model = Sequential()
model.add(Conv2D(32,(1,12),input_shape=(1,Len,8),activation='relu'))
model.add(MaxPool2D(pool_size=(1,2),strides=(1,2)))
model.add(Conv2D(32,(1,6)))
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(1,2),strides=(1,2)))
model.add(Conv2D(32,(1,3)))
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(1,2),strides=(1,2)))
model.add(Conv2D(32,(1,3)))
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(1,2),strides=(1,2)))
model.add(Conv2D(32,(1,3)))
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(1,2),strides=(1,2)))
model.add(Dropout(0.7))
model.add(Flatten())
model.add(Dense(150))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(3, activation='softmax'))
adam = optimizers.Adam(lr=0.001)
model.compile(optimizer=adam, loss='categorical_crossentropy', metrics=["accuracy"])
このようなモデルを作り、3クラスの分類を行ったのですが、val_accuracy が全て1.0000で一定です。なぜこのようになってしまうのか教えていただきたいです.
history = model.fit(train_data, label_data, batch_size=batch_size, epochs=epochs, verbose=1, validation_split=0.2,shuffle=True)
これでモデルに入力しています(keras)
#train_data 最初の3つ
[[[[ 6.91957676e-03 8.85816846e-03 9.83053455e-03 ... 1.17698875e-02
1.63501876e-02 2.23292213e-02]
[ 2.73652506e-02 2.97222532e-02 2.94192975e-02 ... 2.34445072e-02
1.90044310e-02 1.15221407e-02]
[ 2.03428260e-03 -6.21843661e-03 -9.62839980e-03 ... 7.61158803e-03
1.08164477e-02 9.19921719e-03]
...
[-5.84158376e-03 -5.40712117e-04 7.89350269e-03 ... 4.54523296e-03
-5.19621123e-03 -1.23342302e-02]
[-1.41354261e-02 -1.12439548e-02 -6.80933959e-03 ... -6.84408885e-03
-8.20570704e-03 -7.27328592e-03]
[-3.84972361e-03 1.39095776e-03 6.93063360e-03 ... 9.52208753e-03
5.19444845e-03 1.27566357e-04]]]
[[[ 6.33010794e-03 8.66293757e-04 -5.65776885e-05 ... 7.39951987e-03
6.67156364e-03 5.54482443e-03]
[ 5.72909577e-03 7.27491832e-03 8.62522041e-03 ... -5.21118089e-04
-4.28482901e-03 -4.77614733e-03]
[-2.09951547e-03 1.51217801e-03 3.41576270e-03 ... -3.46178239e-03
-3.57482331e-03 -1.47187294e-04]
...
[ 4.53099509e-02 4.60491407e-02 4.34434603e-02 ... 2.84958143e-03
-9.05766419e-03 -8.92232713e-03]
[ 4.87520340e-03 2.74575681e-02 5.01069829e-02 ... 6.90538450e-02
6.49820956e-02 6.01188067e-02]
[ 5.23581397e-02 3.90500170e-02 2.10108801e-02 ... -1.13102894e-02
-5.52333541e-03 4.20713561e-03]]]
[[[-1.73806680e-03 -5.97454320e-04 1.87099769e-03 ... 7.24767983e-03
9.32374218e-03 1.30392710e-02]
[ 1.69545814e-02 1.78353978e-02 1.34362322e-02 ... -4.73423358e-03
-3.50194929e-05 9.11966329e-03]
[ 1.75902911e-02 2.05988467e-02 1.66422334e-02 ... -3.54559323e-03
-2.09747127e-03 2.71754968e-03]
...
[ 2.84029384e-02 2.47564109e-02 1.83898681e-02 ... -1.00556158e-04
-3.12461217e-03 -3.83489494e-03]
[-2.81460016e-03 -7.94667639e-04 1.76763854e-03 ... 6.71201642e-03
7.25159510e-03 7.59902451e-03]
[ 6.66918958e-03 3.61755616e-03 -6.17837660e-04 ... 3.55181043e-04
6.51861409e-03 1.19279672e-02]]]]
#label_data 最初の3つ
[[0. 1. 0.]
[0. 0. 1.]
[1. 0. 0.]]
#出力結果
Epoch 1/50
16/16 [==============================] - 2s 50ms/step - loss: 1.0649 - accuracy: 0.3554 - val_loss: 0.8286 - val_accuracy: 0.9474
Epoch 2/50
16/16 [==============================] - 0s 13ms/step - loss: 0.8012 - accuracy: 0.6721 - val_loss: 0.2265 - val_accuracy: 0.9737
Epoch 3/50
16/16 [==============================] - 0s 11ms/step - loss: 0.5103 - accuracy: 0.7682 - val_loss: 0.1046 - val_accuracy: 1.0000
Epoch 4/50
16/16 [==============================] - 0s 11ms/step - loss: 0.3200 - accuracy: 0.8927 - val_loss: 0.0214 - val_accuracy: 1.0000
Epoch 5/50
16/16 [==============================] - 0s 15ms/step - loss: 0.3217 - accuracy: 0.9127 - val_loss: 0.0192 - val_accuracy: 1.0000
Epoch 6/50
16/16 [==============================] - 0s 11ms/step - loss: 0.1944 - accuracy: 0.9307 - val_loss: 0.0080 - val_accuracy: 1.0000
Epoch 7/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0695 - accuracy: 0.9818 - val_loss: 0.0065 - val_accuracy: 1.0000
Epoch 8/50
16/16 [==============================] - 0s 12ms/step - loss: 0.0828 - accuracy: 0.9520 - val_loss: 0.0023 - val_accuracy: 1.0000
Epoch 9/50
16/16 [==============================] - 0s 13ms/step - loss: 0.0436 - accuracy: 0.9805 - val_loss: 0.0026 - val_accuracy: 1.0000
Epoch 10/50
16/16 [==============================] - 0s 16ms/step - loss: 0.0336 - accuracy: 0.9877 - val_loss: 9.2347e-04 - val_accuracy: 1.0000
Epoch 11/50
16/16 [==============================] - 0s 15ms/step - loss: 0.0282 - accuracy: 0.9976 - val_loss: 6.5329e-04 - val_accuracy: 1.0000
Epoch 12/50
16/16 [==============================] - 0s 16ms/step - loss: 0.0497 - accuracy: 0.9934 - val_loss: 0.0012 - val_accuracy: 1.0000
Epoch 13/50
16/16 [==============================] - 0s 14ms/step - loss: 0.0186 - accuracy: 1.0000 - val_loss: 2.0497e-04 - val_accuracy: 1.0000
Epoch 14/50
16/16 [==============================] - 0s 15ms/step - loss: 0.0067 - accuracy: 1.0000 - val_loss: 4.1923e-05 - val_accuracy: 1.0000
Epoch 15/50
16/16 [==============================] - 0s 15ms/step - loss: 0.0162 - accuracy: 0.9944 - val_loss: 3.3526e-05 - val_accuracy: 1.0000
Epoch 16/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0118 - accuracy: 0.9931 - val_loss: 1.0029e-04 - val_accuracy: 1.0000
Epoch 17/50
16/16 [==============================] - 0s 12ms/step - loss: 0.0293 - accuracy: 0.9917 - val_loss: 9.2682e-05 - val_accuracy: 1.0000
Epoch 18/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0075 - accuracy: 1.0000 - val_loss: 9.8395e-05 - val_accuracy: 1.0000
Epoch 19/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0395 - accuracy: 0.9920 - val_loss: 0.0022 - val_accuracy: 1.0000
Epoch 20/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0203 - accuracy: 1.0000 - val_loss: 1.2478e-04 - val_accuracy: 1.0000
Epoch 21/50
16/16 [==============================] - 0s 13ms/step - loss: 0.0057 - accuracy: 1.0000 - val_loss: 2.4402e-05 - val_accuracy: 1.0000
Epoch 22/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 9.0217e-06 - val_accuracy: 1.0000
Epoch 23/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0117 - accuracy: 0.9941 - val_loss: 1.5141e-04 - val_accuracy: 1.0000
Epoch 24/50
16/16 [==============================] - 0s 12ms/step - loss: 0.0284 - accuracy: 0.9969 - val_loss: 6.1338e-04 - val_accuracy: 1.0000
Epoch 25/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0080 - accuracy: 1.0000 - val_loss: 7.0094e-05 - val_accuracy: 1.0000
Epoch 26/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0163 - accuracy: 0.9885 - val_loss: 1.4289e-04 - val_accuracy: 1.0000
Epoch 27/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0165 - accuracy: 0.9969 - val_loss: 3.4287e-05 - val_accuracy: 1.0000
Epoch 28/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0098 - accuracy: 0.9941 - val_loss: 4.3478e-06 - val_accuracy: 1.0000
Epoch 29/50
16/16 [==============================] - 0s 11ms/step - loss: 0.0029 - accuracy: 1.0000 - val_loss: 9.2543e-07 - val_accuracy: 1.0000
Epoch 30/50
16/16 [==============================] - 0s 15ms/step - loss: 0.0270 - accuracy: 0.9957 - val_loss: 6.4333e-05 - val_accuracy: 1.0000
#追記
train_label
[[0. 1. 0.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 1. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
......
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 1. 0.]
[0. 0. 1.]
[0. 0. 1.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[1. 0. 0.]
[0. 0. 1.]
[0. 0. 1.]
[0. 1. 0.]
[0. 1. 0.]]
回答1件
あなたの回答
tips
プレビュー