質問編集履歴
2
一部修正
title
CHANGED
File without changes
|
body
CHANGED
@@ -30,7 +30,7 @@
|
|
30
30
|
import keras
|
31
31
|
|
32
32
|
IMAGE_SIZE = 224
|
33
|
-
N_CATEGORIES =
|
33
|
+
N_CATEGORIES = 20
|
34
34
|
BATCH_SIZE = 64
|
35
35
|
NUM_EPOCHS = 20
|
36
36
|
|
1
ソースコードの追加
title
CHANGED
File without changes
|
body
CHANGED
@@ -10,4 +10,113 @@
|
|
10
10
|

|
11
11
|
|
12
12
|
|
13
|
-
初歩的なことで申し訳ありませんがよろしくお願いいたします。
|
13
|
+
初歩的なことで申し訳ありませんがよろしくお願いいたします。
|
14
|
+
|
15
|
+
##追記
|
16
|
+
ソースコードは以下のWEBサイトを参考にしました。
|
17
|
+
・https://qiita.com/tomo_20180402/items/e8c55bdca648f4877188
|
18
|
+
・https://spjai.com/keras-fine-tuning/
|
19
|
+
|
20
|
+
```python
|
21
|
+
import os
|
22
|
+
import numpy as np
|
23
|
+
from keras.preprocessing.image import ImageDataGenerator
|
24
|
+
from keras import models
|
25
|
+
from keras import layers
|
26
|
+
from keras import optimizers
|
27
|
+
from keras.layers import Conv2D, MaxPooling2D,Input
|
28
|
+
from keras.layers import Dense, Dropout, Flatten
|
29
|
+
import matplotlib.pyplot as plt
|
30
|
+
import keras
|
31
|
+
|
32
|
+
IMAGE_SIZE = 224
|
33
|
+
N_CATEGORIES = 22
|
34
|
+
BATCH_SIZE = 64
|
35
|
+
NUM_EPOCHS = 20
|
36
|
+
|
37
|
+
train_data_dir = ''
|
38
|
+
validation_data_dir = ''
|
39
|
+
|
40
|
+
NUM_TRAINING = 80000
|
41
|
+
NUM_VALIDATION = 12000
|
42
|
+
|
43
|
+
|
44
|
+
model = models.Sequential()
|
45
|
+
model.add(layers.Conv2D(32,(3,3),activation="relu",input_shape=(224,224,3)))
|
46
|
+
model.add(layers.MaxPooling2D((2,2)))
|
47
|
+
model.add(layers.Conv2D(64,(3,3),activation="relu"))
|
48
|
+
model.add(layers.MaxPooling2D((2,2)))
|
49
|
+
model.add(layers.Conv2D(128,(3,3),activation="relu"))
|
50
|
+
model.add(layers.MaxPooling2D((2,2)))
|
51
|
+
model.add(layers.Conv2D(256,(3,3),activation="relu"))
|
52
|
+
model.add(layers.MaxPooling2D((2,2)))
|
53
|
+
|
54
|
+
|
55
|
+
model.add(layers.Flatten())
|
56
|
+
model.add(layers.Dense(128,activation="relu"))
|
57
|
+
model.add(layers.Dropout(0.5))
|
58
|
+
model.add(layers.Dense(N_CATEGORIES,activation="softmax"))
|
59
|
+
|
60
|
+
|
61
|
+
model.compile(loss='categorical_crossentropy',
|
62
|
+
optimizer=optimizers.SGD(lr=1e-4,momentum=0.9),
|
63
|
+
metrics=['acc'])
|
64
|
+
model.summary()
|
65
|
+
|
66
|
+
|
67
|
+
train_datagen = ImageDataGenerator(
|
68
|
+
rescale=1.0 / 255,
|
69
|
+
shear_range=0.2,
|
70
|
+
zoom_range=0.2,
|
71
|
+
horizontal_flip=True,
|
72
|
+
rotation_range=10)
|
73
|
+
test_datagen = ImageDataGenerator(
|
74
|
+
rescale=1.0 / 255,
|
75
|
+
)
|
76
|
+
|
77
|
+
train_generator = train_datagen.flow_from_directory(
|
78
|
+
train_data_dir,
|
79
|
+
target_size=(IMAGE_SIZE, IMAGE_SIZE),
|
80
|
+
batch_size=BATCH_SIZE,
|
81
|
+
class_mode='categorical',
|
82
|
+
shuffle=True
|
83
|
+
)
|
84
|
+
validation_generator = test_datagen.flow_from_directory(
|
85
|
+
validation_data_dir,
|
86
|
+
target_size=(IMAGE_SIZE, IMAGE_SIZE),
|
87
|
+
batch_size=BATCH_SIZE,
|
88
|
+
class_mode='categorical',
|
89
|
+
shuffle=True
|
90
|
+
)
|
91
|
+
|
92
|
+
history = model.fit_generator(train_generator,
|
93
|
+
steps_per_epoch=NUM_TRAINING//BATCH_SIZE,
|
94
|
+
epochs=NUM_EPOCHS,
|
95
|
+
verbose=1,
|
96
|
+
validation_data=validation_generator,
|
97
|
+
validation_steps=NUM_VALIDATION//BATCH_SIZE,
|
98
|
+
)
|
99
|
+
|
100
|
+
|
101
|
+
model.save('model.h5')
|
102
|
+
|
103
|
+
|
104
|
+
# グラフ描画
|
105
|
+
# Accuracy
|
106
|
+
plt.plot(range(1, NUM_EPOCHS+1), history.history['acc'], "o-")
|
107
|
+
plt.plot(range(1, NUM_EPOCHS+1), history.history['val_acc'], "o-")
|
108
|
+
plt.title('model accuracy')
|
109
|
+
plt.ylabel('accuracy') # Y軸ラベル
|
110
|
+
plt.xlabel('epoch') # X軸ラベル
|
111
|
+
plt.legend(['train', 'test'], loc='upper left')
|
112
|
+
plt.show()
|
113
|
+
# loss
|
114
|
+
plt.plot(range(1, NUM_EPOCHS+1), history.history['loss'], "o-")
|
115
|
+
plt.plot(range(1, NUM_EPOCHS+1), history.history['val_loss'], "o-")
|
116
|
+
plt.title('model loss')
|
117
|
+
plt.ylabel('loss') # Y軸ラベル
|
118
|
+
plt.xlabel('epoch') # X軸ラベル
|
119
|
+
plt.legend(['train', 'test'], loc='upper right')
|
120
|
+
plt.show()
|
121
|
+
|
122
|
+
```
|