質問編集履歴

2

タイポ修正

2021/01/01 17:20

投稿

velonica74
velonica74

スコア11

test CHANGED
File without changes
test CHANGED
@@ -86,13 +86,11 @@
86
86
 
87
87
  names_test=['pixels']
88
88
 
89
- df_test=pd.read_csv('/hoge/test.csv',names=names_test, na_filter=False)
89
+ df_test=pd.read_csv('./hoge/test.csv',names=names_test, na_filter=False)
90
90
 
91
91
  df_test=df_test.drop([0],axis=0)
92
92
 
93
- df_test.head(10)
94
-
95
- df_train=pd.read_csv('/hoge/train.csv',names=names_train, na_filter=False)
93
+ df_train=pd.read_csv('./hoge/train.csv',names=names_train, na_filter=False)
96
94
 
97
95
  df_train=df_train.drop([0],axis=0)
98
96
 

1

ソースコードの追加

2021/01/01 17:19

投稿

velonica74
velonica74

スコア11

test CHANGED
File without changes
test CHANGED
@@ -1,4 +1,4 @@
1
- JypyterでTensorflow(keras)を用いCNNを構成し,fittingのコードを実行させたタイミングでカーネルが異常終了してしまいます.
1
+ 機械学習初心者です.JypyterでTensorflow(keras)を用いCNNを構成し,fittingのコードを実行させたタイミングでカーネルが異常終了してしまいます.
2
2
 
3
3
 
4
4
 
@@ -46,286 +46,200 @@
46
46
 
47
47
 
48
48
 
49
- Macを利用してお,anaconda仮想環境のlistは
50
-
51
- ```ここに言語を入力
52
-
53
- # Name Version Build Channel
54
-
55
- _tflow_select 2.3.0 mkl
56
-
57
- absl-py 0.11.0 pyhd3eb1b0_1
58
-
59
- appnope 0.1.2 py36hecd8cb5_1001
60
-
61
- argon2-cffi 20.1.0 py36haf1e3a3_1
62
-
63
- astor 0.8.1 py36_0
64
-
65
- async_generator 1.10 py36h28b3542_0
66
-
67
- attrs 20.3.0 pyhd3eb1b0_0
68
-
69
- backcall 0.2.0 py_0
70
-
71
- blas 1.0 mkl
72
-
73
- bleach 3.2.1 py_0
74
-
75
- brotlipy 0.7.0 py36h9ed2024_1003
76
-
77
- c-ares 1.17.1 h9ed2024_0
78
-
79
- ca-certificates 2020.12.8 hecd8cb5_0
80
-
81
- certifi 2020.12.5 py36hecd8cb5_0
82
-
83
- cffi 1.14.4 py36h2125817_0
84
-
85
- chardet 4.0.0 py36hecd8cb5_1003
86
-
87
- cryptography 3.3.1 py36hbcfaee0_0
88
-
89
- cycler 0.10.0 py36hecd8cb5_0
90
-
91
- dbus 1.13.18 h18a8e69_0
92
-
93
- decorator 4.4.2 py_0
94
-
95
- defusedxml 0.6.0 py_0
96
-
97
- entrypoints 0.3 py36_0
98
-
99
- expat 2.2.10 hb1e8313_2
100
-
101
- freetype 2.10.4 ha233b18_0
102
-
103
- gast 0.2.2 py36_0
104
-
105
- gettext 0.19.8.1 hb0f4f8b_2
106
-
107
- glib 2.66.1 h9bbe63b_0
108
-
109
- google-pasta 0.2.0 py_0
110
-
111
- grpcio 1.31.0 py36h7580e61_0
112
-
113
- h5py 2.10.0 py36h0601b69_1
114
-
115
- hdf5 1.10.6 hdbbcd12_0
116
-
117
- icu 58.2 h0a44026_3
118
-
119
- idna 2.10 py_0
120
-
121
- imageio 2.9.0 pypi_0 pypi
122
-
123
- importlib-metadata 2.0.0 py_1
124
-
125
- importlib_metadata 2.0.0 1
126
-
127
- intel-openmp 2019.4 233
128
-
129
- ipykernel 5.3.4 py36h5ca1d4c_0
130
-
131
- ipython 7.16.1 py36h5ca1d4c_0
132
-
133
- ipython_genutils 0.2.0 pyhd3eb1b0_1
134
-
135
- ipywidgets 7.6.0 pyhd3eb1b0_1
136
-
137
- jedi 0.18.0 py36hecd8cb5_0
138
-
139
- jinja2 2.11.2 py_0
140
-
141
- joblib 1.0.0 pypi_0 pypi
142
-
143
- jpeg 9b he5867d9_2
144
-
145
- json5 0.9.5 py_0
146
-
147
- jsonschema 3.2.0 py_2
148
-
149
- jupyter 1.0.0 py36_7
150
-
151
- jupyter_client 6.1.7 py_0
152
-
153
- jupyter_console 6.2.0 py_0
154
-
155
- jupyter_core 4.7.0 py36hecd8cb5_0
156
-
157
- jupyterlab 2.2.6 py_0
158
-
159
- jupyterlab_pygments 0.1.2 py_0
160
-
161
- jupyterlab_server 1.2.0 py_0
162
-
163
- keras-applications 1.0.8 py_1
164
-
165
- keras-preprocessing 1.1.0 py_1
166
-
167
- kiwisolver 1.3.0 py36h23ab428_0
168
-
169
- lcms2 2.11 h92f6f08_0
170
-
171
- libcxx 10.0.0 1
172
-
173
- libedit 3.1.20191231 h1de35cc_1
174
-
175
- libffi 3.3 hb1e8313_2
176
-
177
- libgfortran 3.0.1 h93005f0_2
178
-
179
- libiconv 1.16 h1de35cc_0
180
-
181
- libpng 1.6.37 ha441bb4_0
182
-
183
- libprotobuf 3.13.0.1 hab81aa3_0
184
-
185
- libsodium 1.0.18 h1de35cc_0
186
-
187
- libtiff 4.1.0 hcb84e12_1
188
-
189
- lz4-c 1.9.2 h79c402e_3
190
-
191
- markdown 3.3.3 py36hecd8cb5_0
192
-
193
- markupsafe 1.1.1 py36h1de35cc_0
194
-
195
- matplotlib 3.3.2 hecd8cb5_0
196
-
197
- matplotlib-base 3.3.2 py36h181983e_0
198
-
199
- mistune 0.8.4 py36h1de35cc_0
200
-
201
- mkl 2019.5 intel_281 intel
202
-
203
- mkl-service 2.3.0 py36h9ed2024_0
204
-
205
- mkl_fft 1.2.0 py36hc64f4ea_0
206
-
207
- mkl_random 1.1.1 py36h959d312_0
208
-
209
- nbclient 0.5.1 py_0
210
-
211
- nbconvert 6.0.7 py36_0
212
-
213
- nbformat 5.0.8 py_0
214
-
215
- ncurses 6.2 h0a44026_1
216
-
217
- nest-asyncio 1.4.3 pyhd3eb1b0_0
218
-
219
- notebook 6.1.6 py36hecd8cb5_0
220
-
221
- numpy 1.19.2 py36h456fd55_0
222
-
223
- numpy-base 1.19.2 py36hcfb5961_0
224
-
225
- olefile 0.46 py36_0
226
-
227
- openssl 1.1.1i h9ed2024_0
228
-
229
- opt_einsum 3.1.0 py_0
230
-
231
- packaging 20.8 pyhd3eb1b0_0
232
-
233
- pandas 1.1.5 py36hb2f4e1b_0
234
-
235
- pandoc 2.11 h0dc7051_0
236
-
237
- pandocfilters 1.4.3 py36hecd8cb5_1
238
-
239
- parso 0.7.0 py_0
240
-
241
- pcre 8.44 hb1e8313_0
242
-
243
- pexpect 4.8.0 pyhd3eb1b0_3
244
-
245
- pickleshare 0.7.5 pyhd3eb1b0_1003
246
-
247
- pillow 8.0.1 py36h5270095_0
248
-
249
- pip 20.3.3 py36hecd8cb5_0
250
-
251
- prometheus_client 0.9.0 pyhd3eb1b0_0
252
-
253
- prompt-toolkit 3.0.8 py_0
254
-
255
- prompt_toolkit 3.0.8 0
256
-
257
- protobuf 3.13.0.1 py36hb1e8313_1
258
-
259
- ptyprocess 0.6.0 pyhd3eb1b0_2
260
-
261
- pycparser 2.20 py_2
262
-
263
- pygments 2.7.3 pyhd3eb1b0_0
264
-
265
- pyopenssl 20.0.1 pyhd3eb1b0_1
266
-
267
- pyparsing 2.4.7 py_0
268
-
269
- pyqt 5.9.2 py36h655552a_2
270
-
271
- pyrsistent 0.17.3 py36haf1e3a3_0
272
-
273
- pysocks 1.7.1 py36hecd8cb5_0
274
-
275
- python 3.6.12 h26836e1_2
276
-
277
- python-dateutil 2.8.1 py_0
278
-
279
- pytz 2020.5 pyhd3eb1b0_0
280
-
281
- pyzmq 20.0.0 py36h23ab428_1
282
-
283
- qt 5.9.7 h468cd18_1
284
-
285
- qtconsole 4.7.7 py_0
286
-
287
- qtpy 1.9.0 py_0
288
-
289
- readline 8.0 h1de35cc_0
290
-
291
- requests 2.25.1 pyhd3eb1b0_0
292
-
293
- scikit-learn 0.24.0 pypi_0 pypi
294
-
295
- scipy 1.5.2 py36h912ce22_0
296
-
297
- send2trash 1.5.0 pyhd3eb1b0_1
298
-
299
- setuptools 51.0.0 py36hecd8cb5_2
300
-
301
- sip 4.19.8 py36h0a44026_0
302
-
303
- six 1.15.0 py36hecd8cb5_0
304
-
305
- sklearn 0.0 pypi_0 pypi
306
-
307
- sqlite 3.33.0 hffcf06c_0
308
-
309
- tbb 2019.6 intel_233 intel
310
-
311
- tensorboard 2.0.0 pyhb38c66f_1
312
-
313
- tensorflow-base 2.0.0 mkl_py36h66b1bf0_0
314
-
315
- tensorflow-estimator 2.0.0 pyh2649769_0
316
-
317
- termcolor 1.1.0 py36_1
318
-
319
- terminado 0.9.1 py36_0
320
-
321
- testpath 0.4.4 py_0
322
-
323
- threadpoolctl 2.1.0 pypi_0 pypi
324
-
325
- ---
49
+ コードは以下になます.
50
+
51
+ ```Python
52
+
53
+
54
+
55
+ import numpy as np
56
+
57
+ import pandas as pd
58
+
59
+ import matplotlib.pyplot as plt
60
+
61
+ %matplotlib inline
62
+
63
+ import matplotlib.image as mpimg
64
+
65
+
66
+
67
+ import tensorflow as tf
68
+
69
+ from tensorflow import keras
70
+
71
+ from sklearn.model_selection import train_test_split
72
+
73
+
74
+
75
+ import os
76
+
77
+ os.environ['KMP_DUPLICATE_LIB_OK']='TRUE'
78
+
79
+
80
+
81
+
82
+
83
+ label_map = ['Anger', 'Disgust', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral']
84
+
85
+ names_train=['emotion','pixels']
86
+
87
+ names_test=['pixels']
88
+
89
+ df_test=pd.read_csv('/hoge/test.csv',names=names_test, na_filter=False)
90
+
91
+ df_test=df_test.drop([0],axis=0)
92
+
93
+ df_test.head(10)
94
+
95
+ df_train=pd.read_csv('/hoge/train.csv',names=names_train, na_filter=False)
96
+
97
+ df_train=df_train.drop([0],axis=0)
98
+
99
+
100
+
101
+
102
+
103
+ def gray_to_rgb(im):
104
+
105
+ w, h = im.shape
106
+
107
+ ret = np.empty((w, h, 3), dtype=np.uint8)
108
+
109
+ ret[:, :, 2] = ret[:, :, 1] = ret[:, :, 0] = im
110
+
111
+ return ret
112
+
113
+
114
+
115
+ def convert_to_image(pixels, mode="save", t="gray"):
116
+
117
+ if type(pixels) == str:
118
+
119
+ pixels = np.array([int(i) for i in pixels.split()])
120
+
121
+ if mode == "show":
122
+
123
+ if t == "gray":
124
+
125
+ return pixels.reshape(48,48)
126
+
127
+ else:
128
+
129
+ return gray_to_rgb(pixels.reshape(48,48))
130
+
131
+ else:
132
+
133
+ return pixels
134
+
135
+
136
+
137
+
138
+
139
+ df_train["pixels"] = df_train["pixels"].apply(lambda x : convert_to_image(x, mode="show", t="gray"))
140
+
141
+ df_test["pixels"] = df_test["pixels"].apply(lambda x : convert_to_image(x, mode="show", t="gray"))
142
+
143
+
144
+
145
+
146
+
147
+ X_train, X_val, y_train, y_val = train_test_split(df_train["pixels"], df_train["emotion"], test_size=0.2, random_state=1)
148
+
149
+
150
+
151
+ X_train = np.array(list(X_train[:]), dtype=np.float)
152
+
153
+ X_val = np.array(list(X_val[:]), dtype=np.float)
154
+
155
+
156
+
157
+ y_train = np.array(list(y_train[:]), dtype=np.float)
158
+
159
+ y_val = np.array(list(y_val[:]), dtype=np.float)
160
+
161
+
162
+
163
+ X_train = X_train.reshape(X_train.shape[0], 48, 48, 1)
164
+
165
+ X_val = X_val.reshape(X_val.shape[0], 48, 48, 1)
166
+
167
+
168
+
169
+
170
+
171
+ X_test=np.array(list(df_test['pixels']), dtype=np.float)
172
+
173
+
174
+
175
+
176
+
177
+ X_test = X_test.reshape(X_test.shape[0], 48, 48, 1)
178
+
179
+
180
+
181
+ IMG_SIZE=48
182
+
183
+
184
+
185
+ model = keras.models.Sequential([
186
+
187
+ keras.layers.Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(IMG_SIZE,IMG_SIZE,1)),
188
+
189
+ keras.layers.BatchNormalization(axis=1),
190
+
191
+ keras.layers.MaxPooling2D(pool_size=(2, 2)),
192
+
193
+
194
+
195
+ keras.layers.Conv2D(64, kernel_size=(3, 3), activation='relu'),
196
+
197
+ keras.layers.BatchNormalization(axis=1),
198
+
199
+ keras.layers.MaxPooling2D(pool_size=(2, 2)),
200
+
201
+
202
+
203
+ keras.layers.Conv2D(64, kernel_size=(3, 3), activation='relu'),
204
+
205
+ keras.layers.BatchNormalization(axis=1),
206
+
207
+ keras.layers.MaxPooling2D(pool_size=(2, 2)),
208
+
209
+
210
+
211
+ keras.layers.Conv2D(32, kernel_size=(3, 3), activation='relu'),
212
+
213
+ keras.layers.BatchNormalization(axis=1),
214
+
215
+ keras.layers.MaxPooling2D(pool_size=(2, 2)),
216
+
217
+
218
+
219
+ keras.layers.Flatten(),
220
+
221
+ keras.layers.Dense(512, activation='relu'),
222
+
223
+ keras.layers.Dropout(0.2),
224
+
225
+ keras.layers.Dense(512, activation='relu'),
226
+
227
+ keras.layers.Dropout(0.2),
228
+
229
+ keras.layers.Dense(7, activation='softmax')
230
+
231
+ ])
232
+
233
+
234
+
235
+ model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
236
+
237
+
238
+
239
+ model.fit(X_train,y_train,epochs=50,batch_size=64,validation_data=(X_val,y_val))
326
240
 
327
241
  ```
328
242
 
329
- になります.
243
+
330
244
 
331
245
  初歩的な質問で申し訳ありませんが,よろしくお願いいたします.