質問編集履歴

1

ソースコードを挿入し直しました

2019/09/18 09:31

投稿

Dpulex
Dpulex

スコア9

test CHANGED
File without changes
test CHANGED
@@ -22,206 +22,210 @@
22
22
 
23
23
  エラーメッセージ
24
24
 
25
+ Traceback (most recent call last):
26
+
27
+ File "lstm_tokutyou.py", line 55, in <module>
28
+
29
+ model.add(Dense(n_out, kernel_initializer=weight_variable))
30
+
31
+ File "/usr/local/lib/python3.7/site-packages/keras/engine/sequential.py", line 182, in add
32
+
33
+ output_tensor = layer(self.outputs[0])
34
+
35
+ File "/usr/local/lib/python3.7/site-packages/keras/engine/base_layer.py", line 463, in __call__
36
+
37
+ self.build(unpack_singleton(input_shapes))
38
+
39
+ File "/usr/local/lib/python3.7/site-packages/keras/layers/core.py", line 895, in build
40
+
41
+ constraint=self.kernel_constraint)
42
+
43
+ File "/usr/local/lib/python3.7/site-packages/keras/engine/base_layer.py", line 279, in add_weight
44
+
45
+ weight = K.variable(initializer(shape, dtype=dtype),
46
+
47
+ TypeError: weight_variable() got an unexpected keyword argument 'dtype'
48
+
25
49
  ```
26
50
 
27
- Traceback (most recent call last):
51
+
28
-
29
- File "lstm_tokutyou.py", line 55, in <module>
30
-
31
- model.add(Dense(n_out, kernel_initializer=weight_variable))
32
-
33
- File "/usr/local/lib/python3.7/site-packages/keras/engine/sequential.py", line 182, in add
34
-
35
- output_tensor = layer(self.outputs[0])
36
-
37
- File "/usr/local/lib/python3.7/site-packages/keras/engine/base_layer.py", line 463, in __call__
38
-
39
- self.build(unpack_singleton(input_shapes))
40
-
41
- File "/usr/local/lib/python3.7/site-packages/keras/layers/core.py", line 895, in build
42
-
43
- constraint=self.kernel_constraint)
44
-
45
- File "/usr/local/lib/python3.7/site-packages/keras/engine/base_layer.py", line 279, in add_weight
46
-
47
- weight = K.variable(initializer(shape, dtype=dtype),
48
-
49
- TypeError: weight_variable() got an unexpected keyword argument 'dtype'
50
52
 
51
53
  ### 該当のソースコード
52
54
 
53
55
 
54
56
 
55
- ```ここに言語名を入力
57
+ ```python
56
58
 
57
59
  ソースコード
58
60
 
61
+ import csv
62
+
63
+ import re
64
+
65
+ import datetime
66
+
67
+ import time
68
+
69
+ import numpy as np
70
+
71
+ import matplotlib.pyplot as plt
72
+
73
+ from keras.models import Sequential
74
+
75
+ from keras.layers.core import Dense, Activation
76
+
77
+ from keras.layers.recurrent import LSTM
78
+
79
+ from keras.layers.wrappers import Bidirectional
80
+
81
+ from keras.optimizers import Adam
82
+
83
+ from keras.callbacks import EarlyStopping
84
+
85
+ from sklearn import datasets
86
+
87
+ from sklearn.model_selection import train_test_split
88
+
89
+ from sklearn.utils import shuffle
90
+
91
+
92
+
93
+ data = np.loadtxt("lstm_tokutyou1002.csv",delimiter=",")
94
+
95
+ X = data[:,:1105]
96
+
97
+ y = data[:,-1]
98
+
99
+ Y = np.eye(2)[y.astype(int)]
100
+
101
+ del y
102
+
103
+ X_train, X_test, Y_train, Y_test = \
104
+
105
+ train_test_split(X, Y, test_size=0.3)
106
+
107
+
108
+
109
+ X_train, X_validation, Y_train, Y_validation = \
110
+
111
+ train_test_split(X_train, Y_train, test_size=0.2)
112
+
113
+
114
+
115
+ '''
116
+
117
+ モデル設定
118
+
119
+ '''
120
+
121
+ n_in = 151186
122
+
123
+ n_long = 1105
124
+
125
+ n_hidden = 128
126
+
127
+ n_out = 10
128
+
129
+
130
+
131
+
132
+
133
+ def weight_variable(shape, name=None):
134
+
135
+ return np.random.normal(scale=.01, size=shape)
136
+
137
+
138
+
139
+
140
+
141
+ early_stopping = EarlyStopping(monitor='val_loss', patience=10, verbose=1)
142
+
143
+
144
+
145
+ model = Sequential()
146
+
147
+ model.add(Bidirectional(LSTM(n_hidden),
148
+
149
+ input_shape=(n_long,n_in)))
150
+
151
+ model.add(Dense(n_out, kernel_initializer=weight_variable))
152
+
153
+ model.add(Activation('softmax'))
154
+
155
+
156
+
157
+ model.compile(loss='categorical_crossentropy',
158
+
159
+ optimizer=Adam(lr=0.001, beta_1=0.9, beta_2=0.999),
160
+
161
+ metrics=['accuracy'])
162
+
163
+
164
+
165
+ '''
166
+
167
+ モデル学習
168
+
169
+ '''
170
+
171
+ epochs = 300
172
+
173
+ batch_size = 250
174
+
175
+
176
+
177
+ hist = model.fit(X_train, Y_train,
178
+
179
+ batch_size=batch_size,
180
+
181
+ epochs=epochs,
182
+
183
+ validation_data=(X_validation, Y_validation),
184
+
185
+ callbacks=[early_stopping])
186
+
187
+
188
+
189
+ '''
190
+
191
+ 学習の進み具合
192
+
193
+ '''
194
+
195
+ acc = hist.history['val_acc']
196
+
197
+ loss = hist.history['val_loss']
198
+
199
+
200
+
201
+ plt.rc('font', family='serif')
202
+
203
+ fig = plt.figure()
204
+
205
+ plt.plot(range(len(loss)), loss,
206
+
207
+ label='loss', color='black')
208
+
209
+ plt.xlabel('epochs')
210
+
211
+ plt.show()
212
+
213
+
214
+
215
+ '''
216
+
217
+ 予測精度の評価
218
+
219
+ '''
220
+
221
+ loss_and_metrics = model.evaluate(X_test, Y_test)
222
+
223
+ print(loss_and_metrics)
224
+
225
+
226
+
59
227
  ```
60
228
 
61
- import csv
62
-
63
- import re
64
-
65
- import datetime
66
-
67
- import time
68
-
69
- import numpy as np
70
-
71
- import matplotlib.pyplot as plt
72
-
73
- from keras.models import Sequential
74
-
75
- from keras.layers.core import Dense, Activation
76
-
77
- from keras.layers.recurrent import LSTM
78
-
79
- from keras.layers.wrappers import Bidirectional
80
-
81
- from keras.optimizers import Adam
82
-
83
- from keras.callbacks import EarlyStopping
84
-
85
- from sklearn import datasets
86
-
87
- from sklearn.model_selection import train_test_split
88
-
89
- from sklearn.utils import shuffle
90
-
91
-
92
-
93
- data = np.loadtxt("lstm_tokutyou1002.csv",delimiter=",")
94
-
95
- X = data[:,:1105]
96
-
97
- y = data[:,-1]
98
-
99
- Y = np.eye(2)[y.astype(int)]
100
-
101
- del y
102
-
103
- X_train, X_test, Y_train, Y_test = \
104
-
105
- train_test_split(X, Y, test_size=0.3)
106
-
107
-
108
-
109
- X_train, X_validation, Y_train, Y_validation = \
110
-
111
- train_test_split(X_train, Y_train, test_size=0.2)
112
-
113
-
114
-
115
- '''
116
-
117
- モデル設定
118
-
119
- '''
120
-
121
- n_in = 151186
122
-
123
- n_long = 1105
124
-
125
- n_hidden = 128
126
-
127
- n_out = 10
128
-
129
-
130
-
131
-
132
-
133
- def weight_variable(shape, name=None):
134
-
135
- return np.random.normal(scale=.01, size=shape)
136
-
137
-
138
-
139
-
140
-
141
- early_stopping = EarlyStopping(monitor='val_loss', patience=10, verbose=1)
142
-
143
-
144
-
145
- model = Sequential()
146
-
147
- model.add(Bidirectional(LSTM(n_hidden),
148
-
149
- input_shape=(n_long,n_in)))
150
-
151
- model.add(Dense(n_out, kernel_initializer=weight_variable))
152
-
153
- model.add(Activation('softmax'))
154
-
155
-
156
-
157
- model.compile(loss='categorical_crossentropy',
158
-
159
- optimizer=Adam(lr=0.001, beta_1=0.9, beta_2=0.999),
160
-
161
- metrics=['accuracy'])
162
-
163
-
164
-
165
- '''
166
-
167
- モデル学習
168
-
169
- '''
170
-
171
- epochs = 300
172
-
173
- batch_size = 250
174
-
175
-
176
-
177
- hist = model.fit(X_train, Y_train,
178
-
179
- batch_size=batch_size,
180
-
181
- epochs=epochs,
182
-
183
- validation_data=(X_validation, Y_validation),
184
-
185
- callbacks=[early_stopping])
186
-
187
-
188
-
189
- '''
190
-
191
- 学習の進み具合
192
-
193
- '''
194
-
195
- acc = hist.history['val_acc']
196
-
197
- loss = hist.history['val_loss']
198
-
199
-
200
-
201
- plt.rc('font', family='serif')
202
-
203
- fig = plt.figure()
204
-
205
- plt.plot(range(len(loss)), loss,
206
-
207
- label='loss', color='black')
208
-
209
- plt.xlabel('epochs')
210
-
211
- plt.show()
212
-
213
-
214
-
215
- '''
216
-
217
- 予測精度の評価
218
-
219
- '''
220
-
221
- loss_and_metrics = model.evaluate(X_test, Y_test)
222
-
223
- print(loss_and_metrics)
224
-
225
229
 
226
230
 
227
231
  ### 試したこと