質問編集履歴

1

文章の修正

2020/06/22 02:15

投稿

os-t
os-t

スコア20

test CHANGED
@@ -1 +1 @@
1
- SVM多クラス分類における分離超平面描きたい
1
+ エラーメッセージ意味知りたい
test CHANGED
@@ -1,8 +1,8 @@
1
1
  ### 前提・実現したいこと
2
2
 
3
+ エラーメッセージの意味を知りたい
3
4
 
4
5
 
5
- SVMの多クラス分類における分離超平面を描きたい
6
6
 
7
7
  ### 発生している問題・エラーメッセージ
8
8
 
@@ -14,520 +14,4 @@
14
14
 
15
15
  ```
16
16
 
17
-
18
-
19
- ### 該当のソースコード
20
-
21
-
22
-
23
- ```Python
24
-
25
- import pandas as pd
26
-
27
- import numpy as np
28
-
29
- #import seaborn as sns
30
-
31
- #sns.set_style("darkgrid")
32
-
33
- #3次元プロットするためのモジュール
34
-
35
- from mpl_toolkits.mplot3d import Axes3D
36
-
37
- import matplotlib.pyplot as plt
38
-
39
- from matplotlib.colors import LinearSegmentedColormap
40
-
41
-
42
-
43
-
44
-
45
- def generate_cmap(colors, num):
46
-
47
-
48
-
49
- """自分で定義したカラーマップを返す"""
50
-
51
-
52
-
53
- values = range(num)
54
-
55
-
56
-
57
- vmax = np.ceil(np.max(values))
58
-
59
-
60
-
61
- color_list = []
62
-
63
-
64
-
65
- for v, c in zip(values, colors):
66
-
67
-
68
-
69
- color_list.append( ( v/ vmax, c) )
70
-
71
-
72
-
73
- return LinearSegmentedColormap.from_list('custom_cmap', color_list)
74
-
75
-
76
-
77
- def plot_classifier2(cls, sc, X_train, y_train, X_test, y_test, y_pred, clis_f, CL_lis, MK_lis, St, num):
78
-
79
-
80
-
81
-
82
-
83
- num1 = len(y_test)
84
-
85
-
86
-
87
- dum1 = 0
88
-
89
-
90
-
91
- for i in range(num1):
92
-
93
-
94
-
95
- if(y_test[i] == y_pred[i]):
96
-
97
-
98
-
99
- dum1 +=1
100
-
101
- print("accuracy rate (percent):")
102
-
103
-
104
-
105
- accu = (1.0*dum1)/(1.0*num1)*100.0
106
-
107
-
108
-
109
- print(accu)
110
-
111
-
112
-
113
- fig = plt.figure()
114
-
115
- ax = Axes3D(fig)
116
-
117
-
118
-
119
- x1_min, x1_max = -2.5, 2.5
120
-
121
-
122
-
123
- x2_min, x2_max = -2.5, 2.5
124
-
125
- """resolutionは境界線の滑らかさ"""
126
-
127
- resolution = 0.02
128
-
129
-
130
-
131
- xx1, xx2= np.meshgrid(np.arange(x1_min, x1_max, resolution),
132
-
133
-
134
-
135
- np.arange(x2_min, x2_max, resolution))
136
-
137
- #print(xx1)
138
-
139
- Xp = np.array([xx1.ravel(), xx2.ravel()]).T
140
-
141
- #print(Xp)
142
-
143
-
144
-
145
- """scは標準化(後述で記載)"""
146
-
147
- Xp1 = sc.transform(Xp)
148
-
149
-
150
-
151
- """clsの意味は?"""
152
-
153
- Z = cls.predict(Xp1)
154
-
155
-
156
-
157
- clis = []
158
-
159
- """uniqueは重複要素の削除"""
160
-
161
- yc = np.unique(Z)
162
-
163
-
164
-
165
- yt = np.unique(y_train)
166
-
167
-
168
-
169
- """appendは要素の追加"""
170
-
171
- for i in range(len(np.unique(yc))):
172
-
173
-
174
-
175
- for j in range(len(np.unique(yt))):
176
-
177
-
178
-
179
- if yc[i] == yt[j]:
180
-
181
-
182
-
183
- clis.append(clis_f[j])
184
-
185
-
186
-
187
-
188
-
189
- cm = generate_cmap(clis, len(np.unique(Z)))
190
-
191
- #reshapeh列数自動で形状の変更
192
-
193
- Z1 = Z.reshape(xx1.shape)
194
-
195
- ax.contourf(xx1, xx2, Z1, alpha=0.3, cmap=cm)
196
-
197
- #reshapeh列数自動で形状の変
198
-
199
-
200
-
201
- ax.set_xlabel("V1-V2 pair density (normalized)")
202
-
203
- ax.set_ylabel("V1-V6 pair density (normalized)")
204
-
205
- ax.set_zlabel("V1-V4 pair density (normalized)")
206
-
207
-
208
-
209
- for idx in range(len(np.unique(y0))):
210
-
211
- ax.plot(X_train[y_train==idx+1, 0], X_train[y_train==idx+1, 1], X_train[y_train==idx+1, 2],
212
-
213
- color=CL_lis[idx], marker=MK_lis[idx], label=LB_lis[idx], linestyle='None')
214
-
215
- #ZZ = -(w[idx:, 0] * xx1 + w[idx, 1] * xx2 + b)/ w[idx, 2]
216
-
217
-
218
-
219
- #ax.plot_surface(xx1, xx2, ZZ, alpha = 0.3, cmap = cm)
220
-
221
-
222
-
223
- ax.plot(X_test[y_test==y_pred,0], X_test[y_test==y_pred,1], X_test[y_test==y_pred,2],
224
-
225
-
226
-
227
- color='black', marker='o', label="test(true)", linestyle='None')
228
-
229
-
230
-
231
- ax.plot(X_test[y_test!=y_pred,0], X_test[y_test!=y_pred,1], X_test[y_test!=y_pred,2],
232
-
233
-
234
-
235
- color='black', marker='x', label="test(false)", linestyle='None')
236
-
237
-
238
-
239
-
240
-
241
- plt.legend(loc='upper left')
242
-
243
-
244
-
245
- plt.show()
246
-
247
-
248
-
249
- return accu
250
-
251
-
252
-
253
-
254
-
255
- df0 = pd.read_csv('C:\sample\xxxxx.csv', header = None)
256
-
257
-
258
-
259
- df0.columns = ['V1V2', 'V1V35', 'V1V6', 'V1V4', 'KAM','class label']
260
-
261
- df0.head()
262
-
263
-
264
-
265
- y0 = df0.iloc[0:, 5].values
266
-
267
- #1列目
268
-
269
- X0_V1V2 = np.copy(df0.iloc[0:, 0].values)
270
-
271
- #2列目
272
-
273
- X0_V1V35 = np.copy(df0.iloc[0:, 1].values)
274
-
275
- #3列目
276
-
277
- X0_V1V6 = np.copy(df0.iloc[0:, 2].values)
278
-
279
- #4列目
280
-
281
- X0_V1V4 = np.copy(df0.iloc[0:, 3].values)
282
-
283
- #5列目
284
-
285
- X0_KAM = np.copy(df0.iloc[0:, 4].values)
286
-
287
-
288
-
289
- X0 = df0.iloc[0:, [0,2,3]].values
290
-
291
-
292
-
293
- from sklearn.model_selection import train_test_split
294
-
295
-
296
-
297
- X_train, X_test, y_train, y_test = train_test_split(X0, y0, test_size=0.2, random_state=0)
298
-
299
-
300
-
301
- from sklearn.preprocessing import StandardScaler
302
-
303
- #標準化
304
-
305
- sc = StandardScaler()
306
-
307
-
308
-
309
- X_train_std = sc.fit_transform(X_train)
310
-
311
-
312
-
313
- X_test_std = sc.transform(X_test)
314
-
315
-
316
-
317
- from sklearn.svm import SVC
318
-
319
-
320
-
321
- svm = SVC(kernel='linear', C=1, random_state=0)
322
-
323
-
324
-
325
- svm.fit(X_train_std,y_train)
326
-
327
-
328
-
329
- y_pred = svm.predict(X_test_std)
330
-
331
- #plot_classifier(svm, sc, X0, y0, y_train, clis_f, CL_lis, MK_lis, St)
332
-
333
-
334
-
335
- accu_t = 0.0
336
-
337
-
338
-
339
- kk = 5
340
-
341
-
342
-
343
- z = 0.0
344
-
345
-
346
-
347
- CL_lis = ['red','blue','green','grey','cyan','pink','salmon']
348
-
349
-
350
-
351
- MK_lis = ['o','v','^','+','D','o', 'D']
352
-
353
-
354
-
355
- LB_lis = ['T1','T2','T3/T4/T5','T6','T7','T8', 'H']
356
-
357
-
358
-
359
- St = ""
360
-
361
-
362
-
363
- clis_f = ['red','blue','green','grey','cyan','yellow']
364
-
365
-
366
-
367
- for i in range(kk):
368
-
369
-
370
-
371
- X_train, X_test, y_train, y_test = train_test_split(X0, y0, test_size=0.2, random_state=i)
372
-
373
-
374
-
375
- sc = StandardScaler()
376
-
377
-
378
-
379
- X_train_std = sc.fit_transform(X_train)
380
-
381
-
382
-
383
- X_test_std = sc.transform(X_test)
384
-
385
-
386
-
387
- svm = SVC(kernel='linear', C=10.0, random_state=i)
388
-
389
-
390
-
391
- svm.fit(X_train_std,y_train)
392
-
393
-
394
-
395
- y_pred = svm.predict(X_test_std)
396
-
397
-
398
-
399
- plot_classifier2(svm, sc, X_train, y_train, X_test, y_test, y_pred, clis_f, CL_lis, MK_lis, St, num=i)
400
-
401
-
402
-
403
- num1 = len(y_test)
404
-
405
-
406
-
407
- dum1 = 0
408
-
409
-
410
-
411
- for i in range(num1):
412
-
413
-
414
-
415
- if(y_test[i] == y_pred[i]):
416
-
417
-
418
-
419
- dum1 +=1
420
-
421
-
422
-
423
- accu_t = (1.0*dum1)/(1.0*num1)*100.0
424
-
425
-
426
-
427
- #print("accu. = ",accu_t)
428
-
429
-
430
-
431
- z += accu_t
432
-
433
-
434
-
435
- print("total accu. = ",z / kk)
436
-
437
-
438
-
439
-
440
-
441
-
442
-
443
- import numpy as np
444
-
445
- X = X0_V1V2
446
-
447
- Y = X0_V1V6
448
-
449
- Z = X0_V1V4
450
-
451
-
452
-
453
-
454
-
455
- #seabornでグラフをきれいにしたいだけのコード
456
-
457
-
458
-
459
- #sns.set_style("darkgrid")
460
-
461
-
462
-
463
-
464
-
465
-
466
-
467
-
468
-
469
- #グラフの枠を作っていく
470
-
471
- fig = plt.figure()
472
-
473
- ax = Axes3D(fig)
474
-
475
-
476
-
477
- #軸にラベルを付けたいときは書く
478
-
479
- ax.set_xlabel("V1-V2 pair density (normalized)")
480
-
481
- ax.set_ylabel("V1-V6 pair density (normalized)")
482
-
483
- ax.set_zlabel("V1-V4 pair density (normalized)")
484
-
485
-
486
-
487
- #.plotで描画
488
-
489
- #linestyle='None'にしないと初期値では線が引かれるが、3次元の散布図だと大抵ジャマになる
490
-
491
- #markerは無難に丸
492
-
493
- for idx in range(len(np.unique(y0))):
494
-
495
- ax.plot(X[y0==idx+1], Y[y0==idx+1], Z[y0==idx+1],
496
-
497
- color=CL_lis[idx], marker=MK_lis[idx], label=LB_lis[idx], linestyle='None')
498
-
499
-
500
-
501
- '''ax.plot(X_test[y_test==y_pred,0], X_test[y_test==y_pred,1],
502
-
503
-
504
-
505
- color='black', marker='o', label="test(true)", linestyle='None')
506
-
507
-
508
-
509
- ax.plot(X_test[y_test!=y_pred,0], X_test[y_test!=y_pred,1],
510
-
511
-
512
-
513
- color='black', marker='x', label="test(false)", linestyle='None')'''
514
-
515
- plt.legend(loc='upper left')
516
-
517
- #最後に.show()を書いてグラフ表示
518
-
519
- plt.show()
520
-
521
- ```
522
-
523
-
524
-
525
- ### 試したこと
526
-
527
-
528
-
529
- おそらくax.plot_surfaceを使うと思うのですが, うまくいかなさそうなので, https://matplotlib.org/mpl_toolkits/mplot3d/tutorial.htmlのAxes3D.contourfを使ってそれぞれの平面での境界線を描こうと思い実行したところ上記のエラーが発生しました. エラーメッセージの意味だけでも教えていただけると幸いです。
530
-
531
-
532
-
533
17
  ### 補足情報(ツールのバージョンなど)