前提
開発環境はmacos,spyderです
発生している問題・エラーメッセージ
Start /ユーザー名/anaconda3/lib/python3.10/site-packages/sklearn/datasets/_openml.py:1002: FutureWarning: The default value of `parser` will change from `'liac-arff'` to `'auto'` in 1.4. You can set `parser='auto'` to silence this warning. Therefore, an `ImportError` will be raised from 1.4 if the dataset is dense and pandas is not installed. Note that the pandas parser may return different data types. See the Notes Section in fetch_openml's API doc for details. warn( epoch 0 Traceback (most recent call last): File ~/anaconda3/lib/python3.10/site-packages/spyder_kernels/py3compat.py:356 in compat_exec exec(code, globals, locals) File ~/Library/CloudStorage/保存先名/spyder/保存ファイル名.py:48 x = Variable(x_all[indexes[i : i + BATCHSIZE]]) File ~/anaconda3/lib/python3.10/site-packages/pandas/core/frame.py:3813 in __getitem__ indexer = self.columns._get_indexer_strict(key, "columns")[1] File ~/anaconda3/lib/python3.10/site-packages/pandas/core/indexes/base.py:6070 in _get_indexer_strict self._raise_if_missing(keyarr, indexer, axis_name) File ~/anaconda3/lib/python3.10/site-packages/pandas/core/indexes/base.py:6130 in _raise_if_missing raise KeyError(f"None of [{key}] are in the [{axis_name}]") KeyError: "None of [Int64Index([35591, 20294, 18337, 16931, 13072, 69126, 39634, 25769, 11346,\n 51243, 21720, 20385, 62344, 43301, 26070, 34432, 34125, 68804,\n 51753, 36023, 69494, 28196, 9979, 6770, 32787, 41588, 68411,\n 11848, 25625, 7457, 28528, 24608, 9720, 58788, 29144, 64967,\n 24248, 3573, 20452, 40200, 789, 69971, 61182, 64594, 21509,\n 69578, 64162, 24641, 22162, 29223, 57707, 16098, 27149, 37215,\n 29198, 53891, 26750, 28734, 60063, 21480, 40913, 39801, 65790,\n 50183, 57479, 17439, 793, 35548, 15707, 3294, 62658, 44846,\n 3386, 7345, 53687, 19329, 69516, 52891, 1857, 24471, 49636,\n 62473, 65023, 66143, 8098, 59403, 36681, 60229, 34429, 53443,\n 38653, 13604, 53509, 13794, 67996, 11017, 51464, 35539, 69727,\n 13377],\n dtype='int64')] are in the [columns]"
試したこと
おそらくこの部分がおかしいのではないかと思い,.valuesを加えたりと試したのですが解消されませんでした
for i in range(0, DATASIZE, BATCHSIZE): x = Variable(x_all[indexes[i : i + BATCHSIZE]]) t = Variable(y_all[indexes[i : i + BATCHSIZE]])
該当のソースコード
python
1import numpy as np 2from chainer import Variable, optimizers, serializers 3from chainer import Chain 4import chainer.functions as F 5import chainer.links as L 6from sklearn.datasets import fetch_openml 7 8 9class MyMLP(Chain): 10 def __init__(self, n_in=784, n_units=100, n_out=10): 11 super(MyMLP, self).__init__( 12 l1=L.Linear(n_in, n_units), 13 l2=L.Linear(n_units, n_units), 14 l3=L.Linear(n_units, n_out), 15 ) 16 def __call__(self, x): 17 h1 = F.relu(self.l1(x)) 18 h2 = F.relu(self.l2(h1)) 19 y = self.l3(h2) 20 return y 21 22print('Start') 23 24mnist_X, mnist_y = fetch_openml('mnist_784', version=1, data_home=".", return_X_y=True) 25 26x_all = mnist_X.astype(np.float32) / 255 27y_all = mnist_y.astype(np.int32) 28 29model = MyMLP() 30optimizer = optimizers.SGD() 31optimizer.setup(model) 32 33BATCHSIZE = 100 34DATASIZE = 70000 35 36for epoch in range(20): 37 print('epoch %d' % epoch) 38 indexes = np.random.permutation(DATASIZE) 39 for i in range(0, DATASIZE, BATCHSIZE): 40 x = Variable(x_all[indexes[i : i + BATCHSIZE]]) 41 t = Variable(y_all[indexes[i : i + BATCHSIZE]]) 42 43 model.zerograds() 44 45 y = model(x) 46 47 loss = F.softmax_cross_entropy(y, t) 48 49 loss.backward() 50 51 optimizer.update() 52 53serializers.save_npz("mymodel.npz", model) 54 55print('Finish')

回答1件
あなたの回答
tips
プレビュー