dtypeが分からずエラーが取り除けません。協力お願いします!
コード
import sys import numpy import csv numpy.seterr(all='ignore') def sigmoid(x): return 1. / (1 + numpy.exp(-x)) class dA(object): # パラメータの初期化 def __init__(self, input=None, n_visible=2, n_hidden=3, \ W=None, hbias=None, vbias=None, numpy_rng=None): self.n_visible = n_visible # 入力層のユニット数 self.n_hidden = n_hidden # 中間層のユニット数 if numpy_rng is None: numpy_rng = numpy.random.RandomState(1234) if W is None: a = 1. / n_visible initial_W = numpy.array(numpy_rng.uniform( # 均一にW(重み)を初期化する low=-a, high=a, size=(n_visible, n_hidden))) W = initial_W if hbias is None: hbias = numpy.zeros(n_hidden) # 中間層のバイアスを0で初期化する if vbias is None: vbias = numpy.zeros(n_visible) # 中間層のバイアスを0で初期化する self.numpy_rng = numpy_rng self.x = input self.W = W self.W_prime = self.W.T self.hbias = hbias self.vbias = vbias # self.params = [self.W, self.hbias, self.vbias] def get_corrupted_input(self, input, corruption_level): assert corruption_level < 1 # print (self.numpy_rng.binomial(size=input.shape,n=1,p=1-corruption_level)) return self.numpy_rng.binomial(size=input.shape, n=1, p=1-corruption_level) * input # Encode def get_hidden_values(self, input): print (sigmoid(numpy.dot(input, self.W) + self.hbias)) return sigmoid(numpy.dot(input, self.W) + self.hbias) #シグモイド関数(入力*重みの総和) # Decode def get_reconstructed_input(self, hidden): return sigmoid(numpy.dot(hidden, self.W_prime) + self.vbias) #シグモイド関数(中間*重みの総和) def train(self, lr=0.1, corruption_level=0.3, input=None): if input is not None: self.x = input x = self.x tilde_x = self.get_corrupted_input(x, corruption_level) y = self.get_hidden_values(tilde_x) z = self.get_reconstructed_input(y) L_h2 = x - z L_h1 = numpy.dot(L_h2, self.W) * y * (1 - y) L_vbias = L_h2 L_hbias = L_h1 L_W = numpy.dot(tilde_x.T, L_h1) + numpy.dot(L_h2.T, y) self.W += lr * L_W self.hbias += lr * numpy.mean(L_hbias, axis=0) self.vbias += lr * numpy.mean(L_vbias, axis=0) def negative_log_likelihood(self, corruption_level=0.3): tilde_x = self.get_corrupted_input(self.x, corruption_level) y = self.get_hidden_values(tilde_x) z = self.get_reconstructed_input(y) cross_entropy = - numpy.mean( numpy.sum(self.x * numpy.log(z) + (1 - self.x) * numpy.log(1 - z), axis=1)) return cross_entropy def reconstruct(self, x): y = self.get_hidden_values(x) z = self.get_reconstructed_input(y) return z def test_dA(learning_rate=0.1, corruption_level=0.3, training_epochs=50): data = numpy.array([]) with open('train.csv','r') as f: reader = csv.reader(f) # header = next(x) for row in reader: data = numpy.append(data, row) rng = numpy.random.RandomState(123) # construct dA da = dA(input=data, n_visible=5, n_hidden=1, numpy_rng=rng) # train for epoch in range(training_epochs): da.train(lr=learning_rate, corruption_level=corruption_level) # cost = da.negative_log_likelihood(corruption_level=corruption_level) # print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost # learning_rate *= 0.95 # test x = numpy.array([]) with open('test.csv','r') as f: reader = csv.reader(f) # header = next(x) for row in reader: x = numpy.append(x, row) print (da.reconstruct(x)) if __name__ == "__main__": test_dA()
エラー文
Traceback (most recent call last): File "C:\Users\yossi\Documents\sotuken\Autoencoder.py", line 159, in <module> File "C:\Users\yossi\Documents\sotuken\Autoencoder.py", line 140, in test_dA File "C:\Users\yossi\Documents\sotuken\Autoencoder.py", line 87, in train File "C:\Users\yossi\Documents\sotuken\Autoencoder.py", line 70, in get_corrupted_input TypeError: ufunc 'multiply' did not contain a loop with signature matching types dtype('<U32') dtype('<U32') dtype('<U32')

回答1件
あなたの回答
tips
プレビュー
バッドをするには、ログインかつ
こちらの条件を満たす必要があります。
2017/12/28 12:49