hyperasを使用してMLPのパラメータチューニングを自動調整したいのですがエラーが出てプログラムを回すことができません.エラーを消すにはどのようにすればよいでしょうか?
以下にソースコードとエラー,参照したurlを添付します.
python
1コード 2"""importするもの""" 3!pip install hyperas 4!pip install hyperopt 5 6from hyperopt import Trials, STATUS_OK, tpe 7from hyperas import optim 8from hyperas.distributions import choice 9 10import tensorflow as tf 11from tensorflow.keras.models import Sequential 12from tensorflow.keras.layers import Activation, Dense, Dropout, LeakyReLU 13from tensorflow.keras import regularizers 14from tensorflow.keras.optimizers import Adagrad 15from tensorflow.keras.optimizers import Adam 16from tensorflow.keras.models import load_model 17from keras.regularizers import l1, l2 18from keras import regularizers 19from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger 20 21from sklearn.model_selection import train_test_split 22import numpy as np 23import os 24import time 25import csv 26import cv2 27import math 28 29#ファイルの入出力 30os.chdir('/content/drive/My Drive/Colab Notebooks/my_drive/deep_learning_google') 31 32X=[] 33Y=[] 34 35#画像(入力データ) 36X = np.load('data/numpy/pictures_120x120.npy') 37#座標(出力データ) 38location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_a.csv",delimiter=",",skiprows=0) 39Y.extend(location) 40location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_b.csv",delimiter=",",skiprows=0) 41Y.extend(location) 42location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_c.csv",delimiter=",",skiprows=0) 43Y.extend(location) 44location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_d.csv",delimiter=",",skiprows=0) 45Y.extend(location) 46location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_e.csv",delimiter=",",skiprows=0) 47Y.extend(location) 48location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_f.csv",delimiter=",",skiprows=0) 49Y.extend(location) 50location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_g.csv",delimiter=",",skiprows=0) 51Y.extend(location) 52location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_h.csv",delimiter=",",skiprows=0) 53Y.extend(location) 54location = np.loadtxt("data/coordinates_data/sigma_x/max_stress_coordinates_i.csv",delimiter=",",skiprows=0) 55Y.extend(location) 56 57Y = np.array(Y) 58 59def prepare_data(): 60 x_train,x_test,y_train,y_test = train_test_split(X,Y,test_size=0.3) 61 return x_train, y_train, x_test, y_test 62 63def create_model(x_train, y_train, x_test, y_test): 64 65 model = Sequential() 66 model.add(Dense({{choice([1000,2000,3000])}}, 67 input_dim=14400,kernel_initializer='random_normal',bias_initializer='zeros')) 68 69 model.add(LeakyReLU()) 70 model.add(Dropout({{uniform(0,1)}})) 71 72 model.add(Dense({{choice([100,200,300,400,500,600,700,800,900,1000])}}, 73 kernel_initializer='random_normal',bias_initializer='zeros')) 74 model.add(LeakyReLU()) 75 model.add(Dropout({{uniform(0,1)}})) 76 77 model.add(Dense({{choice([100,200,300,400,500,600,700,800,900,1000])}}, 78 kernel_initializer='random_normal', 79 bias_initializer='zeros')) 80 model.add(LeakyReLU()) 81 model.add(Dropout(({{uniform(0,1)}}))) 82 83 model.add(Dense({{choice([50,150,200,250,300,350,400,450])}}, 84 kernel_initializer='random_normal',bias_initializer='zeros')) 85 model.add(LeakyReLU()) 86 model.add(Dropout(({{uniform(0,1)}}))) 87 88 model.add(Dense({{choice([10,20,30,40,50,60,70,80,90,100])}}, 89 kernel_initializer='random_normal',bias_initializer='zeros')) 90 model.add(LeakyReLU()) 91 model.add(Dropout(({{uniform(0,1)}}))) 92 93 model.add(Dense({{choice([5,10,15,20,25,30,40,45,50])}}, 94 kernel_initializer='random_normal',bias_initializer='zeros')) 95 model.add(LeakyReLU()) 96 model.add(Dropout(({{uniform(0,1)}}))) 97 98 model.add(Dense(2)) 99 model.add(Activation("linear")) 100 101 opt = Adam(lr=0.0001) 102 model.compile(loss="mean_absolute_percentage_error", 103 optimizer=opt) 104 history = model.fit(x_train,y_train, 105 nb_epoch=200, 106 batch_size=32, 107 verbose=1, 108 validation_data=(x_test,y_test)) 109 return {'loss': val_loss, 'status': STATUS_OK, 'model': model} 110 111if __name__=="__main__": 112 113 best_run,best_model = optim.minimize(model=create_model, 114 data=prepare_data, 115 algo=tpe.suggest, 116 max_evals=200, 117 trials=Trials()) 118 print(best_model.summary()) 119 print(best_run) 120 _, _, x_test, y_test = prepare_data() 121 val_loss, val_acc = best_model.evaluate(x_test, y_test) 122 print("val_loss: ", val_loss) 123 124エラー内容 125FileNotFoundError Traceback (most recent call last) 126<ipython-input-10-d5b8b93952c3> in <module>() 127 118 algo=tpe.suggest, 128 119 max_evals=200, 129--> 120 trials=Trials()) 130 121 print(best_model.summary()) 131 122 print(best_run) 132 1333 frames 134/usr/local/lib/python3.6/dist-packages/hyperas/optim.py in minimize(model, data, algo, max_evals, trials, functions, rseed, notebook_name, verbose, eval_space, return_space, keep_temp) 135 67 notebook_name=notebook_name, 136 68 verbose=verbose, 137---> 69 keep_temp=keep_temp) 138 70 139 71 best_model = None 140 141/usr/local/lib/python3.6/dist-packages/hyperas/optim.py in base_minimizer(model, data, functions, algo, max_evals, trials, rseed, full_model_string, notebook_name, verbose, stack, keep_temp) 142 96 model_str = full_model_string 143 97 else: 144---> 98 model_str = get_hyperopt_model_string(model, data, functions, notebook_name, verbose, stack) 145 99 temp_file = './temp_model.py' 146 100 write_temp_files(model_str, temp_file) 147 148/usr/local/lib/python3.6/dist-packages/hyperas/optim.py in get_hyperopt_model_string(model, data, functions, notebook_name, verbose, stack) 149 183 else: 150 184 calling_script_file = os.path.abspath(inspect.stack()[stack][1]) 151--> 185 with open(calling_script_file, 'r') as f: 152 186 source = f.read() 153 187 154 155FileNotFoundError: [Errno 2] No such file or directory: '/content/drive/My Drive/Colab Notebooks/my_drive/deep_learning_google/<ipython-input-10-d5b8b93952c3>' 156 157参照url 158https://qiita.com/UpAllNight/items/773396f3070eb9b3382d
あなたの回答
tips
プレビュー