質問するログイン新規登録

質問編集履歴

1

写真の追加

2021/05/05 04:39

投稿

taiyo.
taiyo.

スコア2

title CHANGED
File without changes
body CHANGED
@@ -96,4 +96,75 @@
96
96
 
97
97
  mac/latest,jupyter/latest
98
98
 
99
- BEST REGARDS.
99
+ BEST REGARDS.
100
+
101
+
102
+ ![イメージ説明](606b3e862f8144ad2723f6d6bfc5974a.png)
103
+
104
+ 変更後
105
+ ```
106
+
107
+ import torch
108
+ import torch.nn as nn
109
+ import torch.nn.functional as F
110
+ from torch.utils.data import Dataset, DataLoader
111
+ from sklearn.model_selection import train_test_split
112
+
113
+ import pandas as pd
114
+ import matplotlib.pyplot as plt
115
+ %matplotlib inline
116
+
117
+ class Model(nn.Module):
118
+ def __init__(self, in_features=4, h1=8, h2=9,out_features=3):
119
+ super().__init__()
120
+ self.fc1 = nn.Linear(in_features, h1)
121
+ self.fc2 = nn.Linear(h1,h2)
122
+ self.out = nn.Linear(h2,in_features)
123
+
124
+ def forward(self,x):
125
+ x = F.relu(self.fc1(x))
126
+ x = F.relu(self.fc2(x))
127
+ x = self.out(x)
128
+ return x
129
+
130
+
131
+ torch.manual_seed(32)
132
+ model=Model()
133
+
134
+ df = pd.read_csv('../Data/iris.csv')
135
+ df.head()
136
+
137
+ X = X = df.drop('target',axis=1).values
138
+ y = df['target'].values
139
+
140
+ X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.2,random_state=33)
141
+
142
+ X_train = torch.FloatTensor(X_train)
143
+ X_test = torch.FloatTensor(X_test)
144
+ # y_train = F.one_hot(torch.LongTensor(y_train)) # not needed with Cross Entropy Loss
145
+ # y_test = F.one_hot(torch.LongTensor(y_test))
146
+ y_train = torch.LongTensor(y_train)
147
+ y_test = torch.LongTensor(y_test)
148
+
149
+ criterion = nn.CrossEntropyLoss()
150
+ optimizer = torch.optim.Adam(model.parameters(),lr=0.01)
151
+
152
+
153
+ model.parameters()
154
+
155
+ epochs = 100
156
+ lossses = []
157
+
158
+ for i in range(epochs):
159
+
160
+ i+=1
161
+ y_pred = model.forward(X_train)
162
+ loss = criterion(y_test, y_pred)
163
+ losses.append(loss)
164
+
165
+ if i%10 == 1:
166
+ print(f'epoch: {i:2} loss:{loss.item():10.8f}')
167
+ optimizer.zero_grad()
168
+ loss.backward()
169
+ optimizer.step()
170
+ ```