Skip to content

Latest commit

 

History

History
149 lines (111 loc) · 4.16 KB

README.md

File metadata and controls

149 lines (111 loc) · 4.16 KB

neural-network-using-pytrorch

PyTorch is a popular open-source machine learning library used for applications such as computer vision and natural language processing. Developed by Facebook's AI Research lab, it provides a flexible and dynamic interface for building and training neural networks.

import torch import torch.nn as nn import torch.nn.functional as F

import pandas as pd import matplotlib.pyplot as plt

from sklearn.model_selection import train_test_split %matplotlib inline

class Model(nn.Module):

Input layer (4 features of the flower) -->

Hidden Layer1 (number of neurons) -->

H2 (n) -->

output (3 classes of iris flowers)

def init(self, in_features=4, h1=8, h2=9, out_features=3): super().init() # instantiate our nn.Module self.fc1 = nn.Linear(in_features, h1) self.fc2 = nn.Linear(h1, h2) self.out = nn.Linear(h2, out_features)

def forward(self, x): x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.out(x)

return x

torch.manual_seed(41)

Create an instance of model

model = Model()

url = 'https://gist.githubusercontent.com/netj/8836201/raw/6f9306ad21398ea43cba4f7d537619d0e07d5ae3/iris.csv' my_df = pd.read_csv(url) my_df.tail()

my_df['variety'] = my_df['variety'].replace('Setosa', 0.0) my_df['variety'] = my_df['variety'].replace('Versicolor', 1.0) my_df['variety'] = my_df['variety'].replace('Virginica', 2.0) my_df

X = my_df.drop('variety', axis=1) y = my_df['variety']

X = X.values y = y.values

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=41)

X_train = torch.FloatTensor(X_train) X_test = torch.FloatTensor(X_test) y_train = torch.LongTensor(y_train) y_test = torch.LongTensor(y_test)

criterion = nn.CrossEntropyLoss()

Choose Adam Optimizer, lr = learning rate (if error doesn't go down after a bunch of iterations (epochs), lower our learning rate)

optimizer = torch.optim.Adam(model.parameters(), lr=0.01)

model.parameters

epochs = 100 losses = [] for i in range(epochs):

Go forward and get a prediction

y_pred = model.forward(X_train) # Get predicted results

Measure the loss/error, gonna be high at first

loss = criterion(y_pred, y_train) # predicted values vs the y_train

Keep Track of our losses

losses.append(loss.detach().numpy())

print every 10 epoch

if i % 10 == 0: print(f'Epoch: {i} and loss: {loss}')

Do some back propagation: take the error rate of forward propagation and feed it back

thru the network to fine tune the weights

optimizer.zero_grad() loss.backward() optimizer.step()

plt.plot(range(epochs), losses) plt.ylabel('Loss') plt.xlabel('Epoch')

Evaluate Model on Test Data Set (validate model on test set)

with torch.no_grad(): # Basically turn off back propogation y_eval = model.forward(X_test) # X_test are features from our test set, y_eval will be predictions loss = criterion(y_eval, y_test) # Find the loss or error

loss

correct = 0 with torch.no_grad(): for i, data in enumerate(X_test): y_val = model.forward(data)

if y_test[i] == 0:
  x = "Setosa"
elif y_test[i] == 1:
  x = 'Versicolor'
else:
  x = 'Virginica'


# Will tell us what type of flower class our network thinks it is
print(f'{i+1}.)  {str(y_val)} \t {y_test[i]} \t {y_val.argmax().item()}')

# Correct or not
if y_val.argmax().item() == y_test[i]:
  correct +=1

print(f'We got {correct} correct!')

new_iris = torch.tensor([4.7, 3.2, 1.3, 0.2])

with torch.no_grad(): print(f'New Iris: {new_iris} \t {model.forward(new_iris)}')

newer_iris = torch.tensor([5.9, 3.0, 5.1, 1.8])

with torch.no_grad(): print(f'New Iris: {newer_iris} \t {model.forward(newer_iris)}')

with torch.no_grad(): print(model(newer_iris)) torch.save(model.state_dict(), 'my_really_awesome_iris_model.pt')

new_model = Model() new_model.load_state_dict(torch.load('my_really_awesome_iris_model.pt'))

new_model.eval() class Model(nn.Module): def init(self): super().init() self.fc1 = nn.Linear(4, 8) self.fc2 = nn.Linear(8, 9) self.out = nn.Linear(9, 3)

def forward(self, x):
    x = self.fc1(x)
    x = self.fc2(x)
    x = self.out(x)
    return x