Boston Housing dataset with PyTorch

In this notebook, we will be using the Boston Housing dataset to train a simple linear regression model using PyTorch. This dataset is commonly used in machine learning. The goal of this notebook is to demonstrate how to use PyTorch to build and train a simple linear regression model on this dataset.

Show Code
# imports
import torch
import torch.nn as nn
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_openml
from sklearn.model_selection import train_test_split
Show Code
data = fetch_openml("boston", version=1, as_frame=True)
X, Y = data.data, data.target

print(X.shape, Y.shape)
print(Y.dtypes)
pd.DataFrame(X).head(4)
(506, 13) (506,)
float64
CRIM ZN INDUS CHAS NOX RM AGE DIS RAD TAX PTRATIO B LSTAT
0 0.00632 18.0 2.31 0 0.538 6.575 65.2 4.0900 1 296.0 15.3 396.90 4.98
1 0.02731 0.0 7.07 0 0.469 6.421 78.9 4.9671 2 242.0 17.8 396.90 9.14
2 0.02729 0.0 7.07 0 0.469 7.185 61.1 4.9671 2 242.0 17.8 392.83 4.03
3 0.03237 0.0 2.18 0 0.458 6.998 45.8 6.0622 3 222.0 18.7 394.63 2.94
Show Code
# What are the types of each column in X?
print(X.dtypes)
CRIM        float64
ZN          float64
INDUS       float64
CHAS       category
NOX         float64
RM          float64
AGE         float64
DIS         float64
RAD        category
TAX         float64
PTRATIO     float64
B           float64
LSTAT       float64
dtype: object
Show Code
# we need to normalize the data
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X)

pd.DataFrame(X_scaled).head(4)
0 1 2 3 4 5 6 7 8 9 10 11 12
0 -0.419782 0.284830 -1.287909 -0.272599 -0.144217 0.413672 -0.120013 0.140214 -0.982843 -0.666608 -1.459000 0.441052 -1.075562
1 -0.417339 -0.487722 -0.593381 -0.272599 -0.740262 0.194274 0.367166 0.557160 -0.867883 -0.987329 -0.303094 0.441052 -0.492439
2 -0.417342 -0.487722 -0.593381 -0.272599 -0.740262 1.282714 -0.265812 0.557160 -0.867883 -0.987329 -0.303094 0.396427 -1.208727
3 -0.416750 -0.487722 -1.306878 -0.272599 -0.835284 1.016303 -0.809889 1.077737 -0.752922 -1.106115 0.113032 0.416163 -1.361517
Show Code
# now we can split the data into train and test sets
X_train, X_test, Y_train, Y_test = train_test_split(X_scaled, Y, test_size=0.2, random_state=42)
# convert to torch tensors
X_train = torch.tensor(X_train, dtype=torch.float32)
Y_train = torch.tensor(Y_train.values, dtype=torch.float32).view(-1, 1)
X_test = torch.tensor(X_test, dtype=torch.float32)
Y_test = torch.tensor(Y_test.values, dtype=torch.float32).view(-1, 1)
Show Code
# Define the model
model = nn.Linear(X_train.shape[1], 1)
# Define the loss function and optimizer
criterion = nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)

# define the training loop
def train(model, criterion, optimizer, X_train, Y_train, X_test, Y_test, epochs=100):
    train_losses = []
    test_losses = []
    model.reset_parameters()  # reset model parameters before training
    for epoch in range(epochs):
        model.train()
        optimizer.zero_grad()
        outputs = model(X_train)
        loss = criterion(outputs, Y_train)
        loss.backward()
        optimizer.step()
        train_losses.append(loss.item())

        model.eval()
        with torch.no_grad():
            test_outputs = model(X_test)
            test_loss = criterion(test_outputs, Y_test)
            test_losses.append(test_loss.item())

        if (epoch + 1) % 10 == 0:
            print(f"Epoch [{epoch+1}/{epochs}], Train Loss: {loss.item():.4f}, Test Loss: {test_loss.item():.4f}")

    return train_losses, test_losses
Show Code
train_losses, test_losses = train(model, criterion, optimizer, X_train, Y_train, X_test, Y_test, epochs=1000)
Epoch [10/1000], Train Loss: 393.2908, Test Loss: 354.0582
Epoch [20/1000], Train Loss: 264.5840, Test Loss: 250.5241
Epoch [30/1000], Train Loss: 182.5464, Test Loss: 180.0341
Epoch [40/1000], Train Loss: 128.8590, Test Loss: 131.8857
Epoch [50/1000], Train Loss: 93.4891, Test Loss: 99.0665
Epoch [60/1000], Train Loss: 70.0907, Test Loss: 76.6803
Epoch [70/1000], Train Loss: 54.5545, Test Loss: 61.3754
Epoch [80/1000], Train Loss: 44.2011, Test Loss: 50.8788
Epoch [90/1000], Train Loss: 37.2759, Test Loss: 43.6532
Epoch [100/1000], Train Loss: 32.6250, Test Loss: 38.6577
Epoch [110/1000], Train Loss: 29.4872, Test Loss: 35.1864
Epoch [120/1000], Train Loss: 27.3591, Test Loss: 32.7597
Epoch [130/1000], Train Loss: 25.9063, Test Loss: 31.0509
Epoch [140/1000], Train Loss: 24.9067, Test Loss: 29.8372
Epoch [150/1000], Train Loss: 24.2122, Test Loss: 28.9661
Epoch [160/1000], Train Loss: 23.7236, Test Loss: 28.3331
Epoch [170/1000], Train Loss: 23.3748, Test Loss: 27.8662
Epoch [180/1000], Train Loss: 23.1213, Test Loss: 27.5159
Epoch [190/1000], Train Loss: 22.9331, Test Loss: 27.2478
Epoch [200/1000], Train Loss: 22.7901, Test Loss: 27.0383
Epoch [210/1000], Train Loss: 22.6786, Test Loss: 26.8705
Epoch [220/1000], Train Loss: 22.5893, Test Loss: 26.7331
Epoch [230/1000], Train Loss: 22.5160, Test Loss: 26.6177
Epoch [240/1000], Train Loss: 22.4543, Test Loss: 26.5187
Epoch [250/1000], Train Loss: 22.4012, Test Loss: 26.4319
Epoch [260/1000], Train Loss: 22.3547, Test Loss: 26.3544
Epoch [270/1000], Train Loss: 22.3133, Test Loss: 26.2842
Epoch [280/1000], Train Loss: 22.2760, Test Loss: 26.2198
Epoch [290/1000], Train Loss: 22.2420, Test Loss: 26.1599
Epoch [300/1000], Train Loss: 22.2108, Test Loss: 26.1038
Epoch [310/1000], Train Loss: 22.1820, Test Loss: 26.0509
Epoch [320/1000], Train Loss: 22.1553, Test Loss: 26.0007
Epoch [330/1000], Train Loss: 22.1303, Test Loss: 25.9529
Epoch [340/1000], Train Loss: 22.1070, Test Loss: 25.9073
Epoch [350/1000], Train Loss: 22.0852, Test Loss: 25.8636
Epoch [360/1000], Train Loss: 22.0647, Test Loss: 25.8216
Epoch [370/1000], Train Loss: 22.0454, Test Loss: 25.7812
Epoch [380/1000], Train Loss: 22.0273, Test Loss: 25.7424
Epoch [390/1000], Train Loss: 22.0102, Test Loss: 25.7049
Epoch [400/1000], Train Loss: 21.9941, Test Loss: 25.6688
Epoch [410/1000], Train Loss: 21.9789, Test Loss: 25.6339
Epoch [420/1000], Train Loss: 21.9645, Test Loss: 25.6003
Epoch [430/1000], Train Loss: 21.9509, Test Loss: 25.5678
Epoch [440/1000], Train Loss: 21.9381, Test Loss: 25.5364
Epoch [450/1000], Train Loss: 21.9259, Test Loss: 25.5060
Epoch [460/1000], Train Loss: 21.9144, Test Loss: 25.4767
Epoch [470/1000], Train Loss: 21.9034, Test Loss: 25.4483
Epoch [480/1000], Train Loss: 21.8931, Test Loss: 25.4208
Epoch [490/1000], Train Loss: 21.8832, Test Loss: 25.3942
Epoch [500/1000], Train Loss: 21.8739, Test Loss: 25.3685
Epoch [510/1000], Train Loss: 21.8650, Test Loss: 25.3436
Epoch [520/1000], Train Loss: 21.8565, Test Loss: 25.3195
Epoch [530/1000], Train Loss: 21.8485, Test Loss: 25.2961
Epoch [540/1000], Train Loss: 21.8408, Test Loss: 25.2734
Epoch [550/1000], Train Loss: 21.8335, Test Loss: 25.2515
Epoch [560/1000], Train Loss: 21.8266, Test Loss: 25.2302
Epoch [570/1000], Train Loss: 21.8200, Test Loss: 25.2096
Epoch [580/1000], Train Loss: 21.8137, Test Loss: 25.1896
Epoch [590/1000], Train Loss: 21.8076, Test Loss: 25.1702
Epoch [600/1000], Train Loss: 21.8019, Test Loss: 25.1514
Epoch [610/1000], Train Loss: 21.7964, Test Loss: 25.1331
Epoch [620/1000], Train Loss: 21.7911, Test Loss: 25.1154
Epoch [630/1000], Train Loss: 21.7861, Test Loss: 25.0982
Epoch [640/1000], Train Loss: 21.7812, Test Loss: 25.0814
Epoch [650/1000], Train Loss: 21.7766, Test Loss: 25.0652
Epoch [660/1000], Train Loss: 21.7722, Test Loss: 25.0494
Epoch [670/1000], Train Loss: 21.7680, Test Loss: 25.0340
Epoch [680/1000], Train Loss: 21.7639, Test Loss: 25.0191
Epoch [690/1000], Train Loss: 21.7600, Test Loss: 25.0046
Epoch [700/1000], Train Loss: 21.7563, Test Loss: 24.9905
Epoch [710/1000], Train Loss: 21.7527, Test Loss: 24.9767
Epoch [720/1000], Train Loss: 21.7492, Test Loss: 24.9634
Epoch [730/1000], Train Loss: 21.7459, Test Loss: 24.9504
Epoch [740/1000], Train Loss: 21.7427, Test Loss: 24.9377
Epoch [750/1000], Train Loss: 21.7396, Test Loss: 24.9253
Epoch [760/1000], Train Loss: 21.7367, Test Loss: 24.9133
Epoch [770/1000], Train Loss: 21.7338, Test Loss: 24.9016
Epoch [780/1000], Train Loss: 21.7311, Test Loss: 24.8902
Epoch [790/1000], Train Loss: 21.7285, Test Loss: 24.8791
Epoch [800/1000], Train Loss: 21.7259, Test Loss: 24.8682
Epoch [810/1000], Train Loss: 21.7235, Test Loss: 24.8576
Epoch [820/1000], Train Loss: 21.7211, Test Loss: 24.8473
Epoch [830/1000], Train Loss: 21.7188, Test Loss: 24.8372
Epoch [840/1000], Train Loss: 21.7166, Test Loss: 24.8274
Epoch [850/1000], Train Loss: 21.7145, Test Loss: 24.8178
Epoch [860/1000], Train Loss: 21.7124, Test Loss: 24.8084
Epoch [870/1000], Train Loss: 21.7104, Test Loss: 24.7993
Epoch [880/1000], Train Loss: 21.7085, Test Loss: 24.7904
Epoch [890/1000], Train Loss: 21.7066, Test Loss: 24.7816
Epoch [900/1000], Train Loss: 21.7048, Test Loss: 24.7731
Epoch [910/1000], Train Loss: 21.7031, Test Loss: 24.7648
Epoch [920/1000], Train Loss: 21.7014, Test Loss: 24.7566
Epoch [930/1000], Train Loss: 21.6997, Test Loss: 24.7487
Epoch [940/1000], Train Loss: 21.6982, Test Loss: 24.7409
Epoch [950/1000], Train Loss: 21.6966, Test Loss: 24.7333
Epoch [960/1000], Train Loss: 21.6951, Test Loss: 24.7258
Epoch [970/1000], Train Loss: 21.6937, Test Loss: 24.7186
Epoch [980/1000], Train Loss: 21.6923, Test Loss: 24.7114
Epoch [990/1000], Train Loss: 21.6909, Test Loss: 24.7045
Epoch [1000/1000], Train Loss: 21.6896, Test Loss: 24.6976

Comments