此文为其他文章的代码部分:

⚡再啃-Deep-Learning

也提供了 notebook 形式: 代码地址

分割线

神经网络

感知器

def AND(x1, x2):
w1, w2, theta = 0.5, 0.5, 0.7
tmp = x1 * w1 + x2 * w2
if tmp <= theta:
return 0
else:
return 1


def OR(x1, x2):
w1, w2, theta = 0.5, 0.5, 0.2
tmp = x1 * w1 + x2 * w2
if tmp <= theta:
return 0
else:
return 1


# 非门只取一个输入,另一个不管
def NOT(x1, x2):
w1, w2, theta = -1, 0, 0
tmp = x1 * w1 + x2 * w2 + 1
return tmp


# 异或门是非线性运算, 需要多层感知器组合
def XOR(x1, x2):
# 异或门 = (与非门 与 或门)
return AND(OR(x1, x2), not AND(x1, x2))


print(AND(0, 1), AND(1, 1), OR(0, 1), OR(0, 0))
print(NOT(0, 1), NOT(1, 1), NOT(0, 0), NOT(1, 0))
print(XOR(0, 1), XOR(1, 1), XOR(0, 0), XOR(1, 0))

0 1 1 0
1 0 1 0
1 0 0 1

分割线

Pytorch

MNIST-手写数字识别

入门典中典 [1], 具体教程推荐 [2] [3]

import torch
import torchvision
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt

# 超参
n_epochs = 3
batch_size_train = 64
batch_size_test = 1000
learning_rate = 0.01
momentum = 0.5
log_interval = 10
random_seed = 42
torch.manual_seed(random_seed)
MINST_mean = 0.1307
MINST_std = 0.3081

train_loader = torch.utils.data.DataLoader(torchvision.datasets.MNIST(
'./_data_set/',
train=True,
download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((MINST_mean, ), (MINST_std, ))
])),
batch_size=batch_size_train,
shuffle=True)
test_loader = torch.utils.data.DataLoader(torchvision.datasets.MNIST(
'./_data_set/',
train=False,
download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((MINST_mean, ), (MINST_std, ))
])),
batch_size=batch_size_test,
shuffle=True)

batch_idx, (example_data, example_targets) = next(enumerate(train_loader))
# 每批次有 64 张单通道 28x28 大小的图片
print(example_data.shape)
# 每个图片实际的数字标签
print(example_targets)

fig = plt.figure()
for i in range(9):
plt.subplot(3, 3, i + 1)
plt.tight_layout()
plt.imshow(example_data[i][0], cmap='gray', interpolation='none')
# plt.title("数字: {}".format(example_targets[i]))
plt.title(f'number: {example_targets[i]}')
plt.xticks([])
plt.yticks([])
plt.show()
torch.Size([64, 1, 28, 28])
tensor([1, 2, 8, 5, 2, 6, 9, 9, 9, 4, 0, 3, 9, 9, 5, 6, 7, 8, 8, 9, 2, 6, 9, 3,
        0, 5, 0, 7, 6, 1, 2, 0, 7, 4, 6, 0, 6, 9, 7, 0, 7, 3, 2, 5, 9, 0, 4, 8,
        3, 6, 4, 0, 3, 2, 6, 6, 3, 2, 2, 3, 6, 7, 8, 4])

png

import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim


class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# 卷积层
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
# 加一层 dropout (删除神经元), 防止过拟合; 不要直接用 F.dropout2d
self.conv2_drop = nn.Dropout2d()
# 全连接层
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)

def forward(self, x):
# 原28x28x1 -> 卷积24x24x10 -> 池化12x12x10 -> 激活12x12x10
x = F.relu(F.max_pool2d(self.conv1(x), 2))

# 原12x12x10 -> 卷积8x8x20 -> 池化4x4x20 -> 激活4x4x20
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
# 320 = 4x4x20 将张量x变形成一维向量形式,总特征数不变,为全连接层做准备
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x)


network = Net()
optimizer = optim.SGD(network.parameters(),
lr=learning_rate,
momentum=momentum)

train_losses = []
train_counter = []
test_losses = []
test_counter = [i * len(train_loader.dataset) for i in range(n_epochs + 1)]


def train(epoch):
network.train()
for batch_idx, (data, target) in enumerate(train_loader):
optimizer.zero_grad()
output = network(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch + 1, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
train_losses.append(loss.item())
train_counter.append((batch_idx * 64) +
(epoch * len(train_loader.dataset)))
torch.save(network.state_dict(), './_data_set/MNIST/model.pth')
torch.save(optimizer.state_dict(),
'./_data_set/MNIST/optimizer.pth')


def test():
network.eval()
test_loss = 0
correct = 0
# 测试集不需要反向传播,所以使用 torch.no_grad() 方法关闭计算图
with torch.no_grad():
for data, target in test_loader:
output = network(data)
test_loss += F.nll_loss(output, target, reduction='sum').item()
pred = output.data.max(1, keepdim=True)[1]
correct += pred.eq(target.data.view_as(pred)).sum()
test_loss /= len(test_loader.dataset)
test_losses.append(test_loss)
print('\nTest set: Avg. loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))


if __name__ == '__main__':
# train(0)
# 不加这个,后面画图就会报错:x and y must be the same size
test()
for epoch in range(n_epochs):
train(epoch)
test()

C:\Users\Administrator\AppData\Local\Temp\ipykernel_18752\2337744027.py:29: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.
  return F.log_softmax(x)


Train Epoch: 1 [0/60000 (0%)]	Loss: 2.306355
Train Epoch: 1 [640/60000 (1%)]	Loss: 2.309732
Train Epoch: 1 [1280/60000 (2%)]	Loss: 2.263060
Train Epoch: 1 [1920/60000 (3%)]	Loss: 2.253021
Train Epoch: 1 [2560/60000 (4%)]	Loss: 2.239486
Train Epoch: 1 [3200/60000 (5%)]	Loss: 2.232780
Train Epoch: 1 [3840/60000 (6%)]	Loss: 2.223558
Train Epoch: 1 [4480/60000 (7%)]	Loss: 2.174626
Train Epoch: 1 [5120/60000 (9%)]	Loss: 2.122881
Train Epoch: 1 [5760/60000 (10%)]	Loss: 2.025848
Train Epoch: 1 [6400/60000 (11%)]	Loss: 1.923471
Train Epoch: 1 [7040/60000 (12%)]	Loss: 1.832063
Train Epoch: 1 [7680/60000 (13%)]	Loss: 1.906025
Train Epoch: 1 [8320/60000 (14%)]	Loss: 1.673950
Train Epoch: 1 [8960/60000 (15%)]	Loss: 1.537203
Train Epoch: 1 [9600/60000 (16%)]	Loss: 1.439621
Train Epoch: 1 [10240/60000 (17%)]	Loss: 1.275429
Train Epoch: 1 [10880/60000 (18%)]	Loss: 1.183242
Train Epoch: 1 [11520/60000 (19%)]	Loss: 1.182200
Train Epoch: 1 [12160/60000 (20%)]	Loss: 1.154036
Train Epoch: 1 [12800/60000 (21%)]	Loss: 0.921464
Train Epoch: 1 [13440/60000 (22%)]	Loss: 0.877538
Train Epoch: 1 [14080/60000 (23%)]	Loss: 0.917646
Train Epoch: 1 [14720/60000 (25%)]	Loss: 0.928683
Train Epoch: 1 [15360/60000 (26%)]	Loss: 0.883320
Train Epoch: 1 [16000/60000 (27%)]	Loss: 0.829271
Train Epoch: 1 [16640/60000 (28%)]	Loss: 0.843372
Train Epoch: 1 [17280/60000 (29%)]	Loss: 0.962177
Train Epoch: 1 [17920/60000 (30%)]	Loss: 0.816695
Train Epoch: 1 [18560/60000 (31%)]	Loss: 0.803838
Train Epoch: 1 [19200/60000 (32%)]	Loss: 0.706732
Train Epoch: 1 [19840/60000 (33%)]	Loss: 0.697531
Train Epoch: 1 [20480/60000 (34%)]	Loss: 0.748919
Train Epoch: 1 [21120/60000 (35%)]	Loss: 0.599384
Train Epoch: 1 [21760/60000 (36%)]	Loss: 0.873525
Train Epoch: 1 [22400/60000 (37%)]	Loss: 0.730187
Train Epoch: 1 [23040/60000 (38%)]	Loss: 0.780188
Train Epoch: 1 [23680/60000 (39%)]	Loss: 0.688149
Train Epoch: 1 [24320/60000 (41%)]	Loss: 0.578381
Train Epoch: 1 [24960/60000 (42%)]	Loss: 0.679022
Train Epoch: 1 [25600/60000 (43%)]	Loss: 0.597318
Train Epoch: 1 [26240/60000 (44%)]	Loss: 0.885319
Train Epoch: 1 [26880/60000 (45%)]	Loss: 0.586243
Train Epoch: 1 [27520/60000 (46%)]	Loss: 0.832649
Train Epoch: 1 [28160/60000 (47%)]	Loss: 0.541850
Train Epoch: 1 [28800/60000 (48%)]	Loss: 0.997807
Train Epoch: 1 [29440/60000 (49%)]	Loss: 0.680030
Train Epoch: 1 [30080/60000 (50%)]	Loss: 0.546325
Train Epoch: 1 [30720/60000 (51%)]	Loss: 0.812084
Train Epoch: 1 [31360/60000 (52%)]	Loss: 0.659495
Train Epoch: 1 [32000/60000 (53%)]	Loss: 0.880883
Train Epoch: 1 [32640/60000 (54%)]	Loss: 0.788217
Train Epoch: 1 [33280/60000 (55%)]	Loss: 0.810931
Train Epoch: 1 [33920/60000 (57%)]	Loss: 0.572216
Train Epoch: 1 [34560/60000 (58%)]	Loss: 0.481873
Train Epoch: 1 [35200/60000 (59%)]	Loss: 0.502513
Train Epoch: 1 [35840/60000 (60%)]	Loss: 0.410055
Train Epoch: 1 [36480/60000 (61%)]	Loss: 0.489813
Train Epoch: 1 [37120/60000 (62%)]	Loss: 0.507667
Train Epoch: 1 [37760/60000 (63%)]	Loss: 0.497175
Train Epoch: 1 [38400/60000 (64%)]	Loss: 0.433524
Train Epoch: 1 [39040/60000 (65%)]	Loss: 0.448550
Train Epoch: 1 [39680/60000 (66%)]	Loss: 0.451818
Train Epoch: 1 [40320/60000 (67%)]	Loss: 0.501129
Train Epoch: 1 [40960/60000 (68%)]	Loss: 0.605211
Train Epoch: 1 [41600/60000 (69%)]	Loss: 0.332027
Train Epoch: 1 [42240/60000 (70%)]	Loss: 0.483988
Train Epoch: 1 [42880/60000 (71%)]	Loss: 0.887791
Train Epoch: 1 [43520/60000 (72%)]	Loss: 0.273152
Train Epoch: 1 [44160/60000 (74%)]	Loss: 0.590317
Train Epoch: 1 [44800/60000 (75%)]	Loss: 0.459316
Train Epoch: 1 [45440/60000 (76%)]	Loss: 0.492083
Train Epoch: 1 [46080/60000 (77%)]	Loss: 0.477218
Train Epoch: 1 [46720/60000 (78%)]	Loss: 0.421333
Train Epoch: 1 [47360/60000 (79%)]	Loss: 0.576612
Train Epoch: 1 [48000/60000 (80%)]	Loss: 0.359032
Train Epoch: 1 [48640/60000 (81%)]	Loss: 0.434221
Train Epoch: 1 [49280/60000 (82%)]	Loss: 0.506898
Train Epoch: 1 [49920/60000 (83%)]	Loss: 0.342816
Train Epoch: 1 [50560/60000 (84%)]	Loss: 0.287471
Train Epoch: 1 [51200/60000 (85%)]	Loss: 0.250604
Train Epoch: 1 [51840/60000 (86%)]	Loss: 0.386828
Train Epoch: 1 [52480/60000 (87%)]	Loss: 0.331759
Train Epoch: 1 [53120/60000 (88%)]	Loss: 0.388772
Train Epoch: 1 [53760/60000 (90%)]	Loss: 0.509017
Train Epoch: 1 [54400/60000 (91%)]	Loss: 0.517717
Train Epoch: 1 [55040/60000 (92%)]	Loss: 0.393285
Train Epoch: 1 [55680/60000 (93%)]	Loss: 0.341990
Train Epoch: 1 [56320/60000 (94%)]	Loss: 0.434098
Train Epoch: 1 [56960/60000 (95%)]	Loss: 0.342924
Train Epoch: 1 [57600/60000 (96%)]	Loss: 0.617892
Train Epoch: 1 [58240/60000 (97%)]	Loss: 0.488426
Train Epoch: 1 [58880/60000 (98%)]	Loss: 0.356584
Train Epoch: 1 [59520/60000 (99%)]	Loss: 0.548745

Test set: Avg. loss: 0.1674, Accuracy: 9512/10000 (95%)

Train Epoch: 2 [0/60000 (0%)]	Loss: 0.315766
Train Epoch: 2 [640/60000 (1%)]	Loss: 0.468051
Train Epoch: 2 [1280/60000 (2%)]	Loss: 0.464737
Train Epoch: 2 [1920/60000 (3%)]	Loss: 0.357223
Train Epoch: 2 [2560/60000 (4%)]	Loss: 0.317739
Train Epoch: 2 [3200/60000 (5%)]	Loss: 0.684018
Train Epoch: 2 [3840/60000 (6%)]	Loss: 0.464855
Train Epoch: 2 [4480/60000 (7%)]	Loss: 0.420857
Train Epoch: 2 [5120/60000 (9%)]	Loss: 0.508732
Train Epoch: 2 [5760/60000 (10%)]	Loss: 0.318191
Train Epoch: 2 [6400/60000 (11%)]	Loss: 0.346217
Train Epoch: 2 [7040/60000 (12%)]	Loss: 0.559674
Train Epoch: 2 [7680/60000 (13%)]	Loss: 0.409686
Train Epoch: 2 [8320/60000 (14%)]	Loss: 0.535685
Train Epoch: 2 [8960/60000 (15%)]	Loss: 0.480220
Train Epoch: 2 [9600/60000 (16%)]	Loss: 0.519283
Train Epoch: 2 [10240/60000 (17%)]	Loss: 0.329807
Train Epoch: 2 [10880/60000 (18%)]	Loss: 0.393071
Train Epoch: 2 [11520/60000 (19%)]	Loss: 0.444064
Train Epoch: 2 [12160/60000 (20%)]	Loss: 0.533698
Train Epoch: 2 [12800/60000 (21%)]	Loss: 0.511128
Train Epoch: 2 [13440/60000 (22%)]	Loss: 0.346371
Train Epoch: 2 [14080/60000 (23%)]	Loss: 0.307755
Train Epoch: 2 [14720/60000 (25%)]	Loss: 0.536841
Train Epoch: 2 [15360/60000 (26%)]	Loss: 0.310219
Train Epoch: 2 [16000/60000 (27%)]	Loss: 0.493684
Train Epoch: 2 [16640/60000 (28%)]	Loss: 0.395575
Train Epoch: 2 [17280/60000 (29%)]	Loss: 0.252499
Train Epoch: 2 [17920/60000 (30%)]	Loss: 0.238181
Train Epoch: 2 [18560/60000 (31%)]	Loss: 0.323044
Train Epoch: 2 [19200/60000 (32%)]	Loss: 0.421634
Train Epoch: 2 [19840/60000 (33%)]	Loss: 0.409919
Train Epoch: 2 [20480/60000 (34%)]	Loss: 0.325331
Train Epoch: 2 [21120/60000 (35%)]	Loss: 0.396984
Train Epoch: 2 [21760/60000 (36%)]	Loss: 0.395612
Train Epoch: 2 [22400/60000 (37%)]	Loss: 0.378807
Train Epoch: 2 [23040/60000 (38%)]	Loss: 0.368943
Train Epoch: 2 [23680/60000 (39%)]	Loss: 0.525262
Train Epoch: 2 [24320/60000 (41%)]	Loss: 0.227805
Train Epoch: 2 [24960/60000 (42%)]	Loss: 0.278575
Train Epoch: 2 [25600/60000 (43%)]	Loss: 0.404145
Train Epoch: 2 [26240/60000 (44%)]	Loss: 0.467817
Train Epoch: 2 [26880/60000 (45%)]	Loss: 0.279880
Train Epoch: 2 [27520/60000 (46%)]	Loss: 0.539638
Train Epoch: 2 [28160/60000 (47%)]	Loss: 0.257472
Train Epoch: 2 [28800/60000 (48%)]	Loss: 0.220950
Train Epoch: 2 [29440/60000 (49%)]	Loss: 0.448459
Train Epoch: 2 [30080/60000 (50%)]	Loss: 0.280728
Train Epoch: 2 [30720/60000 (51%)]	Loss: 0.321694
Train Epoch: 2 [31360/60000 (52%)]	Loss: 0.234594
Train Epoch: 2 [32000/60000 (53%)]	Loss: 0.249877
Train Epoch: 2 [32640/60000 (54%)]	Loss: 0.443569
Train Epoch: 2 [33280/60000 (55%)]	Loss: 0.358985
Train Epoch: 2 [33920/60000 (57%)]	Loss: 0.350358
Train Epoch: 2 [34560/60000 (58%)]	Loss: 0.240932
Train Epoch: 2 [35200/60000 (59%)]	Loss: 0.400578
Train Epoch: 2 [35840/60000 (60%)]	Loss: 0.451478
Train Epoch: 2 [36480/60000 (61%)]	Loss: 0.366277
Train Epoch: 2 [37120/60000 (62%)]	Loss: 0.464619
Train Epoch: 2 [37760/60000 (63%)]	Loss: 0.219236
Train Epoch: 2 [38400/60000 (64%)]	Loss: 0.310383
Train Epoch: 2 [39040/60000 (65%)]	Loss: 0.318048
Train Epoch: 2 [39680/60000 (66%)]	Loss: 0.598020
Train Epoch: 2 [40320/60000 (67%)]	Loss: 0.248990
Train Epoch: 2 [40960/60000 (68%)]	Loss: 0.153452
Train Epoch: 2 [41600/60000 (69%)]	Loss: 0.155354
Train Epoch: 2 [42240/60000 (70%)]	Loss: 0.249885
Train Epoch: 2 [42880/60000 (71%)]	Loss: 0.198647
Train Epoch: 2 [43520/60000 (72%)]	Loss: 0.394697
Train Epoch: 2 [44160/60000 (74%)]	Loss: 0.450222
Train Epoch: 2 [44800/60000 (75%)]	Loss: 0.202541
Train Epoch: 2 [45440/60000 (76%)]	Loss: 0.268529
Train Epoch: 2 [46080/60000 (77%)]	Loss: 0.348173
Train Epoch: 2 [46720/60000 (78%)]	Loss: 0.359101
Train Epoch: 2 [47360/60000 (79%)]	Loss: 0.522160
Train Epoch: 2 [48000/60000 (80%)]	Loss: 0.488034
Train Epoch: 2 [48640/60000 (81%)]	Loss: 0.350620
Train Epoch: 2 [49280/60000 (82%)]	Loss: 0.523037
Train Epoch: 2 [49920/60000 (83%)]	Loss: 0.124925
Train Epoch: 2 [50560/60000 (84%)]	Loss: 0.299917
Train Epoch: 2 [51200/60000 (85%)]	Loss: 0.302186
Train Epoch: 2 [51840/60000 (86%)]	Loss: 0.323338
Train Epoch: 2 [52480/60000 (87%)]	Loss: 0.256383
Train Epoch: 2 [53120/60000 (88%)]	Loss: 0.313699
Train Epoch: 2 [53760/60000 (90%)]	Loss: 0.442143
Train Epoch: 2 [54400/60000 (91%)]	Loss: 0.234823
Train Epoch: 2 [55040/60000 (92%)]	Loss: 0.356765
Train Epoch: 2 [55680/60000 (93%)]	Loss: 0.425722
Train Epoch: 2 [56320/60000 (94%)]	Loss: 0.260159
Train Epoch: 2 [56960/60000 (95%)]	Loss: 0.307709
Train Epoch: 2 [57600/60000 (96%)]	Loss: 0.368729
Train Epoch: 2 [58240/60000 (97%)]	Loss: 0.554495
Train Epoch: 2 [58880/60000 (98%)]	Loss: 0.151344
Train Epoch: 2 [59520/60000 (99%)]	Loss: 0.332558

Test set: Avg. loss: 0.1095, Accuracy: 9664/10000 (97%)

Train Epoch: 3 [0/60000 (0%)]	Loss: 0.606901
Train Epoch: 3 [640/60000 (1%)]	Loss: 0.217843
Train Epoch: 3 [1280/60000 (2%)]	Loss: 0.225650
Train Epoch: 3 [1920/60000 (3%)]	Loss: 0.359506
Train Epoch: 3 [2560/60000 (4%)]	Loss: 0.380754
Train Epoch: 3 [3200/60000 (5%)]	Loss: 0.283387
Train Epoch: 3 [3840/60000 (6%)]	Loss: 0.318688
Train Epoch: 3 [4480/60000 (7%)]	Loss: 0.335678
Train Epoch: 3 [5120/60000 (9%)]	Loss: 0.157370
Train Epoch: 3 [5760/60000 (10%)]	Loss: 0.220007
Train Epoch: 3 [6400/60000 (11%)]	Loss: 0.291860
Train Epoch: 3 [7040/60000 (12%)]	Loss: 0.464692
Train Epoch: 3 [7680/60000 (13%)]	Loss: 0.229839
Train Epoch: 3 [8320/60000 (14%)]	Loss: 0.342332
Train Epoch: 3 [8960/60000 (15%)]	Loss: 0.313431
Train Epoch: 3 [9600/60000 (16%)]	Loss: 0.341073
Train Epoch: 3 [10240/60000 (17%)]	Loss: 0.208354
Train Epoch: 3 [10880/60000 (18%)]	Loss: 0.188568
Train Epoch: 3 [11520/60000 (19%)]	Loss: 0.213437
Train Epoch: 3 [12160/60000 (20%)]	Loss: 0.327615
Train Epoch: 3 [12800/60000 (21%)]	Loss: 0.240687
Train Epoch: 3 [13440/60000 (22%)]	Loss: 0.347590
Train Epoch: 3 [14080/60000 (23%)]	Loss: 0.367349
Train Epoch: 3 [14720/60000 (25%)]	Loss: 0.270027
Train Epoch: 3 [15360/60000 (26%)]	Loss: 0.549701
Train Epoch: 3 [16000/60000 (27%)]	Loss: 0.265491
Train Epoch: 3 [16640/60000 (28%)]	Loss: 0.264681
Train Epoch: 3 [17280/60000 (29%)]	Loss: 0.259617
Train Epoch: 3 [17920/60000 (30%)]	Loss: 0.193016
Train Epoch: 3 [18560/60000 (31%)]	Loss: 0.405925
Train Epoch: 3 [19200/60000 (32%)]	Loss: 0.230640
Train Epoch: 3 [19840/60000 (33%)]	Loss: 0.298929
Train Epoch: 3 [20480/60000 (34%)]	Loss: 0.508493
Train Epoch: 3 [21120/60000 (35%)]	Loss: 0.253897
Train Epoch: 3 [21760/60000 (36%)]	Loss: 0.201565
Train Epoch: 3 [22400/60000 (37%)]	Loss: 0.276484
Train Epoch: 3 [23040/60000 (38%)]	Loss: 0.447977
Train Epoch: 3 [23680/60000 (39%)]	Loss: 0.542686
Train Epoch: 3 [24320/60000 (41%)]	Loss: 0.340117
Train Epoch: 3 [24960/60000 (42%)]	Loss: 0.238209
Train Epoch: 3 [25600/60000 (43%)]	Loss: 0.151151
Train Epoch: 3 [26240/60000 (44%)]	Loss: 0.365213
Train Epoch: 3 [26880/60000 (45%)]	Loss: 0.362948
Train Epoch: 3 [27520/60000 (46%)]	Loss: 0.237704
Train Epoch: 3 [28160/60000 (47%)]	Loss: 0.398583
Train Epoch: 3 [28800/60000 (48%)]	Loss: 0.397255
Train Epoch: 3 [29440/60000 (49%)]	Loss: 0.262200
Train Epoch: 3 [30080/60000 (50%)]	Loss: 0.152108
Train Epoch: 3 [30720/60000 (51%)]	Loss: 0.372339
Train Epoch: 3 [31360/60000 (52%)]	Loss: 0.372472
Train Epoch: 3 [32000/60000 (53%)]	Loss: 0.259656
Train Epoch: 3 [32640/60000 (54%)]	Loss: 0.435441
Train Epoch: 3 [33280/60000 (55%)]	Loss: 0.378388
Train Epoch: 3 [33920/60000 (57%)]	Loss: 0.177515
Train Epoch: 3 [34560/60000 (58%)]	Loss: 0.283689
Train Epoch: 3 [35200/60000 (59%)]	Loss: 0.131174
Train Epoch: 3 [35840/60000 (60%)]	Loss: 0.171783
Train Epoch: 3 [36480/60000 (61%)]	Loss: 0.104585
Train Epoch: 3 [37120/60000 (62%)]	Loss: 0.389055
Train Epoch: 3 [37760/60000 (63%)]	Loss: 0.360526
Train Epoch: 3 [38400/60000 (64%)]	Loss: 0.259271
Train Epoch: 3 [39040/60000 (65%)]	Loss: 0.525336
Train Epoch: 3 [39680/60000 (66%)]	Loss: 0.196208
Train Epoch: 3 [40320/60000 (67%)]	Loss: 0.374461
Train Epoch: 3 [40960/60000 (68%)]	Loss: 0.216166
Train Epoch: 3 [41600/60000 (69%)]	Loss: 0.221857
Train Epoch: 3 [42240/60000 (70%)]	Loss: 0.223318
Train Epoch: 3 [42880/60000 (71%)]	Loss: 0.185287
Train Epoch: 3 [43520/60000 (72%)]	Loss: 0.382659
Train Epoch: 3 [44160/60000 (74%)]	Loss: 0.262684
Train Epoch: 3 [44800/60000 (75%)]	Loss: 0.388742
Train Epoch: 3 [45440/60000 (76%)]	Loss: 0.286496
Train Epoch: 3 [46080/60000 (77%)]	Loss: 0.301708
Train Epoch: 3 [46720/60000 (78%)]	Loss: 0.451360
Train Epoch: 3 [47360/60000 (79%)]	Loss: 0.279125
Train Epoch: 3 [48000/60000 (80%)]	Loss: 0.197034
Train Epoch: 3 [48640/60000 (81%)]	Loss: 0.240700
Train Epoch: 3 [49280/60000 (82%)]	Loss: 0.594597
Train Epoch: 3 [49920/60000 (83%)]	Loss: 0.108885
Train Epoch: 3 [50560/60000 (84%)]	Loss: 0.199601
Train Epoch: 3 [51200/60000 (85%)]	Loss: 0.184297
Train Epoch: 3 [51840/60000 (86%)]	Loss: 0.175885
Train Epoch: 3 [52480/60000 (87%)]	Loss: 0.365624
Train Epoch: 3 [53120/60000 (88%)]	Loss: 0.236363
Train Epoch: 3 [53760/60000 (90%)]	Loss: 0.508030
Train Epoch: 3 [54400/60000 (91%)]	Loss: 0.379145
Train Epoch: 3 [55040/60000 (92%)]	Loss: 0.304528
Train Epoch: 3 [55680/60000 (93%)]	Loss: 0.184158
Train Epoch: 3 [56320/60000 (94%)]	Loss: 0.323510
Train Epoch: 3 [56960/60000 (95%)]	Loss: 0.192909
Train Epoch: 3 [57600/60000 (96%)]	Loss: 0.275473
Train Epoch: 3 [58240/60000 (97%)]	Loss: 0.397780
Train Epoch: 3 [58880/60000 (98%)]	Loss: 0.159153
Train Epoch: 3 [59520/60000 (99%)]	Loss: 0.231473

Test set: Avg. loss: 0.0909, Accuracy: 9709/10000 (97%)
plt.figure(figsize=(15, 12))
plt.plot(train_counter, train_losses, color='blue')
plt.scatter(test_counter, test_losses, color='red')
plt.legend(['Train Loss', 'Test Loss'], loc='upper right')
plt.xlabel('number of training examples seen')
plt.ylabel('negative log likelihood loss')
plt.show()

png

continued_network = Net()
continued_optimizer = optim.SGD(network.parameters(),
lr=learning_rate,
momentum=momentum)

network_state_dict = torch.load('./_data_set/MNIST/model.pth')
continued_network.load_state_dict(network_state_dict)
optimizer_state_dict = torch.load('./_data_set/MNIST/optimizer.pth')
continued_optimizer.load_state_dict(optimizer_state_dict)

for i in range(3, 9):
train(i)
test()
test_counter.append(i * len(train_loader.dataset))

C:\Users\Administrator\AppData\Local\Temp\ipykernel_18752\2337744027.py:29: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.
  return F.log_softmax(x)


Train Epoch: 4 [0/60000 (0%)]	Loss: 0.180916
Train Epoch: 4 [640/60000 (1%)]	Loss: 0.188782
Train Epoch: 4 [1280/60000 (2%)]	Loss: 0.250786
Train Epoch: 4 [1920/60000 (3%)]	Loss: 0.185484
Train Epoch: 4 [2560/60000 (4%)]	Loss: 0.295455
Train Epoch: 4 [3200/60000 (5%)]	Loss: 0.172000
Train Epoch: 4 [3840/60000 (6%)]	Loss: 0.117650
Train Epoch: 4 [4480/60000 (7%)]	Loss: 0.423349
Train Epoch: 4 [5120/60000 (9%)]	Loss: 0.285250
Train Epoch: 4 [5760/60000 (10%)]	Loss: 0.360192
Train Epoch: 4 [6400/60000 (11%)]	Loss: 0.362748
Train Epoch: 4 [7040/60000 (12%)]	Loss: 0.292238
Train Epoch: 4 [7680/60000 (13%)]	Loss: 0.238687
Train Epoch: 4 [8320/60000 (14%)]	Loss: 0.150868
Train Epoch: 4 [8960/60000 (15%)]	Loss: 0.427452
Train Epoch: 4 [9600/60000 (16%)]	Loss: 0.230040
Train Epoch: 4 [10240/60000 (17%)]	Loss: 0.275437
Train Epoch: 4 [10880/60000 (18%)]	Loss: 0.158874
Train Epoch: 4 [11520/60000 (19%)]	Loss: 0.162058
Train Epoch: 4 [12160/60000 (20%)]	Loss: 0.177609
Train Epoch: 4 [12800/60000 (21%)]	Loss: 0.416331
Train Epoch: 4 [13440/60000 (22%)]	Loss: 0.255415
Train Epoch: 4 [14080/60000 (23%)]	Loss: 0.150464
Train Epoch: 4 [14720/60000 (25%)]	Loss: 0.285846
Train Epoch: 4 [15360/60000 (26%)]	Loss: 0.175966
Train Epoch: 4 [16000/60000 (27%)]	Loss: 0.385556
Train Epoch: 4 [16640/60000 (28%)]	Loss: 0.217486
Train Epoch: 4 [17280/60000 (29%)]	Loss: 0.243101
Train Epoch: 4 [17920/60000 (30%)]	Loss: 0.182876
Train Epoch: 4 [18560/60000 (31%)]	Loss: 0.094987
Train Epoch: 4 [19200/60000 (32%)]	Loss: 0.225338
Train Epoch: 4 [19840/60000 (33%)]	Loss: 0.145325
Train Epoch: 4 [20480/60000 (34%)]	Loss: 0.165757
Train Epoch: 4 [21120/60000 (35%)]	Loss: 0.218088
Train Epoch: 4 [21760/60000 (36%)]	Loss: 0.085633
Train Epoch: 4 [22400/60000 (37%)]	Loss: 0.122317
Train Epoch: 4 [23040/60000 (38%)]	Loss: 0.272857
Train Epoch: 4 [23680/60000 (39%)]	Loss: 0.155769
Train Epoch: 4 [24320/60000 (41%)]	Loss: 0.212894
Train Epoch: 4 [24960/60000 (42%)]	Loss: 0.117393
Train Epoch: 4 [25600/60000 (43%)]	Loss: 0.211204
Train Epoch: 4 [26240/60000 (44%)]	Loss: 0.071398
Train Epoch: 4 [26880/60000 (45%)]	Loss: 0.091613
Train Epoch: 4 [27520/60000 (46%)]	Loss: 0.267689
Train Epoch: 4 [28160/60000 (47%)]	Loss: 0.194783
Train Epoch: 4 [28800/60000 (48%)]	Loss: 0.207981
Train Epoch: 4 [29440/60000 (49%)]	Loss: 0.367123
Train Epoch: 4 [30080/60000 (50%)]	Loss: 0.179464
Train Epoch: 4 [30720/60000 (51%)]	Loss: 0.204890
Train Epoch: 4 [31360/60000 (52%)]	Loss: 0.219245
Train Epoch: 4 [32000/60000 (53%)]	Loss: 0.229217
Train Epoch: 4 [32640/60000 (54%)]	Loss: 0.139216
Train Epoch: 4 [33280/60000 (55%)]	Loss: 0.300938
Train Epoch: 4 [33920/60000 (57%)]	Loss: 0.133212
Train Epoch: 4 [34560/60000 (58%)]	Loss: 0.352936
Train Epoch: 4 [35200/60000 (59%)]	Loss: 0.153349
Train Epoch: 4 [35840/60000 (60%)]	Loss: 0.208317
Train Epoch: 4 [36480/60000 (61%)]	Loss: 0.164447
Train Epoch: 4 [37120/60000 (62%)]	Loss: 0.227367
Train Epoch: 4 [37760/60000 (63%)]	Loss: 0.241028
Train Epoch: 4 [38400/60000 (64%)]	Loss: 0.307894
Train Epoch: 4 [39040/60000 (65%)]	Loss: 0.461609
Train Epoch: 4 [39680/60000 (66%)]	Loss: 0.381326
Train Epoch: 4 [40320/60000 (67%)]	Loss: 0.135251
Train Epoch: 4 [40960/60000 (68%)]	Loss: 0.518683
Train Epoch: 4 [41600/60000 (69%)]	Loss: 0.210409
Train Epoch: 4 [42240/60000 (70%)]	Loss: 0.187576
Train Epoch: 4 [42880/60000 (71%)]	Loss: 0.115259
Train Epoch: 4 [43520/60000 (72%)]	Loss: 0.259883
Train Epoch: 4 [44160/60000 (74%)]	Loss: 0.177242
Train Epoch: 4 [44800/60000 (75%)]	Loss: 0.345635
Train Epoch: 4 [45440/60000 (76%)]	Loss: 0.213670
Train Epoch: 4 [46080/60000 (77%)]	Loss: 0.193606
Train Epoch: 4 [46720/60000 (78%)]	Loss: 0.173757
Train Epoch: 4 [47360/60000 (79%)]	Loss: 0.215879
Train Epoch: 4 [48000/60000 (80%)]	Loss: 0.171952
Train Epoch: 4 [48640/60000 (81%)]	Loss: 0.119270
Train Epoch: 4 [49280/60000 (82%)]	Loss: 0.324480
Train Epoch: 4 [49920/60000 (83%)]	Loss: 0.104771
Train Epoch: 4 [50560/60000 (84%)]	Loss: 0.163072
Train Epoch: 4 [51200/60000 (85%)]	Loss: 0.160245
Train Epoch: 4 [51840/60000 (86%)]	Loss: 0.249122
Train Epoch: 4 [52480/60000 (87%)]	Loss: 0.282949
Train Epoch: 4 [53120/60000 (88%)]	Loss: 0.152365
Train Epoch: 4 [53760/60000 (90%)]	Loss: 0.102036
Train Epoch: 4 [54400/60000 (91%)]	Loss: 0.162332
Train Epoch: 4 [55040/60000 (92%)]	Loss: 0.182912
Train Epoch: 4 [55680/60000 (93%)]	Loss: 0.468722
Train Epoch: 4 [56320/60000 (94%)]	Loss: 0.421446
Train Epoch: 4 [56960/60000 (95%)]	Loss: 0.254925
Train Epoch: 4 [57600/60000 (96%)]	Loss: 0.345340
Train Epoch: 4 [58240/60000 (97%)]	Loss: 0.293855
Train Epoch: 4 [58880/60000 (98%)]	Loss: 0.200609
Train Epoch: 4 [59520/60000 (99%)]	Loss: 0.045524

Test set: Avg. loss: 0.0696, Accuracy: 9772/10000 (98%)

Train Epoch: 5 [0/60000 (0%)]	Loss: 0.270343
Train Epoch: 5 [640/60000 (1%)]	Loss: 0.275511
Train Epoch: 5 [1280/60000 (2%)]	Loss: 0.209185
Train Epoch: 5 [1920/60000 (3%)]	Loss: 0.172124
Train Epoch: 5 [2560/60000 (4%)]	Loss: 0.176122
Train Epoch: 5 [3200/60000 (5%)]	Loss: 0.469673
Train Epoch: 5 [3840/60000 (6%)]	Loss: 0.113855
Train Epoch: 5 [4480/60000 (7%)]	Loss: 0.241289
Train Epoch: 5 [5120/60000 (9%)]	Loss: 0.123443
Train Epoch: 5 [5760/60000 (10%)]	Loss: 0.153661
Train Epoch: 5 [6400/60000 (11%)]	Loss: 0.069777
Train Epoch: 5 [7040/60000 (12%)]	Loss: 0.372212
Train Epoch: 5 [7680/60000 (13%)]	Loss: 0.158761
Train Epoch: 5 [8320/60000 (14%)]	Loss: 0.231457
Train Epoch: 5 [8960/60000 (15%)]	Loss: 0.235761
Train Epoch: 5 [9600/60000 (16%)]	Loss: 0.208866
Train Epoch: 5 [10240/60000 (17%)]	Loss: 0.511828
Train Epoch: 5 [10880/60000 (18%)]	Loss: 0.333603
Train Epoch: 5 [11520/60000 (19%)]	Loss: 0.254337
Train Epoch: 5 [12160/60000 (20%)]	Loss: 0.260536
Train Epoch: 5 [12800/60000 (21%)]	Loss: 0.263866
Train Epoch: 5 [13440/60000 (22%)]	Loss: 0.483610
Train Epoch: 5 [14080/60000 (23%)]	Loss: 0.178215
Train Epoch: 5 [14720/60000 (25%)]	Loss: 0.137731
Train Epoch: 5 [15360/60000 (26%)]	Loss: 0.177714
Train Epoch: 5 [16000/60000 (27%)]	Loss: 0.132543
Train Epoch: 5 [16640/60000 (28%)]	Loss: 0.146177
Train Epoch: 5 [17280/60000 (29%)]	Loss: 0.398970
Train Epoch: 5 [17920/60000 (30%)]	Loss: 0.120811
Train Epoch: 5 [18560/60000 (31%)]	Loss: 0.147756
Train Epoch: 5 [19200/60000 (32%)]	Loss: 0.317457
Train Epoch: 5 [19840/60000 (33%)]	Loss: 0.310727
Train Epoch: 5 [20480/60000 (34%)]	Loss: 0.144468
Train Epoch: 5 [21120/60000 (35%)]	Loss: 0.515887
Train Epoch: 5 [21760/60000 (36%)]	Loss: 0.113731
Train Epoch: 5 [22400/60000 (37%)]	Loss: 0.326223
Train Epoch: 5 [23040/60000 (38%)]	Loss: 0.275539
Train Epoch: 5 [23680/60000 (39%)]	Loss: 0.261113
Train Epoch: 5 [24320/60000 (41%)]	Loss: 0.198070
Train Epoch: 5 [24960/60000 (42%)]	Loss: 0.380463
Train Epoch: 5 [25600/60000 (43%)]	Loss: 0.130256
Train Epoch: 5 [26240/60000 (44%)]	Loss: 0.234225
Train Epoch: 5 [26880/60000 (45%)]	Loss: 0.284392
Train Epoch: 5 [27520/60000 (46%)]	Loss: 0.168175
Train Epoch: 5 [28160/60000 (47%)]	Loss: 0.346845
Train Epoch: 5 [28800/60000 (48%)]	Loss: 0.206521
Train Epoch: 5 [29440/60000 (49%)]	Loss: 0.149305
Train Epoch: 5 [30080/60000 (50%)]	Loss: 0.248684
Train Epoch: 5 [30720/60000 (51%)]	Loss: 0.245173
Train Epoch: 5 [31360/60000 (52%)]	Loss: 0.201854
Train Epoch: 5 [32000/60000 (53%)]	Loss: 0.182244
Train Epoch: 5 [32640/60000 (54%)]	Loss: 0.160925
Train Epoch: 5 [33280/60000 (55%)]	Loss: 0.251155
Train Epoch: 5 [33920/60000 (57%)]	Loss: 0.137573
Train Epoch: 5 [34560/60000 (58%)]	Loss: 0.305431
Train Epoch: 5 [35200/60000 (59%)]	Loss: 0.260936
Train Epoch: 5 [35840/60000 (60%)]	Loss: 0.298855
Train Epoch: 5 [36480/60000 (61%)]	Loss: 0.209435
Train Epoch: 5 [37120/60000 (62%)]	Loss: 0.152100
Train Epoch: 5 [37760/60000 (63%)]	Loss: 0.322987
Train Epoch: 5 [38400/60000 (64%)]	Loss: 0.245070
Train Epoch: 5 [39040/60000 (65%)]	Loss: 0.079185
Train Epoch: 5 [39680/60000 (66%)]	Loss: 0.286104
Train Epoch: 5 [40320/60000 (67%)]	Loss: 0.134519
Train Epoch: 5 [40960/60000 (68%)]	Loss: 0.256614
Train Epoch: 5 [41600/60000 (69%)]	Loss: 0.230492
Train Epoch: 5 [42240/60000 (70%)]	Loss: 0.081532
Train Epoch: 5 [42880/60000 (71%)]	Loss: 0.174270
Train Epoch: 5 [43520/60000 (72%)]	Loss: 0.123896
Train Epoch: 5 [44160/60000 (74%)]	Loss: 0.126517
Train Epoch: 5 [44800/60000 (75%)]	Loss: 0.333949
Train Epoch: 5 [45440/60000 (76%)]	Loss: 0.369903
Train Epoch: 5 [46080/60000 (77%)]	Loss: 0.207948
Train Epoch: 5 [46720/60000 (78%)]	Loss: 0.080965
Train Epoch: 5 [47360/60000 (79%)]	Loss: 0.117344
Train Epoch: 5 [48000/60000 (80%)]	Loss: 0.419863
Train Epoch: 5 [48640/60000 (81%)]	Loss: 0.265613
Train Epoch: 5 [49280/60000 (82%)]	Loss: 0.256468
Train Epoch: 5 [49920/60000 (83%)]	Loss: 0.509733
Train Epoch: 5 [50560/60000 (84%)]	Loss: 0.144000
Train Epoch: 5 [51200/60000 (85%)]	Loss: 0.174094
Train Epoch: 5 [51840/60000 (86%)]	Loss: 0.335760
Train Epoch: 5 [52480/60000 (87%)]	Loss: 0.363208
Train Epoch: 5 [53120/60000 (88%)]	Loss: 0.199396
Train Epoch: 5 [53760/60000 (90%)]	Loss: 0.276358
Train Epoch: 5 [54400/60000 (91%)]	Loss: 0.063482
Train Epoch: 5 [55040/60000 (92%)]	Loss: 0.107777
Train Epoch: 5 [55680/60000 (93%)]	Loss: 0.273694
Train Epoch: 5 [56320/60000 (94%)]	Loss: 0.111036
Train Epoch: 5 [56960/60000 (95%)]	Loss: 0.242194
Train Epoch: 5 [57600/60000 (96%)]	Loss: 0.192879
Train Epoch: 5 [58240/60000 (97%)]	Loss: 0.140068
Train Epoch: 5 [58880/60000 (98%)]	Loss: 0.053538
Train Epoch: 5 [59520/60000 (99%)]	Loss: 0.282342

Test set: Avg. loss: 0.0648, Accuracy: 9794/10000 (98%)

Train Epoch: 6 [0/60000 (0%)]	Loss: 0.157271
Train Epoch: 6 [640/60000 (1%)]	Loss: 0.125083
Train Epoch: 6 [1280/60000 (2%)]	Loss: 0.324760
Train Epoch: 6 [1920/60000 (3%)]	Loss: 0.233483
Train Epoch: 6 [2560/60000 (4%)]	Loss: 0.305379
Train Epoch: 6 [3200/60000 (5%)]	Loss: 0.133055
Train Epoch: 6 [3840/60000 (6%)]	Loss: 0.308374
Train Epoch: 6 [4480/60000 (7%)]	Loss: 0.361411
Train Epoch: 6 [5120/60000 (9%)]	Loss: 0.263687
Train Epoch: 6 [5760/60000 (10%)]	Loss: 0.136558
Train Epoch: 6 [6400/60000 (11%)]	Loss: 0.187681
Train Epoch: 6 [7040/60000 (12%)]	Loss: 0.249220
Train Epoch: 6 [7680/60000 (13%)]	Loss: 0.206636
Train Epoch: 6 [8320/60000 (14%)]	Loss: 0.151353
Train Epoch: 6 [8960/60000 (15%)]	Loss: 0.195330
Train Epoch: 6 [9600/60000 (16%)]	Loss: 0.361656
Train Epoch: 6 [10240/60000 (17%)]	Loss: 0.366321
Train Epoch: 6 [10880/60000 (18%)]	Loss: 0.166131
Train Epoch: 6 [11520/60000 (19%)]	Loss: 0.075402
Train Epoch: 6 [12160/60000 (20%)]	Loss: 0.151120
Train Epoch: 6 [12800/60000 (21%)]	Loss: 0.115874
Train Epoch: 6 [13440/60000 (22%)]	Loss: 0.144778
Train Epoch: 6 [14080/60000 (23%)]	Loss: 0.063986
Train Epoch: 6 [14720/60000 (25%)]	Loss: 0.174532
Train Epoch: 6 [15360/60000 (26%)]	Loss: 0.261803
Train Epoch: 6 [16000/60000 (27%)]	Loss: 0.251123
Train Epoch: 6 [16640/60000 (28%)]	Loss: 0.145080
Train Epoch: 6 [17280/60000 (29%)]	Loss: 0.354523
Train Epoch: 6 [17920/60000 (30%)]	Loss: 0.187603
Train Epoch: 6 [18560/60000 (31%)]	Loss: 0.082521
Train Epoch: 6 [19200/60000 (32%)]	Loss: 0.154904
Train Epoch: 6 [19840/60000 (33%)]	Loss: 0.165881
Train Epoch: 6 [20480/60000 (34%)]	Loss: 0.115822
Train Epoch: 6 [21120/60000 (35%)]	Loss: 0.603689
Train Epoch: 6 [21760/60000 (36%)]	Loss: 0.164415
Train Epoch: 6 [22400/60000 (37%)]	Loss: 0.107442
Train Epoch: 6 [23040/60000 (38%)]	Loss: 0.196783
Train Epoch: 6 [23680/60000 (39%)]	Loss: 0.220013
Train Epoch: 6 [24320/60000 (41%)]	Loss: 0.207349
Train Epoch: 6 [24960/60000 (42%)]	Loss: 0.178466
Train Epoch: 6 [25600/60000 (43%)]	Loss: 0.149325
Train Epoch: 6 [26240/60000 (44%)]	Loss: 0.169802
Train Epoch: 6 [26880/60000 (45%)]	Loss: 0.205438
Train Epoch: 6 [27520/60000 (46%)]	Loss: 0.121831
Train Epoch: 6 [28160/60000 (47%)]	Loss: 0.169774
Train Epoch: 6 [28800/60000 (48%)]	Loss: 0.138925
Train Epoch: 6 [29440/60000 (49%)]	Loss: 0.183873
Train Epoch: 6 [30080/60000 (50%)]	Loss: 0.108401
Train Epoch: 6 [30720/60000 (51%)]	Loss: 0.087508
Train Epoch: 6 [31360/60000 (52%)]	Loss: 0.114378
Train Epoch: 6 [32000/60000 (53%)]	Loss: 0.124145
Train Epoch: 6 [32640/60000 (54%)]	Loss: 0.368228
Train Epoch: 6 [33280/60000 (55%)]	Loss: 0.353432
Train Epoch: 6 [33920/60000 (57%)]	Loss: 0.239896
Train Epoch: 6 [34560/60000 (58%)]	Loss: 0.341117
Train Epoch: 6 [35200/60000 (59%)]	Loss: 0.280842
Train Epoch: 6 [35840/60000 (60%)]	Loss: 0.303140
Train Epoch: 6 [36480/60000 (61%)]	Loss: 0.340119
Train Epoch: 6 [37120/60000 (62%)]	Loss: 0.165236
Train Epoch: 6 [37760/60000 (63%)]	Loss: 0.113621
Train Epoch: 6 [38400/60000 (64%)]	Loss: 0.313772
Train Epoch: 6 [39040/60000 (65%)]	Loss: 0.258854
Train Epoch: 6 [39680/60000 (66%)]	Loss: 0.158878
Train Epoch: 6 [40320/60000 (67%)]	Loss: 0.160731
Train Epoch: 6 [40960/60000 (68%)]	Loss: 0.166837
Train Epoch: 6 [41600/60000 (69%)]	Loss: 0.226772
Train Epoch: 6 [42240/60000 (70%)]	Loss: 0.160613
Train Epoch: 6 [42880/60000 (71%)]	Loss: 0.157473
Train Epoch: 6 [43520/60000 (72%)]	Loss: 0.279009
Train Epoch: 6 [44160/60000 (74%)]	Loss: 0.157638
Train Epoch: 6 [44800/60000 (75%)]	Loss: 0.202747
Train Epoch: 6 [45440/60000 (76%)]	Loss: 0.123593
Train Epoch: 6 [46080/60000 (77%)]	Loss: 0.164986
Train Epoch: 6 [46720/60000 (78%)]	Loss: 0.285264
Train Epoch: 6 [47360/60000 (79%)]	Loss: 0.211086
Train Epoch: 6 [48000/60000 (80%)]	Loss: 0.125422
Train Epoch: 6 [48640/60000 (81%)]	Loss: 0.076380
Train Epoch: 6 [49280/60000 (82%)]	Loss: 0.274299
Train Epoch: 6 [49920/60000 (83%)]	Loss: 0.128968
Train Epoch: 6 [50560/60000 (84%)]	Loss: 0.100312
Train Epoch: 6 [51200/60000 (85%)]	Loss: 0.173260
Train Epoch: 6 [51840/60000 (86%)]	Loss: 0.375576
Train Epoch: 6 [52480/60000 (87%)]	Loss: 0.359121
Train Epoch: 6 [53120/60000 (88%)]	Loss: 0.137199
Train Epoch: 6 [53760/60000 (90%)]	Loss: 0.280192
Train Epoch: 6 [54400/60000 (91%)]	Loss: 0.345643
Train Epoch: 6 [55040/60000 (92%)]	Loss: 0.213638
Train Epoch: 6 [55680/60000 (93%)]	Loss: 0.142949
Train Epoch: 6 [56320/60000 (94%)]	Loss: 0.188716
Train Epoch: 6 [56960/60000 (95%)]	Loss: 0.067698
Train Epoch: 6 [57600/60000 (96%)]	Loss: 0.227284
Train Epoch: 6 [58240/60000 (97%)]	Loss: 0.141719
Train Epoch: 6 [58880/60000 (98%)]	Loss: 0.166098
Train Epoch: 6 [59520/60000 (99%)]	Loss: 0.251494

Test set: Avg. loss: 0.0615, Accuracy: 9814/10000 (98%)

Train Epoch: 7 [0/60000 (0%)]	Loss: 0.091176
Train Epoch: 7 [640/60000 (1%)]	Loss: 0.269873
Train Epoch: 7 [1280/60000 (2%)]	Loss: 0.132484
Train Epoch: 7 [1920/60000 (3%)]	Loss: 0.181744
Train Epoch: 7 [2560/60000 (4%)]	Loss: 0.119089
Train Epoch: 7 [3200/60000 (5%)]	Loss: 0.172001
Train Epoch: 7 [3840/60000 (6%)]	Loss: 0.288416
Train Epoch: 7 [4480/60000 (7%)]	Loss: 0.282734
Train Epoch: 7 [5120/60000 (9%)]	Loss: 0.274102
Train Epoch: 7 [5760/60000 (10%)]	Loss: 0.224655
Train Epoch: 7 [6400/60000 (11%)]	Loss: 0.227839
Train Epoch: 7 [7040/60000 (12%)]	Loss: 0.202125
Train Epoch: 7 [7680/60000 (13%)]	Loss: 0.176258
Train Epoch: 7 [8320/60000 (14%)]	Loss: 0.198535
Train Epoch: 7 [8960/60000 (15%)]	Loss: 0.119568
Train Epoch: 7 [9600/60000 (16%)]	Loss: 0.204526
Train Epoch: 7 [10240/60000 (17%)]	Loss: 0.259647
Train Epoch: 7 [10880/60000 (18%)]	Loss: 0.216371
Train Epoch: 7 [11520/60000 (19%)]	Loss: 0.098282
Train Epoch: 7 [12160/60000 (20%)]	Loss: 0.156163
Train Epoch: 7 [12800/60000 (21%)]	Loss: 0.194898
Train Epoch: 7 [13440/60000 (22%)]	Loss: 0.172166
Train Epoch: 7 [14080/60000 (23%)]	Loss: 0.231052
Train Epoch: 7 [14720/60000 (25%)]	Loss: 0.135253
Train Epoch: 7 [15360/60000 (26%)]	Loss: 0.144452
Train Epoch: 7 [16000/60000 (27%)]	Loss: 0.162540
Train Epoch: 7 [16640/60000 (28%)]	Loss: 0.136042
Train Epoch: 7 [17280/60000 (29%)]	Loss: 0.236265
Train Epoch: 7 [17920/60000 (30%)]	Loss: 0.208569
Train Epoch: 7 [18560/60000 (31%)]	Loss: 0.158493
Train Epoch: 7 [19200/60000 (32%)]	Loss: 0.161767
Train Epoch: 7 [19840/60000 (33%)]	Loss: 0.066611
Train Epoch: 7 [20480/60000 (34%)]	Loss: 0.322254
Train Epoch: 7 [21120/60000 (35%)]	Loss: 0.233296
Train Epoch: 7 [21760/60000 (36%)]	Loss: 0.086176
Train Epoch: 7 [22400/60000 (37%)]	Loss: 0.125919
Train Epoch: 7 [23040/60000 (38%)]	Loss: 0.148241
Train Epoch: 7 [23680/60000 (39%)]	Loss: 0.108953
Train Epoch: 7 [24320/60000 (41%)]	Loss: 0.223288
Train Epoch: 7 [24960/60000 (42%)]	Loss: 0.243924
Train Epoch: 7 [25600/60000 (43%)]	Loss: 0.224430
Train Epoch: 7 [26240/60000 (44%)]	Loss: 0.230509
Train Epoch: 7 [26880/60000 (45%)]	Loss: 0.326988
Train Epoch: 7 [27520/60000 (46%)]	Loss: 0.229315
Train Epoch: 7 [28160/60000 (47%)]	Loss: 0.315817
Train Epoch: 7 [28800/60000 (48%)]	Loss: 0.137005
Train Epoch: 7 [29440/60000 (49%)]	Loss: 0.116640
Train Epoch: 7 [30080/60000 (50%)]	Loss: 0.167090
Train Epoch: 7 [30720/60000 (51%)]	Loss: 0.173242
Train Epoch: 7 [31360/60000 (52%)]	Loss: 0.191092
Train Epoch: 7 [32000/60000 (53%)]	Loss: 0.134305
Train Epoch: 7 [32640/60000 (54%)]	Loss: 0.138862
Train Epoch: 7 [33280/60000 (55%)]	Loss: 0.125831
Train Epoch: 7 [33920/60000 (57%)]	Loss: 0.129372
Train Epoch: 7 [34560/60000 (58%)]	Loss: 0.120971
Train Epoch: 7 [35200/60000 (59%)]	Loss: 0.268926
Train Epoch: 7 [35840/60000 (60%)]	Loss: 0.170120
Train Epoch: 7 [36480/60000 (61%)]	Loss: 0.455412
Train Epoch: 7 [37120/60000 (62%)]	Loss: 0.182606
Train Epoch: 7 [37760/60000 (63%)]	Loss: 0.085986
Train Epoch: 7 [38400/60000 (64%)]	Loss: 0.229982
Train Epoch: 7 [39040/60000 (65%)]	Loss: 0.165970
Train Epoch: 7 [39680/60000 (66%)]	Loss: 0.125090
Train Epoch: 7 [40320/60000 (67%)]	Loss: 0.149905
Train Epoch: 7 [40960/60000 (68%)]	Loss: 0.382724
Train Epoch: 7 [41600/60000 (69%)]	Loss: 0.155642
Train Epoch: 7 [42240/60000 (70%)]	Loss: 0.059763
Train Epoch: 7 [42880/60000 (71%)]	Loss: 0.305220
Train Epoch: 7 [43520/60000 (72%)]	Loss: 0.209007
Train Epoch: 7 [44160/60000 (74%)]	Loss: 0.309034
Train Epoch: 7 [44800/60000 (75%)]	Loss: 0.049236
Train Epoch: 7 [45440/60000 (76%)]	Loss: 0.305584
Train Epoch: 7 [46080/60000 (77%)]	Loss: 0.187595
Train Epoch: 7 [46720/60000 (78%)]	Loss: 0.117563
Train Epoch: 7 [47360/60000 (79%)]	Loss: 0.147911
Train Epoch: 7 [48000/60000 (80%)]	Loss: 0.233994
Train Epoch: 7 [48640/60000 (81%)]	Loss: 0.198659
Train Epoch: 7 [49280/60000 (82%)]	Loss: 0.302784
Train Epoch: 7 [49920/60000 (83%)]	Loss: 0.204405
Train Epoch: 7 [50560/60000 (84%)]	Loss: 0.053611
Train Epoch: 7 [51200/60000 (85%)]	Loss: 0.635807
Train Epoch: 7 [51840/60000 (86%)]	Loss: 0.101712
Train Epoch: 7 [52480/60000 (87%)]	Loss: 0.097702
Train Epoch: 7 [53120/60000 (88%)]	Loss: 0.257620
Train Epoch: 7 [53760/60000 (90%)]	Loss: 0.121270
Train Epoch: 7 [54400/60000 (91%)]	Loss: 0.191442
Train Epoch: 7 [55040/60000 (92%)]	Loss: 0.127309
Train Epoch: 7 [55680/60000 (93%)]	Loss: 0.148693
Train Epoch: 7 [56320/60000 (94%)]	Loss: 0.073504
Train Epoch: 7 [56960/60000 (95%)]	Loss: 0.184949
Train Epoch: 7 [57600/60000 (96%)]	Loss: 0.072307
Train Epoch: 7 [58240/60000 (97%)]	Loss: 0.296258
Train Epoch: 7 [58880/60000 (98%)]	Loss: 0.415298
Train Epoch: 7 [59520/60000 (99%)]	Loss: 0.309931

Test set: Avg. loss: 0.0576, Accuracy: 9823/10000 (98%)

Train Epoch: 8 [0/60000 (0%)]	Loss: 0.411129
Train Epoch: 8 [640/60000 (1%)]	Loss: 0.082472
Train Epoch: 8 [1280/60000 (2%)]	Loss: 0.232263
Train Epoch: 8 [1920/60000 (3%)]	Loss: 0.184606
Train Epoch: 8 [2560/60000 (4%)]	Loss: 0.082728
Train Epoch: 8 [3200/60000 (5%)]	Loss: 0.173117
Train Epoch: 8 [3840/60000 (6%)]	Loss: 0.111649
Train Epoch: 8 [4480/60000 (7%)]	Loss: 0.442360
Train Epoch: 8 [5120/60000 (9%)]	Loss: 0.227383
Train Epoch: 8 [5760/60000 (10%)]	Loss: 0.288936
Train Epoch: 8 [6400/60000 (11%)]	Loss: 0.227669
Train Epoch: 8 [7040/60000 (12%)]	Loss: 0.238741
Train Epoch: 8 [7680/60000 (13%)]	Loss: 0.276109
Train Epoch: 8 [8320/60000 (14%)]	Loss: 0.173926
Train Epoch: 8 [8960/60000 (15%)]	Loss: 0.167442
Train Epoch: 8 [9600/60000 (16%)]	Loss: 0.178728
Train Epoch: 8 [10240/60000 (17%)]	Loss: 0.181267
Train Epoch: 8 [10880/60000 (18%)]	Loss: 0.213851
Train Epoch: 8 [11520/60000 (19%)]	Loss: 0.130871
Train Epoch: 8 [12160/60000 (20%)]	Loss: 0.074143
Train Epoch: 8 [12800/60000 (21%)]	Loss: 0.090848
Train Epoch: 8 [13440/60000 (22%)]	Loss: 0.262355
Train Epoch: 8 [14080/60000 (23%)]	Loss: 0.112236
Train Epoch: 8 [14720/60000 (25%)]	Loss: 0.237863
Train Epoch: 8 [15360/60000 (26%)]	Loss: 0.128822
Train Epoch: 8 [16000/60000 (27%)]	Loss: 0.325940
Train Epoch: 8 [16640/60000 (28%)]	Loss: 0.059681
Train Epoch: 8 [17280/60000 (29%)]	Loss: 0.297807
Train Epoch: 8 [17920/60000 (30%)]	Loss: 0.193296
Train Epoch: 8 [18560/60000 (31%)]	Loss: 0.101481
Train Epoch: 8 [19200/60000 (32%)]	Loss: 0.319415
Train Epoch: 8 [19840/60000 (33%)]	Loss: 0.221697
Train Epoch: 8 [20480/60000 (34%)]	Loss: 0.128780
Train Epoch: 8 [21120/60000 (35%)]	Loss: 0.365089
Train Epoch: 8 [21760/60000 (36%)]	Loss: 0.114066
Train Epoch: 8 [22400/60000 (37%)]	Loss: 0.178602
Train Epoch: 8 [23040/60000 (38%)]	Loss: 0.198497
Train Epoch: 8 [23680/60000 (39%)]	Loss: 0.113458
Train Epoch: 8 [24320/60000 (41%)]	Loss: 0.084631
Train Epoch: 8 [24960/60000 (42%)]	Loss: 0.220370
Train Epoch: 8 [25600/60000 (43%)]	Loss: 0.112375
Train Epoch: 8 [26240/60000 (44%)]	Loss: 0.180620
Train Epoch: 8 [26880/60000 (45%)]	Loss: 0.174218
Train Epoch: 8 [27520/60000 (46%)]	Loss: 0.220530
Train Epoch: 8 [28160/60000 (47%)]	Loss: 0.322495
Train Epoch: 8 [28800/60000 (48%)]	Loss: 0.108935
Train Epoch: 8 [29440/60000 (49%)]	Loss: 0.302106
Train Epoch: 8 [30080/60000 (50%)]	Loss: 0.143926
Train Epoch: 8 [30720/60000 (51%)]	Loss: 0.132183
Train Epoch: 8 [31360/60000 (52%)]	Loss: 0.295584
Train Epoch: 8 [32000/60000 (53%)]	Loss: 0.153446
Train Epoch: 8 [32640/60000 (54%)]	Loss: 0.356207
Train Epoch: 8 [33280/60000 (55%)]	Loss: 0.049660
Train Epoch: 8 [33920/60000 (57%)]	Loss: 0.134523
Train Epoch: 8 [34560/60000 (58%)]	Loss: 0.107794
Train Epoch: 8 [35200/60000 (59%)]	Loss: 0.152601
Train Epoch: 8 [35840/60000 (60%)]	Loss: 0.183556
Train Epoch: 8 [36480/60000 (61%)]	Loss: 0.139419
Train Epoch: 8 [37120/60000 (62%)]	Loss: 0.217243
Train Epoch: 8 [37760/60000 (63%)]	Loss: 0.067120
Train Epoch: 8 [38400/60000 (64%)]	Loss: 0.140370
Train Epoch: 8 [39040/60000 (65%)]	Loss: 0.118206
Train Epoch: 8 [39680/60000 (66%)]	Loss: 0.068067
Train Epoch: 8 [40320/60000 (67%)]	Loss: 0.390102
Train Epoch: 8 [40960/60000 (68%)]	Loss: 0.362602
Train Epoch: 8 [41600/60000 (69%)]	Loss: 0.151053
Train Epoch: 8 [42240/60000 (70%)]	Loss: 0.329660
Train Epoch: 8 [42880/60000 (71%)]	Loss: 0.030132
Train Epoch: 8 [43520/60000 (72%)]	Loss: 0.214005
Train Epoch: 8 [44160/60000 (74%)]	Loss: 0.177053
Train Epoch: 8 [44800/60000 (75%)]	Loss: 0.270380
Train Epoch: 8 [45440/60000 (76%)]	Loss: 0.160427
Train Epoch: 8 [46080/60000 (77%)]	Loss: 0.152326
Train Epoch: 8 [46720/60000 (78%)]	Loss: 0.173051
Train Epoch: 8 [47360/60000 (79%)]	Loss: 0.213168
Train Epoch: 8 [48000/60000 (80%)]	Loss: 0.148429
Train Epoch: 8 [48640/60000 (81%)]	Loss: 0.179701
Train Epoch: 8 [49280/60000 (82%)]	Loss: 0.115524
Train Epoch: 8 [49920/60000 (83%)]	Loss: 0.203157
Train Epoch: 8 [50560/60000 (84%)]	Loss: 0.092355
Train Epoch: 8 [51200/60000 (85%)]	Loss: 0.241119
Train Epoch: 8 [51840/60000 (86%)]	Loss: 0.214614
Train Epoch: 8 [52480/60000 (87%)]	Loss: 0.133498
Train Epoch: 8 [53120/60000 (88%)]	Loss: 0.139286
Train Epoch: 8 [53760/60000 (90%)]	Loss: 0.271059
Train Epoch: 8 [54400/60000 (91%)]	Loss: 0.126945
Train Epoch: 8 [55040/60000 (92%)]	Loss: 0.098850
Train Epoch: 8 [55680/60000 (93%)]	Loss: 0.224279
Train Epoch: 8 [56320/60000 (94%)]	Loss: 0.166734
Train Epoch: 8 [56960/60000 (95%)]	Loss: 0.143847
Train Epoch: 8 [57600/60000 (96%)]	Loss: 0.146182
Train Epoch: 8 [58240/60000 (97%)]	Loss: 0.226856
Train Epoch: 8 [58880/60000 (98%)]	Loss: 0.049229
Train Epoch: 8 [59520/60000 (99%)]	Loss: 0.534476

Test set: Avg. loss: 0.0554, Accuracy: 9817/10000 (98%)

Train Epoch: 9 [0/60000 (0%)]	Loss: 0.054686
Train Epoch: 9 [640/60000 (1%)]	Loss: 0.235064
Train Epoch: 9 [1280/60000 (2%)]	Loss: 0.106654
Train Epoch: 9 [1920/60000 (3%)]	Loss: 0.234180
Train Epoch: 9 [2560/60000 (4%)]	Loss: 0.198757
Train Epoch: 9 [3200/60000 (5%)]	Loss: 0.162267
Train Epoch: 9 [3840/60000 (6%)]	Loss: 0.271285
Train Epoch: 9 [4480/60000 (7%)]	Loss: 0.080265
Train Epoch: 9 [5120/60000 (9%)]	Loss: 0.122332
Train Epoch: 9 [5760/60000 (10%)]	Loss: 0.217631
Train Epoch: 9 [6400/60000 (11%)]	Loss: 0.185639
Train Epoch: 9 [7040/60000 (12%)]	Loss: 0.168876
Train Epoch: 9 [7680/60000 (13%)]	Loss: 0.112571
Train Epoch: 9 [8320/60000 (14%)]	Loss: 0.125877
Train Epoch: 9 [8960/60000 (15%)]	Loss: 0.188810
Train Epoch: 9 [9600/60000 (16%)]	Loss: 0.105145
Train Epoch: 9 [10240/60000 (17%)]	Loss: 0.188370
Train Epoch: 9 [10880/60000 (18%)]	Loss: 0.117349
Train Epoch: 9 [11520/60000 (19%)]	Loss: 0.069228
Train Epoch: 9 [12160/60000 (20%)]	Loss: 0.162447
Train Epoch: 9 [12800/60000 (21%)]	Loss: 0.108819
Train Epoch: 9 [13440/60000 (22%)]	Loss: 0.263229
Train Epoch: 9 [14080/60000 (23%)]	Loss: 0.292624
Train Epoch: 9 [14720/60000 (25%)]	Loss: 0.248468
Train Epoch: 9 [15360/60000 (26%)]	Loss: 0.119649
Train Epoch: 9 [16000/60000 (27%)]	Loss: 0.109907
Train Epoch: 9 [16640/60000 (28%)]	Loss: 0.580101
Train Epoch: 9 [17280/60000 (29%)]	Loss: 0.177047
Train Epoch: 9 [17920/60000 (30%)]	Loss: 0.178420
Train Epoch: 9 [18560/60000 (31%)]	Loss: 0.221444
Train Epoch: 9 [19200/60000 (32%)]	Loss: 0.133854
Train Epoch: 9 [19840/60000 (33%)]	Loss: 0.282360
Train Epoch: 9 [20480/60000 (34%)]	Loss: 0.136910
Train Epoch: 9 [21120/60000 (35%)]	Loss: 0.280332
Train Epoch: 9 [21760/60000 (36%)]	Loss: 0.174279
Train Epoch: 9 [22400/60000 (37%)]	Loss: 0.083278
Train Epoch: 9 [23040/60000 (38%)]	Loss: 0.188579
Train Epoch: 9 [23680/60000 (39%)]	Loss: 0.102640
Train Epoch: 9 [24320/60000 (41%)]	Loss: 0.163085
Train Epoch: 9 [24960/60000 (42%)]	Loss: 0.158967
Train Epoch: 9 [25600/60000 (43%)]	Loss: 0.192901
Train Epoch: 9 [26240/60000 (44%)]	Loss: 0.163242
Train Epoch: 9 [26880/60000 (45%)]	Loss: 0.157237
Train Epoch: 9 [27520/60000 (46%)]	Loss: 0.088420
Train Epoch: 9 [28160/60000 (47%)]	Loss: 0.094397
Train Epoch: 9 [28800/60000 (48%)]	Loss: 0.156636
Train Epoch: 9 [29440/60000 (49%)]	Loss: 0.150083
Train Epoch: 9 [30080/60000 (50%)]	Loss: 0.077640
Train Epoch: 9 [30720/60000 (51%)]	Loss: 0.172498
Train Epoch: 9 [31360/60000 (52%)]	Loss: 0.228058
Train Epoch: 9 [32000/60000 (53%)]	Loss: 0.162368
Train Epoch: 9 [32640/60000 (54%)]	Loss: 0.278008
Train Epoch: 9 [33280/60000 (55%)]	Loss: 0.256715
Train Epoch: 9 [33920/60000 (57%)]	Loss: 0.196984
Train Epoch: 9 [34560/60000 (58%)]	Loss: 0.205926
Train Epoch: 9 [35200/60000 (59%)]	Loss: 0.103880
Train Epoch: 9 [35840/60000 (60%)]	Loss: 0.053308
Train Epoch: 9 [36480/60000 (61%)]	Loss: 0.117932
Train Epoch: 9 [37120/60000 (62%)]	Loss: 0.080018
Train Epoch: 9 [37760/60000 (63%)]	Loss: 0.423203
Train Epoch: 9 [38400/60000 (64%)]	Loss: 0.088841
Train Epoch: 9 [39040/60000 (65%)]	Loss: 0.157737
Train Epoch: 9 [39680/60000 (66%)]	Loss: 0.196193
Train Epoch: 9 [40320/60000 (67%)]	Loss: 0.164245
Train Epoch: 9 [40960/60000 (68%)]	Loss: 0.199979
Train Epoch: 9 [41600/60000 (69%)]	Loss: 0.188702
Train Epoch: 9 [42240/60000 (70%)]	Loss: 0.199731
Train Epoch: 9 [42880/60000 (71%)]	Loss: 0.102235
Train Epoch: 9 [43520/60000 (72%)]	Loss: 0.087075
Train Epoch: 9 [44160/60000 (74%)]	Loss: 0.128069
Train Epoch: 9 [44800/60000 (75%)]	Loss: 0.148430
Train Epoch: 9 [45440/60000 (76%)]	Loss: 0.232641
Train Epoch: 9 [46080/60000 (77%)]	Loss: 0.082571
Train Epoch: 9 [46720/60000 (78%)]	Loss: 0.283418
Train Epoch: 9 [47360/60000 (79%)]	Loss: 0.233733
Train Epoch: 9 [48000/60000 (80%)]	Loss: 0.096537
Train Epoch: 9 [48640/60000 (81%)]	Loss: 0.109687
Train Epoch: 9 [49280/60000 (82%)]	Loss: 0.216523
Train Epoch: 9 [49920/60000 (83%)]	Loss: 0.181914
Train Epoch: 9 [50560/60000 (84%)]	Loss: 0.139312
Train Epoch: 9 [51200/60000 (85%)]	Loss: 0.238586
Train Epoch: 9 [51840/60000 (86%)]	Loss: 0.109351
Train Epoch: 9 [52480/60000 (87%)]	Loss: 0.076942
Train Epoch: 9 [53120/60000 (88%)]	Loss: 0.221999
Train Epoch: 9 [53760/60000 (90%)]	Loss: 0.178757
Train Epoch: 9 [54400/60000 (91%)]	Loss: 0.219890
Train Epoch: 9 [55040/60000 (92%)]	Loss: 0.136030
Train Epoch: 9 [55680/60000 (93%)]	Loss: 0.238617
Train Epoch: 9 [56320/60000 (94%)]	Loss: 0.195722
Train Epoch: 9 [56960/60000 (95%)]	Loss: 0.252661
Train Epoch: 9 [57600/60000 (96%)]	Loss: 0.149977
Train Epoch: 9 [58240/60000 (97%)]	Loss: 0.061803
Train Epoch: 9 [58880/60000 (98%)]	Loss: 0.105117
Train Epoch: 9 [59520/60000 (99%)]	Loss: 0.054359

Test set: Avg. loss: 0.0508, Accuracy: 9843/10000 (98%)
plt.figure(figsize=(15, 12))
plt.plot(train_counter, train_losses, color='blue')
plt.scatter(test_counter, test_losses, color='red')
plt.legend(['Train Loss', 'Test Loss'], loc='upper right')
plt.xlabel('number of training examples seen')
plt.ylabel('negative log likelihood loss')
plt.show()

png

分割线

借物表

[1]: 用 PyTorch 实现 MNIST 手写数字识别(非常详细)

[2]: PyTorch 深度学习入门与实战 2022 最简明易懂的 PyTorch 代码精讲 最新版本 PyTorch PyTorch 安装

[3]: PyTorch 中的 nn.Conv1d 与 nn.Conv2d