1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
| import torch import torchvision
from torch import nn from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter
train_data = torchvision.datasets.CIFAR10('data',train=True,download=True,transform=torchvision.transforms.ToTensor()) test_data = torchvision.datasets.CIFAR10('data',train=True,download=True,transform=torchvision.transforms.ToTensor())
train_loader = DataLoader(train_data,batch_size=64,shuffle=True) test_loader = DataLoader(test_data, batch_size=64, shuffle=True)
train_size = len(train_data) test_size = len(test_data)
class Tudui(nn.Module): def __init__(self): super().__init__() self.model = nn.Sequential( nn.Conv2d(3, 32, 5, padding=2), nn.MaxPool2d(2), nn.Conv2d(32, 32, 5, padding=2), nn.MaxPool2d(2), nn.Conv2d(32, 64, 5, padding=2), nn.MaxPool2d(2), nn.Flatten(), nn.Linear(1024, 64), nn.Linear(64, 10) ) def forward(self, x): output = self.model(x) return output
tudui = Tudui() tudui = tudui.cuda()
loss_func = nn.CrossEntropyLoss() loss_func = loss_func.cuda()
optimizer = torch.optim.SGD(tudui.parameters(),lr=0.01)
epoch = 20 train_cnt = 0 test_cnt = 0
writer = SummaryWriter("nn_model")
for i in range(epoch): train_loss = 0 print("---------第{}次迭代开始了------------".format(i+1)) for data in train_loader: imgs, labels = data imgs = imgs.cuda() labels = labels.cuda() output = tudui(imgs) loss = loss_func(output, labels)
optimizer.zero_grad() loss.backward() optimizer.step()
train_loss += loss.item() train_cnt += 1 if train_cnt % 100 == 0: print("第{}次训练的损失是{}".format(train_cnt,loss.item())) writer.add_scalar('train_loss',loss.item(), train_cnt)
test_loss = 0 acc = 0 with torch.no_grad(): for data in test_loader: imgs, labels = data imgs = imgs.cuda() labels = labels.cuda() output = tudui(imgs) loss = loss_func(output, labels) acc += (output.argmax(1) == labels).sum() test_loss += loss
print("第{}次迭代后的损失为{}".format(i,test_loss)) print("第{}次迭代后的acc为{}".format(i,acc/test_size)) writer.add_scalar("test_loss",loss,i)
torch.save(tudui, 'vgg_{}.pth'.format(i))
writer.close()
|