torch_logistic_regression 小灰灰 2022-09-10 10:22 136阅读 0赞 ### torch\_logistic\_regression ### * 手打了一波logistic\_regression import torch import torch.nn as nn import torchvision import torchvision.transforms as transforms # Hyper-parameters input_size = 28 * 28 num_classes = 10 num_epochs = 20 batch_size = 100 learning_rate = 0.001 # Mnist dataset train_dataset = torchvision.datasets.MNIST(root='../../data', train=True, transform=transforms.ToTensor(), download=True) test_dataset = torchvision.datasets.MNIST(root='../../data', train=False, transform=transforms.ToTensor()) train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) model = nn.Linear(input_size, num_classes) # Loss and optimizer # nn.CrossEntropyLoss() computes softmax internally criterion = nn.CrossEntropyLoss() # 这里优化器试了下Adam,效果还不错,在这个人物下比SGD好点 optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) # Train the model total_step = len(train_loader) for epoch in range(num_epochs): for i ,(images,lables) in enumerate(train_loader): # Reshape images to (batch_size,input_size) images = images.reshape(-1,input_size) # Forward pass outputs = model(images) loss = criterion(outputs, lables) # backward and optimize optimizer.zero_grad() loss.backward() optimizer.step() if (i + 1) % 100 == 0: print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(epoch + 1, num_epochs, i + 1, total_step, loss.item())) with torch.no_grad(): correct = 0 total = 0 for images, labels in test_loader: images = images.reshape(-1, input_size) outputs = model(images) # 拿到最大的那个当作预测结果 _, predicted = torch.max(outputs.data,1) total += labels.size(0) correct += (predicted == labels).sum() # 计算精确度 print('Accuracy of the model on the 10000 test images: {} %'.format(100 * correct / total)) # Save the model checkpoint torch.save(model.state_dict(), 'model.ckpt')
还没有评论,来说两句吧...