LogisticRegression

LogisticRegression 逻辑斯蒂回归

Pytorch torchversion

torchversion工具包提供数据集

1
2
3
4
5
6
7
8
9
10
import torchvision

# MINIST数据集 手写数字识别
train_set = torchvision.datasets.MNIST(root='./datast/minist',train=True,download=True)
test_set = torchvision.datasets.MNIST(root='./datast/minist',train=False,download=True)


# cifar10数据集 10分类数据集,猫、狗...
train_set = torchvision.datasets.CIFAR10(root='./datast/cifar10',train=True,download=True)
train_set = torchvision.datasets.CIFAR10(root='./datast/cifar10',train=False,download=True)

基本概念

在线性模型的基础上,使用Sigmoid函数,将线性模型的结果压缩到[0,1]之间,使其拥有概率意义。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
# -*- coding: UTF-8 -*-
import torch
import torch.nn as nn

x_data = torch.Tensor([[1.0],[2.0],[3.0]])
y_data = torch.Tensor([[0.],[0.],[1.]])

class LogisticRegressionModel(nn.Module):
def __init__(self):
super(LogisticRegressionModel,self).__init__()
self.linear = nn.Linear(1,1)

def forward(self,x):
return torch.sigmoid(self.linear(x))

model = LogisticRegressionModel()
loss_fuction = nn.BCELoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(),lr=0.01)

for epoch in range(1000):
y_pred = model(x_data)
loss = loss_fuction(y_pred,y_data)

optimizer.zero_grad()
loss.backward()
optimizer.step()

x_test = torch.Tensor([[4.0]])
y_test = model(x_test)
print('after training:',4,y_test.data.item())
Donate comment here