天才一秒记住【长江书屋】地址:https://www.cjshuwu.com
testDataLoader=DataLoader(TestDataSet(),batch_size=batch_size,shuffle=False)
epoch_num=200
#lr=0.001
lr=0.001
net=VGGBaseSimpleS2().to(device)
print(net)
#loss
loss_func=nn.CrossEntropyLoss()
#optimizer
optimizer=torch.optiAdam(net.parameters(),lr=lr)
#optimizer=torch.optiSGD(net.parameters(),lr=lr,momentum=0.9,weight_decay=5e-4)
scheduler=torch.optilr_scheduler.StepLR(optimizer,step_size=5,gamma=0.9)
ifnotos.path.exists(“logCNN“):
os.mkdir(“logCNN“)
writer=tensorboardX.SummaryWriter(“logCNN“)
forepochinrange(epoch_num):
train_sum_loss=0
train_sum_correct=0
train_sum_fp=0
train_sum_fn=0
train_sum_tp=0
train_sum_tn=0
fori,datainenumerate(trainDataLoader):
net.train()
inputs,labels=data
inputs=inputs.unsqueeze(1).to(torch.float32)
labels=labels.type(torch.LongTensor)
inputs,labels=inputs.to(device),labels.to(device)
outputs=net(inputs)
loss=loss_func(outputs,labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
_,pred=torch.max(outputs.data,dim=1)
acc=pred.eq(labels.data)pu().sum()
one=torch.ones_like(labels)
zero=torch.zeros_like(labels)
本章未完,请点击下一章继续阅读!若浏览器显示没有新章节了,请尝试点击右上角↗️或右下角↘️的菜单,退出阅读模式即可,谢谢!