当前位置: 首页 > news >正文

营销型网站展示网页搜索公众号文章

营销型网站展示,网页搜索公众号文章,想做代理怎么找商家,网站里的横幅怎么做作业#xff1a;一次稍微有点学术感觉的作业#xff1a; 对inception网络在cifar10上观察精度消融实验#xff1a;引入残差机制和cbam模块分别进行消融 import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from torchvision … 作业一次稍微有点学术感觉的作业 对inception网络在cifar10上观察精度消融实验引入残差机制和cbam模块分别进行消融 import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from torchvision import datasets, transforms from torch.utils.data import DataLoader import torchvision.models as models import time import copy# 设置随机种子确保可复现性 torch.manual_seed(42)# 数据预处理 transform_train transforms.Compose([transforms.RandomCrop(32, padding4),transforms.RandomHorizontalFlip(),transforms.ToTensor(),transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), ])transform_test transforms.Compose([transforms.ToTensor(),transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), ])# 加载CIFAR-10数据集 trainset datasets.CIFAR10(root./data, trainTrue,downloadTrue, transformtransform_train) trainloader DataLoader(trainset, batch_size128,shuffleTrue, num_workers2)testset datasets.CIFAR10(root./data, trainFalse,downloadTrue, transformtransform_test) testloader DataLoader(testset, batch_size100,shuffleFalse, num_workers2)# 定义CBAM模块 class ChannelAttention(nn.Module):def __init__(self, in_channels, reduction_ratio16):super(ChannelAttention, self).__init__()self.avg_pool nn.AdaptiveAvgPool2d(1)self.max_pool nn.AdaptiveMaxPool2d(1)self.fc nn.Sequential(nn.Conv2d(in_channels, in_channels // reduction_ratio, 1, biasFalse),nn.ReLU(),nn.Conv2d(in_channels // reduction_ratio, in_channels, 1, biasFalse))def forward(self, x):avg_out self.fc(self.avg_pool(x))max_out self.fc(self.max_pool(x))out avg_out max_outreturn torch.sigmoid(out)class SpatialAttention(nn.Module):def __init__(self, kernel_size7):super(SpatialAttention, self).__init__()self.conv nn.Conv2d(2, 1, kernel_size, paddingkernel_size//2, biasFalse)def forward(self, x):avg_out torch.mean(x, dim1, keepdimTrue)max_out, _ torch.max(x, dim1, keepdimTrue)out torch.cat([avg_out, max_out], dim1)out self.conv(out)return torch.sigmoid(out)class CBAM(nn.Module):def __init__(self, in_channels, reduction_ratio16, kernel_size7):super(CBAM, self).__init__()self.channel_att ChannelAttention(in_channels, reduction_ratio)self.spatial_att SpatialAttention(kernel_size)def forward(self, x):x x * self.channel_att(x)x x * self.spatial_att(x)return x# 定义Inception模块 class InceptionModule(nn.Module):def __init__(self, in_channels, ch1x1, ch3x3red, ch3x3, ch5x5red, ch5x5, pool_proj):super(InceptionModule, self).__init__()# 1x1卷积分支self.branch1 nn.Sequential(nn.Conv2d(in_channels, ch1x1, kernel_size1),nn.BatchNorm2d(ch1x1),nn.ReLU(True),)# 1x1卷积 - 3x3卷积分支self.branch2 nn.Sequential(nn.Conv2d(in_channels, ch3x3red, kernel_size1),nn.BatchNorm2d(ch3x3red),nn.ReLU(True),nn.Conv2d(ch3x3red, ch3x3, kernel_size3, padding1),nn.BatchNorm2d(ch3x3),nn.ReLU(True),)# 1x1卷积 - 5x5卷积分支self.branch3 nn.Sequential(nn.Conv2d(in_channels, ch5x5red, kernel_size1),nn.BatchNorm2d(ch5x5red),nn.ReLU(True),nn.Conv2d(ch5x5red, ch5x5, kernel_size5, padding2),nn.BatchNorm2d(ch5x5),nn.ReLU(True),)# 3x3池化 - 1x1卷积分支self.branch4 nn.Sequential(nn.MaxPool2d(kernel_size3, stride1, padding1),nn.Conv2d(in_channels, pool_proj, kernel_size1),nn.BatchNorm2d(pool_proj),nn.ReLU(True),)def forward(self, x):branch1 self.branch1(x)branch2 self.branch2(x)branch3 self.branch3(x)branch4 self.branch4(x)outputs [branch1, branch2, branch3, branch4]return torch.cat(outputs, 1)# 基础Inception网络 class BasicInception(nn.Module):def __init__(self, num_classes10):super(BasicInception, self).__init__()self.conv1 nn.Conv2d(3, 64, kernel_size3, stride1, padding1)self.bn1 nn.BatchNorm2d(64)self.relu1 nn.ReLU(True)self.inception1 InceptionModule(64, 32, 48, 64, 8, 16, 16)self.pool1 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception2 InceptionModule(128, 64, 64, 96, 16, 48, 32)self.pool2 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception3 InceptionModule(240, 96, 48, 104, 8, 24, 32)self.global_pool nn.AdaptiveAvgPool2d((1, 1))self.fc nn.Linear(256, num_classes)def forward(self, x):x self.relu1(self.bn1(self.conv1(x)))x self.inception1(x)x self.pool1(x)x self.inception2(x)x self.pool2(x)x self.inception3(x)x self.global_pool(x)x x.view(x.size(0), -1)x self.fc(x)return x# 带残差连接的Inception网络 class ResidualInception(nn.Module):def __init__(self, num_classes10):super(ResidualInception, self).__init__()self.conv1 nn.Conv2d(3, 64, kernel_size3, stride1, padding1)self.bn1 nn.BatchNorm2d(64)self.relu1 nn.ReLU(True)self.inception1 InceptionModule(64, 32, 48, 64, 8, 16, 16)self.pool1 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception2 InceptionModule(128, 64, 64, 96, 16, 48, 32)self.pool2 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception3 InceptionModule(240, 96, 48, 104, 8, 24, 32)self.global_pool nn.AdaptiveAvgPool2d((1, 1))self.fc nn.Linear(256, num_classes)# 残差连接的1x1卷积self.res_conv1 nn.Conv2d(64, 128, kernel_size1, stride2)self.res_conv2 nn.Conv2d(128, 240, kernel_size1, stride2)def forward(self, x):identity xx self.relu1(self.bn1(self.conv1(x)))x self.inception1(x)identity self.res_conv1(identity)x identityx F.relu(x)x self.pool1(x)identity xx self.inception2(x)identity self.res_conv2(identity)x identityx F.relu(x)x self.pool2(x)x self.inception3(x)x self.global_pool(x)x x.view(x.size(0), -1)x self.fc(x)return x# 带CBAM模块的Inception网络 class CBAMInception(nn.Module):def __init__(self, num_classes10):super(CBAMInception, self).__init__()self.conv1 nn.Conv2d(3, 64, kernel_size3, stride1, padding1)self.bn1 nn.BatchNorm2d(64)self.relu1 nn.ReLU(True)self.inception1 InceptionModule(64, 32, 48, 64, 8, 16, 16)self.cbam1 CBAM(128)self.pool1 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception2 InceptionModule(128, 64, 64, 96, 16, 48, 32)self.cbam2 CBAM(240)self.pool2 nn.MaxPool2d(kernel_size3, stride2, padding1)self.inception3 InceptionModule(240, 96, 48, 104, 8, 24, 32)self.cbam3 CBAM(256)self.global_pool nn.AdaptiveAvgPool2d((1, 1))self.fc nn.Linear(256, num_classes)def forward(self, x):x self.relu1(self.bn1(self.conv1(x)))x self.inception1(x)x self.cbam1(x)x self.pool1(x)x self.inception2(x)x self.cbam2(x)x self.pool2(x)x self.inception3(x)x self.cbam3(x)x self.global_pool(x)x x.view(x.size(0), -1)x self.fc(x)return x# 训练函数 def train_model(model, criterion, optimizer, scheduler, num_epochs25):since time.time()best_model_wts copy.deepcopy(model.state_dict())best_acc 0.0for epoch in range(num_epochs):print(fEpoch {epoch}/{num_epochs - 1})print(- * 10)# 每个epoch都有一个训练和验证阶段model.train() # 训练模式running_loss 0.0running_corrects 0# 迭代训练数据for inputs, labels in trainloader:inputs inputs.to(device)labels labels.to(device)# 零梯度optimizer.zero_grad()# 前向传播# 只有在训练时才跟踪历史with torch.set_grad_enabled(True):outputs model(inputs)_, preds torch.max(outputs, 1)loss criterion(outputs, labels)# 后向传播 优化loss.backward()optimizer.step()# 统计running_loss loss.item() * inputs.size(0)running_corrects torch.sum(preds labels.data)scheduler.step()epoch_loss running_loss / len(trainset)epoch_acc running_corrects.double() / len(trainset)print(fTrain Loss: {epoch_loss:.4f} Acc: {epoch_acc:.4f})# 深拷贝模型if epoch_acc best_acc:best_acc epoch_accbest_model_wts copy.deepcopy(model.state_dict())print()time_elapsed time.time() - sinceprint(fTraining complete in {time_elapsed // 60:.0f}m {time_elapsed % 60:.0f}s)print(fBest train Acc: {best_acc:4f})# 加载最佳模型权重model.load_state_dict(best_model_wts)return model# 测试函数 def evaluate_model(model):model.eval()correct 0total 0with torch.no_grad():for inputs, labels in testloader:inputs inputs.to(device)labels labels.to(device)outputs model(inputs)_, predicted torch.max(outputs, 1)total labels.size(0)correct (predicted labels).sum().item()accuracy 100 * correct / totalprint(fAccuracy of the network on the 10000 test images: {accuracy:.2f}%)return accuracy# 设置设备 device torch.device(cuda:0 if torch.cuda.is_available() else cpu) print(fUsing device: {device})# 实验结果记录 results {}# 实验1基础Inception网络 print( 实验1: 基础Inception网络 ) model_basic BasicInception().to(device) criterion nn.CrossEntropyLoss() optimizer optim.SGD(model_basic.parameters(), lr0.001, momentum0.9) scheduler optim.lr_scheduler.StepLR(optimizer, step_size7, gamma0.1)model_basic train_model(model_basic, criterion, optimizer, scheduler, num_epochs10) basic_accuracy evaluate_model(model_basic) results[Basic Inception] basic_accuracy# 实验2带残差连接的Inception网络 print(\n 实验2: 带残差连接的Inception网络 ) model_residual ResidualInception().to(device) criterion nn.CrossEntropyLoss() optimizer optim.SGD(model_residual.parameters(), lr0.001, momentum0.9) scheduler optim.lr_scheduler.StepLR(optimizer, step_size7, gamma0.1)model_residual train_model(model_residual, criterion, optimizer, scheduler, num_epochs10) residual_accuracy evaluate_model(model_residual) results[Residual Inception] residual_accuracy# 实验3带CBAM模块的Inception网络 print(\n 实验3: 带CBAM模块的Inception网络 ) model_cbam CBAMInception().to(device) criterion nn.CrossEntropyLoss() optimizer optim.SGD(model_cbam.parameters(), lr0.001, momentum0.9) scheduler optim.lr_scheduler.StepLR(optimizer, step_size7, gamma0.1)model_cbam train_model(model_cbam, criterion, optimizer, scheduler, num_epochs10) cbam_accuracy evaluate_model(model_cbam) results[CBAM Inception] cbam_accuracy# 输出实验结果对比 print(\n 实验结果对比 ) print({:20} {:10}.format(模型, 准确率 (%))) print(- * 30) for model, acc in results.items():print({:20} {:10.2f}.format(model, acc))
http://www.w-s-a.com/news/171030/

相关文章:

  • wordpress返回顶部插件wordpress站群seo
  • 企业网站建设报价表百度竞价托管哪家好
  • 织梦网站首页打开慢淄博网站推广那家好
  • 苏州高端网站建设kgwl互动网站建设的主页
  • 宿州网站建设哪家公司好个人网站制作方法
  • 网站正能量晚上在线观看视频站长之家关键词挖掘工具
  • 建设网站怎么判断是电脑还是手机仿租号网站源码网站开发
  • seo百度网站排名软件重庆巫山网站设计公司
  • 搭建视频播放网站网站排名诊断
  • 网站域名注册网站centos做网站服务器
  • 网站服务器共享的 vpsh5页面制作软件电脑版
  • 免费手机网站申请上海网站建设设计公司哪家好
  • 站长工具大全企业网上书店网站建设设计
  • 做网站的专业公司公司网站是做的谷歌的
  • 做网站前期工作wordpress图片并排
  • 免费注册网站哪个好wordpress评论修改
  • 合肥模板网站建设软件赤峰公司网站建设
  • 毕业设计都是做网站吗深圳网站制作企业邮箱
  • 网站排名 优帮云小规模公司简介怎么写
  • 那个做头像的网站好选择手机网站建设
  • 设计一个网站花多少时间做视频网站适合用什么服务器
  • asp网站开发环境订单系统单页面网站怎么做
  • 山东网站建设都有那些企业推广策略
  • 网站开发文档是什么概念衣服销售网站建设规划书范文
  • 中国建筑装饰网官网企业网站设计优化公司
  • 南海建设工程交易中心网站c2c交易平台有哪些?
  • 有没有专业做网站架构图的软件番禺建设网站哪个好
  • 建立网站第一步整站seo优化公司
  • php网站开发文章管理系统wordpress 评论 顶踩 心 插件
  • 网站做百度收录的意义html网页设计代码作业代码