当前位置: 首页 > news >正文

环球易购招聘网站建设中山网上房地产

环球易购招聘网站建设,中山网上房地产,示范高职建设网站,怎么做网站讯息背景#xff1a; GoogLeNeta是2014年提出的一种全新的深度学习结构#xff0c;在这之前的AlexNet、VGG等结构都是通过增大网络的深度(层数)来获得更好的训练效果#xff0c;但层数的增加会带来很多负作用#xff0c;比如overfit、梯度消失、梯度爆炸等。GoogLeNet通过引入i…背景 GoogLeNeta是2014年提出的一种全新的深度学习结构在这之前的AlexNet、VGG等结构都是通过增大网络的深度(层数)来获得更好的训练效果但层数的增加会带来很多负作用比如overfit、梯度消失、梯度爆炸等。GoogLeNet通过引入inception从另一种角度来提升训练结果:能更高效的利用计算资源在相同的计算量下能提取到更多的特征从而提升训练结果。 因此GoogLeNet在专注于加深网络结构的同时引入了新的基本结构——Inception模块以增加网络的宽度。 网络结构图 1、Inception模块 Inception就是把多个卷积或池化操作放在一起组装成一个网络模块 实际中需要什么样的Inception   我们在上面提供了一种Inception的结构但是这个结构存在很多问题是不能够直接使用的。首要问题就是参数太多导致特征图厚度太大。为了解决这个问题作者在其中加入了1X1的卷积核改进后的Inception结构如下图    代码 定义BasicConv2d class BasicConv2d(nn.Module):def __init__(self, in_channels, out_channels, **kwargs):super(BasicConv2d, self).__init__()self.conv nn.Conv2d(in_channels, out_channels, **kwargs)self.relu nn.ReLU(inplaceTrue)def forward(self, x):x self.conv(x)x self.relu(x)return x定义Inception class Inception(nn.Module):def __init__(self, in_channels, ch1x1, ch3x3red, ch3x3, ch5x5red, ch5x5, pool_proj):super(Inception, self).__init__()self.branch1 BasicConv2d(in_channels, ch1x1, kernel_size1)self.branch2 nn.Sequential(BasicConv2d(in_channels, ch3x3red, kernel_size1),BasicConv2d(ch3x3red, ch3x3, kernel_size3, padding1) # 保证输出大小等于输入大小)self.branch3 nn.Sequential(BasicConv2d(in_channels, ch5x5red, kernel_size1),BasicConv2d(ch5x5red, ch5x5, kernel_size5, padding2) # 保证输出大小等于输入大小)self.branch4 nn.Sequential(nn.MaxPool2d(kernel_size3, stride1, padding1),BasicConv2d(in_channels, pool_proj, kernel_size1))定义分类器 class InceptionAux(nn.Module):def __init__(self, in_channels, num_classes):super(InceptionAux, self).__init__()self.averagePool nn.AvgPool2d(kernel_size5, stride3)self.conv BasicConv2d(in_channels, 128, kernel_size1) # output[batch, 128, 4, 4]self.fc1 nn.Linear(2048, 1024)self.fc2 nn.Linear(1024, num_classes)def forward(self, x):# aux1: N x 512 x 14 x 14, aux2: N x 528 x 14 x 14x self.averagePool(x)# aux1: N x 512 x 4 x 4, aux2: N x 528 x 4 x 4x self.conv(x)# N x 128 x 4 x 4x torch.flatten(x, 1)x F.dropout(x, 0.5, trainingself.training)# N x 2048x F.relu(self.fc1(x), inplaceTrue)x F.dropout(x, 0.5, trainingself.training)# N x 1024x self.fc2(x)# N x num_classesreturn x model完整代码 import torch.nn as nn import torch import torch.nn.functional as Fclass GoogLeNet(nn.Module):def __init__(self, num_classes1000, aux_logitsTrue, init_weightsFalse):super(GoogLeNet, self).__init__()self.aux_logits aux_logitsself.conv1 BasicConv2d(3, 64, kernel_size7, stride2, padding3)self.maxpool1 nn.MaxPool2d(3, stride2, ceil_modeTrue)self.conv2 BasicConv2d(64, 64, kernel_size1)self.conv3 BasicConv2d(64, 192, kernel_size3, padding1)self.maxpool2 nn.MaxPool2d(3, stride2, ceil_modeTrue)self.inception3a Inception(192, 64, 96, 128, 16, 32, 32)self.inception3b Inception(256, 128, 128, 192, 32, 96, 64)self.maxpool3 nn.MaxPool2d(3, stride2, ceil_modeTrue)self.inception4a Inception(480, 192, 96, 208, 16, 48, 64)self.inception4b Inception(512, 160, 112, 224, 24, 64, 64)self.inception4c Inception(512, 128, 128, 256, 24, 64, 64)self.inception4d Inception(512, 112, 144, 288, 32, 64, 64)self.inception4e Inception(528, 256, 160, 320, 32, 128, 128)self.maxpool4 nn.MaxPool2d(3, stride2, ceil_modeTrue)self.inception5a Inception(832, 256, 160, 320, 32, 128, 128)self.inception5b Inception(832, 384, 192, 384, 48, 128, 128)if self.aux_logits:self.aux1 InceptionAux(512, num_classes)self.aux2 InceptionAux(528, num_classes)self.avgpool nn.AdaptiveAvgPool2d((1, 1))self.dropout nn.Dropout(0.4)self.fc nn.Linear(1024, num_classes)if init_weights:self._initialize_weights()def forward(self, x):# N x 3 x 224 x 224x self.conv1(x)# N x 64 x 112 x 112x self.maxpool1(x)# N x 64 x 56 x 56x self.conv2(x)# N x 64 x 56 x 56x self.conv3(x)# N x 192 x 56 x 56x self.maxpool2(x)# N x 192 x 28 x 28x self.inception3a(x)# N x 256 x 28 x 28x self.inception3b(x)# N x 480 x 28 x 28x self.maxpool3(x)# N x 480 x 14 x 14x self.inception4a(x)# N x 512 x 14 x 14if self.training and self.aux_logits: # eval model lose this layeraux1 self.aux1(x)x self.inception4b(x)# N x 512 x 14 x 14x self.inception4c(x)# N x 512 x 14 x 14x self.inception4d(x)# N x 528 x 14 x 14if self.training and self.aux_logits: # eval model lose this layeraux2 self.aux2(x)x self.inception4e(x)# N x 832 x 14 x 14x self.maxpool4(x)# N x 832 x 7 x 7x self.inception5a(x)# N x 832 x 7 x 7x self.inception5b(x)# N x 1024 x 7 x 7x self.avgpool(x)# N x 1024 x 1 x 1x torch.flatten(x, 1)# N x 1024x self.dropout(x)x self.fc(x)# N x 1000 (num_classes)if self.training and self.aux_logits: # eval model lose this layerreturn x, aux2, aux1return xdef _initialize_weights(self):for m in self.modules():if isinstance(m, nn.Conv2d):nn.init.kaiming_normal_(m.weight, modefan_out, nonlinearityrelu)if m.bias is not None:nn.init.constant_(m.bias, 0)elif isinstance(m, nn.Linear):nn.init.normal_(m.weight, 0, 0.01)nn.init.constant_(m.bias, 0)class Inception(nn.Module):def __init__(self, in_channels, ch1x1, ch3x3red, ch3x3, ch5x5red, ch5x5, pool_proj):super(Inception, self).__init__()self.branch1 BasicConv2d(in_channels, ch1x1, kernel_size1)self.branch2 nn.Sequential(BasicConv2d(in_channels, ch3x3red, kernel_size1),BasicConv2d(ch3x3red, ch3x3, kernel_size3, padding1) # 保证输出大小等于输入大小)self.branch3 nn.Sequential(BasicConv2d(in_channels, ch5x5red, kernel_size1),BasicConv2d(ch5x5red, ch5x5, kernel_size5, padding2) # 保证输出大小等于输入大小)self.branch4 nn.Sequential(nn.MaxPool2d(kernel_size3, stride1, padding1),BasicConv2d(in_channels, pool_proj, kernel_size1))def forward(self, x):branch1 self.branch1(x)branch2 self.branch2(x)branch3 self.branch3(x)branch4 self.branch4(x)outputs [branch1, branch2, branch3, branch4]return torch.cat(outputs, 1)class InceptionAux(nn.Module):def __init__(self, in_channels, num_classes):super(InceptionAux, self).__init__()self.averagePool nn.AvgPool2d(kernel_size5, stride3)self.conv BasicConv2d(in_channels, 128, kernel_size1) # output[batch, 128, 4, 4]self.fc1 nn.Linear(2048, 1024)self.fc2 nn.Linear(1024, num_classes)def forward(self, x):# aux1: N x 512 x 14 x 14, aux2: N x 528 x 14 x 14x self.averagePool(x)# aux1: N x 512 x 4 x 4, aux2: N x 528 x 4 x 4x self.conv(x)# N x 128 x 4 x 4x torch.flatten(x, 1)x F.dropout(x, 0.5, trainingself.training)# N x 2048x F.relu(self.fc1(x), inplaceTrue)x F.dropout(x, 0.5, trainingself.training)# N x 1024x self.fc2(x)# N x num_classesreturn xclass BasicConv2d(nn.Module):def __init__(self, in_channels, out_channels, **kwargs):super(BasicConv2d, self).__init__()self.conv nn.Conv2d(in_channels, out_channels, **kwargs)self.relu nn.ReLU(inplaceTrue)def forward(self, x):x self.conv(x)x self.relu(x)return x
http://www.dnsts.com.cn/news/155565.html

相关文章:

  • 石家庄站内换乘示意图wordpress管理员插件
  • 网站建设的案例wordpress get_pages 输出格式
  • 网站设计好 如何将本地网站发布手机网站优化公司
  • 洛阳高新区做网站公司淘宝店采用哪些方法做网站推广
  • 新网网站做网站需要的大图
  • 网站空间管理权限西安关键词排名推广
  • 网站建设为什么需要备案知乎seo
  • 网站开发的论文怎么写wordpress 过滤html
  • 高中男女做那个视频网站南京网站制作西安
  • vi设计网站运动康复网站建设与管理知识点
  • 网站如何加速深圳国际物流公司排名前十
  • 现在搜什么关键词能搜到网站做网站 租服务器吗
  • 马鞍山建设集团有限公司网站家装e站
  • 宁波网站推广方法专业杭州网站建设
  • 典型的四大综合门户网站手机网站的建设价格
  • 做衣服的网站新媒体seo培训
  • 网站建设的公司怎么收费做网站要用到的技术
  • 做看电视电影的网站赚钱重庆建设教育网站
  • 宽屏网站欣赏南宁论坛
  • 学校网站网页制作wordpress内容替换
  • 大形电商网站开发费用商业网站开发与设计
  • 西宁做网站_君博相约宁波做简单网站的
  • 茶叶企业网站建设可以做任务的网站
  • 摩洛哥网站后缀地方门户网站管理系统
  • 网站的物理结构招生门户网站建设方案
  • 河南南阳油田网站建设做网站多大上行速度
  • 优秀甜品网站上海鸿鹄设计公司
  • 网站图片上传不了怎么办效果图是怎么做出来的
  • 建设工程招标公告在哪个网站建盏公司最新消息
  • 做运营常用的网站舆情分析报告格式