当前位置: 首页 > news >正文

.net开发手机网站网站诚信认证电话销售

.net开发手机网站,网站诚信认证电话销售,百度手机快速排名点击软件,电子政务与网站建设工作总结特征金字塔多尺度融合特征金字塔的网络原理 这里是基于resnet网络与Fpn做的结合#xff0c;主要把resnet中的特征层利用FPN的思想一起结合#xff0c;实现resnet_fpn。增强目标检测backone的有效性。代码实现如下#xff1a; import torch from torch import Tensor from c…特征金字塔多尺度融合特征金字塔的网络原理 这里是基于resnet网络与Fpn做的结合主要把resnet中的特征层利用FPN的思想一起结合实现resnet_fpn。增强目标检测backone的有效性。代码实现如下 import torch from torch import Tensor from collections import OrderedDict import torch.nn.functional as F from torch import nn from torch.jit.annotations import Tuple, List, Dictclass Bottleneck(nn.Module):expansion 4def __init__(self, in_channel, out_channel, stride1, downsampleNone, norm_layerNone):super(Bottleneck, self).__init__()if norm_layer is None:norm_layer nn.BatchNorm2dself.conv1 nn.Conv2d(in_channelsin_channel, out_channelsout_channel,kernel_size(1,1), stride(1,1), biasFalse) # squeeze channelsself.bn1 norm_layer(out_channel)# -----------------------------------------self.conv2 nn.Conv2d(in_channelsout_channel, out_channelsout_channel,kernel_size(3,3), stride(stride,stride), biasFalse, padding(1,1))self.bn2 norm_layer(out_channel)# -----------------------------------------self.conv3 nn.Conv2d(in_channelsout_channel, out_channelsout_channel * self.expansion,kernel_size(1,1), stride(1,1), biasFalse) # unsqueeze channelsself.bn3 norm_layer(out_channel * self.expansion)self.relu nn.ReLU(inplaceTrue)self.downsample downsampledef forward(self, x):identity xif self.downsample is not None:identity self.downsample(x)out self.conv1(x)out self.bn1(out)out self.relu(out)out self.conv2(out)out self.bn2(out)out self.relu(out)out self.conv3(out)out self.bn3(out)out identityout self.relu(out)return outclass ResNet(nn.Module):def __init__(self, block, blocks_num, num_classes1000, include_topTrue, norm_layerNone)::param block:块:param blocks_num:块数:param num_classes: 分类数:param include_top::param norm_layer: BNsuper(ResNet, self).__init__()if norm_layer is None:norm_layer nn.BatchNorm2dself._norm_layer norm_layerself.include_top include_topself.in_channel 64self.conv1 nn.Conv2d(in_channels3, out_channelsself.in_channel, kernel_size(7,7), stride(2,2),padding(3,3), biasFalse)self.bn1 norm_layer(self.in_channel)self.relu nn.ReLU(inplaceTrue)self.maxpool nn.MaxPool2d(kernel_size3, stride2, padding1)self.layer1 self._make_layer(block, 64, blocks_num[0])self.layer2 self._make_layer(block, 128, blocks_num[1], stride2)self.layer3 self._make_layer(block, 256, blocks_num[2], stride2)self.layer4 self._make_layer(block, 512, blocks_num[3], stride2)if self.include_top:self.avgpool nn.AdaptiveAvgPool2d((1, 1)) # output size (1, 1)self.fc nn.Linear(512 * block.expansion, num_classes)初始化for m in self.modules():if isinstance(m, nn.Conv2d):nn.init.kaiming_normal_(m.weight, modefan_out, nonlinearityrelu)def _make_layer(self, block, channel, block_num, stride1):norm_layer self._norm_layerdownsample Noneif stride ! 1 or self.in_channel ! channel * block.expansion:downsample nn.Sequential(nn.Conv2d(self.in_channel, channel * block.expansion, kernel_size(1,1), stride(stride,stride), biasFalse),norm_layer(channel * block.expansion))layers []layers.append(block(self.in_channel, channel, downsampledownsample,stridestride, norm_layernorm_layer))self.in_channel channel * block.expansionfor _ in range(1, block_num):layers.append(block(self.in_channel, channel, norm_layernorm_layer))return nn.Sequential(*layers)def forward(self, x):x self.conv1(x)x self.bn1(x)x self.relu(x)x self.maxpool(x)x self.layer1(x)x self.layer2(x)x self.layer3(x)x self.layer4(x)if self.include_top:x self.avgpool(x)x torch.flatten(x, 1)x self.fc(x)return xclass IntermediateLayerGetter(nn.ModuleDict):Module wrapper that returns intermediate layers from a modelsIt has a strong assumption that the modules have been registeredinto the models in the same order as they are used.This means that one should **not** reuse the same nn.Moduletwice in the forward if you want this to work.Additionally, it is only able to query submodules that are directlyassigned to the models. So if models is passed, models.feature1 canbe returned, but not models.feature1.layer2.Arguments:model (nn.Module): models on which we will extract the featuresreturn_layers (Dict[name, new_name]): a dict containing the namesof the modules for which the activations will be returned asthe key of the dict, and the value of the dict is the nameof the returned activation (which the user can specify).__annotations__ {return_layers: Dict[str, str],}def __init__(self, model, return_layers):if not set(return_layers).issubset([name for name, _ in model.named_children()]):raise ValueError(return_layers are not present in models)# {layer1: 0, layer2: 1, layer3: 2, layer4: 3}orig_return_layers return_layersreturn_layers {k: v for k, v in return_layers.items()}layers OrderedDict()# 遍历模型子模块按顺序存入有序字典# 只保存layer4及其之前的结构舍去之后不用的结构for name, module in model.named_children():layers[name] moduleif name in return_layers:del return_layers[name]if not return_layers:breaksuper(IntermediateLayerGetter, self).__init__(layers)self.return_layers orig_return_layersdef forward(self, x):out OrderedDict()# 依次遍历模型的所有子模块并进行正向传播# 收集layer1, layer2, layer3, layer4的输出for name, module in self.named_children():x module(x)if name in self.return_layers:out_name self.return_layers[name]out[out_name] xreturn outclass FeaturePyramidNetwork(nn.Module):Module that adds a FPN from on top of a set of feature maps. This is based onFeature Pyramid Network for Object Detection https://arxiv.org/abs/1612.03144_.The feature maps are currently supposed to be in increasing depthorder.The input to the models is expected to be an OrderedDict[Tensor], containingthe feature maps on top of which the FPN will be added.Arguments:in_channels_list (list[int]): number of channels for each feature map thatis passed to the moduleout_channels (int): number of channels of the FPN representationextra_blocks (ExtraFPNBlock or None): if provided, extra operations willbe performed. It is expected to take the fpn features, the originalfeatures and the names of the original features as input, and returnsa new list of feature maps and their corresponding namesdef __init__(self, in_channels_list, out_channels, extra_blocksNone):super(FeaturePyramidNetwork, self).__init__()# 用来调整resnet特征矩阵(layer1,2,3,4)的channelkernel_size1self.inner_blocks nn.ModuleList()# 对调整后的特征矩阵使用3x3的卷积核来得到对应的预测特征矩阵self.layer_blocks nn.ModuleList()for in_channels in in_channels_list:if in_channels 0:continueinner_block_module nn.Conv2d(in_channels, out_channels, (1,1))layer_block_module nn.Conv2d(out_channels, out_channels, (3,3), padding(1,1))self.inner_blocks.append(inner_block_module)self.layer_blocks.append(layer_block_module)# initialize parameters now to avoid modifying the initialization of top_blocksfor m in self.children():if isinstance(m, nn.Conv2d):nn.init.kaiming_uniform_(m.weight, a1)nn.init.constant_(m.bias, 0)self.extra_blocks extra_blocksdef get_result_from_inner_blocks(self, x, idx):# type: (Tensor, int) - TensorThis is equivalent to self.inner_blocks[idx](x),but torchscript doesnt support this yetnum_blocks len(self.inner_blocks)if idx 0:idx num_blocksi 0out xfor module in self.inner_blocks:if i idx:out module(x)i 1return outdef get_result_from_layer_blocks(self, x, idx):# type: (Tensor, int) - TensorThis is equivalent to self.layer_blocks[idx](x),but torchscript doesnt support this yetnum_blocks len(self.layer_blocks)if idx 0:idx num_blocksi 0out xfor module in self.layer_blocks:if i idx:out module(x)i 1return outdef forward(self, x):# type: (Dict[str, Tensor]) - Dict[str, Tensor]Computes the FPN for a set of feature maps.Arguments:x (OrderedDict[Tensor]): feature maps for each feature level.Returns:results (OrderedDict[Tensor]): feature maps after FPN layers.They are ordered from highest resolution first.# unpack OrderedDict into two lists for easier handlingnames list(x.keys())x list(x.values())# 将resnet layer4的channel调整到指定的out_channels# last_inner self.inner_blocks[-1](x[-1])last_inner self.get_result_from_inner_blocks(x[-1], -1)# result中保存着每个预测特征层results []# 将layer4调整channel后的特征矩阵通过3x3卷积后得到对应的预测特征矩阵# results.append(self.layer_blocks[-1](last_inner))results.append(self.get_result_from_layer_blocks(last_inner, -1))# 倒序遍历resenet输出特征层以及对应inner_block和layer_block# layer3 - layer2 - layer1 layer4已经处理过了# for feature, inner_block, layer_block in zip(# x[:-1][::-1], self.inner_blocks[:-1][::-1], self.layer_blocks[:-1][::-1]# ):# if not inner_block:# continue# inner_lateral inner_block(feature)# feat_shape inner_lateral.shape[-2:]# inner_top_down F.interpolate(last_inner, sizefeat_shape, modenearest)# last_inner inner_lateral inner_top_down# results.insert(0, layer_block(last_inner))for idx in range(len(x) - 2, -1, -1):inner_lateral self.get_result_from_inner_blocks(x[idx], idx)feat_shape inner_lateral.shape[-2:]inner_top_down F.interpolate(last_inner, sizefeat_shape, modenearest)last_inner inner_lateral inner_top_downresults.insert(0, self.get_result_from_layer_blocks(last_inner, idx))# 在layer4对应的预测特征层基础上生成预测特征矩阵5if self.extra_blocks is not None:results, names self.extra_blocks(results, names)# make it back an OrderedDictout OrderedDict([(k, v) for k, v in zip(names, results)])return outclass LastLevelMaxPool(torch.nn.Module):Applies a max_pool2d on top of the last feature mapdef forward(self, x, names):# type: (List[Tensor], List[str]) - Tuple[List[Tensor], List[str]]names.append(pool)x.append(F.max_pool2d(x[-1], 1, 2, 0))return x, namesclass BackboneWithFPN(nn.Module):Adds a FPN on top of a models.Internally, it uses torchvision.models._utils.IntermediateLayerGetter toextract a submodel that returns the feature maps specified in return_layers.The same limitations of IntermediatLayerGetter apply here.Arguments:backbone (nn.Module)return_layers (Dict[name, new_name]): a dict containing the namesof the modules for which the activations will be returned asthe key of the dict, and the value of the dict is the nameof the returned activation (which the user can specify).in_channels_list (List[int]): number of channels for each feature mapthat is returned, in the order they are present in the OrderedDictout_channels (int): number of channels in the FPN.Attributes:out_channels (int): the number of channels in the FPNdef __init__(self, backbone, return_layers, in_channels_list, out_channels)::param backbone: 特征层:param return_layers: 返回的层数:param in_channels_list: 输入通道数:param out_channels: 输出通道数super(BackboneWithFPN, self).__init__()返回有序字典模型self.body IntermediateLayerGetter(backbone, return_layersreturn_layers)self.fpn FeaturePyramidNetwork(in_channels_listin_channels_list,out_channelsout_channels,extra_blocksLastLevelMaxPool(),)# super(BackboneWithFPN, self).__init__(OrderedDict(# [(body, body), (fpn, fpn)]))self.out_channels out_channelsdef forward(self, x):x self.body(x)x self.fpn(x)return xdef resnet50_fpn_backbone():# FrozenBatchNorm2d的功能与BatchNorm2d类似但参数无法更新# norm_layermisc.FrozenBatchNorm2dresnet_backbone ResNet(Bottleneck, [3, 4, 6, 3],include_topFalse)# freeze layers# 冻结layer1及其之前的所有底层权重基础通用特征for name, parameter in resnet_backbone.named_parameters():if layer2 not in name and layer3 not in name and layer4 not in name:冻结权重不参与训练parameter.requires_grad_(False)# 字典名字return_layers {layer1: 0, layer2: 1, layer3: 2, layer4: 3}# in_channel 为layer4的输出特征矩阵channel 2048in_channels_stage2 resnet_backbone.in_channel // 8in_channels_list [in_channels_stage2, # layer1 out_channel256in_channels_stage2 * 2, # layer2 out_channel512in_channels_stage2 * 4, # layer3 out_channel1024in_channels_stage2 * 8, # layer4 out_channel2048]out_channels 256return BackboneWithFPN(resnet_backbone, return_layers, in_channels_list, out_channels)if __name__ __main__:net resnet50_fpn_backbone()x torch.randn(1,3,224,224)for key,value in net(x).items():print(key,value.shape) 测试结果
http://www.hkea.cn/news/14405679/

相关文章:

  • 企业手机网站cms系统加强网站内容建设
  • 徐州市城乡建设局门户网站帝国cms更改网站ico
  • 班级网页设计图片淮安网站排名优化公司
  • 写论文的网站网站开发费用怎么做账
  • 网站服务器制作上海人才市场档案存放中心
  • 网站建设的色彩搭配wordpress安装不了 404
  • 备案号查询网站网址网站设计哪家比较好
  • 在淘宝做网站和网络公司做网站区别企业服务有哪些
  • 网站建设合同任使用cms建设网站安全吗
  • 做商城网站需要在北京注册公司吗旅游型网站的建设背景图片
  • 有哪些做课件的网站男通网站哪个好用
  • 网站建设的相关职位设计与绘制一个网站首页
  • 湖南茶叶品牌网站建设wordpress悬浮音乐播放器插件
  • icp备案添加网站磁力屋torrentkitty
  • 北京企业做网站报价莱芜网站快排
  • 打车网站开发济宁做网站
  • 做网站优化如何遍文章可以做微网站的第三方平台有哪些
  • 伊春网站推广青岛建设局网站首页
  • 网站开发验收申请报告wordpress内容查看
  • 长春网站上排名网站建设和推广话术
  • 网站建设公司网站源码如何免费申请自己的网站
  • dedeai网站最新网页素材及网站架构制作
  • 企业网站添加栏目网上买购物的软件有哪些
  • 峡江网站建设建设工程询价网站有哪些
  • 做app的网站有哪些泰安网页设计公司
  • html网页设计网站网站建设优化方法
  • 阿里云需要网站建设方案书网站点内页还是首页
  • 建设免费网站登录网址抚州的电子商务网站建设公司
  • ps网站导航怎么做韩国今天新闻
  • 上海校园兼职网站建设网站布局优化