您的位置:首页 > 其它

paddle学习之复写Restnet

2020-08-24 00:06 387 查看

Restnet的意义

神经网络并不是层数越多越好,由于层数增多且每层都会带来信息损失,因此很可能导致后层网络出现梯度消失的情况。残差网络的思想是利用一些列残差块,其中每一个残差块的输出都是由输入该残差块的数据以及在该残差块处理后的数据进行对应叠加后所得到的,这样就有效地避免了信息的损失。

实现

import paddle
from paddle import fluid
from paddle.fluid.dygraph.nn import Conv2D,Pool2D,Linear,BatchNorm
from paddle.fluid.layers import reshape,mean,accuracy,elementwise_add
from paddle.fluid.layer_helper import LayerHelper

class ConvwithBatchnorm(fluid.dygraph.Layer):
def __init__(self,num_channels,num_filters,filter_size,act,stride):
super(ConvwithBatchnorm,self)
self.conv=Conv2D(num_channels=num_channels,num_filters=num_filters,stride=stride,act=act,filter_size=filter_size,padding=(filter_size-1)//2)
self.bn=Batchnorm(filter_size,act=act)
def forward(self,Input):
return self.bn(self.conv(x))

class BottleneckBlock(fluid.dygraph.Layer):
def __init__(self,num_channels,num_filters,stride,short_cut=False):
super(BottleneckBlock,self)
self.conv1=ConvwithBatchnorm(num_channels=num_channels,num_filters=num_filters,act="relu",filter_size=1)
self.conv2=ConvwithBatchnorm(num_channels=num_filters,num_filters=num_filters,stride=stride,act="relu",filter_size=3)
self.conv3=ConvwithBatchnorm(num_channels=num_filters,num_filters=4*num_filters,act=None,filter_size=1)
self.short=ConvwithBatchnorm(num_channels=num_filters,num_filters=4*num_filters,stride=stride,act=act,filter_size=1)
self.short_cut=short_cut
def forward(self,x):
out=self.conv3(self.conv2(self.conv1(x)))
if self.short_cut:
add_=Input
else:
add_=self.short(x)
y=elementwise_add(out,add_)
layer_helper = LayerHelper(self.full_name(), act='relu')
return layer_helper.append_activation(y)

class RestNet(fluid.dygraph.Layer):
def __init__(self,layers=50,class_dim=1):
super(RestNet,self).__init__()
supported_layers=[50,101,152]
assert layers in supported_layers, \
"supported layers are {} but input layer is {}".format(supported_layers, layers)
num_filters = [64, 128, 256, 512]
layer_accordingto_depth={
50:[3, 4, 6, 3],
101:[3, 4, 23, 3],
152:[3, 8, 36, 3]
}
try:
depth=layer_accordingto_depth[layers]
except:
print("plz select a choice from [50,101,52]")
self.conv1=Conv2D(num_channels=3,num_filters=64,num_filter_size=7,stride=2,act="relu")
self.pool2d=Pool2D(pool_size=3,pool_stride=2,pool_padding=1,pool_type="max")
self.botten_block_list=[]
num_channels=64
for block in range(len(depth)):
shortcut=False
for i in range(depth[block]):
botten_block=self.add_sublayer(
"bb_%d_%d"%(block,i),
BottleneckBlock(
num_channels=num_channels,
num_filters=num_filters[block],
stride=2 if i == 0 and block != 0 else 1,
shortcut=shortcut))
num_channels = bottleneck_block._num_channels_out
self.bottleneck_block_list.append(bottleneck_block)
shortcut = True
self.pool2d_avg = Pool2D(pool_size=7, pool_type='avg', global_pooling=True)

import math
stdv = 1.0 / math.sqrt(2048 * 1.0)
self.out = Linear(input_dim=2048, output_dim=class_dim,
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.Uniform(-stdv, stdv)))
def forward(self,Inputs):
y=self.pool2d(self.conv1(Inputs))
for item in self.botten_block_list:
y=item(y)
y=self.pool2d_avg(y)
y=reshape(y,[y.shape[0],-1])
y=self.out(y)

return y
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: