2023年7月7日发(作者:)
从ResNet101到ResNet50⼀直⽤VGG训练,⼏天前想看下ResNet的效果如何,因为SSD源码中有python实现的ResNet⽹络结构实现代码,包含ResNet101和ResNet152,直接拿ResNet101来训练,GTX1060配置,batchsize竟然只降到2才跑的起来,果然⼀直收敛不了。看了下model_⾥⾯的实现代码:8293637383946474849def ResNet101Body(net, from_layer, use_pool5=True, use_dilation_conv5=False, **bn_param): conv_prefix = '' conv_postfix = '' bn_prefix = 'bn_' bn_postfix = '' scale_prefix = 'scale_' scale_postfix = '' ConvBNLayer(net, from_layer, 'conv1', use_bn=True, use_relu=True, num_output=64, kernel_size=7, pad=3, stride=2, conv_prefix=conv_prefix, conv_postfix=conv_postfix, bn_prefix=bn_prefix, bn_postfix=bn_postfix, scale_prefix=scale_prefix, scale_postfix=scale_postfix, **bn_param)
1 = g(1, pool=, kernel_size=3, stride=2)
ResBody(net, 'pool1', '2a', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=True, **bn_param) ResBody(net, 'res2a', '2b', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=False, **bn_param) ResBody(net, 'res2b', '2c', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=False, **bn_param)
ResBody(net, 'res2c', '3a', out2a=128, out2b=128, out2c=512, stride=2, use_branch1=True, **bn_param)
from_layer = 'res3a' for i in xrange(1, 4): block_name = '3b{}'.format(i) ResBody(net, from_layer, block_name, out2a=128, out2b=128, out2c=512, stride=1, use_branch1=False, **bn_param) from_layer = 'res{}'.format(block_name)
ResBody(net, from_layer, '4a', out2a=256, out2b=256, out2c=1024, stride=2, use_branch1=True, **bn_param)
from_layer = 'res4a' for i in xrange(1, 23): block_name = '4b{}'.format(i) ResBody(net, from_layer, block_name, out2a=256, out2b=256, out2c=1024, stride=1, use_branch1=False, **bn_param) from_layer = 'res{}'.format(block_name)
stride = 2 dilation = 1 if use_dilation_conv5: stride = 1 dilation = 2
ResBody(net, from_layer, '5a', out2a=512, out2b=512, out2c=2048, stride=stride, use_branch1=True, dilation=dilation, **bn_param) ResBody(net, 'res5a', '5b', out2a=512, out2b=512, out2c=2048, stride=1, use_branch1=False, dilation=dilation, **bn_param) ResBody(net, 'res5b', '5c', out2a=512, out2b=512, out2c=2048, stride=1, use_branch1=False, dilation=dilation, **bn_param)
if use_pool5: 5 = g(5c, pool=, global_pooling=True)
return netRenNet152Body为:8293637383946474849def ResNet152Body(net, from_layer, use_pool5=True, use_dilation_conv5=False, **bn_param): conv_prefix = '' conv_postfix = '' bn_prefix = 'bn_' bn_postfix = '' scale_prefix = 'scale_' scale_postfix = '' ConvBNLayer(net, from_layer, 'conv1', use_bn=True, use_relu=True, num_output=64, kernel_size=7, pad=3, stride=2, conv_prefix=conv_prefix, conv_postfix=conv_postfix, bn_prefix=bn_prefix, bn_postfix=bn_postfix, scale_prefix=scale_prefix, scale_postfix=scale_postfix, **bn_param)
1 = g(1, pool=, kernel_size=3, stride=2)
ResBody(net, 'pool1', '2a', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=True, **bn_param) ResBody(net, 'res2a', '2b', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=False, **bn_param) ResBody(net, 'res2b', '2c', out2a=64, out2b=64, out2c=256, stride=1, use_branch1=False, **bn_param)
ResBody(net, 'res2c', '3a', out2a=128, out2b=128, out2c=512, stride=2, use_branch1=True, **bn_param)
from_layer = 'res3a' for i in xrange(1, 8): block_name = '3b{}'.format(i) ResBody(net, from_layer, block_name, out2a=128, out2b=128, out2c=512, stride=1, use_branch1=False, **bn_param) from_layer = 'res{}'.format(block_name)
ResBody(net, from_layer, '4a', out2a=256, out2b=256, out2c=1024, stride=2, use_branch1=True, **bn_param)
from_layer = 'res4a' for i in xrange(1, 36): block_name = '4b{}'.format(i) ResBody(net, from_layer, block_name, out2a=256, out2b=256, out2c=1024, stride=1, use_branch1=False, **bn_param) from_layer = 'res{}'.format(block_name)
stride = 2 dilation = 1 if use_dilation_conv5: stride = 1 dilation = 2
ResBody(net, from_layer, '5a', out2a=512, out2b=512, out2c=2048, stride=stride, use_branch1=True, dilation=dilation, **bn_param) ResBody(net, 'res5a', '5b', out2a=512, out2b=512, out2c=2048, stride=1, use_branch1=False, dilation=dilation, **bn_param) ResBody(net, 'res5b', '5c', out2a=512, out2b=512, out2c=2048, stride=1, use_branch1=False, dilation=dilation, **bn_param)
if use_pool5: 5 = g(5c, pool=, global_pooling=True)
return net其中每次调⽤ResBody,当use_brabch1 = True,会创建4个卷积层,当use_brabch1 = False时,创建3个卷积层。ResNet101Body和ResNet152Body的区别在于两个for循环的次数不⼀样,101层和152层差的51层就是这⾥体现的,所以现在要创建ResNet50Body就容易多了。根据⽹上下载的模型对应的ResNet_50_train_xt,对上⾯代码进⾏修改即可。50层,batchsize=4,训练马上收敛。当然训练⽅式多种,可⽤直接利⽤已有ResNet_50_train_xt进⾏训练。
发布者:admin,转转请注明出处:http://www.yc00.com/news/1688703499a163838.html
评论列表(0条)