代码拉取完成,页面将自动刷新
同步操作将从 shopping/yolo_v5_nnie 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
layer {
name: "images"
type: "Input"
top: "images"
input_param {
shape {
dim: 1
dim: 3
dim: 320
dim: 320
}
}
}
layer {
name: "Conv_0"
type: "Convolution"
bottom: "images"
top: "147"
convolution_param {
num_output: 16
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 2
stride_w: 2
dilation: 1
}
}
layer {
name: "Conv_1"
type: "Convolution"
bottom: "147"
top: "148"
convolution_param {
num_output: 32
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_2"
type: "ReLU"
bottom: "148"
top: "149"
}
layer {
name: "Conv_3"
type: "Convolution"
bottom: "149"
top: "150"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 2
stride_w: 2
dilation: 1
}
}
layer {
name: "Relu_4"
type: "ReLU"
bottom: "150"
top: "151"
}
layer {
name: "Conv_5"
type: "Convolution"
bottom: "151"
top: "152"
convolution_param {
num_output: 32
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_6"
type: "ReLU"
bottom: "152"
top: "153"
}
layer {
name: "Conv_7"
type: "Convolution"
bottom: "153"
top: "154"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_8"
type: "ReLU"
bottom: "154"
top: "155"
}
layer {
name: "Add_9"
type: "Eltwise"
bottom: "151"
bottom: "155"
top: "156"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_10"
type: "Convolution"
bottom: "156"
top: "157"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 2
stride_w: 2
dilation: 1
}
}
layer {
name: "Relu_11"
type: "ReLU"
bottom: "157"
top: "158"
}
layer {
name: "Conv_12"
type: "Convolution"
bottom: "158"
top: "159"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_13"
type: "ReLU"
bottom: "159"
top: "160"
}
layer {
name: "Conv_14"
type: "Convolution"
bottom: "160"
top: "161"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_15"
type: "ReLU"
bottom: "161"
top: "162"
}
layer {
name: "Conv_16"
type: "Convolution"
bottom: "162"
top: "163"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_17"
type: "ReLU"
bottom: "163"
top: "164"
}
layer {
name: "Add_18"
type: "Eltwise"
bottom: "160"
bottom: "164"
top: "165"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_19"
type: "Convolution"
bottom: "165"
top: "166"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_20"
type: "ReLU"
bottom: "166"
top: "167"
}
layer {
name: "Conv_21"
type: "Convolution"
bottom: "167"
top: "168"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_22"
type: "ReLU"
bottom: "168"
top: "169"
}
layer {
name: "Add_23"
type: "Eltwise"
bottom: "165"
bottom: "169"
top: "170"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_24"
type: "Convolution"
bottom: "170"
top: "171"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_25"
type: "ReLU"
bottom: "171"
top: "172"
}
layer {
name: "Conv_26"
type: "Convolution"
bottom: "172"
top: "173"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_27"
type: "ReLU"
bottom: "173"
top: "174"
}
layer {
name: "Add_28"
type: "Eltwise"
bottom: "170"
bottom: "174"
top: "175"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_29"
type: "Convolution"
bottom: "175"
top: "176"
convolution_param {
num_output: 64
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_30"
type: "Convolution"
bottom: "158"
top: "177"
convolution_param {
num_output: 64
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_31"
type: "Concat"
bottom: "176"
bottom: "177"
top: "178"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_32_bn"
type: "BatchNorm"
bottom: "178"
top: "179"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_32"
type: "Scale"
bottom: "179"
top: "179"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_33"
type: "ReLU"
bottom: "179"
top: "180"
}
layer {
name: "Conv_34"
type: "Convolution"
bottom: "180"
top: "181"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_35"
type: "ReLU"
bottom: "181"
top: "182"
}
layer {
name: "Conv_36"
type: "Convolution"
bottom: "182"
top: "183"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 2
stride_w: 2
dilation: 1
}
}
layer {
name: "Relu_37"
type: "ReLU"
bottom: "183"
top: "184"
}
layer {
name: "Conv_38"
type: "Convolution"
bottom: "184"
top: "185"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_39"
type: "ReLU"
bottom: "185"
top: "186"
}
layer {
name: "Conv_40"
type: "Convolution"
bottom: "186"
top: "187"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_41"
type: "ReLU"
bottom: "187"
top: "188"
}
layer {
name: "Conv_42"
type: "Convolution"
bottom: "188"
top: "189"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_43"
type: "ReLU"
bottom: "189"
top: "190"
}
layer {
name: "Add_44"
type: "Eltwise"
bottom: "186"
bottom: "190"
top: "191"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_45"
type: "Convolution"
bottom: "191"
top: "192"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_46"
type: "ReLU"
bottom: "192"
top: "193"
}
layer {
name: "Conv_47"
type: "Convolution"
bottom: "193"
top: "194"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_48"
type: "ReLU"
bottom: "194"
top: "195"
}
layer {
name: "Add_49"
type: "Eltwise"
bottom: "191"
bottom: "195"
top: "196"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_50"
type: "Convolution"
bottom: "196"
top: "197"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_51"
type: "ReLU"
bottom: "197"
top: "198"
}
layer {
name: "Conv_52"
type: "Convolution"
bottom: "198"
top: "199"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_53"
type: "ReLU"
bottom: "199"
top: "200"
}
layer {
name: "Add_54"
type: "Eltwise"
bottom: "196"
bottom: "200"
top: "201"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_55"
type: "Convolution"
bottom: "201"
top: "202"
convolution_param {
num_output: 128
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_56"
type: "Convolution"
bottom: "184"
top: "203"
convolution_param {
num_output: 128
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_57"
type: "Concat"
bottom: "202"
bottom: "203"
top: "204"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_58_bn"
type: "BatchNorm"
bottom: "204"
top: "205"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_58"
type: "Scale"
bottom: "205"
top: "205"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_59"
type: "ReLU"
bottom: "205"
top: "206"
}
layer {
name: "Conv_60"
type: "Convolution"
bottom: "206"
top: "207"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_61"
type: "ReLU"
bottom: "207"
top: "208"
}
layer {
name: "Conv_62"
type: "Convolution"
bottom: "208"
top: "209"
convolution_param {
num_output: 512
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 2
stride_w: 2
dilation: 1
}
}
layer {
name: "Relu_63"
type: "ReLU"
bottom: "209"
top: "210"
}
layer {
name: "Conv_64"
type: "Convolution"
bottom: "210"
top: "211"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_65"
type: "ReLU"
bottom: "211"
top: "212"
}
layer {
name: "MaxPool_66"
type: "Pooling"
bottom: "212"
top: "213"
pooling_param {
pool: MAX
kernel_h: 5
kernel_w: 5
stride_h: 1
stride_w: 1
pad_h: 2
pad_w: 2
}
}
layer {
name: "MaxPool_67"
type: "Pooling"
bottom: "212"
top: "214"
pooling_param {
pool: MAX
kernel_h: 9
kernel_w: 9
stride_h: 1
stride_w: 1
pad_h: 4
pad_w: 4
}
}
layer {
name: "MaxPool_68"
type: "Pooling"
bottom: "212"
top: "215"
pooling_param {
pool: MAX
kernel_h: 13
kernel_w: 13
stride_h: 1
stride_w: 1
pad_h: 6
pad_w: 6
}
}
layer {
name: "Concat_69"
type: "Concat"
bottom: "212"
bottom: "213"
bottom: "214"
bottom: "215"
top: "216"
concat_param {
axis: 1
}
}
layer {
name: "Conv_70"
type: "Convolution"
bottom: "216"
top: "217"
convolution_param {
num_output: 512
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_71"
type: "ReLU"
bottom: "217"
top: "218"
}
layer {
name: "Conv_72"
type: "Convolution"
bottom: "218"
top: "219"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_73"
type: "ReLU"
bottom: "219"
top: "220"
}
layer {
name: "Conv_74"
type: "Convolution"
bottom: "220"
top: "221"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_75"
type: "ReLU"
bottom: "221"
top: "222"
}
layer {
name: "Conv_76"
type: "Convolution"
bottom: "222"
top: "223"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_77"
type: "ReLU"
bottom: "223"
top: "224"
}
layer {
name: "Add_78"
type: "Eltwise"
bottom: "220"
bottom: "224"
top: "225"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_79"
type: "Convolution"
bottom: "225"
top: "226"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_80"
type: "ReLU"
bottom: "226"
top: "227"
}
layer {
name: "Conv_81"
type: "Convolution"
bottom: "227"
top: "228"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_82"
type: "ReLU"
bottom: "228"
top: "229"
}
layer {
name: "Add_83"
type: "Eltwise"
bottom: "225"
bottom: "229"
top: "230"
eltwise_param {
operation: SUM
}
}
layer {
name: "Conv_84"
type: "Convolution"
bottom: "230"
top: "231"
convolution_param {
num_output: 256
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_85"
type: "Convolution"
bottom: "218"
top: "232"
convolution_param {
num_output: 256
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_86"
type: "Concat"
bottom: "231"
bottom: "232"
top: "233"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_87_bn"
type: "BatchNorm"
bottom: "233"
top: "234"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_87"
type: "Scale"
bottom: "234"
top: "234"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_88"
type: "ReLU"
bottom: "234"
top: "235"
}
layer {
name: "Conv_89"
type: "Convolution"
bottom: "235"
top: "236"
convolution_param {
num_output: 512
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_90"
type: "ReLU"
bottom: "236"
top: "237"
}
layer {
name: "Conv_91"
type: "Convolution"
bottom: "237"
top: "238"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_92"
type: "ReLU"
bottom: "238"
top: "239"
}
layer {
name: "Conv_93"
type: "Convolution"
bottom: "239"
top: "240"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_94"
type: "ReLU"
bottom: "240"
top: "241"
}
layer {
name: "Conv_95"
type: "Convolution"
bottom: "241"
top: "242"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_96"
type: "ReLU"
bottom: "242"
top: "243"
}
layer {
name: "Conv_97"
type: "Convolution"
bottom: "243"
top: "244"
convolution_param {
num_output: 256
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_98"
type: "Convolution"
bottom: "237"
top: "245"
convolution_param {
num_output: 256
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_99"
type: "Concat"
bottom: "244"
bottom: "245"
top: "246"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_100_bn"
type: "BatchNorm"
bottom: "246"
top: "247"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_100"
type: "Scale"
bottom: "247"
top: "247"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_101"
type: "ReLU"
bottom: "247"
top: "248"
}
layer {
name: "Conv_102"
type: "Convolution"
bottom: "248"
top: "249"
convolution_param {
num_output: 512
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_103"
type: "ReLU"
bottom: "249"
top: "250"
}
layer {
name: "Conv_104"
type: "Convolution"
bottom: "250"
top: "251"
convolution_param {
num_output: 159
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "ConvTranspose_105"
type: "Deconvolution"
bottom: "250"
top: "252"
convolution_param {
num_output: 512
bias_term: true
group: 512
pad_h: 0
pad_w: 0
kernel_h: 2
kernel_w: 2
stride_h: 2
stride_w: 2
}
}
layer {
name: "Concat_106"
type: "Concat"
bottom: "252"
bottom: "208"
top: "253"
concat_param {
axis: 1
}
}
layer {
name: "Conv_107"
type: "Convolution"
bottom: "253"
top: "254"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_108"
type: "ReLU"
bottom: "254"
top: "255"
}
layer {
name: "Conv_109"
type: "Convolution"
bottom: "255"
top: "256"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_110"
type: "ReLU"
bottom: "256"
top: "257"
}
layer {
name: "Conv_111"
type: "Convolution"
bottom: "257"
top: "258"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_112"
type: "ReLU"
bottom: "258"
top: "259"
}
layer {
name: "Conv_113"
type: "Convolution"
bottom: "259"
top: "260"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_114"
type: "ReLU"
bottom: "260"
top: "261"
}
layer {
name: "Conv_115"
type: "Convolution"
bottom: "261"
top: "262"
convolution_param {
num_output: 128
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_116"
type: "Convolution"
bottom: "255"
top: "263"
convolution_param {
num_output: 128
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_117"
type: "Concat"
bottom: "262"
bottom: "263"
top: "264"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_118_bn"
type: "BatchNorm"
bottom: "264"
top: "265"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_118"
type: "Scale"
bottom: "265"
top: "265"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_119"
type: "ReLU"
bottom: "265"
top: "266"
}
layer {
name: "Conv_120"
type: "Convolution"
bottom: "266"
top: "267"
convolution_param {
num_output: 256
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_121"
type: "ReLU"
bottom: "267"
top: "268"
}
layer {
name: "Conv_122"
type: "Convolution"
bottom: "268"
top: "269"
convolution_param {
num_output: 159
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "ConvTranspose_123"
type: "Deconvolution"
bottom: "268"
top: "270"
convolution_param {
num_output: 256
bias_term: true
group: 256
pad_h: 0
pad_w: 0
kernel_h: 2
kernel_w: 2
stride_h: 2
stride_w: 2
}
}
layer {
name: "Concat_124"
type: "Concat"
bottom: "270"
bottom: "182"
top: "271"
concat_param {
axis: 1
}
}
layer {
name: "Conv_125"
type: "Convolution"
bottom: "271"
top: "272"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_126"
type: "ReLU"
bottom: "272"
top: "273"
}
layer {
name: "Conv_127"
type: "Convolution"
bottom: "273"
top: "274"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_128"
type: "ReLU"
bottom: "274"
top: "275"
}
layer {
name: "Conv_129"
type: "Convolution"
bottom: "275"
top: "276"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_130"
type: "ReLU"
bottom: "276"
top: "277"
}
layer {
name: "Conv_131"
type: "Convolution"
bottom: "277"
top: "278"
convolution_param {
num_output: 64
bias_term: true
group: 1
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_132"
type: "ReLU"
bottom: "278"
top: "279"
}
layer {
name: "Conv_133"
type: "Convolution"
bottom: "279"
top: "280"
convolution_param {
num_output: 64
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Conv_134"
type: "Convolution"
bottom: "273"
top: "281"
convolution_param {
num_output: 64
bias_term: false
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Concat_135"
type: "Concat"
bottom: "280"
bottom: "281"
top: "282"
concat_param {
axis: 1
}
}
layer {
name: "BatchNormalization_136_bn"
type: "BatchNorm"
bottom: "282"
top: "283"
batch_norm_param {
use_global_stats: true
eps: 9.999999747378752e-05
}
}
layer {
name: "BatchNormalization_136"
type: "Scale"
bottom: "283"
top: "283"
scale_param {
bias_term: true
}
}
layer {
name: "Relu_137"
type: "ReLU"
bottom: "283"
top: "284"
}
layer {
name: "Conv_138"
type: "Convolution"
bottom: "284"
top: "285"
convolution_param {
num_output: 128
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Relu_139"
type: "ReLU"
bottom: "285"
top: "286"
}
layer {
name: "Conv_140"
type: "Convolution"
bottom: "286"
top: "287"
convolution_param {
num_output: 159
bias_term: true
group: 1
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
dilation: 1
}
}
layer {
name: "Reshape_154"
type: "Reshape"
bottom: "287"
top: "305"
reshape_param {
shape {
dim: 0
dim: 3
dim: 53
dim: 1600
}
}
}
layer {
name: "Reshape_169"
type: "Reshape"
bottom: "269"
top: "324"
reshape_param {
shape {
dim: 0
dim: 3
dim: 53
dim: 400
}
}
}
layer {
name: "Reshape_184"
type: "Reshape"
bottom: "251"
top: "343"
reshape_param {
shape {
dim: 0
dim: 3
dim: 53
dim: 100
}
}
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。