File tree 4 files changed +5504
-0
lines changed
examples/SSDH-VGG16-Avg-48
4 files changed +5504
-0
lines changed Original file line number Diff line number Diff line change
1
+ name: "VGG_ILSVRC_16_layers"
2
+ input: "data"
3
+ input_dim: 10
4
+ input_dim: 3
5
+ input_dim: 224
6
+ input_dim: 224
7
+ layer {
8
+ name: "conv1_1"
9
+ type: "Convolution"
10
+ bottom: "data"
11
+ top: "conv1_1"
12
+ convolution_param {
13
+ num_output: 64
14
+ pad: 1
15
+ kernel_size: 3
16
+ }
17
+ }
18
+ layer {
19
+ name: "relu1_1"
20
+ type: "ReLU"
21
+ bottom: "conv1_1"
22
+ top: "conv1_1"
23
+ }
24
+ layer {
25
+ name: "conv1_2"
26
+ type: "Convolution"
27
+ bottom: "conv1_1"
28
+ top: "conv1_2"
29
+ convolution_param {
30
+ num_output: 64
31
+ pad: 1
32
+ kernel_size: 3
33
+ }
34
+ }
35
+ layer {
36
+ name: "relu1_2"
37
+ type: "ReLU"
38
+ bottom: "conv1_2"
39
+ top: "conv1_2"
40
+ }
41
+ layer {
42
+ name: "pool1"
43
+ type: "Pooling"
44
+ bottom: "conv1_2"
45
+ top: "pool1"
46
+ pooling_param {
47
+ pool: MAX
48
+ kernel_size: 2
49
+ stride: 2
50
+ }
51
+ }
52
+ layer {
53
+ name: "conv2_1"
54
+ type: "Convolution"
55
+ bottom: "pool1"
56
+ top: "conv2_1"
57
+ convolution_param {
58
+ num_output: 128
59
+ pad: 1
60
+ kernel_size: 3
61
+ }
62
+ }
63
+ layer {
64
+ name: "relu2_1"
65
+ type: "ReLU"
66
+ bottom: "conv2_1"
67
+ top: "conv2_1"
68
+ }
69
+ layer {
70
+ name: "conv2_2"
71
+ type: "Convolution"
72
+ bottom: "conv2_1"
73
+ top: "conv2_2"
74
+ convolution_param {
75
+ num_output: 128
76
+ pad: 1
77
+ kernel_size: 3
78
+ }
79
+ }
80
+ layer {
81
+ name: "relu2_2"
82
+ type: "ReLU"
83
+ bottom: "conv2_2"
84
+ top: "conv2_2"
85
+ }
86
+ layer {
87
+ name: "pool2"
88
+ type: "Pooling"
89
+ bottom: "conv2_2"
90
+ top: "pool2"
91
+ pooling_param {
92
+ pool: MAX
93
+ kernel_size: 2
94
+ stride: 2
95
+ }
96
+ }
97
+ layer {
98
+ name: "conv3_1"
99
+ type: "Convolution"
100
+ bottom: "pool2"
101
+ top: "conv3_1"
102
+ convolution_param {
103
+ num_output: 256
104
+ pad: 1
105
+ kernel_size: 3
106
+ }
107
+ }
108
+ layer {
109
+ name: "relu3_1"
110
+ type: "ReLU"
111
+ bottom: "conv3_1"
112
+ top: "conv3_1"
113
+ }
114
+ layer {
115
+ name: "conv3_2"
116
+ type: "Convolution"
117
+ bottom: "conv3_1"
118
+ top: "conv3_2"
119
+ convolution_param {
120
+ num_output: 256
121
+ pad: 1
122
+ kernel_size: 3
123
+ }
124
+ }
125
+ layer {
126
+ name: "relu3_2"
127
+ type: "ReLU"
128
+ bottom: "conv3_2"
129
+ top: "conv3_2"
130
+ }
131
+ layer {
132
+ name: "conv3_3"
133
+ type: "Convolution"
134
+ bottom: "conv3_2"
135
+ top: "conv3_3"
136
+ param {
137
+ lr_mult: 1
138
+ decay_mult: 1
139
+ }
140
+ param {
141
+ lr_mult: 2
142
+ decay_mult: 0
143
+ }
144
+ convolution_param {
145
+ num_output: 256
146
+ pad: 1
147
+ kernel_size: 3
148
+ }
149
+ }
150
+ layer {
151
+ name: "relu3_3"
152
+ type: "ReLU"
153
+ bottom: "conv3_3"
154
+ top: "conv3_3"
155
+ }
156
+ layer {
157
+ name: "pool3"
158
+ type: "Pooling"
159
+ bottom: "conv3_3"
160
+ top: "pool3"
161
+ pooling_param {
162
+ pool: MAX
163
+ kernel_size: 2
164
+ stride: 2
165
+ }
166
+ }
167
+ layer {
168
+ name: "conv4_1"
169
+ type: "Convolution"
170
+ bottom: "pool3"
171
+ top: "conv4_1"
172
+ convolution_param {
173
+ num_output: 512
174
+ pad: 1
175
+ kernel_size: 3
176
+ }
177
+ }
178
+ layer {
179
+ name: "relu4_1"
180
+ type: "ReLU"
181
+ bottom: "conv4_1"
182
+ top: "conv4_1"
183
+ }
184
+ layer {
185
+ name: "conv4_2"
186
+ type: "Convolution"
187
+ bottom: "conv4_1"
188
+ top: "conv4_2"
189
+ convolution_param {
190
+ num_output: 512
191
+ pad: 1
192
+ kernel_size: 3
193
+ }
194
+ }
195
+ layer {
196
+ name: "relu4_2"
197
+ type: "ReLU"
198
+ bottom: "conv4_2"
199
+ top: "conv4_2"
200
+ }
201
+ layer {
202
+ name: "conv4_3"
203
+ type: "Convolution"
204
+ bottom: "conv4_2"
205
+ top: "conv4_3"
206
+ convolution_param {
207
+ num_output: 512
208
+ pad: 1
209
+ kernel_size: 3
210
+ }
211
+ }
212
+ layer {
213
+ name: "relu4_3"
214
+ type: "ReLU"
215
+ bottom: "conv4_3"
216
+ top: "conv4_3"
217
+ }
218
+ layer {
219
+ name: "pool4"
220
+ type: "Pooling"
221
+ bottom: "conv4_3"
222
+ top: "pool4"
223
+ pooling_param {
224
+ pool: MAX
225
+ kernel_size: 2
226
+ stride: 2
227
+ }
228
+ }
229
+ layer {
230
+ name: "conv5_1"
231
+ type: "Convolution"
232
+ bottom: "pool4"
233
+ top: "conv5_1"
234
+ convolution_param {
235
+ num_output: 512
236
+ pad: 1
237
+ kernel_size: 3
238
+ }
239
+ }
240
+ layer {
241
+ name: "relu5_1"
242
+ type: "ReLU"
243
+ bottom: "conv5_1"
244
+ top: "conv5_1"
245
+ }
246
+ layer {
247
+ name: "conv5_2"
248
+ type: "Convolution"
249
+ bottom: "conv5_1"
250
+ top: "conv5_2"
251
+ convolution_param {
252
+ num_output: 512
253
+ pad: 1
254
+ kernel_size: 3
255
+ }
256
+ }
257
+ layer {
258
+ name: "relu5_2"
259
+ type: "ReLU"
260
+ bottom: "conv5_2"
261
+ top: "conv5_2"
262
+ }
263
+ layer {
264
+ name: "conv5_3"
265
+ type: "Convolution"
266
+ bottom: "conv5_2"
267
+ top: "conv5_3"
268
+ convolution_param {
269
+ num_output: 512
270
+ pad: 1
271
+ kernel_size: 3
272
+ }
273
+ }
274
+ layer {
275
+ name: "relu5_3"
276
+ type: "ReLU"
277
+ bottom: "conv5_3"
278
+ top: "conv5_3"
279
+ }
280
+ layer {
281
+ name: "pool5"
282
+ type: "Pooling"
283
+ bottom: "conv5_3"
284
+ top: "pool5"
285
+ pooling_param {
286
+ # pool: MAX
287
+ pool: AVE
288
+ kernel_size: 14
289
+ stride: 1
290
+ }
291
+ }
292
+ #layer {
293
+ # name: "fc6"
294
+ # type: "InnerProduct"
295
+ # bottom: "pool5"
296
+ # top: "fc6"
297
+ # param {
298
+ # lr_mult: 1
299
+ # decay_mult: 1
300
+ # }
301
+ # param {
302
+ # lr_mult: 2
303
+ # decay_mult: 0
304
+ # }
305
+ # inner_product_param {
306
+ # num_output: 4096
307
+ # }
308
+ #}
309
+ #layer {
310
+ # name: "relu6"
311
+ # type: "ReLU"
312
+ # bottom: "fc6"
313
+ # top: "fc6"
314
+ #}
315
+ #layer {
316
+ # name: "drop6"
317
+ # type: "Dropout"
318
+ # bottom: "fc6"
319
+ # top: "fc6"
320
+ # dropout_param {
321
+ # dropout_ratio: 0.5
322
+ # }
323
+ #}
324
+ #layer {
325
+ # name: "fc7"
326
+ # type: "InnerProduct"
327
+ # bottom: "fc6"
328
+ # top: "fc7"
329
+ # param {
330
+ # lr_mult: 1
331
+ # decay_mult: 1
332
+ # }
333
+ # param {
334
+ # lr_mult: 2
335
+ # decay_mult: 0
336
+ # }
337
+ # inner_product_param {
338
+ # num_output: 4096
339
+ # }
340
+ #}
341
+ #layer {
342
+ # name: "relu7"
343
+ # type: "ReLU"
344
+ # bottom: "fc7"
345
+ # top: "fc7"
346
+ #}
347
+ #layer {
348
+ # name: "drop7"
349
+ # type: "Dropout"
350
+ # bottom: "fc7"
351
+ # top: "fc7"
352
+ # dropout_param {
353
+ # dropout_ratio: 0.5
354
+ # }
355
+ #}
356
+ layer {
357
+ name: "latent_layer"
358
+ type: "InnerProduct"
359
+ bottom: "pool5"
360
+ top: "latent_layer"
361
+ param {
362
+ lr_mult: 1
363
+ decay_mult: 1
364
+ }
365
+ param {
366
+ lr_mult: 2
367
+ decay_mult: 0
368
+ }
369
+ inner_product_param {
370
+ num_output: 48
371
+ weight_filler {
372
+ type: "gaussian"
373
+ std: 0.005
374
+ }
375
+ bias_filler {
376
+ type: "constant"
377
+ value: 1
378
+ }
379
+ }
380
+ }
381
+ layer {
382
+ name: "encode_neuron"
383
+ bottom: "latent_layer"
384
+ top: "encode_neuron"
385
+ type: "Sigmoid"
386
+ }
You can’t perform that action at this time.
0 commit comments