Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit cb6305e

Browse files
committed
fix resnet model
1 parent a9e8e58 commit cb6305e

File tree

3 files changed

+46
-9
lines changed

3 files changed

+46
-9
lines changed

caffe/models/JAN/alexnet/train_val.prototxt

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -488,7 +488,7 @@ layer {
488488
include: { phase: TRAIN }
489489
}
490490
layer {
491-
name: "fc7_mmd_loss"
491+
name: "fc7_jmmd_loss"
492492
type: "JMMDLoss"
493493
bottom: "source_fc7"
494494
bottom: "target_fc7"
@@ -501,7 +501,25 @@ layer {
501501
kernel_mul: 2.0
502502
label_kernel_num: 1
503503
label_kernel_mul: 2.0
504-
sigma: 1.3
504+
sigma: 1.68
505+
}
506+
include: { phase: TRAIN }
507+
}
508+
layer {
509+
name: "fc8_jmmd_loss"
510+
type: "JMMDLoss"
511+
bottom: "softmax_source"
512+
bottom: "softmax_target"
513+
bottom: "softmax_source"
514+
bottom: "softmax_target"
515+
loss_weight: 0.3
516+
top: "fc8_jmmd_loss"
517+
jmmd_param {
518+
kernel_num: 5
519+
kernel_mul: 2.0
520+
label_kernel_num: 1
521+
label_kernel_mul: 2.0
522+
sigma: 1.68
505523
}
506524
include: { phase: TRAIN }
507525
}

caffe/models/JAN/resnet/solver.prototxt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ base_lr: 0.0003
55
lr_policy: "inv"
66
gamma: 0.001
77
power: 0.75
8-
display: 500
8+
display: 1000
99
max_iter: 30000
1010
momentum: 0.9
1111
weight_decay: 0.0005

caffe/models/JAN/resnet/train_val.prototxt

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2484,19 +2484,38 @@ layer {
24842484
include: { phase: TRAIN }
24852485
}
24862486
layer {
2487-
name: "jmmd_loss"
2487+
name: "fc7_jmmd_loss"
24882488
type: "JMMDLoss"
24892489
bottom: "bottleneck_source"
24902490
bottom: "bottleneck_target"
24912491
bottom: "source_softmax"
24922492
bottom: "target_softmax"
2493-
loss_weight: 0.3 # best 0.3 for our tasks, can be tuned within [0.1, 1.0]
2494-
top: "jmmd_loss"
2493+
loss_weight: 0.3
2494+
top: "fc7_jmmd_loss"
2495+
jmmd_param {
2496+
kernel_num: 5
2497+
kernel_mul: 2.0
2498+
label_kernel_num: 1
2499+
label_kernel_mul: 2.0
2500+
sigma: 1.68
2501+
}
24952502
include: { phase: TRAIN }
24962503
}
24972504
layer {
2498-
name: "silence_loss_value"
2499-
type: "Silence"
2500-
bottom: "jmmd_loss"
2505+
name: "fc8_jmmd_loss"
2506+
type: "JMMDLoss"
2507+
bottom: "source_softmax"
2508+
bottom: "target_softmax"
2509+
bottom: "source_softmax"
2510+
bottom: "target_softmax"
2511+
loss_weight: 0.3
2512+
top: "fc7_jmmd_loss"
2513+
jmmd_param {
2514+
kernel_num: 1
2515+
kernel_mul: 2.0
2516+
label_kernel_num: 1
2517+
label_kernel_mul: 2.0
2518+
sigma: 1.68
2519+
}
25012520
include: { phase: TRAIN }
25022521
}

0 commit comments

Comments
 (0)