Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit e348efd

Browse files
committed
monir fix
1 parent f358568 commit e348efd

File tree

18 files changed

+17321
-148
lines changed

18 files changed

+17321
-148
lines changed

caffe/.gitignore

Lines changed: 0 additions & 103 deletions
This file was deleted.

caffe/include/caffe/layers/jmmd_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ class JMMDLossLayer : public LossLayer<Dtype> {
6060
Blob<Dtype> delta_;
6161
int label_kernel_num_;
6262
Dtype label_kernel_mul_;
63+
int train_iter_num_;
6364
};
6465

6566
} // namespace caffe

caffe/kmake.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
cd src
2+
protoc ./caffe/proto/caffe.proto --cpp_out=../include/
3+
cd ..
4+
make -j48

caffe/models/DAN/alexnet/train_val.prototxt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ layer {
55
top: "source_data"
66
top: "lp_labels"
77
image_data_param {
8-
source: "./data/office/webcam_list.txt"
8+
source: "./data/office/amazon_list.txt"
99
batch_size: 64
1010
shuffle: true
1111
new_height: 256
@@ -24,7 +24,7 @@ layer {
2424
top: "target_data"
2525
top: "target_label"
2626
image_data_param {
27-
source: "./data/office/amazon_list.txt"
27+
source: "./data/office/webcam_list.txt"
2828
batch_size: 64
2929
shuffle: true
3030
new_height: 256
@@ -49,7 +49,7 @@ layer {
4949
top: "data"
5050
top: "lp_labels"
5151
image_data_param {
52-
source: "./data/office/amazon_list.txt"
52+
source: "./data/office/webcam_list.txt"
5353
batch_size: 1
5454
shuffle: false
5555
new_height: 256

caffe/models/JAN/alexnet/solver.prototxt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
net: "models/JAN/alexnet/train_val.prototxt"
2-
test_iter: 2817
2+
test_iter: 795 # target domain: amazon 2817, webcam 795, dslr 498
33
test_interval: 500
44
base_lr: 0.001
55
lr_policy: "inv"
@@ -12,4 +12,5 @@ weight_decay: 0.0005
1212
snapshot: 60000
1313
snapshot_prefix: "models/JAN/alexnet/trained_model"
1414
snapshot_after_train: false
15-
solver_mode: GPU
15+
solver_mode: GPU
16+

caffe/models/JAN/alexnet/train_val.prototxt

Lines changed: 44 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,15 @@ layer {
55
top: "source_data"
66
top: "source_label"
77
image_data_param {
8-
source: "./data/office/webcam_list.txt"
8+
source: "./data/office/amazon_list.txt"
99
batch_size: 64
1010
new_height: 256
1111
new_width: 256
1212
shuffle: true
1313
}
1414
transform_param {
1515
crop_size: 227
16-
mirror: true
16+
mirror: true
1717
mean_file: "./data/ilsvrc12/imagenet_mean.binaryproto"
1818
}
1919
include: { phase: TRAIN }
@@ -24,7 +24,7 @@ layer {
2424
top: "target_data"
2525
top: "target_label"
2626
image_data_param {
27-
source: "./data/office/amazon_list.txt"
27+
source: "./data/office/webcam_list.txt"
2828
batch_size: 64
2929
new_height: 256
3030
new_width: 256
@@ -49,7 +49,7 @@ layer {
4949
top: "data"
5050
top: "label"
5151
image_data_param {
52-
source: "./data/office/amazon_list.txt"
52+
source: "./data/office/webcam_list.txt"
5353
batch_size: 1
5454
new_height: 256
5555
new_width: 256
@@ -381,7 +381,7 @@ layer {
381381
type: "InnerProduct"
382382
bottom: "fc7"
383383
top: "fc8"
384-
# blobs_lr is set to higher than for other layers, because this layer is starting from random while the others are already trained
384+
# blobs_lr is set higher than at other layers, as this layer is trained from random weights while the others are fine-tuned
385385
param {
386386
lr_mult: 1
387387
decay_mult: 1
@@ -421,12 +421,6 @@ layer {
421421
}
422422
include: { phase: TRAIN }
423423
}
424-
layer {
425-
name: "silence"
426-
type: "Silence"
427-
bottom: "fc8_target"
428-
include: { phase: TRAIN }
429-
}
430424
layer {
431425
name: "softmax_loss"
432426
type: "SoftmaxWithLoss"
@@ -435,24 +429,19 @@ layer {
435429
top: "softmax_loss"
436430
include: { phase: TRAIN }
437431
}
432+
layer {
433+
name: "silence_fc8_target"
434+
type: "Silence"
435+
bottom: "fc8_target"
436+
include: { phase: TRAIN }
437+
}
438438
layer {
439439
name: "fc8_softmax"
440440
type: "Softmax"
441441
bottom: "fc8"
442442
top: "fc8_softmax"
443443
include: { phase: TRAIN }
444444
}
445-
layer {
446-
name: "slice_fc7"
447-
type: "Slice"
448-
bottom: "fc7"
449-
top: "fc7_source"
450-
top: "fc7_target"
451-
slice_param {
452-
slice_dim: 0
453-
}
454-
include: { phase: TRAIN }
455-
}
456445
layer {
457446
name: "slice_softmax"
458447
type: "Slice"
@@ -467,18 +456,46 @@ layer {
467456
layer {
468457
name: "jmmd_loss"
469458
type: "JMMDLoss"
470-
bottom: "fc7_source"
471-
bottom: "fc7_target"
472459
bottom: "source_softmax"
473460
bottom: "target_softmax"
474-
loss_weight: 0.3 #best 0.3 for our tasks, can be tuned within [0.1, 1.0]
461+
bottom: "source_softmax"
462+
bottom: "target_softmax"
463+
loss_weight: 0.3 # best 0.3 for our tasks, can be tuned within [0.1, 1.0]
475464
top: "jmmd_loss"
476465
include: { phase: TRAIN }
477466
}
478467
layer {
479-
name: "silence_loss_value"
468+
name: "silence_jmmd_loss"
480469
type: "Silence"
481470
bottom: "jmmd_loss"
482471
include: { phase: TRAIN }
483-
# we don't calculate jmmd loss value in our code
484472
}
473+
#The following three layers could be added for better performance
474+
#layer {
475+
# name: "slice_fc7"
476+
# type: "Slice"
477+
# bottom: "fc7"
478+
# top: "fc7_source"
479+
# top: "fc7_target"
480+
# slice_param {
481+
# slice_dim: 0
482+
# }
483+
# include: { phase: TRAIN }
484+
#}
485+
#layer {
486+
# name: "jmmd_loss_fc7"
487+
# type: "JMMDLoss"
488+
# bottom: "fc7_source"
489+
# bottom: "fc7_target"
490+
# bottom: "source_softmax"
491+
# bottom: "target_softmax"
492+
# loss_weight: 0.3 # best 0.3 for our tasks, can be tuned within [0.1, 1.0]# top: "jmmd_loss_fc7"
493+
# top: "jmmd_loss_fc7"
494+
# include: { phase: TRAIN }
495+
#}
496+
#layer {
497+
# name: "silence_jmmd_loss_fc7"
498+
# type: "Silence"
499+
# bottom: "jmmd_loss_fc7"
500+
# include: { phase: TRAIN }
501+
#}

caffe/models/JAN/resnet/solver.prototxt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
net: "models/JAN/resnet/train_val.prototxt"
2-
test_iter: 2817
2+
test_iter: 795 # target domain: amazon 2817, webcam 795, dslr 498
33
test_interval: 500
44
base_lr: 0.0003
55
lr_policy: "inv"

caffe/models/JAN/resnet/train_val.prototxt

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ layer {
55
top: "source_data"
66
top: "source_label"
77
image_data_param {
8-
source: "./data/office/webcam_list.txt"
8+
source: "./data/office/amazon_list.txt"
99
batch_size: 16
1010
new_height: 224
1111
new_width: 224
@@ -24,7 +24,7 @@ layer {
2424
top: "target_data"
2525
top: "target_label"
2626
image_data_param {
27-
source: "./data/office/amazon_list.txt"
27+
source: "./data/office/webcam_list.txt"
2828
batch_size: 16
2929
new_height: 224
3030
new_width: 224
@@ -49,7 +49,7 @@ layer {
4949
top: "data"
5050
top: "label"
5151
image_data_param {
52-
source: "./data/office/amazon_list.txt"
52+
source: "./data/office/webcam_list.txt"
5353
batch_size: 1
5454
new_height: 224
5555
new_width: 224
@@ -2400,7 +2400,7 @@ layer {
24002400
type: "InnerProduct"
24012401
bottom: "bottleneck"
24022402
top: "fc8"
2403-
# blobs_lr is set to higher than for other layers, because this layer is starting from random while the others are already trained
2403+
# blobs_lr is set higher than for other layers, as this layer is trained from random weights while the others are fine-tuned
24042404
param {
24052405
lr_mult: 1
24062406
decay_mult: 1
@@ -2490,7 +2490,7 @@ layer {
24902490
bottom: "bottleneck_target"
24912491
bottom: "source_softmax"
24922492
bottom: "target_softmax"
2493-
loss_weight: 0.3 #best 0.3 for our tasks, can be tuned within [0.1, 1.0]
2493+
loss_weight: 0.3 # best 0.3 for our tasks, can be tuned within [0.1, 1.0]
24942494
top: "jmmd_loss"
24952495
include: { phase: TRAIN }
24962496
}
@@ -2499,5 +2499,4 @@ layer {
24992499
type: "Silence"
25002500
bottom: "jmmd_loss"
25012501
include: { phase: TRAIN }
2502-
# we don't calculate jmmd loss value in our code
25032502
}

caffe/models/RTN/alexnet/solver.prototxt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
net: "./models/RTN/alexnet/train_val.prototxt"
2-
test_iter: 2817
2+
test_iter: 795 # target domain: amazon 2817, webcam 795, dslr 498
33
test_interval: 500
44
base_lr: 0.001
55
momentum: 0.9

caffe/models/RTN/alexnet/train_val.prototxt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
name: "outer_product_amazon_to_webcam"
1+
name: "amazon_to_webcam"
22
layer {
33
name: "source_data"
44
type: "ImageData"
55
top: "source_data"
66
top: "lp_labels"
77
image_data_param {
8-
source: "./data/office/webcam_list.txt"
8+
source: "./data/office/amazon_list.txt"
99
batch_size: 64
1010
shuffle: true
1111
new_height: 256
@@ -24,7 +24,7 @@ layer {
2424
top: "target_data"
2525
top: "target_label"
2626
image_data_param {
27-
source: "./data/office/amazon_list.txt"
27+
source: "./data/office/webcam_list.txt"
2828
batch_size: 64
2929
shuffle: true
3030
new_height: 256
@@ -43,7 +43,7 @@ layer {
4343
top: "data"
4444
top: "lp_labels"
4545
image_data_param {
46-
source: "./data/office/amazon_list.txt"
46+
source: "./data/office/webcam_list.txt"
4747
batch_size: 1
4848
shuffle: true
4949
new_height: 256

0 commit comments

Comments
 (0)