From e18af4ad0d5a451e3b3721f037724b2b65631635 Mon Sep 17 00:00:00 2001 From: chenliu-wustl <31680460+chenliu-wustl@users.noreply.github.com> Date: Thu, 21 Feb 2019 06:31:53 +0800 Subject: [PATCH 1/3] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 07c33e3..b29c21d 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ This is the PyTorch implementation for our [technical report](https://arxiv.org/ ``` pip install -r requirements.txt ``` +We are using Python 3.5.2. And as pointed out by [Issue #3](https://github.com/art-programmer/MASC/issues/3), please consider using Python 3.6 and refer to [SparseConvNet](https://github.com/facebookresearch/SparseConvNet) for related issues. ## Data preparation To prepare training data from ScanNet mesh models, please run: From a70cfa23e80a9787f057fd2c4bd0ba68f26a35c0 Mon Sep 17 00:00:00 2001 From: art-programmer Date: Mon, 25 Feb 2019 14:58:06 +0800 Subject: [PATCH 2/3] Update inference.py --- inference.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/inference.py b/inference.py index 6ba5d33..31f76c1 100644 --- a/inference.py +++ b/inference.py @@ -187,11 +187,7 @@ def write(filename, model, validator, test_dir, num_scales, augment=True, num_cr filenames = [] split = options.split - if split == 'test': - data_folder = '/gruvi/Data/chenliu/ScanNet/scans_test/' - else: - data_folder = '/gruvi/Data/chenliu/ScanNet/scans/' - pass + data_folder = options.dataFolder test_dir = 'test/' + options.keyname + '/inference/' + split + '/' if not os.path.exists(test_dir): From 05204159c48941d21bb4d6b06c282bf4a9312f23 Mon Sep 17 00:00:00 2001 From: art-programmer Date: Thu, 14 Mar 2019 21:22:25 -0700 Subject: [PATCH 3/3] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index b29c21d..d2011a9 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ We are using Python 3.5.2. And as pointed out by [Issue #3](https://github.com/a ## Data preparation To prepare training data from ScanNet mesh models, please run: ``` -python train.py --task=prepare --dataFolder=[SCANNET_PATH] ----labelFile=[SCANNET_LABEL_FILE_PATH (i.e., scannetv2-labels.combined.tsv)] +python train.py --task=prepare --dataFolder=[SCANNET_PATH] --labelFile=[SCANNET_LABEL_FILE_PATH (i.e., scannetv2-labels.combined.tsv)] ``` ## Training @@ -43,13 +43,13 @@ The "task" option can contain any combinations of these three tasks, but the ear ## Write results for the final evaluation To train the instance confidence model, please first generate the instance segmentation results: ``` -python inference.py --dataFolder=[SCANNET_PATH] --task=predict_cluster split=val -python inference.py --dataFolder=[SCANNET_PATH] --task=predict_cluster split=train +python inference.py --dataFolder=[SCANNET_PATH] --task=predict_cluster --split=val +python inference.py --dataFolder=[SCANNET_PATH] --task=predict_cluster --split=train ``` Then train the confidence model: ``` -python train.py --restore=0 --dataFolder=[SCANNET_PATH] +python train_confidence.py --restore=0 --dataFolder=[SCANNET_PATH] ``` Predict instance confidence, add additional instances for certain semantic labels, and write instance segmentation results: