Skip to content

Commit e9b35a2

Browse files
author
csguestp
committed
clean up
1 parent e346e21 commit e9b35a2

10 files changed

+1750
-1066
lines changed

datasets/scannet_dataset.py

+17-25
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
blur1=np.ones((1,3,1)).astype('float32')/3
1111
blur2=np.ones((1,1,3)).astype('float32')/3
1212

13+
## Elastic augmentation from https://github.com/facebookresearch/SparseConvNet/blob/master/examples/ScanNet/data.py
1314
def elastic(x,gran,mag):
1415
bb=np.abs(x).max(0).astype(np.int32)//gran+3
1516
noise=[np.random.randn(bb[0],bb[1],bb[2]).astype('float32') for _ in range(3)]
@@ -27,8 +28,8 @@ def g(x_):
2728
noise = g(x)
2829
return x+g(x)*mag
2930

30-
## ScanNet dataset class
3131
class ScanNetDataset(Dataset):
32+
""" ScanNet data loader """
3233
def __init__(self, options, split, load_confidence=False, random=True):
3334
self.options = options
3435
self.split = split
@@ -37,19 +38,23 @@ def __init__(self, options, split, load_confidence=False, random=True):
3738
self.dataFolder = options.dataFolder
3839
self.load_confidence = load_confidence
3940

40-
with open('split_' + split + '.txt', 'r') as f:
41+
with open('datasets/split_' + split + '.txt', 'r') as f:
4142
for line in f:
4243
scene_id = line.strip()
4344
if len(scene_id) < 5 or scene_id[:5] != 'scene':
4445
continue
4546
if options.scene_id != '' and options.scene_id not in scene_id:
4647
continue
48+
if load_confidence:
49+
confidence_filename = options.test_dir + '/inference/' + split + '/cache/' + scene_id + '.pth'
50+
if not os.path.exists(confidence_filename):
51+
continue
52+
pass
4753
filename = self.dataFolder + '/' + scene_id + '/' + scene_id + '_vh_clean_2.pth'
4854
if os.path.exists(filename):
4955
info = torch.load(filename)
5056
if len(info) == 5:
5157
self.imagePaths.append(filename)
52-
5358
#np.savetxt('semantic_val/' + scene_id + '.txt', info[2], fmt='%d')
5459
pass
5560
pass
@@ -58,7 +63,8 @@ def __init__(self, options, split, load_confidence=False, random=True):
5863
continue
5964
pass
6065

61-
#self.imagePaths = [filename for filename in self.imagePaths if 'scene0217_00' in filename]
66+
#self.imagePaths = [filename for filename in self.imagePaths if 'scene0217_00' in filename]
67+
print('the number of images', split, len(self.imagePaths))
6268

6369
if options.numTrainingImages > 0 and split == 'train':
6470
self.numImages = options.numTrainingImages
@@ -89,15 +95,10 @@ def __getitem__(self, index):
8995
pass
9096

9197
coords, colors, labels, instances, faces = torch.load(self.imagePaths[index])
92-
invalid_instances, = torch.load(self.imagePaths[index].replace('.pth', '_invalid.pth'))
98+
#invalid_instances, = torch.load(self.imagePaths[index].replace('.pth', '_invalid.pth'))
9399

94100
labels = remapper[labels]
95101

96-
#neighbor_gt = torch.load(self.imagePaths[index].replace('.pth', '_neighbor.pth'))
97-
#print(neighbor_gt[0])
98-
#exit(1)
99-
#neighbor_gt = 1
100-
#print(coords.min(0), coords.max(0))
101102
if self.split == 'train':
102103
m = np.eye(3) + np.random.randn(3,3) * 0.1
103104
m[0][0] *= np.random.randint(2) * 2 - 1
@@ -117,6 +118,7 @@ def __getitem__(self, index):
117118
#coords = elastic(coords, 20 * scale // 50, 160 * scale / 50)
118119
pass
119120

121+
## Load normals as input
120122
if 'normal' in self.options.suffix:
121123
points_1 = coords[faces[:, 0]]
122124
points_2 = coords[faces[:, 1]]
@@ -134,10 +136,11 @@ def __getitem__(self, index):
134136
if self.split == 'train':
135137
colors[:, :3] = colors[:, :3] + np.random.randn(3) * 0.1
136138
pass
137-
139+
140+
## Load instance segmentation results to train the confidence prediction network
138141
if self.load_confidence:
139142
scene_id = self.imagePaths[index].split('/')[-1].split('_vh_clean_2')[0]
140-
info = torch.load('test/output_normal_augment_2_' + self.split + '/cache/' + scene_id + '.pth')
143+
info = torch.load(self.options.test_dir + '/inference/' + self.split + '/cache/' + scene_id + '.pth')
141144
if len(info) == 2:
142145
semantic_pred, instance_pred = info
143146
else:
@@ -203,20 +206,9 @@ def __getitem__(self, index):
203206
pass
204207

205208
coords = np.round(coords)
206-
if False:
207-
idxs = (coords.min(1) >= 0) * (coords.max(1) < full_scale)
208-
coords = coords[idxs]
209-
colors = colors[idxs]
210-
labels = labels[idxs]
211-
instances = instances[idxs]
212-
invalid_instances = invalid_instances[idxs]
213-
else:
214-
#print(coords.min(0), coords.max(0))
215-
#exit(1)
216-
coords = np.clip(coords, 0, full_scale - 1)
217-
pass
209+
coords = np.clip(coords, 0, full_scale - 1)
218210

219211
coords = np.concatenate([coords, np.full((coords.shape[0], 1), fill_value=index)], axis=-1)
220212
#coords = np.concatenate([coords, np.expand_dims(instances, -1)], axis=-1)
221-
sample = [coords.astype(np.int64), colors.astype(np.float32), faces.astype(np.int64), labels.astype(np.int64), instances.astype(np.int64), invalid_instances.astype(np.int64), self.imagePaths[index]]
213+
sample = [coords.astype(np.int64), colors.astype(np.float32), faces.astype(np.int64), labels.astype(np.int64), instances.astype(np.int64), self.imagePaths[index]]
222214
return sample

datasets/split_test.txt

+101
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
scene0707_00
2+
scene0708_00
3+
scene0709_00
4+
scene0710_00
5+
scene0711_00
6+
scene0712_00
7+
scene0713_00
8+
scene0714_00
9+
scene0715_00
10+
scene0716_00
11+
scene0717_00
12+
scene0718_00
13+
scene0719_00
14+
scene0720_00
15+
scene0721_00
16+
scene0722_00
17+
scene0723_00
18+
scene0724_00
19+
scene0725_00
20+
scene0726_00
21+
scene0727_00
22+
scene0728_00
23+
scene0729_00
24+
scene0730_00
25+
scene0731_00
26+
scene0732_00
27+
scene0733_00
28+
scene0734_00
29+
scene0735_00
30+
scene0736_00
31+
scene0737_00
32+
scene0738_00
33+
scene0739_00
34+
scene0740_00
35+
scene0741_00
36+
scene0742_00
37+
scene0743_00
38+
scene0744_00
39+
scene0745_00
40+
scene0746_00
41+
scene0747_00
42+
scene0748_00
43+
scene0749_00
44+
scene0750_00
45+
scene0751_00
46+
scene0752_00
47+
scene0753_00
48+
scene0754_00
49+
scene0755_00
50+
scene0756_00
51+
scene0757_00
52+
scene0758_00
53+
scene0759_00
54+
scene0760_00
55+
scene0761_00
56+
scene0762_00
57+
scene0763_00
58+
scene0764_00
59+
scene0765_00
60+
scene0766_00
61+
scene0767_00
62+
scene0768_00
63+
scene0769_00
64+
scene0770_00
65+
scene0771_00
66+
scene0772_00
67+
scene0773_00
68+
scene0774_00
69+
scene0775_00
70+
scene0776_00
71+
scene0777_00
72+
scene0778_00
73+
scene0779_00
74+
scene0780_00
75+
scene0781_00
76+
scene0782_00
77+
scene0783_00
78+
scene0784_00
79+
scene0785_00
80+
scene0786_00
81+
scene0787_00
82+
scene0788_00
83+
scene0789_00
84+
scene0790_00
85+
scene0791_00
86+
scene0792_00
87+
scene0793_00
88+
scene0794_00
89+
scene0795_00
90+
scene0796_00
91+
scene0797_00
92+
scene0798_00
93+
scene0799_00
94+
scene0800_00
95+
scene0801_00
96+
scene0802_00
97+
scene0803_00
98+
scene0804_00
99+
scene0805_00
100+
scene0806_00
101+

0 commit comments

Comments
 (0)