1
+ import argparse
2
+ import os
3
+ from operator import itemgetter
4
+
1
5
import numpy as np
2
- import os , glob , argparse
3
6
import torch
4
- from operator import itemgetter
5
- import cv2
6
- import glob
7
7
8
+ # yapf:disable
8
9
COLOR_DETECTRON2 = np .array (
9
10
[
10
11
0.000 , 0.447 , 0.741 ,
82
83
0.857 , 0.857 , 0.857 ,
83
84
# 1.000, 1.000, 1.000
84
85
]).astype (np .float32 ).reshape (- 1 , 3 ) * 255
86
+ # yapf:enable
85
87
86
88
SEMANTIC_IDXS = np .array ([1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 14 , 16 , 24 , 28 , 33 , 34 , 36 , 39 ])
87
- SEMANTIC_NAMES = np .array (['wall' , 'floor' , 'cabinet' , 'bed' , 'chair' , 'sofa' , 'table' , 'door' , 'window' , 'bookshelf' , 'picture' , 'counter' ,
88
- 'desk' , 'curtain' , 'refridgerator' , 'shower curtain' , 'toilet' , 'sink' , 'bathtub' , 'otherfurniture' ])
89
+ SEMANTIC_NAMES = np .array ([
90
+ 'wall' , 'floor' , 'cabinet' , 'bed' , 'chair' , 'sofa' , 'table' , 'door' , 'window' , 'bookshelf' ,
91
+ 'picture' , 'counter' , 'desk' , 'curtain' , 'refridgerator' , 'shower curtain' , 'toilet' , 'sink' ,
92
+ 'bathtub' , 'otherfurniture'
93
+ ])
89
94
CLASS_COLOR = {
90
95
'unannotated' : [0 , 0 , 0 ],
91
96
'floor' : [143 , 223 , 142 ],
109
114
'sink' : [110 , 128 , 143 ],
110
115
'otherfurniture' : [80 , 83 , 160 ]
111
116
}
112
- SEMANTIC_IDX2NAME = {1 : 'wall' , 2 : 'floor' , 3 : 'cabinet' , 4 : 'bed' , 5 : 'chair' , 6 : 'sofa' , 7 : 'table' , 8 : 'door' , 9 : 'window' , 10 : 'bookshelf' , 11 : 'picture' ,
113
- 12 : 'counter' , 14 : 'desk' , 16 : 'curtain' , 24 : 'refridgerator' , 28 : 'shower curtain' , 33 : 'toilet' , 34 : 'sink' , 36 : 'bathtub' , 39 : 'otherfurniture' }
117
+ SEMANTIC_IDX2NAME = {
118
+ 1 : 'wall' ,
119
+ 2 : 'floor' ,
120
+ 3 : 'cabinet' ,
121
+ 4 : 'bed' ,
122
+ 5 : 'chair' ,
123
+ 6 : 'sofa' ,
124
+ 7 : 'table' ,
125
+ 8 : 'door' ,
126
+ 9 : 'window' ,
127
+ 10 : 'bookshelf' ,
128
+ 11 : 'picture' ,
129
+ 12 : 'counter' ,
130
+ 14 : 'desk' ,
131
+ 16 : 'curtain' ,
132
+ 24 : 'refridgerator' ,
133
+ 28 : 'shower curtain' ,
134
+ 33 : 'toilet' ,
135
+ 34 : 'sink' ,
136
+ 36 : 'bathtub' ,
137
+ 39 : 'otherfurniture'
138
+ }
114
139
115
140
116
141
def get_coords_color (opt ):
117
142
if opt .dataset == 's3dis' :
118
143
assert opt .data_split in ['Area_1' , 'Area_2' , 'Area_3' , 'Area_4' , 'Area_5' , 'Area_6' ],\
119
144
'data_split for s3dis should be one of [Area_1, Area_2, Area_3, Area_4, Area_5, Area_6]'
120
- input_file = os .path .join ('dataset' , opt .dataset , 'preprocess' , opt .room_name + '_inst_nostuff.pth' )
145
+ input_file = os .path .join ('dataset' , opt .dataset , 'preprocess' ,
146
+ opt .room_name + '_inst_nostuff.pth' )
121
147
assert os .path .isfile (input_file ), 'File not exist - {}.' .format (input_file )
122
148
xyz , rgb , label , inst_label , _ , _ = torch .load (input_file )
123
149
# update variable to match scannet format
124
150
opt .data_split = os .path .join ('val' , opt .data_split )
125
151
else :
126
- input_file = os .path .join ('dataset' , opt .dataset , opt .data_split , opt .room_name + '_inst_nostuff.pth' )
152
+ input_file = os .path .join ('dataset' , opt .dataset , opt .data_split ,
153
+ opt .room_name + '_inst_nostuff.pth' )
127
154
assert os .path .isfile (input_file ), 'File not exist - {}.' .format (input_file )
128
155
if opt .data_split == 'test' :
129
156
xyz , rgb = torch .load (input_file )
@@ -136,26 +163,30 @@ def get_coords_color(opt):
136
163
assert opt .data_split != 'test'
137
164
label = label .astype (np .int )
138
165
label_rgb = np .zeros (rgb .shape )
139
- label_rgb [label >= 0 ] = np .array (itemgetter (* SEMANTIC_NAMES [label [label >= 0 ]])(CLASS_COLOR ))
166
+ label_rgb [label >= 0 ] = np .array (
167
+ itemgetter (* SEMANTIC_NAMES [label [label >= 0 ]])(CLASS_COLOR ))
140
168
rgb = label_rgb
141
169
142
170
elif (opt .task == 'semantic_pred' ):
143
171
assert opt .data_split != 'train'
144
- semantic_file = os .path .join (opt .prediction_path , opt .data_split , 'semantic' , opt .room_name + '.npy' )
172
+ semantic_file = os .path .join (opt .prediction_path , opt .data_split , 'semantic' ,
173
+ opt .room_name + '.npy' )
145
174
assert os .path .isfile (semantic_file ), 'No semantic result - {}.' .format (semantic_file )
146
175
label_pred = np .load (semantic_file ).astype (np .int ) # 0~19
147
176
label_pred_rgb = np .array (itemgetter (* SEMANTIC_NAMES [label_pred ])(CLASS_COLOR ))
148
177
rgb = label_pred_rgb
149
178
150
179
elif (opt .task == 'offset_semantic_pred' ):
151
180
assert opt .data_split != 'train'
152
- semantic_file = os .path .join (opt .prediction_path , opt .data_split , 'semantic' , opt .room_name + '.npy' )
181
+ semantic_file = os .path .join (opt .prediction_path , opt .data_split , 'semantic' ,
182
+ opt .room_name + '.npy' )
153
183
assert os .path .isfile (semantic_file ), 'No semantic result - {}.' .format (semantic_file )
154
184
label_pred = np .load (semantic_file ).astype (np .int ) # 0~19
155
185
label_pred_rgb = np .array (itemgetter (* SEMANTIC_NAMES [label_pred ])(CLASS_COLOR ))
156
186
rgb = label_pred_rgb
157
187
158
- offset_file = os .path .join (opt .prediction_path , opt .data_split , 'coords_offsets' , opt .room_name + '.npy' )
188
+ offset_file = os .path .join (opt .prediction_path , opt .data_split , 'coords_offsets' ,
189
+ opt .room_name + '.npy' )
159
190
assert os .path .isfile (offset_file ), 'No offset result - {}.' .format (offset_file )
160
191
offset_coords = np .load (offset_file )
161
192
xyz = offset_coords [:, :3 ] + offset_coords [:, 3 :]
@@ -164,16 +195,16 @@ def get_coords_color(opt):
164
195
elif (opt .task == 'instance_gt' ):
165
196
assert opt .data_split != 'test'
166
197
inst_label = inst_label .astype (np .int )
167
- print (" Instance number: {}" .format (inst_label .max () + 1 ))
198
+ print (' Instance number: {}' .format (inst_label .max () + 1 ))
168
199
inst_label_rgb = np .zeros (rgb .shape )
169
- object_idx = (inst_label >= 0 )
170
200
ins_num = inst_label .max () + 1
171
201
ins_pointnum = np .zeros (ins_num )
172
202
for _ins_id in range (ins_num ):
173
203
ins_pointnum [_ins_id ] = (inst_label == _ins_id ).sum ()
174
204
sort_idx = np .argsort (ins_pointnum )[::- 1 ]
175
205
for _sort_id in range (ins_num ):
176
- inst_label_rgb [inst_label == sort_idx [_sort_id ] ] = COLOR_DETECTRON2 [_sort_id % len (COLOR_DETECTRON2 )]
206
+ inst_label_rgb [inst_label == sort_idx [_sort_id ]] = COLOR_DETECTRON2 [
207
+ _sort_id % len (COLOR_DETECTRON2 )]
177
208
rgb = inst_label_rgb
178
209
179
210
# same color order according to instance pointnum
@@ -201,17 +232,19 @@ def get_coords_color(opt):
201
232
continue
202
233
mask = np .loadtxt (mask_path ).astype (np .int )
203
234
if opt .dataset == 'scannet' :
204
- print ('{} {}: {} pointnum: {}' .format (i , masks [i ], SEMANTIC_IDX2NAME [int (masks [i ][1 ])], mask .sum ()))
235
+ print ('{} {}: {} pointnum: {}' .format (i ,
236
+ masks [i ], SEMANTIC_IDX2NAME [int (masks [i ][1 ])],
237
+ mask .sum ()))
205
238
else :
206
239
print ('{} {}: pointnum: {}' .format (i , masks [i ], mask .sum ()))
207
240
ins_pointnum [i ] = mask .sum ()
208
- inst_label [mask == 1 ] = i
241
+ inst_label [mask == 1 ] = i
209
242
sort_idx = np .argsort (ins_pointnum )[::- 1 ]
210
243
for _sort_id in range (ins_num ):
211
- inst_label_pred_rgb [inst_label == sort_idx [_sort_id ] ] = COLOR_DETECTRON2 [_sort_id % len (COLOR_DETECTRON2 )]
244
+ inst_label_pred_rgb [inst_label == sort_idx [_sort_id ]] = COLOR_DETECTRON2 [
245
+ _sort_id % len (COLOR_DETECTRON2 )]
212
246
rgb = inst_label_pred_rgb
213
247
214
-
215
248
if opt .data_split != 'test' :
216
249
sem_valid = (label != - 100 )
217
250
xyz = xyz [sem_valid ]
@@ -240,23 +273,33 @@ def write_ply(verts, colors, indices, output_file):
240
273
file .write ('property list uchar uint vertex_indices\n ' )
241
274
file .write ('end_header\n ' )
242
275
for vert , color in zip (verts , colors ):
243
- file .write ('{:f} {:f} {:f} {:d} {:d} {:d}\n ' .format (
244
- vert [0 ], vert [1 ], vert [2 ], int (color [0 ] * 255 ),
245
- int (color [1 ] * 255 ), int (color [2 ] * 255 )))
276
+ file .write ('{:f} {:f} {:f} {:d} {:d} {:d}\n ' .format (vert [0 ], vert [1 ], vert [2 ],
277
+ int (color [0 ] * 255 ),
278
+ int (color [1 ] * 255 ),
279
+ int (color [2 ] * 255 )))
246
280
for ind in indices :
247
281
file .write ('3 {:d} {:d} {:d}\n ' .format (ind [0 ], ind [1 ], ind [2 ]))
248
282
file .close ()
249
283
250
284
251
285
if __name__ == '__main__' :
252
286
parser = argparse .ArgumentParser ()
253
- parser .add_argument ('--dataset' , choices = ['scannet' , 's3dis' ], help = 'dataset for visualization' , default = 'scannet' )
254
- parser .add_argument ('--prediction_path' , help = 'path to the prediction results' ,
255
- default = './exp/scannetv2/softgroup/softgroup_default_scannet/result' )
256
- parser .add_argument ('--data_split' , help = 'train/val/test for scannet or Area_ID for s3dis' , default = 'val' )
287
+ parser .add_argument (
288
+ '--dataset' ,
289
+ choices = ['scannet' , 's3dis' ],
290
+ help = 'dataset for visualization' ,
291
+ default = 'scannet' )
292
+ parser .add_argument (
293
+ '--prediction_path' ,
294
+ help = 'path to the prediction results' ,
295
+ default = './exp/scannetv2/softgroup/softgroup_default_scannet/result' )
296
+ parser .add_argument (
297
+ '--data_split' , help = 'train/val/test for scannet or Area_ID for s3dis' , default = 'val' )
257
298
parser .add_argument ('--room_name' , help = 'room_name' , default = 'scene0011_00' )
258
- parser .add_argument ('--task' , help = 'input / semantic_gt / semantic_pred / offset_semantic_pred / instance_gt / instance_pred' ,
259
- default = 'instance_pred' )
299
+ parser .add_argument (
300
+ '--task' ,
301
+ help = 'input/semantic_gt/semantic_pred/offset_semantic_pred/instance_gt/instance_pred' ,
302
+ default = 'instance_pred' )
260
303
parser .add_argument ('--out' , help = 'output point cloud file in FILE.ply format' )
261
304
opt = parser .parse_args ()
262
305
0 commit comments