-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcore.py
128 lines (102 loc) · 3.39 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
import os
import logging
from torch.utils import data
import numpy as np
import yaml
import torch
class VoxelDataset(data.Dataset):
'''
Loading the voxelrepresentations of Spheres, Pens and Qubes.
'''
def __init__(self, dataset_folder, return_idx=False):
print("Init Dataset")
self.return_idx = return_idx
self.sample_paths = []
for root, dirs, files in os.walk(dataset_folder):
# print(root)
if len(files) == 0:
continue
# print(files[0])
self.sample_paths.append(os.path.join(root, files[0]))
self.len = len(self.sample_paths)
self.sample_paths = sorted(self.sample_paths)
def __len__(self):
''' Returns the length of the dataset.
'''
return self.len
def __getitem__(self, idx):
''' Returns the data point.
Args:
idx (int): ID of data point
'''
# idx = torch.tensor(idx)
# print("Load Voxel Item")
sample = np.load(self.sample_paths[idx])
inputs = sample['voxel']
points_iou_occ = sample['occ']
points_iou = sample['points']
size = sample["size"]
ypr = sample["yaw_pitch_roll"]
transl = sample["transl"]
# print(transl)
yaw = ypr[0]
pitch = ypr[1]
roll = ypr[2]
combinded = np.concatenate((points_iou, points_iou_occ[np.newaxis].T), axis=1)
batch_samples = np.random.choice(combinded.shape[0], 5000)
batch = combinded[batch_samples]
occ = batch[:, -1]
# occ[occ == 0] = 0.1
# occ[occ == 1] = 0.99
# print(occ)
points = batch[:, :-1]
inputs = np.array(inputs, dtype=np.float)
# inputs[inputs == 1] = 2
# inputs[inputs == 0] = -1
# print("occ: ", occ.shape, " points: ", points.shape)
points_iou = torch.tensor(points_iou, dtype=torch.float32)
points_iou_occ = torch.tensor(points_iou_occ, dtype=torch.float32)
inputs = torch.tensor(inputs, dtype=torch.float32)
# print(inputs)
points = torch.tensor(points, dtype=torch.float32)
occ = torch.tensor(occ, dtype=torch.float32)
data = {
'inputs': inputs,
'points': points,
'points.occ': occ,
'points_iou': points_iou,
'points_iou.occ': points_iou_occ,
'idx': idx,
'size': size,
'yaw': yaw,
'pitch': pitch,
'roll': roll,
'transl': transl
}
return data
def collate_remove_none(batch):
''' Collater that puts each data field into a tensor with outer dimension
batch size.
Args:
batch: batch
'''
batch = list(filter(lambda x: x is not None, batch))
# print(batch[0].shape)
# print(batch)
return data.dataloader.default_collate(batch)
def collate_to_numpy(batch):
''' Collater that puts each data field into a tensor with outer dimension
batch size.
Args:
batch: batch
'''
batch = list(filter(lambda x: x is not None, batch))
# print(batch[0].shape)
# print(batch)
return np.array(batch)
def worker_init_fn(worker_id):
''' Worker init function to ensure true randomness.
'''
random_data = os.urandom(4)
base_seed = int.from_bytes(random_data, byteorder="big")
np.random.seed(base_seed + worker_id)