Skip to content

Commit

Permalink
code commit
Browse files Browse the repository at this point in the history
  • Loading branch information
aboulch committed Jan 5, 2022
1 parent 72a69f7 commit 5ed80ca
Show file tree
Hide file tree
Showing 193 changed files with 70,541 additions and 0 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
__pycache__
build
data
results
*.so
11 changes: 11 additions & 0 deletions configs/config_abc.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
dataset_name: ABC
dataset_root: data/3d_shapes_abc_training

manifold_points: 3000
non_manifold_points: 2000
iter_nbr: 600000
training_random_rotation_x: 180
training_random_rotation_y: 180
training_random_rotation_z: 180

val_interval: 5
45 changes: 45 additions & 0 deletions configs/config_default.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
experiment_name: null
dataset_name: null
dataset_root: null
save_dir: 'results'
train_split: 'training'
val_split: 'validation'
test_split: 'test'
filter_name: null

manifold_points: 2048
non_manifold_points: 2048
random_noise: null # 0.1
normals: False

#training
training_random_scale: null # 0.1
training_random_rotation_x: null # 180
training_random_rotation_y: null # 180
training_random_rotation_z: null # 180
training_batch_size: 16
training_iter_nbr: 100000
training_lr_start: 0.001

resume: false

network_backbone: FKAConv
network_latent_size: 32
network_decoder: InterpAttentionKHeadsNet
network_decoder_k: 64
network_n_labels: 2

device: "cuda"
threads: 8
log_mode: "no_log"
logging: INFO

val_num_mesh: null
val_interval: 1







8 changes: 8 additions & 0 deletions configs/config_shapenet.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
dataset_name: ShapeNet
dataset_root: data/ShapeNet

manifold_points: 3000
non_manifold_points: 2048
random_noise: 0.005

iter_nbr: 600000
12 changes: 12 additions & 0 deletions configs/config_synthetic.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
dataset_name: SyntheticRooms
dataset_root: data/synthetic_room_dataset

manifold_points: 10000
non_manifold_points: 2048
random_noise: 0.005

training_batch_size: 8
iter_nbr: 600000

val_interval: 5
val_num_mesh: 20
15 changes: 15 additions & 0 deletions datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from .shapenet import ShapeNet

from .synthetic_room import ShapeNetSyntheticRooms as SyntheticRooms

from .scenenet import SceneNet
from .scenenet import SceneNet as SceneNet20
from .scenenet import SceneNet as SceneNet100
from .scenenet import SceneNet as SceneNet500
from .scenenet import SceneNet as SceneNet1000

from .abc import ABCTrain as ABC
from .abc_test import ABCTest, ABCTestNoiseFree, ABCTestExtraNoise
from .real_world import RealWorld
from .famous_test import FamousTest, FamousTestNoiseFree, FamousTestExtraNoisy, FamousTestSparse, FamousTestDense
from .thingi10k_test import Thingi10kTest, Thingi10kTestNoiseFree, Thingi10kTestExtraNoisy, Thingi10kTestSparse, Thingi10kTestDense
103 changes: 103 additions & 0 deletions datasets/abc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from torch_geometric.data import Dataset
from lightconvpoint.datasets.data import Data
import os
import numpy as np
import torch
import logging


class ABCTrain(Dataset):

def __init__(self, root, split="training", transform=None, filter_name=None, num_non_manifold_points=2048, dataset_size=None, **kwargs):

super().__init__(root, transform, None)

logging.info(f"Dataset - ABC Training - {split} - {dataset_size}")


self.root = os.path.join(self.root, "abc_train")

self.filenames = []
if split in ["train", "training"]:
split_file = os.path.join(self.root, "trainset.txt")
elif split in ["val", "validation"]:
split_file = os.path.join(self.root, "valset.txt")
else:
raise ValueError("Unknown split")

with open(split_file) as f:
content = f.readlines()
content = [line.split("\n")[0] for line in content]
content = [os.path.join(self.root, "04_pts", line) for line in content]
self.filenames += content
self.filenames.sort()

if dataset_size is not None:
self.filenames = self.filenames[:dataset_size]

logging.info(f"Dataset - len {len(self.filenames)}")

def get_category(self, f_id):
return self.filenames[f_id].split("/")[-2]

def get_object_name(self, f_id):
return self.filenames[f_id].split("/")[-1]

def get_class_name(self, f_id):
return self.metadata[self.get_category(f_id)]["name"]

@property
def raw_file_names(self):
return []

@property
def processed_file_names(self):
return []

def _download(self): # override _download to remove makedirs
pass

def download(self):
pass

def process(self):
pass

def _process(self):
pass

def len(self):
return len(self.filenames)


def get_data_for_evaluation(self, idx):
filename = self.filenames[idx]
raise NotImplementedError
data_shape = np.load(os.path.join(filename, "pointcloud.npz"))
data_space = np.load(os.path.join(filename, "points.npz"))
return data_shape, data_space

def get(self, idx):
"""Get item."""
filename = self.filenames[idx]

pts_shp = np.load(filename+".xyz.npy")

filename = filename.replace("04_pts", "05_query_pts")
pts_space = np.load(filename+".ply.npy")

filename = filename.replace("05_query_pts", "05_query_dist")
occupancies = np.load(filename+".ply.npy")
occupancies = (occupancies>0).astype(np.int64)

pts_shp = torch.tensor(pts_shp, dtype=torch.float)
pts_space = torch.tensor(pts_space, dtype=torch.float)
occupancies = torch.tensor(occupancies, dtype=torch.long)

data = Data(x = torch.ones_like(pts_shp),
shape_id=idx,
pos=pts_shp,
pos_non_manifold=pts_space, occupancies=occupancies, #
)

return data
119 changes: 119 additions & 0 deletions datasets/abc_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
from torch_geometric.data import Dataset
from lightconvpoint.datasets.data import Data
import os
import numpy as np
import torch
import glob
import logging


class ABCTest(Dataset):

def __init__(self, root, split="training", transform=None, filter_name=None, num_non_manifold_points=2048, dataset_size=None, variant_directory="abc", **kwargs):
super().__init__(root, transform, None)

logging.info(f"Dataset - ABC Test - Test only - {dataset_size}")


self.root = os.path.join(self.root, variant_directory)

self.filenames = []
split_file = os.path.join(self.root, "testset.txt")

with open(split_file) as f:
content = f.readlines()
content = [line.split("\n")[0] for line in content]
content = [os.path.join(self.root, "04_pts", line) for line in content]
self.filenames += content
self.filenames.sort()

if dataset_size is not None:
self.filenames = self.filenames[:dataset_size]

logging.info(f"Dataset - len {len(self.filenames)}")

def get_category(self, f_id):
return self.filenames[f_id].split("/")[-2]

def get_object_name(self, f_id):
return self.filenames[f_id].split("/")[-1]

def get_class_name(self, f_id):
return self.metadata[self.get_category(f_id)]["name"]

@property
def raw_file_names(self):
return []

@property
def processed_file_names(self):
return []

def _download(self): # override _download to remove makedirs
pass

def download(self):
pass

def process(self):
pass

def _process(self):
pass

def len(self):
return len(self.filenames)


def get_data_for_evaluation(self, idx):
filename = self.filenames[idx]
raise NotImplementedError
data_shape = np.load(os.path.join(filename, "pointcloud.npz"))
data_space = np.load(os.path.join(filename, "points.npz"))
return data_shape, data_space

def get(self, idx):
"""Get item."""
filename = self.filenames[idx]

pts_shp = np.load(filename+".xyz.npy")

# np.savetxt("/root/no_backup/test.xyz", np.concatenate([pts_space, occupancies[:,np.newaxis]], axis=1))
# exit()

pts_shp = torch.tensor(pts_shp, dtype=torch.float)
pts_space = torch.ones((1,3), dtype=torch.float)
occupancies = torch.ones((1,), dtype=torch.long)

data = Data(x = torch.ones_like(pts_shp),
shape_id=idx,
pos=pts_shp,
pos_non_manifold=pts_space, occupancies=occupancies, #
)

return data

class ABCTestNoiseFree(ABCTest):

def __init__(self, root, split="training", transform=None, filter_name=None, num_non_manifold_points=2048, variant_directory="abc_noisefree", dataset_size=None, **kwargs):

super().__init__(root,
split=split,
transform=transform,
filter_name=filter_name,
num_non_manifold_points=num_non_manifold_points,
variant_directory=variant_directory,
dataset_size=dataset_size, **kwargs)


class ABCTestExtraNoise(ABCTest):

def __init__(self, root, split="training", transform=None, filter_name=None, num_non_manifold_points=2048, variant_directory="abc_extra_noisy", dataset_size=None, **kwargs):

super().__init__(root,
split=split,
transform=transform,
filter_name=filter_name,
num_non_manifold_points=num_non_manifold_points,
variant_directory=variant_directory,
dataset_size=dataset_size, **kwargs)
Loading

0 comments on commit 5ed80ca

Please sign in to comment.