Absolute import and linting
This commit is contained in:
parent
2f991920e6
commit
f6debcb679
1 changed files with 33 additions and 24 deletions
|
@ -4,13 +4,14 @@ import typing
|
|||
|
||||
import numpy as np
|
||||
|
||||
from colmap_wrapper import run_colmap
|
||||
import colmap_read_model as read_model
|
||||
from nerf_homemade.poses.colmap_wrapper import run_colmap
|
||||
import nerf_homemade.poses.colmap_read_model as read_model
|
||||
|
||||
FORMAT = "%(asctime)s %(levelname)s \t %(message)s"
|
||||
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
|
||||
|
||||
def gen_poses(basedir: str, match_type: str='exhaustive') -> None:
|
||||
|
||||
def gen_poses(basedir: str, match_type: str = 'exhaustive') -> None:
|
||||
"""
|
||||
Geneate or retreive camera poses.
|
||||
|
||||
|
@ -29,18 +30,19 @@ def gen_poses(basedir: str, match_type: str='exhaustive') -> None:
|
|||
"""
|
||||
|
||||
files_needed = ["cameras.bin", "images.bin", "points3D.bin"]
|
||||
|
||||
|
||||
path_to_sparse = os.path.join(basedir, "sparse/0")
|
||||
if os.path.exists(path_to_sparse):
|
||||
existing_files = os.listdir(path_to_sparse)
|
||||
else:
|
||||
existing_files = []
|
||||
|
||||
|
||||
if not all([f in existing_files for f in files_needed]):
|
||||
logging.info("Running COLMAP")
|
||||
run_colmap(basedir, match_type)
|
||||
else:
|
||||
logging.info("Files genreated by COLMAP found. Skipping running COLMAP.")
|
||||
logging.info(
|
||||
"Files genreated by COLMAP found. Skipping running COLMAP.")
|
||||
|
||||
logging.debug("Loading COLMAP data")
|
||||
poses, points_3d, perm = load_colmap_data(basedir)
|
||||
|
@ -48,6 +50,7 @@ def gen_poses(basedir: str, match_type: str='exhaustive') -> None:
|
|||
logging.debug("Saving COLMAP data to npy")
|
||||
save_poses(basedir, poses, points_3d, perm)
|
||||
|
||||
|
||||
def load_colmap_data(basedir: str) -> (np.ndarray, dict, np.ndarray):
|
||||
"""
|
||||
Load data from a COLMAP arborescence.
|
||||
|
@ -80,9 +83,9 @@ def load_colmap_data(basedir: str) -> (np.ndarray, dict, np.ndarray):
|
|||
camera_h = cameras_data[1].height
|
||||
camera_w = cameras_data[1].width
|
||||
camera_f = cameras_data[1].params[0]
|
||||
hwf = np.array([camera_h, camera_w, camera_f]).reshape([3,1])
|
||||
hwf = np.array([camera_h, camera_w, camera_f]).reshape([3, 1])
|
||||
logging.debug(f"Number of cameras: {len(cameras_data)}")
|
||||
|
||||
|
||||
# read images data
|
||||
images_file = os.path.join(basedir, "images.bin")
|
||||
images_data = read_model.read_images_binary(images_file)
|
||||
|
@ -92,32 +95,35 @@ def load_colmap_data(basedir: str) -> (np.ndarray, dict, np.ndarray):
|
|||
|
||||
# sort by name
|
||||
names = [images_data[k].name for k in images_data]
|
||||
perm = np.argsort(names)
|
||||
# equivalent to a range since dict is index by number from 1 to N
|
||||
perm = np.argsort(names)
|
||||
# equivalent to a range since dict is index by number from 1 to N
|
||||
|
||||
# create camera matrix
|
||||
for k in images_data:
|
||||
im = images_data[k]
|
||||
R = im.qvec2rotmat()
|
||||
t = im.tvec.reshape([3,1])
|
||||
t = im.tvec.reshape([3, 1])
|
||||
m = np.concatenate([np.concatenate([R, t], 1), bottom], 0)
|
||||
w2c_mats.append(m)
|
||||
|
||||
w2c_mats = np.stack(w2c_mats, 0)
|
||||
c2w_mats = np.linalg.inv(w2c_mats)
|
||||
|
||||
poses = c2w_mats[:, :3, :4].transpose([1,2,0])
|
||||
poses = np.concatenate([poses, np.tile(hwf[..., np.newaxis], [1,1,poses.shape[-1]])], 1)
|
||||
poses = c2w_mats[:, :3, :4].transpose([1, 2, 0])
|
||||
poses = np.concatenate(
|
||||
[poses, np.tile(hwf[..., np.newaxis], [1, 1, poses.shape[-1]])], 1)
|
||||
|
||||
# read 3d points data
|
||||
pts3d_file = os.path.join(basedir, "points3D.bin")
|
||||
pts3d = read_model.read_points3d_binary(pts3d_file)
|
||||
|
||||
# must switch to [-u, r, -t] from [r, -u, t], NOT [r, u, -t]
|
||||
poses = np.concatenate([poses[:, 1:2, :], poses[:, 0:1, :], -poses[:, 2:3, :], poses[:, 3:4, :], poses[:, 4:5, :]], 1)
|
||||
poses = np.concatenate([poses[:, 1:2, :], poses[:, 0:1, :], -
|
||||
poses[:, 2:3, :], poses[:, 3:4, :], poses[:, 4:5, :]], 1)
|
||||
|
||||
return poses, pts3d, perm
|
||||
|
||||
|
||||
def save_poses(basedir, poses, pts3d, perm) -> None:
|
||||
"""
|
||||
Save the COLMAP data in a `.npy` format.
|
||||
|
@ -142,7 +148,8 @@ def save_poses(basedir, poses, pts3d, perm) -> None:
|
|||
cams = [0] * poses.shape[-1]
|
||||
for ind in pts3d[k].image_ids:
|
||||
if len(cams) < ind - 1:
|
||||
logging.error("The correct camera poses for current points cannot be accessed")
|
||||
logging.error(
|
||||
"The correct camera poses for current points cannot be accessed")
|
||||
return
|
||||
cams[ind - 1] = 1
|
||||
vis_arr.append(cams)
|
||||
|
@ -150,21 +157,23 @@ def save_poses(basedir, poses, pts3d, perm) -> None:
|
|||
pts_arr = np.array(pts_arr)
|
||||
vis_arr = np.array(vis_arr)
|
||||
logging.info(f"Points {pts_arr.shape} Visibility {vis_arr.shape}")
|
||||
|
||||
zvals = np.sum(-(pts_arr[:, np.newaxis, :].transpose([2,0,1]) - poses[:3, 3:4, :]) * poses[:3, 2:3, :], 0)
|
||||
|
||||
zvals = np.sum(-(pts_arr[:, np.newaxis, :].transpose([2, 0, 1]
|
||||
) - poses[:3, 3:4, :]) * poses[:3, 2:3, :], 0)
|
||||
valid_z = zvals[vis_arr == 1]
|
||||
logging.info(f"Depths stats - min: {valid_z.min()} max: {valid_z.max()}
|
||||
mean: {valid_z.mean()}")
|
||||
|
||||
logging.info(
|
||||
f"Depths stats - min: {valid_z.min()} max: {valid_z.max()} mean: {valid_z.mean()}")
|
||||
|
||||
save_arr = []
|
||||
for i in perm:
|
||||
vis = vis_arr[:, i]
|
||||
zs = zvals[:, i]
|
||||
zs = zs[vis==1]
|
||||
zs = zs[vis == 1]
|
||||
close_depth, inf_depth = np.percentile(zs, .1), np.percentile(zs, 99.9)
|
||||
|
||||
save_arr.append(np.concatenate([poses[..., i].ravel(), np.array([close_depth, inf_depth])], 0))
|
||||
|
||||
save_arr.append(np.concatenate(
|
||||
[poses[..., i].ravel(), np.array([close_depth, inf_depth])], 0))
|
||||
save_arr = np.array(save_arr)
|
||||
|
||||
|
||||
save_path = os.path.join(basedir, "poses_bounds.npy")
|
||||
np.save(save_path, save_arr)
|
||||
|
|
Loading…
Reference in a new issue