Skip to content

Commit

Permalink
Add from gitlab
Browse files Browse the repository at this point in the history
  • Loading branch information
benemer committed Feb 25, 2024
1 parent 85e6b80 commit e80b376
Show file tree
Hide file tree
Showing 6 changed files with 124 additions and 104 deletions.
5 changes: 2 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ authors = [
]
license_files = "LICENSE"
dependencies = [
"kiss-icp>=0.2.10",
"kiss-icp>=0.3.0",
"diskcache>=5.3.0",
"pytorch_lightning>=1.6.4",
]
Expand All @@ -30,8 +30,7 @@ Homepage = "https://github.com/PRBonn/MapMOS"

[build-system]
requires = [
"scikit_build_core>=0.3.3",
"pybind11",
"scikit_build_core","pybind11",
]
build-backend = "scikit_build_core.build"

Expand Down
123 changes: 61 additions & 62 deletions scripts/cache_to_ply.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ def cache_to_ply(
help="The directory where the cache should be created",
show_default=False,
),
sequence: List[str] = typer.Option(
sequence: Optional[str] = typer.Option(
None,
"--sequence",
"-s",
show_default=False,
help="[Optional] Cache specific sequences",
help="[Optional] For some dataloaders, you need to specify a given sequence",
rich_help_panel="Additional Options",
),
config: Optional[Path] = typer.Option(
Expand All @@ -74,66 +74,65 @@ def cache_to_ply(
print(f'open3d is not installed on your system, run "pip install open3d"')
exit(1)

for seq in sequence:
# Run
cfg = load_config(config)

data_iterable = DataLoader(
MapMOSDataset(
dataloader=dataloader,
data_dir=data,
config=cfg,
sequences=seq,
cache_dir=cache_dir,
),
batch_size=1,
collate_fn=collate_fn,
shuffle=False,
num_workers=0,
batch_sampler=None,
)

dataset_sequence = (
data_iterable.dataset.datasets[seq].sequence_id
if hasattr(data_iterable.dataset.datasets[seq], "sequence_id")
else os.path.basename(data_iterable.dataset.datasets[seq].data_dir)
)
path = os.path.join("ply", dataset_sequence)
os.makedirs(path, exist_ok=True)

for idx, batch in enumerate(
tqdm(data_iterable, desc="Writing data to ply", unit=" items", dynamic_ncols=True)
):
mask_scan = batch[:, 4] == idx
scan_points = batch[mask_scan, 1:4]
scan_labels = batch[mask_scan, 6]

map_points = batch[~mask_scan, 1:4]
map_timestamps = batch[~mask_scan, 5]
map_labels = batch[~mask_scan, 6]

min_time = torch.min(batch[:, 5])
max_time = torch.max(batch[:, 5])

pcd_scan = o3d.geometry.PointCloud(
o3d.utility.Vector3dVector(scan_points.numpy())
).paint_uniform_color([0, 0, 1])
scan_colors = np.array(pcd_scan.colors)
scan_colors[scan_labels == 1] = [1, 0, 0]
pcd_scan.colors = o3d.utility.Vector3dVector(scan_colors)

pcd_map = o3d.geometry.PointCloud(
o3d.utility.Vector3dVector(map_points.numpy())
).paint_uniform_color([0, 0, 0])
map_colors = np.array(pcd_map.colors)
map_timestamps_norm = (map_timestamps - min_time) / (max_time - min_time)
for i in range(len(map_colors)):
t = map_timestamps_norm[i]
map_colors[i, :] = [t, t, t]
map_colors[map_labels == 1] = [1, 0, 0]
pcd_map.colors = o3d.utility.Vector3dVector(map_colors)

o3d.io.write_point_cloud(os.path.join(path, f"{idx:06}.ply"), pcd_scan + pcd_map)
# Run
cfg = load_config(config)

data_iterable = DataLoader(
MapMOSDataset(
dataloader=dataloader,
data_dir=data,
config=cfg,
sequences=sequence,
cache_dir=cache_dir,
),
batch_size=1,
collate_fn=collate_fn,
shuffle=False,
num_workers=0,
batch_sampler=None,
)

dataset_sequence = (
data_iterable.dataset.datasets[sequence].sequence_id
if hasattr(data_iterable.dataset.datasets[sequence], "sequence_id")
else os.path.basename(data_iterable.dataset.datasets[seq].data_dir)
)
path = os.path.join("ply", dataset_sequence)
os.makedirs(path, exist_ok=True)

for idx, batch in enumerate(
tqdm(data_iterable, desc="Writing data to ply", unit=" items", dynamic_ncols=True)
):
mask_scan = batch[:, 4] == idx
scan_points = batch[mask_scan, 1:4]
scan_labels = batch[mask_scan, 6]

map_points = batch[~mask_scan, 1:4]
map_timestamps = batch[~mask_scan, 5]
map_labels = batch[~mask_scan, 6]

min_time = torch.min(batch[:, 5])
max_time = torch.max(batch[:, 5])

pcd_scan = o3d.geometry.PointCloud(
o3d.utility.Vector3dVector(scan_points.numpy())
).paint_uniform_color([0, 0, 1])
scan_colors = np.array(pcd_scan.colors)
scan_colors[scan_labels == 1] = [1, 0, 0]
pcd_scan.colors = o3d.utility.Vector3dVector(scan_colors)

pcd_map = o3d.geometry.PointCloud(
o3d.utility.Vector3dVector(map_points.numpy())
).paint_uniform_color([0, 0, 0])
map_colors = np.array(pcd_map.colors)
map_timestamps_norm = (map_timestamps - min_time) / (max_time - min_time)
for i in range(len(map_colors)):
t = map_timestamps_norm[i]
map_colors[i, :] = [t, t, t]
map_colors[map_labels == 1] = [1, 0, 0]
pcd_map.colors = o3d.utility.Vector3dVector(map_colors)

o3d.io.write_point_cloud(os.path.join(path, f"{idx:06}.ply"), pcd_scan + pcd_map)


if __name__ == "__main__":
Expand Down
44 changes: 25 additions & 19 deletions src/mapmos/config/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,13 @@
# NOTE: This module was contributed by Markus Pielmeier on PR #63
from __future__ import annotations

import yaml
import importlib
import sys

from pathlib import Path
from typing import Any, Dict, Optional

from pydantic import BaseSettings, PrivateAttr
from pydantic_settings import BaseSettings

from mapmos.config.config import (
DataConfig,
Expand All @@ -43,31 +45,35 @@ class MapMOSConfig(BaseSettings):
odometry: OdometryConfig = OdometryConfig()
mos: MOSConfig = MOSConfig()
training: TrainingConfig = TrainingConfig()
_config_file: Optional[Path] = PrivateAttr()

def __init__(self, config_file: Optional[Path] = None, *args, **kwargs):
self._config_file = config_file
super().__init__(*args, **kwargs)

def _yaml_source(self) -> Dict[str, Any]:
data = None
if self._config_file is not None:
with open(self._config_file) as cfg_file:
data = yaml.safe_load(cfg_file)
return data or {}

class Config:
@classmethod
def customise_sources(cls, init_settings, env_settings, file_secret_settings):
return init_settings, MapMOSConfig._yaml_source
def _yaml_source(config_file: Optional[Path]) -> Dict[str, Any]:
data = None
if config_file is not None:
try:
yaml = importlib.import_module("yaml")
except ModuleNotFoundError:
print(
"Custom configuration file specified but PyYAML is not installed on your system,"
' run `pip install "kiss-icp[all]"`. You can also modify the config.py if your '
"system does not support PyYaml "
)
sys.exit(1)
with open(config_file) as cfg_file:
data = yaml.safe_load(cfg_file)
return data or {}


def load_config(config_file: Optional[Path]) -> MapMOSConfig:
"""Load configuration from an Optional yaml file."""
config = MapMOSConfig(config_file=config_file)
config = MapMOSConfig(**_yaml_source(config_file))
return config


def write_config(config: MapMOSConfig, filename: str):
with open(filename, "w") as outfile:
yaml.dump(config.dict(), outfile, default_flow_style=False)
try:
yaml = importlib.import_module("yaml")
yaml.dump(config.model_dump(), outfile, default_flow_style=False)
except ModuleNotFoundError:
outfile.write(str(config.model_dump()))
4 changes: 2 additions & 2 deletions src/mapmos/datasets/mapmos_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,8 @@ def get_scan_and_map(
scan_labels = scan_labels[valid_mask]

if self.sequence != sequence:
data_config = DataConfig().parse_obj(data_config_dict)
odometry_config = OdometryConfig().parse_obj(odometry_config_dict)
data_config = DataConfig().model_validate(data_config_dict)
odometry_config = OdometryConfig().model_validate(odometry_config_dict)

self.odometry = Odometry(data_config, odometry_config)
self.gt_map = VoxelHashMap(odometry_config.voxel_size, data_config.max_range)
Expand Down
26 changes: 17 additions & 9 deletions src/mapmos/paper_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,13 @@ def _run_pipeline(self):
map_points, map_indices = self.odometry.get_map_points()
scan_points = self.odometry.register_points(local_scan, timestamps, scan_index)

scan_mask = self._preprocess(scan_points)
min_range_mos = self.config.mos.min_range_mos
max_range_mos = self.config.mos.max_range_mos
scan_mask = self._preprocess(scan_points, min_range_mos, max_range_mos)
scan_points = torch.tensor(scan_points[scan_mask], dtype=torch.float32, device="cuda")
gt_labels = gt_labels[scan_mask]

map_mask = self._preprocess(map_points)
map_mask = self._preprocess(map_points, min_range_mos, max_range_mos)
map_points = torch.tensor(map_points[map_mask], dtype=torch.float32, device="cuda")
map_indices = torch.tensor(map_indices[map_mask], dtype=torch.float32, device="cuda")

Expand Down Expand Up @@ -123,12 +125,20 @@ def _run_pipeline(self):
torch.tensor(gt_labels, dtype=torch.int32),
)

# Probabilistic volumetric fusion with scan and moving map predictions
map_mask = pred_logits_map > 0
points_stacked = np.vstack([scan_points, map_points[map_mask]])
# Probabilistic Volumetric Fusion of predictions within the belief range
map_mask_belief = pred_logits_map > 0
map_mask_belief = np.logical_and(
map_mask_belief, self._preprocess(map_points, 0.0, self.config.mos.max_range_belief)
)
scan_mask_belief = self._preprocess(scan_points, 0.0, self.config.mos.max_range_belief)
points_stacked = np.vstack([scan_points[scan_mask_belief], map_points[map_mask_belief]])
logits_stacked = np.vstack(
[pred_logits_scan.reshape(-1, 1), pred_logits_map[map_mask].reshape(-1, 1)]
[
pred_logits_scan[scan_mask_belief].reshape(-1, 1),
pred_logits_map[map_mask_belief].reshape(-1, 1),
]
).reshape(-1)

start_time = time.perf_counter_ns()
self.belief.update_belief(points_stacked, logits_stacked)
belief_with_map = self.belief.get_belief(scan_points)
Expand All @@ -144,9 +154,7 @@ def _run_pipeline(self):
torch.tensor(gt_labels, dtype=torch.int32),
)

belief_scan = self.belief.get_belief(scan_points)
self.times_belief.append(time.perf_counter_ns() - start_time)
belief_labels_scan = self.model.to_label(belief_scan)
belief_labels_scan = belief_labels_with_map
if self.visualize:
belief_map = self.belief.get_belief(map_points)
belief_labels_map = self.model.to_label(belief_map)
Expand Down
26 changes: 17 additions & 9 deletions src/mapmos/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,10 @@ def run(self):
self._write_log()
return self.results

def _preprocess(self, points):
def _preprocess(self, points, min_range, max_range):
ranges = np.linalg.norm(points - self.odometry.current_location(), axis=1)
max_range = self.config.mos.max_range_mos
mask = ranges <= max_range if max_range > 0 else np.ones_like(ranges, dtype=bool)
mask = np.logical_and(mask, ranges >= self.config.mos.min_range_mos)
mask = np.logical_and(mask, ranges >= min_range)
return mask

# Private interface ------
Expand All @@ -129,11 +128,13 @@ def _run_pipeline(self):
map_points, map_indices = self.odometry.get_map_points()
scan_points = self.odometry.register_points(local_scan, timestamps, scan_index)

scan_mask = self._preprocess(scan_points)
min_range_mos = self.config.mos.min_range_mos
max_range_mos = self.config.mos.max_range_mos
scan_mask = self._preprocess(scan_points, min_range_mos, max_range_mos)
scan_points = torch.tensor(scan_points[scan_mask], dtype=torch.float32, device="cuda")
gt_labels = gt_labels[scan_mask]

map_mask = self._preprocess(map_points)
map_mask = self._preprocess(map_points, min_range_mos, max_range_mos)
map_points = torch.tensor(map_points[map_mask], dtype=torch.float32, device="cuda")
map_indices = torch.tensor(map_indices[map_mask], dtype=torch.float32, device="cuda")

Expand All @@ -156,11 +157,18 @@ def _run_pipeline(self):
pred_labels_scan = self.model.to_label(pred_logits_scan)
pred_labels_map = self.model.to_label(pred_logits_map)

# Probabilistic Volumetric Fusion
map_mask = pred_logits_map > 0
points_stacked = np.vstack([scan_points, map_points[map_mask]])
# Probabilistic Volumetric Fusion of predictions within the belief range
map_mask_belief = pred_logits_map > 0
map_mask_belief = np.logical_and(
map_mask_belief, self._preprocess(map_points, 0.0, self.config.mos.max_range_belief)
)
scan_mask_belief = self._preprocess(scan_points, 0.0, self.config.mos.max_range_belief)
points_stacked = np.vstack([scan_points[scan_mask_belief], map_points[map_mask_belief]])
logits_stacked = np.vstack(
[pred_logits_scan.reshape(-1, 1), pred_logits_map[map_mask].reshape(-1, 1)]
[
pred_logits_scan[scan_mask_belief].reshape(-1, 1),
pred_logits_map[map_mask_belief].reshape(-1, 1),
]
).reshape(-1)

start_time = time.perf_counter_ns()
Expand Down

0 comments on commit e80b376

Please sign in to comment.