Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HOTFIX : use pdal instead of laspy for interpolation #8

Merged
merged 2 commits into from
Mar 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,15 @@ source bash/setup_environment/setup_env.sh # with cudatoolkit
source bash/setup_environment/setup_env_cpu_only.sh # cpu version only

# activate using
conda activate lidar_deep_segmentation
conda activate lidar_multiclass
```

### Run inference from package
If you are interested in running inference from anywhere, you can install code as a package in a your virtual environment.

```
# activate an env matching ./bash/setup_env.sh requirements.
conda activate lidar_deep_segmentation
conda activate lidar_multiclass

# install the package
pip install --upgrade https://github.com/IGNF/lidar-deep-segmentation/tarball/main # from github directly
Expand Down
6 changes: 3 additions & 3 deletions bash/setup_environment/requirements.yml
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
name: lidar_deep_segmentation
name: lidar_multiclass
channels:
- conda-forge
- anaconda
- comet_ml
- pytorch
dependencies:
- python==3.9
- python==3.9.*
- pip
# --------- numpy --------- #
- numpy==1.20
# --------- geo --------- #
- laspy
- pygeos
- laspy
- python-pdal # useful for data preparation
# --------- loggers --------- #
- comet_ml
Expand Down
4 changes: 2 additions & 2 deletions bash/setup_environment/setup_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ set -e
# SETUP
conda install -y mamba -n base -c conda-forge
mamba env create -f bash/setup_environment/requirements.yml
conda activate lidar_deep_segmentation
conda activate lidar_multiclass

# INSTALL
export PYTORCHVERSION="1.10.1"
Expand All @@ -13,4 +13,4 @@ mamba install -y pytorch==$PYTORCHVERSION torchvision==$TORCHVISIONVERSION cudat
mamba install -y pyg==2.0.3 -c pytorch -c pyg -c conda-forge
FORCE_CUDA=1 pip install torch-points-kernels --no-cache
pip install numba==0.55.1 numpy==1.20.0 # revert inconsistent torch-points-kernel dependencies
mamba install pytorch-lightning==1.5.9 -c conda-forge
mamba install -y pytorch-lightning==1.5.9 -c conda-forge
2 changes: 1 addition & 1 deletion bash/setup_environment/setup_env_cpu_only.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ set -e
# SETUP
conda install -y mamba -n base -c conda-forge
mamba env create -f bash/setup_environment/requirements.yml
conda activate lidar_deep_segmentation
conda activate lidar_multiclass

# INSTALL
export PYTORCHVERSION="1.10.1"
Expand Down
47 changes: 24 additions & 23 deletions lidar_multiclass/datamodules/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from tokenize import Number
from typing import Dict, List, Optional

import laspy
import pdal
import numpy as np
import torch
from torch_geometric.nn.pool import knn
Expand Down Expand Up @@ -47,32 +47,27 @@ def __init__(

def _load_las(self, filepath: str):
"""Load a LAS and add necessary extradim."""

self.las = laspy.read(filepath)
self.current_f = filepath
pipeline = pdal.Reader.las(filename=filepath)

coln = ChannelNames.PredictedClassification.value
param = laspy.ExtraBytesParams(name=coln, type=int)
self.las.add_extra_dim(param)
self.las[coln][:] = 0

param = laspy.ExtraBytesParams(
name=ChannelNames.ProbasEntropy.value, type=float
)
self.las.add_extra_dim(param)
self.las[ChannelNames.ProbasEntropy.value][:] = 0.0

for class_name in self.probas_names:
param = laspy.ExtraBytesParams(name=class_name, type=float)
self.las.add_extra_dim(param)
self.las[class_name][:] = 0.0

new_dims = self.probas_names + [
ChannelNames.PredictedClassification.value,
ChannelNames.ProbasEntropy.value,
]
for new_dim in new_dims:
pipeline |= pdal.Filter.ferry(
dimensions=f"=>{new_dim}"
) | pdal.Filter.assign(value=f"{new_dim}=0")
pipeline.execute()
self.las = pipeline.arrays[0] # named array

# TODO: check scale of x versus X ?
self.pos_las = torch.from_numpy(
np.asarray(
[
self.las.x,
self.las.y,
self.las.z,
self.las["X"],
self.las["Y"],
self.las["Z"],
],
dtype=np.float32,
).transpose()
Expand Down Expand Up @@ -177,7 +172,13 @@ def _write(self, interpolation):
self.las[ChannelNames.ProbasEntropy.value][:] = entropy

log.info(f"Saving...")
self.las.write(out_f)

pipeline = pdal.Writer.las(
filename=out_f,
extra_dims=f"all",
minor_version=4,
).pipeline(self.las)
pipeline.execute()
log.info(f"Saved.")

return out_f
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

setup(
name="lidar_multiclass",
version="1.6.3",
version="1.6.10",
description="Multiclass Semantic Segmentation for Lidar Point Cloud",
author="Charles GAYDON",
author_email="",
Expand Down