Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove additional dimensions in main script + add buffer #20

Merged
merged 2 commits into from
Jul 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@ __pycache__

*.egg-info
*.idea

# test output
tmp
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,8 @@ The code is structured as:
├── macro # Python module with ready-to-use filters combinations
│   ├── __init__.py
│   ├── macro.py
│   └── version.py
├── scripts
│   ├── *.py # Example scripts to use the plugin filters + the filters combinations contained in `macro`
│   ├── version.py
│   └── *.py # Example scripts to use the plugin filters + the filters combinations contained in `macro`
├── test
├── CMakeLists.txt
├── environment*.yml
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ dependencies:
# --------- pip & pip libraries --------- #
- pip
- pip:
- ign-pdal-tools
- ign-pdal-tools==1.7.1
4 changes: 2 additions & 2 deletions environment_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ dependencies:
# --------- pip & pip libraries --------- #
- pip
- pip:
- ign-pdal-tools
- ign-pdal-tools==1.7.1

Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import argparse
import shutil
import tempfile

import pdal
from pdaltools.las_add_buffer import run_on_buffered_las
from pdaltools.las_remove_dimensions import remove_dimensions_from_las

from pdal_ign_macro import macro

Expand Down Expand Up @@ -39,23 +43,55 @@ def parse_args():
parser.add_argument(
"--output_dtm", "-t", type=str, required=False, default="", help="Output dtm tiff file"
)
parser.add_argument(
"--keep_temporary_dims",
"-k",
action="store_true",
help="If set, do not delete temporary dimensions",
)
parser.add_argument(
"--skip_buffer",
"-s",
action="store_true",
help="If set, skip adding a buffer from the neighbor tiles based on their name",
)
parser.add_argument(
"--buffer_width",
type=float,
default=25,
help="width of the border to add to the tile (in meters)",
)
parser.add_argument(
"--spatial_ref",
type=str,
default="EPSG:2154",
leavauchier marked this conversation as resolved.
Show resolved Hide resolved
help="spatial reference for the writer (required when running with a buffer)",
)
parser.add_argument(
"--tile_width",
type=int,
default=1000,
action="store_true",
help="width of tiles in meters (required when running with a buffer)",
)
parser.add_argument(
"--tile_coord_scale",
type=int,
default=1000,
action="store_true",
help="scale used in the filename to describe coordinates in meters (required when running with a buffer)",
)

return parser.parse_args()


def mark_points_to_use_for_digital_models_with_new_dimension(
input_las, output_las, dsm_dimension, dtm_dimension, output_dsm, output_dtm
):
def define_marking_pipeline(input_las, output_las, dsm_dimension, dtm_dimension):
pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)

# 0 - ajout de dimensions temporaires et de sortie
added_dimensions = [
dtm_dimension,
dsm_dimension,
"PT_VEG_DSM",
"PT_ON_BRIDGE",
"PT_ON_BUILDING",
"PT_ON_VEGET",
]
temporary_dimensions = ["PT_VEG_DSM", "PT_ON_BRIDGE", "PT_ON_BUILDING", "PT_ON_VEGET"]
added_dimensions = [dtm_dimension, dsm_dimension] + temporary_dimensions

pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))

# 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en
Expand Down Expand Up @@ -220,31 +256,99 @@ def mark_points_to_use_for_digital_models_with_new_dimension(
)
# ERREUR EN 4!###############################################################################################!
# 5 - export du nuage et des DSM
# TODO: n'ajouter que les dimensions de sortie utiles !

pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=output_las)

if output_dtm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=0.5,
filename=output_dtm,
where=f"{dtm_dimension}==1",
return pipeline, temporary_dimensions


def mark_points_to_use_for_digital_models_with_new_dimension(
input_las,
output_las,
dsm_dimension,
dtm_dimension,
output_dsm,
output_dtm,
keep_temporary_dimensions=False,
):
with tempfile.NamedTemporaryFile(suffix="_with_temporary_dims.las", dir=".") as tmp_las:
pipeline, temporary_dimensions = define_marking_pipeline(
input_las,
tmp_las.name,
dsm_dimension,
dtm_dimension,
)

if output_dsm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=0.5,
filename=output_dsm,
where=f"{dsm_dimension}==1",
if output_dtm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=0.5,
filename=output_dtm,
where=f"{dtm_dimension}==1",
)

if output_dsm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=0.5,
filename=output_dsm,
where=f"{dsm_dimension}==1",
)

pipeline.execute()

if keep_temporary_dimensions:
shutil.copy(tmp_las.name, output_las)
else:
remove_dimensions_from_las(
tmp_las.name,
temporary_dimensions + ["SRC_DOMAIN", "REF_DOMAIN", "radius_search"],
output_las,
)


def main(
input_las,
output_las,
dsm_dimension,
dtm_dimension,
output_dsm,
output_dtm,
keep_temporary_dimensions=False,
skip_buffer=False,
buffer_width=25,
spatial_ref="EPSG:2154",
tile_width=1000,
tile_coord_scale=1000,
):
if skip_buffer:
mark_points_to_use_for_digital_models_with_new_dimension(
input_las,
output_las,
dsm_dimension,
dtm_dimension,
output_dsm,
output_dtm,
keep_temporary_dimensions,
)
else:
mark_with_buffer = run_on_buffered_las(
buffer_width, spatial_ref, tile_width, tile_coord_scale
)(mark_points_to_use_for_digital_models_with_new_dimension)

pipeline.execute()
mark_with_buffer(
input_las,
output_las,
dsm_dimension,
dtm_dimension,
output_dsm,
output_dtm,
keep_temporary_dimensions,
)


if __name__ == "__main__":
args = parse_args()
mark_points_to_use_for_digital_models_with_new_dimension(**vars(args))
main(**vars(args))
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
import pdal

from pdal_ign_macro.mark_points_to_use_for_digital_models_with_new_dimension import (
main,
mark_points_to_use_for_digital_models_with_new_dimension,
)


def test_main():
def test_mark_points_to_use_for_digital_models_with_new_dimension():
ini_las = "test/data/4_6.las"
dsm_dimension = "dsm_marker"
dtm_dimension = "dtm_marker"
Expand All @@ -17,9 +18,102 @@ def test_main():
ini_las, las_output.name, dsm_dimension, dtm_dimension, "", ""
)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(ini_las)
input_dimensions = set(pipeline.quickinfo["readers.las"]["dimensions"].split(", "))
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(las_output.name)
output_dimensions = set(pipeline.quickinfo["readers.las"]["dimensions"].split(", "))
assert output_dimensions == input_dimensions.union([dsm_dimension, dtm_dimension])

pipeline.execute()
arr = pipeline.arrays[0]
assert np.any(arr[dsm_dimension] == 1)
assert np.any(arr[dtm_dimension] == 1)


def test_mark_points_to_use_for_digital_models_with_new_dimension_keep_dimensions():
ini_las = "test/data/4_6.las"
dsm_dimension = "dsm_marker"
dtm_dimension = "dtm_marker"
with tempfile.NamedTemporaryFile(suffix="_mark_points_output.las") as las_output:
mark_points_to_use_for_digital_models_with_new_dimension(
ini_las,
las_output.name,
dsm_dimension,
dtm_dimension,
"",
"",
keep_temporary_dimensions=True,
)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(las_output.name)
output_dimensions = set(pipeline.quickinfo["readers.las"]["dimensions"].split(", "))
assert dsm_dimension in output_dimensions
assert dtm_dimension in output_dimensions

assert all(
[
dim in output_dimensions
for dim in ["PT_VEG_DSM", "PT_ON_BRIDGE", "PT_ON_BUILDING", "PT_ON_VEGET"]
]
)

pipeline.execute()
arr = pipeline.arrays[0]
assert np.any(arr[dsm_dimension] == 1)
assert np.any(arr[dtm_dimension] == 1)


def test_main_no_buffer():
ini_las = "test/data/4_6.las"
dsm_dimension = "dsm_marker"
dtm_dimension = "dtm_marker"
with tempfile.NamedTemporaryFile(suffix="_mark_points_output.las") as las_output:
main(
ini_las,
las_output.name,
dsm_dimension,
dtm_dimension,
"",
"",
keep_temporary_dimensions=False,
skip_buffer=True,
)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(las_output.name)
output_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
assert dsm_dimension in output_dimensions
assert dtm_dimension in output_dimensions

pipeline.execute()
arr = pipeline.arrays[0]
assert np.any(arr[dsm_dimension] == 1)
assert np.any(arr[dtm_dimension] == 1)


def test_main_with_buffer():
ini_las = "test/data/buffer/test_data_77055_627755_LA93_IGN69.laz"
dsm_dimension = "dsm_marker"
dtm_dimension = "dtm_marker"
with tempfile.NamedTemporaryFile(suffix="_mark_points_output.las") as las_output:
main(
ini_las,
las_output.name,
dsm_dimension,
dtm_dimension,
"",
"",
keep_temporary_dimensions=False,
skip_buffer=False,
buffer_width=10,
tile_width=50,
tile_coord_scale=10,
)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(las_output.name)
assert dsm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
assert dtm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
output_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
assert dsm_dimension in output_dimensions
assert dtm_dimension in output_dimensions

pipeline.execute()
arr = pipeline.arrays[0]
Expand Down