Skip to content

Commit

Permalink
Add test for mark_points script
Browse files Browse the repository at this point in the history
  • Loading branch information
leavauchier committed Jun 6, 2024
1 parent b9bc166 commit a5ef9cd
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 26 deletions.
52 changes: 26 additions & 26 deletions scripts/mark_points_to_use_for_digital_models_with_new_dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def parse_args():
"Tool to apply pdal pipelines to select points for DSM and DTM calculation"
+ "(add dimensions with positive values for the selected points)"
)
parser.add_argument("--input", "-i", type=str, required=True, help="Input las file")
parser.add_argument("--input_las", "-i", type=str, required=True, help="Input las file")
parser.add_argument(
"--output_las", "-o", type=str, required=True, help="Output cloud las file"
)
Expand All @@ -42,18 +42,11 @@ def parse_args():
return parser.parse_args()


if __name__ == "__main__":
args = parse_args()

pipeline = pdal.Pipeline() | pdal.Reader.las(args.input)
dsm_dim = args.dsm_dimension
dtm_dim = args.dtm_dimension

# Récupération des dimensions du fichier en entrée
input_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
def main(input_las, output_las, dsm_dimension, dtm_dimension, output_dsm, output_dtm):
pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)

# 0 - ajout de dimensions temporaires et de sortie
added_dimensions = [dtm_dim, dsm_dim, "PT_VEG_DSM", "PT_ON_BRIDGE"]
added_dimensions = [dtm_dimension, dsm_dimension, "PT_VEG_DSM", "PT_ON_BRIDGE"]
pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))

# 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en
Expand Down Expand Up @@ -95,32 +88,34 @@ def parse_args():

# max des points de veget (PT_VEG_DSM==1) sur une grille régulière :
pipeline |= pdal.Filter.gridDecimation(
resolution=0.75, value=f"{dsm_dim}=1", output_type="max", where="PT_VEG_DSM==1"
resolution=0.75, value=f"{dsm_dimension}=1", output_type="max", where="PT_VEG_DSM==1"
)

# 2 - sélection des points pour DTM et DSM

# selection de points DTM (max) sur une grille régulière
pipeline |= pdal.Filter.gridDecimation(
resolution=0.5, value=f"{dtm_dim}=1", output_type="max", where="Classification==2"
resolution=0.5, value=f"{dtm_dimension}=1", output_type="max", where="Classification==2"
)

# selection de points DSM (max) sur une grille régulière
pipeline |= pdal.Filter.gridDecimation(
resolution=0.5,
value=f"{dsm_dim}=1",
value=f"{dsm_dimension}=1",
output_type="max",
where="(" + macro.build_condition("Classification", [6, 9, 17, 64]) + f") || {dsm_dim}==1",
where="("
+ macro.build_condition("Classification", [6, 9, 17, 64])
+ f") || {dsm_dimension}==1",
)

# assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64
pipeline = macro.add_radius_assign(
pipeline,
1.5,
False,
condition_src=f"{dtm_dim}==1",
condition_src=f"{dtm_dimension}==1",
condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]),
condition_out=f"{dsm_dim}=1",
condition_out=f"{dsm_dimension}=1",
)

# 3 - gestion des ponts
Expand All @@ -145,32 +140,37 @@ def parse_args():
condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]),
condition_out="PT_ON_BRIDGE=0",
)
pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=0 WHERE PT_ON_BRIDGE==1"])
pipeline |= pdal.Filter.assign(value=[f"{dsm_dimension}=0 WHERE PT_ON_BRIDGE==1"])

# 4 - point pour DTM servent au DSM également
pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=1 WHERE {dtm_dim}==1"])
pipeline |= pdal.Filter.assign(value=[f"{dsm_dimension}=1 WHERE {dtm_dimension}==1"])

# 5 - export du nuage et des DSM
# TODO: n'ajouter que les dimensions de sortie utiles !

pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las)
pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=output_las)

if args.output_dtm:
if output_dtm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dtm,
where=f"{dtm_dim}==1",
filename=output_dtm,
where=f"{dtm_dimension}==1",
)

if args.output_dsm:
if output_dsm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dsm,
where=f"{dsm_dim}==1",
filename=output_dsm,
where=f"{dsm_dimension}==1",
)

pipeline.execute()


if __name__ == "__main__":
args = parse_args()
main(**args)
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import tempfile

import numpy as np
import pdal

from scripts.mark_points_to_use_for_digital_models_with_new_dimension import main


def test_main():
ini_las = "test/data/4_6.las"
dsm_dimension = "dsm_marker"
dtm_dimension = "dtm_marker"
with tempfile.NamedTemporaryFile(suffix="_mark_points_output.las") as las_output:
main(ini_las, las_output.name, dsm_dimension, dtm_dimension, "", "")
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(las_output.name)
assert dsm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ")
assert dtm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ")

pipeline.execute()
arr = pipeline.arrays[0]
assert np.any(arr[dsm_dimension] == 1)
assert np.any(arr[dtm_dimension] == 1)

0 comments on commit a5ef9cd

Please sign in to comment.