diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000..a8c84af --- /dev/null +++ b/.clang-format @@ -0,0 +1,6 @@ +--- +Language: Cpp +BasedOnStyle: LLVM +ColumnLimit: 100 +... + diff --git a/.flake8 b/.flake8 index 1536be0..a2f02e5 100644 --- a/.flake8 +++ b/.flake8 @@ -2,7 +2,7 @@ max_line_length = 99 show_source = True format = pylint -extend-ignore = E203,E501 +extend-ignore = E501 exclude = .git __pycache__ diff --git a/.github/workflows/cicd_docker.yml b/.github/workflows/cicd_docker.yml new file mode 100644 index 0000000..f89233f --- /dev/null +++ b/.github/workflows/cicd_docker.yml @@ -0,0 +1,27 @@ +name: cicd_docker + +on: + # Run tests for non-draft pull request on main + pull_request: + branches: + - main + +env: + DOCKER_IMAGE_NAME: pdal_ign_plugin + +jobs: + build_docker_image_and_run_tests: + + runs-on: ubuntu-latest + + steps: + - name: Checkout branch + uses: actions/checkout@v3 + + - name: Build docker image + run: docker build -t ${{ env.DOCKER_IMAGE_NAME }}:test . + + - name: Run tests in docker image + run: docker run ${{ env.DOCKER_IMAGE_NAME }}:test python -m pytest + + diff --git a/.github/workflows/cicd_test.yml b/.github/workflows/cicd_test.yml index 62f2df9..4a92fc0 100644 --- a/.github/workflows/cicd_test.yml +++ b/.github/workflows/cicd_test.yml @@ -1,7 +1,7 @@ name: cicd_test on: - # Run each time some code are push on any branch + # Run each time some code is pushed on any branch push: branches: - '**' @@ -25,8 +25,8 @@ jobs: activate-environment: pdal_ign_plugin environment-file: ./environment.yml auto-activate-base: true - - - name: compil_plugins + + - name: compile_plugins run: source ./ci/build.sh - name: test diff --git a/.gitignore b/.gitignore index d8c4899..ec967c4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ xcode +.vscode +build install -__pycache__ +*/__pycache__ test/__pycache_ test/.idea diff --git a/Dockerfile b/Dockerfile index 3a1cbfd..f082bce 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,27 +1,33 @@ FROM mambaorg/micromamba:bullseye-slim as build - + COPY environment_docker.yml /environment_docker.yml - -USER root -RUN micromamba env create -f /environment_docker.yml + +USER root +RUN micromamba env create -f /environment_docker.yml SHELL ["micromamba", "run", "-n", "pdal_ign_plugin", "/bin/bash", "-c"] RUN apt-get update && apt-get install --no-install-recommends -y cmake make build-essential g++ && rm -rf /var/lib/apt/lists/* - -COPY src src + +COPY src src COPY CMakeLists.txt CMakeLists.txt -COPY macro macro - -RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release +RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release RUN make -j4 install - + FROM debian:bullseye-slim - + COPY --from=build /opt/conda/envs/pdal_ign_plugin /opt/conda/envs/pdal_ign_plugin -RUN mkdir -p /pdal_ign_plugin +RUN mkdir -p /pdal_ign_plugin COPY --from=build /tmp/install/lib /pdal_ign_plugin/install/lib -COPY --from=build /tmp/macro /macro -ENV PATH=$PATH:/opt/conda/envs/pdal_ign_plugin/bin/ -ENV PROJ_LIB=/opt/conda/envs/pdal_ign_plugin/share/proj/ +ENV PATH=$PATH:/opt/conda/envs/pdal_ign_plugin/bin/ +ENV PROJ_LIB=/opt/conda/envs/pdal_ign_plugin/share/proj/ ENV PDAL_DRIVER_PATH=/pdal_ign_plugin/install/lib +# Install python macro module +COPY macro /pdal_ign_plugin/macro +COPY pyproject.toml /pdal_ign_plugin/pyproject.toml +WORKDIR /pdal_ign_plugin +RUN pip install . + +# Add example scripts + test data (to be able to test inside the docker image) +COPY scripts /pdal_ign_plugin/scripts +COPY test /pdal_ign_plugin/test diff --git a/README.md b/README.md index c98a2f5..553ed59 100755 --- a/README.md +++ b/README.md @@ -2,40 +2,50 @@ ## Compile -You need to have conda ! +You need to have conda! + +Create the ign_pdal_tools conda environment using the `environment.yml` file +to be able to run the compilation in this environment. ### linux/mac run ci/build.sh -### Windows +### Windows one day, maybe... ## Architecture of the code -The code is structured as : +The code is structured as: ``` ├── src -│ ├── plugins forlder -│ │ ├── plufinFilter.cpp -│ │ ├── plufinFilter.h +│ ├── plugin folder +│ │ ├── pluginFilter.cpp +│ │ ├── pluginFilter.h │ │ ├── CMakeLists.txt ├── doc │ ├── pluginFilter.md ├── ci +├── macro # Python module with ready-to-use filters combinations +│   ├── __init__.py +│   ├── macro.py +│   └── version.py +├── scripts +│   ├── *.py # Example scripts to use the plugin filters + the filters combinations contained in `macro` ├── test ├── CMakeLists.txt ├── environment*.yml -├── Dockerfile -├── .github +├── Dockerfile +├── pyproject.toml # Setup file to install the `macro` python module with pip +├── .github └── .gitignore ``` ## Run the tests -Each plugin should have his own test. To run test : +Each plugin should have his own test. To run all tests: ``` python -m pytest -s @@ -43,27 +53,28 @@ python -m pytest -s ## List of Filters -[grid decimation](./doc/grid_decimation.md) +[grid decimation](./doc/grid_decimation.md) [Deprecated: use the gridDecimation filter from the pdal repository] [radius assign](./doc/radius_assign.md) ## Adding a filter -In order to add a filter, you have to add a new folder in the src directory : +In order to add a filter, you have to add a new folder in the src directory : ``` ├── src │ ├── filter_my_new_PI │ │ ├── my_new_PI_Filter.cpp │ │ ├── my_new_PI_Filter.h -│ │ ├── CMakeLisits.txt +│ │ ├── CMakeLists.txt ``` The name of the folder informs of the plugIN nature (reader, writer, filter). -The code should respect the documentation purpose by pdal : [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). Be careful to change if the plugIn is a reader, a writer or a filter. +The code should respect the documentation proposed by pdal: [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). +Be careful to change if the plugIn is a reader, a writer or a filter. -The CMakeList should contains : +The CMakeList should contain: ``` file( GLOB_RECURSE GD_SRCS ${CMAKE_SOURCE_DIR} *) @@ -78,15 +89,44 @@ PDAL_CREATE_PLUGIN( install(TARGETS pdal_plugin_filter_my_new_PI) ``` -You should complet the principal CMakeList by adding the new plugIN : - +You should complete the main CMakeList by adding the new plugIN: ``` add_subdirectory(src/filter_my_new_PI) ``` -Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). +Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). + +Don't forget to update [the list](#list-of-filters) with a link to the documentation. + +## `macro` python module usage + +The `macro` python module is installed in the project docker image so that it can be imported from anywhere in the +docker image. + + +### Syntax to use it in a python script + +```python +from macro import macro + +marco.my_macro(...) +``` + +See the `scripts` folder for example usages of this module. -D'ont forget to update [the list](#list-of-filters) with a link with the documentation. +### Usage from outside the docker image: +If you have a python script on your computer, you can mount its containing folder as a volume in order to +run it in the docker image. + +Example: + +```bash +docker run \ + -v /my/data/folder:/data \ + -v /my/output/folder:/output \ + -v /my/script/folder:/scripts \ + pdal_ign_plugin \ + python /scripts/my_script.py --input /data/my_data_file.las -o /output/my_output.las +``` - diff --git a/ci/build.sh b/ci/build.sh index b226faf..a870025 100755 --- a/ci/build.sh +++ b/ci/build.sh @@ -1,7 +1,9 @@ #!/bin/sh +set -e + FILE=~/anaconda3/etc/profile.d/conda.sh -if [ -e ~/anaconda3/etc/profile.d/conda.sh ] +if [ -e ~/anaconda3/etc/profile.d/conda.sh ] then source ~/anaconda3/etc/profile.d/conda.sh elif [ -e ~/miniconda3/etc/profile.d/conda.sh ] @@ -10,8 +12,8 @@ then elif [ -e /usr/share/miniconda/etc/profile.d/conda.sh ] then source /usr/share/miniconda/etc/profile.d/conda.sh -elif [ -e ~/miniforge3/etc/profile.d/conda.sh ] -then +elif [ -e ~/miniforge3/etc/profile.d/conda.sh ] +then source ~/miniforge3/etc/profile.d/conda.sh elif [[ -z "${CONDASH}" ]]; then echo ERROR: Failed to load conda.sh : ~/anaconda3/etc/profile.d/conda.sh or ~/miniforge3/etc/profile.d/conda.sh or env CONDASH @@ -28,10 +30,10 @@ echo conda is $CONDA_PREFIX mkdir build cd build -cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release ../ +cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release ../ make install conda deactivate cd .. -rm -rf build \ No newline at end of file +rm -rf build \ No newline at end of file diff --git a/environment.yml b/environment.yml index 695737c..9182325 100755 --- a/environment.yml +++ b/environment.yml @@ -13,7 +13,7 @@ dependencies: - isort # import sorting - flake8 # code analysis - pytest -# --------- pip & pip librairies --------- # +# --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools diff --git a/environment_docker.yml b/environment_docker.yml index 7e23ef6..6d00e85 100755 --- a/environment_docker.yml +++ b/environment_docker.yml @@ -6,8 +6,10 @@ dependencies: - pdal - python-pdal - gdal -# --------- pip & pip librairies --------- # + - pytest + + # --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools - + diff --git a/macro/ex_filtering_points_with_add_dimensions.py b/macro/ex_filtering_points_with_add_dimensions.py deleted file mode 100755 index cf717e8..0000000 --- a/macro/ex_filtering_points_with_add_dimensions.py +++ /dev/null @@ -1,145 +0,0 @@ -import argparse - -import pdal - -import macro - -""" -This tool shows how to use functions of macro in a pdal pipeline -""" - - -def parse_args(): - parser = argparse.ArgumentParser( - "Tool to apply pdal pipelines for DSM and DTM calculation (with add dimensions for the concerned points)" - ) - parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") - parser.add_argument( - "--output_las", "-o", type=str, required=True, help="Output cloud las file" - ) - parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") - parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") - return parser.parse_args() - - -if __name__ == "__main__": - args = parse_args() - - pipeline = pdal.Reader.las(args.input) - - # 0 - ajout de dimensions temporaires - pipeline |= pdal.Filter.ferry( - dimensions="=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE" - ) - - # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse - # vegetation (3) proche de la végétation - # pour le calcul du DSM - - pipeline |= pdal.Filter.assign( - value=["PT_VEG_DSM = 1 WHERE " + macro.build_condition("Classification", [4, 5])] - ) - - # bouche trou : assigne les points sol à l'intérieur de la veget (4,5) - pipeline = macro.add_radius_assign( - pipeline, - 1, - False, - condition_src="Classification==2", - condition_ref=macro.build_condition("Classification", [4, 5]), - condition_out="PT_VEG_DSM=1", - ) - pipeline = macro.add_radius_assign( - pipeline, - 1, - False, - condition_src="PT_VEG_DSM==1 && Classification==2", - condition_ref="Classification==2", - condition_out="PT_VEG_DSM=0", - ) - - # selection des points de veget basse proche de la veget haute - pipeline = macro.add_radius_assign( - pipeline, - 1, - False, - condition_src="Classification==3", - condition_ref="Classification==5", - condition_out="PT_VEG_DSM=1", - ) - - # max des points de veget (PT_VEG_DSM==1) sur une grille régulière : - pipeline |= pdal.Filter.gridDecimation( - resolution=0.75, value="PT_GRID_DSM=1", output_type="max", where="PT_VEG_DSM==1" - ) - - # 2 - sélection des points pour DTM et DSM - - # selection de points DTM (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation( - resolution=0.5, value="PT_GRID_DTM=1", output_type="max", where="Classification==2" - ) - - # selection de points DSM (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation( - resolution=0.5, - value="PT_GRID_DSM=1", - output_type="max", - where="(" - + macro.build_condition("Classification", [6, 9, 17, 64]) - + ") || PT_GRID_DSM==1", - ) - - # assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64 - pipeline = macro.add_radius_assign( - pipeline, - 1.5, - False, - condition_src="PT_GRID_DTM==1", - condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]), - condition_out="PT_GRID_DSM=1", - ) - - # 3 - gestion des ponts - # bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1 - - pipeline = macro.add_radius_assign( - pipeline, - 1.5, - False, - condition_src=macro.build_condition("Classification", [2, 3, 4, 5, 9]), - condition_ref="Classification==17", - condition_out="PT_ON_BRIDGE=1", - ) - pipeline = macro.add_radius_assign( - pipeline, - 1.5, - False, - condition_src="PT_ON_BRIDGE==1", - condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), - condition_out="PT_ON_BRIDGE=0", - ) - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=0 WHERE PT_ON_BRIDGE==1"]) - - # 4 - point pour DTM servent au DSM également - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"]) - - # 5 - export du nuage et des DSM - - pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) - pipeline |= pdal.Writer.gdal( - gdaldriver="GTiff", - output_type="max", - resolution=2.0, - filename=args.output_dtm, - where="PT_GRID_DTM==1", - ) - pipeline |= pdal.Writer.gdal( - gdaldriver="GTiff", - output_type="max", - resolution=2.0, - filename=args.output_dsm, - where="PT_GRID_DSM==1", - ) - - pipeline.execute() diff --git a/macro/macro.py b/macro/macro.py index 89f66d4..08739c6 100755 --- a/macro/macro.py +++ b/macro/macro.py @@ -5,18 +5,41 @@ """ -def add_radius_assign(pipeline, radius, search_3d, condition_src, condition_ref, condition_out): +def add_radius_assign( + pipeline: pdal.Pipeline, + radius: float, + search_3d: bool, + condition_src: str, + condition_ref: str, + condition_out: str, + max2d_above: float = -1, + max2d_below: float = -1, +) -> pdal.Pipeline: """ - search points from "condition_src" closed from "condition_ref", and reassign them to "condition_out" + Search points from "condition_src" that are closer than "radius_search" from points that + belong to "condition_ref" and modify them with "condition_out" + This combination is equivalent to the CloseBy macro of TerraScan - radius : the search distance - search_3d : the distance reseach is in 3d if True - condition_src, condition_ref, condition_out : a pdal condition as "Classification==2" + + Args: + pipeline (pdal.Pipeline): pdal pipeline + radius (float): search distance + search_3d (bool): the distance research is in 3d if True (2d otherwise) + condition_src (str): pdal condition for points to apply the modification to (eg. "Classification==2") + condition_ref (str): pdal condition for the potential neighbors to search for (eg. "Classification==4") + condition_out (str): pdal condition to apply to the points that belong to "condition_src" and + have a point from "condition_ref" closer than "radius" (eg. "Classification==2") + max2d_above (float, optional): In case of 2d Search, upward limit for potential neighbors. Defaults to -1. + max2d_below (float, optional): In case of 2d Search, downward limit for potential neighbors. Defaults to -1. + + Returns: + pdal.Pipeline: output pipeline with the radius_assign steps added. """ + pipeline |= pdal.Filter.ferry(dimensions="=>REF_DOMAIN, =>SRC_DOMAIN, =>radius_search") pipeline |= pdal.Filter.assign( value=[ - "SRS_DOMAIN = 0", + "SRC_DOMAIN = 0", f"SRC_DOMAIN = 1 WHERE {condition_src}", "REF_DOMAIN = 0", f"REF_DOMAIN = 1 WHERE {condition_ref}", @@ -29,19 +52,21 @@ def add_radius_assign(pipeline, radius, search_3d, condition_src, condition_ref, reference_domain="REF_DOMAIN", output_dimension="radius_search", is3d=search_3d, + max2d_above=max2d_above, + max2d_below=max2d_below, ) pipeline |= pdal.Filter.assign(value=condition_out, where="radius_search==1") return pipeline -def classify_hgt_ground(pipeline, hmin, hmax, condition, condition_out): +def classify_hgt_ground(pipeline, h_min, h_max, condition, condition_out): """ - reassign points from "condition" between "hmin" and "hmax" of the ground to "condition_out" + reassign points from "condition" between "h_min" and "h_max" of the ground to "condition_out" This combination is equivalent to the ClassifyHgtGrd macro of TerraScan condition, condition_out : a pdal condition as "Classification==2" """ pipeline |= pdal.Filter.hag_delaunay(allow_extrapolation=True) - condition_h = f"HeightAboveGround>{hmin} && HeightAboveGround<={hmax}" + condition_h = f"HeightAboveGround>{h_min} && HeightAboveGround<={h_max}" condition_h += " && " + condition pipeline |= pdal.Filter.assign(value=condition_out, where=condition_h) return pipeline diff --git a/pyproject.toml b/pyproject.toml index 57a5583..165ffe3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,27 @@ +[project] +name = "pdal_ign_macro" +dynamic = ["version"] +dependencies = [] + +[tool.setuptools.dynamic] +version = { attr = "macro.version.__version__" } + +[tool.setuptools] +packages = ["macro"] + [tool.black] line-length = 99 +include = '\.pyi?$' +exclude = ''' +/( + \.toml + |\.sh + |\.git + |\.ini + |\.bat + | data +)/ +''' + +[tool.isort] +profile = "black" diff --git a/macro/ex_filtering_points.py b/scripts/ex_filtering_points.py similarity index 99% rename from macro/ex_filtering_points.py rename to scripts/ex_filtering_points.py index a4c3f2f..37987c9 100755 --- a/macro/ex_filtering_points.py +++ b/scripts/ex_filtering_points.py @@ -2,7 +2,7 @@ import pdal -import macro +from macro import macro """ This tool shows how to use functions of macro in a pdal pipeline diff --git a/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py new file mode 100755 index 0000000..18aed19 --- /dev/null +++ b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py @@ -0,0 +1,195 @@ +import argparse + +import pdal + +from macro import macro + +""" +This tool applies a pdal pipeline to select points for DSM and DTM calculation +It adds dimensions with positive values for the selected points +""" + + +def parse_args(): + parser = argparse.ArgumentParser( + "Tool to apply pdal pipelines to select points for DSM and DTM calculation" + + "(add dimensions with positive values for the selected points)" + ) + parser.add_argument("--input_las", "-i", type=str, required=True, help="Input las file") + parser.add_argument( + "--output_las", "-o", type=str, required=True, help="Output cloud las file" + ) + parser.add_argument( + "--dsm_dimension", + type=str, + required=False, + default="dsm_marker", + help="Dimension name for the output DSM marker", + ) + parser.add_argument( + "--dtm_dimension", + type=str, + required=False, + default="dtm_marker", + help="Dimension name for the output DTM marker", + ) + parser.add_argument( + "--output_dsm", "-s", type=str, required=False, default="", help="Output dsm tiff file" + ) + parser.add_argument( + "--output_dtm", "-t", type=str, required=False, default="", help="Output dtm tiff file" + ) + return parser.parse_args() + + +def main(input_las, output_las, dsm_dimension, dtm_dimension, output_dsm, output_dtm): + pipeline = pdal.Pipeline() | pdal.Reader.las(input_las) + + # 0 - ajout de dimensions temporaires et de sortie + added_dimensions = [dtm_dimension, dsm_dimension, "PT_VEG_DSM", "PT_ON_BRIDGE"] + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) + + # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en + # compte des points sol (2) et basse + # vegetation (3) proche de la végétation + # pour le calcul du DSM + + pipeline |= pdal.Filter.assign( + value=["PT_VEG_DSM = 1 WHERE " + macro.build_condition("Classification", [4, 5])] + ) + + # bouche trou : assigne les points sol à l'intérieur de la veget (4,5) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==2", + condition_ref=macro.build_condition("Classification", [4, 5]), + condition_out="PT_VEG_DSM=1", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="PT_VEG_DSM==1 && Classification==2", + condition_ref="Classification==2", + condition_out="PT_VEG_DSM=0", + ) + + # selection des points de veget basse proche de la veget haute + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==3", + condition_ref="Classification==5", + condition_out="PT_VEG_DSM=1", + ) + + # max des points de veget (PT_VEG_DSM==1) sur une grille régulière : + # TODO: remplacer par GridDecimation une fois le correctif mergé dans PDAL + # pipeline |= pdal.Filter.GridDecimation( + # resolution=0.75, value=f"{dsm_dimension}=1", output_type="max", where="PT_VEG_DSM==1" + # ) + pipeline |= pdal.Filter.grid_decimation_deprecated( + resolution=0.75, output_dimension=dsm_dimension, output_type="max", where="PT_VEG_DSM==1" + ) + + # 2 - sélection des points pour DTM et DSM + + # selection de points DTM (max) sur une grille régulière + # TODO: remplacer par GridDecimation une fois le correctif mergé dans PDAL + # pipeline |= pdal.Filter.GridDecimation( + # resolution=0.5, value=f"{dtm_dimension}=1", output_type="max", where="Classification==2" + # ) + pipeline |= pdal.Filter.grid_decimation_deprecated( + resolution=0.5, + output_dimension=dtm_dimension, + output_type="max", + where="Classification==2", + ) + + # selection de points DSM (max) sur une grille régulière + # TODO: remplacer par GridDecimation une fois le correctif mergé dans PDAL + # pipeline |= pdal.Filter.GridDecimation( + # resolution=0.5, + # value=f"{dsm_dimension}=1", + # output_type="max", + # where="(" + # + macro.build_condition("Classification", [6, 9, 17, 64]) + # + f") || {dsm_dimension}==1", + # ) + pipeline |= pdal.Filter.grid_decimation_deprecated( + resolution=0.5, + output_dimension=dsm_dimension, + output_type="max", + where="(" + + macro.build_condition("Classification", [6, 9, 17, 64]) + + f") || {dsm_dimension}==1", + ) + + # assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src=f"{dtm_dimension}==1", + condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]), + condition_out=f"{dsm_dimension}=1", + ) + + # 3 - gestion des ponts + # bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1 + + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src=macro.build_condition("Classification", [2, 3, 4, 5, 9]), + condition_ref="Classification==17", + condition_out="PT_ON_BRIDGE=1", + max2d_above=0, # ne pas prendre les points qui sont au dessus des points pont (condition_ref) + max2d_below=-1, # prendre tous les points qui sont en dessous des points pont (condition_ref) + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="PT_ON_BRIDGE==1", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="PT_ON_BRIDGE=0", + ) + pipeline |= pdal.Filter.assign(value=[f"{dsm_dimension}=0 WHERE PT_ON_BRIDGE==1"]) + + # 4 - point pour DTM servent au DSM également + pipeline |= pdal.Filter.assign(value=[f"{dsm_dimension}=1 WHERE {dtm_dimension}==1"]) + + # 5 - export du nuage et des DSM + # TODO: n'ajouter que les dimensions de sortie utiles ! + + pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=output_las) + + if output_dtm: + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=output_dtm, + where=f"{dtm_dimension}==1", + ) + + if output_dsm: + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=output_dsm, + where=f"{dsm_dimension}==1", + ) + + pipeline.execute() + + +if __name__ == "__main__": + args = parse_args() + main(**args) diff --git a/test/data/4_6.las b/test/data/4_6.las index b69ac5c..79be2aa 100644 Binary files a/test/data/4_6.las and b/test/data/4_6.las differ diff --git a/test/scripts/test_mark_points_to_use_for_digital_models_with_new_dimension.py b/test/scripts/test_mark_points_to_use_for_digital_models_with_new_dimension.py new file mode 100644 index 0000000..570d247 --- /dev/null +++ b/test/scripts/test_mark_points_to_use_for_digital_models_with_new_dimension.py @@ -0,0 +1,23 @@ +import tempfile + +import numpy as np +import pdal + +from scripts.mark_points_to_use_for_digital_models_with_new_dimension import main + + +def test_main(): + ini_las = "test/data/4_6.las" + dsm_dimension = "dsm_marker" + dtm_dimension = "dtm_marker" + with tempfile.NamedTemporaryFile(suffix="_mark_points_output.las") as las_output: + main(ini_las, las_output.name, dsm_dimension, dtm_dimension, "", "") + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.las(las_output.name) + assert dsm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ") + assert dtm_dimension in pipeline.quickinfo["readers.las"]["dimensions"].split(", ") + + pipeline.execute() + arr = pipeline.arrays[0] + assert np.any(arr[dsm_dimension] == 1) + assert np.any(arr[dtm_dimension] == 1)