From 14fb3f052c52784fb89a7d6a869b0cc802ccb2fe Mon Sep 17 00:00:00 2001 From: Michael Dawson-Haggerty Date: Wed, 16 Dec 2020 13:02:18 -0500 Subject: [PATCH] Rtree Wheel Infrastructure (#163) * first pass * LSI install script * try curl * freaking sha256sum * update manylinux cmake * get deps on mac * skip test for now * rerun actions * do some things to copy in shared library * copy over shared library * remove FileNotFoundError * remove non-linux for testing * build with tests * add numpy to test_requires * skip unicode filenames on python 2 * try different comparison test * pass flake8 * skip cp27m * missing wildcard * skip broken test on python 2 * test_index passing flake8 * update finder and skip test * add back awful hack * skip pypy builds * linux working add back windows and macos * try building on windows with same script * try bash scripting windows * add imports to init * make build dirs if they don't exist * check before makedirs * fix typo * switch to batch file * batch * install out of script * foolishly try make * back * pip to batch * make? * remove more * ninja * copy compiler * try clang * test whitespace * clang? * windows all use choco * flake8 * clang-cl * try gh clang * try relative path * windows paths * escape * convoluted llvm path * hard paths * set env in gh * try fixing some cmake bugs * change working directory * cd doesnt work? * use batch path tomfoolery * try * try msbuild * copy dll * windows finder logic * typo * try again * finder logic * try script relative copy * use copy * add debug dir * add debug prints * tuple * build both 64 and 32? * try override * try release32? * add force32 * typo * skip 32 bit builds on windows * re-enable other wheels * typo * tpr test * skip test_tpr on windows * try simpler copy strategy * update copy * absolute paths * in too deep * more paths * finder changes working locally * add repair_wheel override * yaml typo * mac uses bsd cp * skip mac * move to lib * explicit copy for macos * pass in target location * what the heck * paths are hard * fix working copy * set search on mac * try * try some magic * different * static * linker * copy cmake * remove arg * try static build * try no rpath * renable wheel repair on mac * skip cleanup * skip test on mac for wheel debugging * fix path on windows * delocate args * fix extension * don't copy everything * change copy for mac * delocate args * get unrepaired wheel for testing * more dylib * try manual dylib repair * remove build directories to make sure paths are OK * minor fixes * add windows search paths for conda * add missed import * install rtree in docs image * docs pip3 * curl * try python2 again * apt * use apt spatial for docs * check windows/linux wheel repair * right * auditwheel * don't repair windows wheels * cd to test dir * revert test command --- .github/workflows/build.yml | 153 +----------------- .github/workflows/wheels.yml | 56 +++++++ README.md | 5 +- ci/CMakeLists.txt | 237 +++++++++++++++++++++++++++ ci/build-wheel-linux.sh | 49 ------ ci/build-wheel-osx.sh | 32 ---- ci/build-wheel.bat | 35 ---- ci/install_libspatialindex.bash | 64 ++++++++ ci/install_libspatialindex.bat | 23 +++ ci/test-wheel-linux.sh | 13 -- ci/test-wheel-osx.sh | 12 -- ci/test-wheel.bat | 17 -- docs/source/conf.py | 25 +-- rtree/__init__.py | 10 +- rtree/core.py | 102 +----------- rtree/exceptions.py | 4 + rtree/finder.py | 121 ++++++++++++++ scripts/visualize.py | 8 +- setup.py | 74 +++++++-- tests/test_index.py | 275 +++++++++++++++++++------------- tests/test_tpr.py | 121 +++++++------- 21 files changed, 835 insertions(+), 601 deletions(-) create mode 100644 .github/workflows/wheels.yml create mode 100644 ci/CMakeLists.txt delete mode 100755 ci/build-wheel-linux.sh delete mode 100755 ci/build-wheel-osx.sh delete mode 100644 ci/build-wheel.bat create mode 100755 ci/install_libspatialindex.bash create mode 100755 ci/install_libspatialindex.bat delete mode 100755 ci/test-wheel-linux.sh delete mode 100755 ci/test-wheel-osx.sh delete mode 100644 ci/test-wheel.bat create mode 100644 rtree/exceptions.py create mode 100644 rtree/finder.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9e22a354..d5a5767d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -78,126 +78,19 @@ jobs: run: | python3 -m pytest --doctest-modules rtree tests - - # linux-wheel: - # name: manylinux Wheel ${{ matrix.python-root }} - # - # runs-on: ubuntu-latest - # strategy: - # matrix: - # python-version: ['3.6','3.7','3.8'] - # - # steps: - # - uses: actions/checkout@v2 - # - name: Build - # shell: bash -l {0} - # run: | - # docker run -v $(pwd):/src quay.io/pypa/manylinux1_x86_64 /src/ci/build-wheel-linux.sh ${{ matrix.python-version }} - # - # - name: Setup Python - # uses: actions/setup-python@v1 - # with: - # python-version: ${{ matrix.python-version }} - # - # - name: Display Python version - # run: python -c "import sys; print(sys.version)" - # - name: Test - # shell: bash -l {0} - # run: | - # ./ci/test-wheel-linux.sh ${{ matrix.python-version }} - # - # - uses: actions/upload-artifact@v1 - # with: - # name: manylinux-${{ matrix.python-version }}-whl - # path: wheelhouse - # - # windows-wheel: - # name: Win64 ${{ matrix.python-version }} Wheel - # - # runs-on: ${{ matrix.os }} - # strategy: - # matrix: - # os: ['windows-latest'] - # python-version: ['3.6','3.7','3.8'] - # sidx-version: ['1.9.3'] - # fail-fast: true - # - # steps: - # - uses: actions/checkout@v2 - # - uses: goanpeca/setup-miniconda@v1.1.2 - # with: - # channels: conda-forge - # auto-update-conda: true - # python-version: ${{ matrix.python-version }} - # - name: Setup - # shell: bash -l {0} - # run: | - # conda install -c conda-forge compilers -y - # pip install cmake ninja - # - # - name: build - # shell: cmd /C CALL "{0}" - # run: | - # call .\ci\build-wheel.bat ${{ matrix.sidx-version }} - # - # - name: test - # shell: cmd /C CALL "{0}" - # run: | - # call .\ci\test-wheel.bat - # - # - uses: actions/upload-artifact@v1 - # with: - # name: win64-${{matrix.python-version}}-whl - # path: dist - # - # osx-wheel: - # name: OSX ${{ matrix.python-version }} Wheel - # - # runs-on: ${{ matrix.os }} - # strategy: - # matrix: - # os: ['macos-latest'] - # python-version: [3.6, 3.7, 3.8] - # fail-fast: true - # - # steps: - # - uses: actions/checkout@v2 - # - name: Setup Python - # uses: actions/setup-python@v1 - # with: - # python-version: ${{ matrix.python-version }} - # - # - name: Display Python version - # run: python -c "import sys; print(sys.version)" - # - # - name: Setup - # run: | - # python -m pip install cmake ninja - # - # - name: build - # run: | - # ./ci/build-wheel-osx.sh ${{ matrix.python-version }} - # - # - name: test - # run: | - # ./ci/test-wheel-osx.sh - # - # - uses: actions/upload-artifact@v1 - # with: - # name: osx-${{matrix.python-version}}-whl - # path: wheels - # - docs: name: Docs - runs-on: ubuntu-latest strategy: fail-fast: true container: osgeo/proj-docs - steps: - uses: actions/checkout@v2 + - name: Run libspatialindex build + run: | + apt-get update -y + apt-get install -y -qq libspatialindex-dev + pip3 install --user . - name: Print versions shell: bash -l {0} run: | @@ -241,42 +134,6 @@ jobs: export PATH=$PATH:/home/runner/.local/bin python3 setup.py sdist - # - uses: actions/download-artifact@v1 - # name: Linux 3.6 wheel - # with: - # name: manylinux-cp36-cp36m-whl - # path: dist - # - # - uses: actions/download-artifact@v1 - # name: Linux 3.7 wheel - # with: - # name: manylinux-cp37-cp37m-whl - # path: dist - # - # - uses: actions/download-artifact@v1 - # name: Linux 3.8 wheel - # with: - # name: manylinux-cp38-cp38-whl - # path: dist - # - # - uses: actions/download-artifact@v1 - # name: Win64 3.6 wheel - # with: - # name: win64-3.6-wheel - # path: dist - # - # - uses: actions/download-artifact@v1 - # name: Win64 3.7 wheel - # with: - # name: win64-3.7-wheel - # path: dist - # - # - uses: actions/download-artifact@v1 - # name: Win64 3.8 wheel - # with: - # name: win64-3.8-wheel - # path: dist - - uses: pypa/gh-action-pypi-publish@master name: Publish package if: github.event_name == 'release' && github.event.action == 'published' diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 00000000..a6761fb4 --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,56 @@ +name: Build Wheels + +on: [push, pull_request] + +jobs: + build_wheels: + name: Build wheel on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + env: + CIBW_SKIP: pp* *-win32 + CIBW_TEST_REQUIRES: pytest numpy + CIBW_TEST_COMMAND: "pytest -v {project}/tests" + # we are copying the shared libraries ourselves so skip magical copy + CIBW_REPAIR_WHEEL_COMMAND_MACOS: "" + CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: "" + CIBW_BEFORE_BUILD_LINUX: "pip install cmake; bash {project}/ci/install_libspatialindex.bash" + strategy: + matrix: + os: [windows-latest, ubuntu-latest, macos-latest] + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + name: Install Python + with: + python-version: '3.7' + - name: Install cibuildwheel + run: | + python -m pip install cibuildwheel==1.6.4 + - name: Run MacOS Preinstall Build + if: startsWith(matrix.os, 'macos') + run: | + # provides sha256sum + brew install coreutils + pip install cmake + bash ci/install_libspatialindex.bash + - name: Run Windows Preinstall Build + if: startsWith(matrix.os, 'windows') + run: | + choco install vcpython27 -f -y + ci\install_libspatialindex.bat + - name: Build wheels + run: | + python -m cibuildwheel --output-dir wheelhouse + - uses: actions/upload-artifact@v1 + with: + name: wheels + path: ./wheelhouse + - name: Upload To PyPi + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + # TODO : remove `if false` statement after secrets are set in Github UI + if: false + run: | + pip install twine + twine upload ./wheelhouse/* diff --git a/README.md b/README.md index 445e42df..f16a45bb 100644 --- a/README.md +++ b/README.md @@ -4,5 +4,8 @@ Rtree ![Build](https://github.com/Toblerity/rtree/workflows/Build/badge.svg) [![PyPI version](https://badge.fury.io/py/Rtree.svg)](https://badge.fury.io/py/Rtree) -Python bindings for libspatialindex 1.8.3. +RTree is a Python package with bindings for [libspatialindex](https://github.com/libspatialindex/libspatialindex). Wheels are available for most major platforms, and `rtree` with bundled `libspatialindex` can be installed via pip: +``` +pip install rtree +``` diff --git a/ci/CMakeLists.txt b/ci/CMakeLists.txt new file mode 100644 index 00000000..0a21d64f --- /dev/null +++ b/ci/CMakeLists.txt @@ -0,0 +1,237 @@ +# +# top-level CMake configuration file for libspatialindex +# +# (based originally on the libLAS files copyright Mateusz Loskot) + +SET(MSVC_INCREMENTAL_DEFAULT OFF) +cmake_minimum_required(VERSION 3.5.0) +project(spatialindex) + +#------------------------------------------------------------------------------ +# internal cmake settings +#------------------------------------------------------------------------------ + +set(CMAKE_COLOR_MAKEFILE ON) + +# C++11 required +set (CMAKE_CXX_STANDARD 11) + +# Allow advanced users to generate Makefiles printing detailed commands +mark_as_advanced(CMAKE_VERBOSE_MAKEFILE) + +# Path to additional CMake modules +set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake/modules" ${CMAKE_MODULE_PATH}) + +# Make string comparison in cmake behave like you'd expect +cmake_policy(SET CMP0054 NEW) + +if (WIN32) + if(${CMAKE_VERSION} VERSION_GREATER "3.14.5") + cmake_policy(SET CMP0092 NEW) # don't put /w3 in flags + endif() +endif() + +if (APPLE) + set(CMAKE_MACOSX_RPATH ON) +endif (APPLE) + +#------------------------------------------------------------------------------ +# libspatialindex general settings +#------------------------------------------------------------------------------ + +SET(SIDX_VERSION_MAJOR "1") +SET(SIDX_VERSION_MINOR "9") +SET(SIDX_VERSION_PATCH "3") +SET(SIDX_LIB_VERSION "6.1.1") +SET(SIDX_LIB_SOVERSION "6") +SET(BUILD_SHARED_LIBS ON) + + +set(SIDX_VERSION_STRING "${SIDX_VERSION_MAJOR}.${SIDX_VERSION_MINOR}.${SIDX_VERSION_PATCH}") + +#------------------------------------------------------------------------------ +# libspatialindex general cmake options +#------------------------------------------------------------------------------ + +option(SIDX_BUILD_TESTS "Enables integrated test suites" OFF) + + +# Name of C++ library + +set(SIDX_LIB_NAME spatialindex) +set(SIDX_C_LIB_NAME spatialindex_c) + +if(WIN32) + if (MSVC) + if( CMAKE_SIZEOF_VOID_P EQUAL 8 ) + set( SIDX_LIB_NAME "spatialindex-64" ) + set( SIDX_C_LIB_NAME "spatialindex_c-64" ) + else( CMAKE_SIZEOF_VOID_P EQUAL 8 ) + set( SIDX_LIB_NAME "spatialindex-32" ) + set( SIDX_C_LIB_NAME "spatialindex_c-32" ) + endif( CMAKE_SIZEOF_VOID_P EQUAL 8 ) + endif() +endif() + +set(CMAKE_INCLUDE_DIRECTORIES_PROJECT_BEFORE ON) + +include (CheckFunctionExists) + +check_function_exists(srand48 HAVE_SRAND48) +check_function_exists(gettimeofday HAVE_GETTIMEOFDAY) +check_function_exists(memset HAVE_MEMSET) +check_function_exists(memcpy HAVE_MEMCPY) +check_function_exists(bcopy HAVE_BCOPY) + + +INCLUDE (CheckIncludeFiles) + + +#------------------------------------------------------------------------------ +# General build settings +#------------------------------------------------------------------------------ + +# note we default to RelWithDebInfo mode if not set +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel" FORCE) +endif() + +# Always show which build type we have +message(STATUS "Setting libspatialindex build type - ${CMAKE_BUILD_TYPE}") + +set(SIDX_BUILD_TYPE ${CMAKE_BUILD_TYPE}) + +# TODO: Still testing the output paths --mloskot +set(SIDX_BUILD_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/bin") + +# Output directory in which to build RUNTIME target files. +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY}) + +# Output directory in which to build LIBRARY target files +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY}) + +# Output directory in which to build ARCHIVE target files. +set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY}) + + +#------------------------------------------------------------------------------ +# Platform and compiler specific settings +#------------------------------------------------------------------------------ + +if(NOT WIN32) + # Recommended C++ compilation flags + set(SIDX_COMMON_CXX_FLAGS + "-pedantic -Wall -Wpointer-arith -Wcast-align -Wcast-qual -Wredundant-decls -Wno-long-long -Wl --no-undefined") +endif(NOT WIN32) + +if (APPLE) + set(SO_EXT dylib) + set(CMAKE_FIND_FRAMEWORK "LAST") +elseif(WIN32) + set(SO_EXT dll) +else() + set(SO_EXT so) +endif(APPLE) + + +enable_testing() + +#------------------------------------------------------------------------------ +# installation path settings +#------------------------------------------------------------------------------ + +if(WIN32) + set(DEFAULT_LIB_SUBDIR lib) + set(DEFAULT_DATA_SUBDIR .) + set(DEFAULT_INCLUDE_SUBDIR include) + + if (MSVC) + set(DEFAULT_BIN_SUBDIR bin) + else() + set(DEFAULT_BIN_SUBDIR .) + endif() +else() + # Common locations for Unix and Mac OS X + set(DEFAULT_BIN_SUBDIR bin) + set(DEFAULT_LIB_SUBDIR lib${LIB_SUFFIX}) + set(DEFAULT_DATA_SUBDIR share/spatialindex) + set(DEFAULT_INCLUDE_SUBDIR include) +endif() + +# Locations are changeable by user to customize layout of SIDX installation +# (default values are platform-specific) +set(SIDX_BIN_SUBDIR ${DEFAULT_BIN_SUBDIR} CACHE STRING + "Subdirectory where executables will be installed") +set(SIDX_LIB_SUBDIR ${DEFAULT_LIB_SUBDIR} CACHE STRING + "Subdirectory where libraries will be installed") +set(SIDX_INCLUDE_SUBDIR ${DEFAULT_INCLUDE_SUBDIR} CACHE STRING + "Subdirectory where header files will be installed") +set(SIDX_DATA_SUBDIR ${DEFAULT_DATA_SUBDIR} CACHE STRING + "Subdirectory where data will be installed") + +# Mark *_SUBDIR variables as advanced and dedicated to use by power-users only. +mark_as_advanced(SIDX_BIN_SUBDIR + SIDX_LIB_SUBDIR SIDX_INCLUDE_SUBDIR SIDX_DATA_SUBDIR) + +# Full paths for the installation +set(SIDX_BIN_DIR ${SIDX_BIN_SUBDIR}) +set(SIDX_LIB_DIR ${SIDX_LIB_SUBDIR}) +set(SIDX_INCLUDE_DIR ${SIDX_INCLUDE_SUBDIR}) +set(SIDX_DATA_DIR ${SIDX_DATA_SUBDIR}) + +#------------------------------------------------------------------------------ +# subdirectory controls +#------------------------------------------------------------------------------ + +add_subdirectory(src) + +if(SIDX_BUILD_TESTS) + add_subdirectory(test) +endif() + +#------------------------------------------------------------------------------ +# CPACK controls +#------------------------------------------------------------------------------ + +SET(CPACK_PACKAGE_VERSION_MAJOR ${SIDX_VERSION_MAJOR}) +SET(CPACK_PACKAGE_VERSION_MINOR ${SIDX_VERSION_MINOR}) +SET(CPACK_PACKAGE_VERSION_PATCH ${SIDX_VERSION_MINOR}) +SET(CPACK_PACKAGE_NAME "libspatialindex") + +SET(CPACK_SOURCE_GENERATOR "TBZ2;TGZ") +SET(CPACK_PACKAGE_VENDOR "libspatialindex Development Team") +SET(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/COPYING") + +set(CPACK_SOURCE_PACKAGE_FILE_NAME + "${CMAKE_PROJECT_NAME}-src-${SIDX_VERSION_STRING}") + +set(CPACK_SOURCE_IGNORE_FILES +"/\\\\.gitattributes;/\\\\.vagrant;/\\\\.DS_Store;/CVS/;/\\\\.git/;\\\\.swp$;~$;\\\\.\\\\#;/\\\\#") + +list(APPEND CPACK_SOURCE_IGNORE_FILES "CMakeScripts/") +list(APPEND CPACK_SOURCE_IGNORE_FILES "_CPack_Packages") +list(APPEND CPACK_SOURCE_IGNORE_FILES "cmake_install.cmake") +list(APPEND CPACK_SOURCE_IGNORE_FILES "/bin/") +list(APPEND CPACK_SOURCE_IGNORE_FILES "/scripts/") +list(APPEND CPACK_SOURCE_IGNORE_FILES "/azure-pipelines.yml") +list(APPEND CPACK_SOURCE_IGNORE_FILES ".gitignore") +list(APPEND CPACK_SOURCE_IGNORE_FILES ".ninja*") +list(APPEND CPACK_SOURCE_IGNORE_FILES "HOWTORELEASE.txt") + +list(APPEND CPACK_SOURCE_IGNORE_FILES "README") +list(APPEND CPACK_SOURCE_IGNORE_FILES "build/") + +list(APPEND CPACK_SOURCE_IGNORE_FILES "CMakeFiles") +list(APPEND CPACK_SOURCE_IGNORE_FILES "CTestTestfile.cmake") +list(APPEND CPACK_SOURCE_IGNORE_FILES "/docs/build/") +list(APPEND CPACK_SOURCE_IGNORE_FILES "/doc/presentations/") +list(APPEND CPACK_SOURCE_IGNORE_FILES "package-release.sh") +list(APPEND CPACK_SOURCE_IGNORE_FILES "docker-package.sh") + +list(APPEND CPACK_SOURCE_IGNORE_FILES ".gz2") + +list(APPEND CPACK_SOURCE_IGNORE_FILES ".bz2") + +include(CPack) +add_custom_target(dist COMMAND ${CMAKE_MAKE_PROGRAM} package_source) diff --git a/ci/build-wheel-linux.sh b/ci/build-wheel-linux.sh deleted file mode 100755 index 4c673d1b..00000000 --- a/ci/build-wheel-linux.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - -#/opt/python/cp38-cp38 - -#python-root: ['cp36-cp36m','cp37-cp37m','cp38-cp38'] - -# 3.6, 3.7, 3.8 -PYTHON_VERSION="$1" - -if [[ "$PYTHON_VERSION" == "3.6" ]]; then - PYTHONROOT="/opt/python/cp36-cp36m" -elif [[ "$PYTHON_VERSION" == "3.7" ]]; then - PYTHONROOT="/opt/python/cp37-cp37m" -elif [[ "$PYTHON_VERSION" == "3.8" ]]; then - PYTHONROOT="/opt/python/cp38-cp38" -fi - -echo "PYTHONROOT: " $PYTHONROOT - -$PYTHONROOT/bin/python -m pip install cmake - -git clone https://github.com/libspatialindex/libspatialindex.git -cd libspatialindex -mkdir build; cd build -$PYTHONROOT/bin/cmake -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Release .. -NPROC=$($PYTHONROOT/bin/python -c "import multiprocessing;print(multiprocessing.cpu_count())") -make -j $NPROC install - -cd /src - -rm -rf build dist Rtree.egg-info/ - - -mkdir -p /src/rtree/lib -mkdir -p /src/rtree/include - -cp -r /usr/lib/libspatialindex* /src/rtree/lib -cp -r /usr/local/lib/libcrypt*.so* /src/rtree/lib -cp -r /usr/include/spatialindex/* /src/rtree/include - - - -$PYTHONROOT/bin/python setup.py bdist_wheel - - -for f in dist/*.whl -do - auditwheel repair $f -done; diff --git a/ci/build-wheel-osx.sh b/ci/build-wheel-osx.sh deleted file mode 100755 index a0454c4b..00000000 --- a/ci/build-wheel-osx.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - - -PYPREFIX=$(python -c "import sys; print(sys.prefix)") -python -c "import sys; print(sys.version)" -echo "PYPREFIX: " $PYPREFIX -python -m pip install cmake -python -m pip install delocate - -PREFIX=$(pwd) - -git clone https://github.com/libspatialindex/libspatialindex.git -cd libspatialindex -mkdir build; cd build -$PYPREFIX/bin/cmake -DCMAKE_INSTALL_PREFIX=$PREFIX/libspatialindex -DCMAKE_BUILD_TYPE=Release .. -NPROC=$($PYPREFIX/bin/python -c "import multiprocessing;print(multiprocessing.cpu_count())") -make -j $NPROC install - -rm -rf build dist Rtree.egg-info/ - - -cd $PREFIX -mkdir -p rtree/lib -mkdir -p rtree/include - -cp -r $PREFIX/libspatialindex/lib/libspatialindex* rtree/lib -cp -r $PREFIX/include/spatialindex/* rtree/include - -pwd -python setup.py bdist_wheel - -delocate-wheel -w wheels -v dist/*.whl diff --git a/ci/build-wheel.bat b/ci/build-wheel.bat deleted file mode 100644 index 321ccc1b..00000000 --- a/ci/build-wheel.bat +++ /dev/null @@ -1,35 +0,0 @@ - -call conda activate test -set SIDX_VERSION=%1 -REM conda install -c conda-forge compilers -y -REM -cd -cd .. - -where python -python -c "import sys; print(sys.version)" -python -m pip install cmake ninja -python -c "from urllib.request import urlretrieve; urlretrieve('https://github.com/libspatialindex/libspatialindex/archive/%SIDX_VERSION%.zip', 'libspatialindex.zip')" -where python -python -m "zipfile" -e libspatialindex.zip libspatialindex - -pushd "%~dp0" - -cd libspatialindex\libspatialindex-%SIDX_VERSION% -mkdir build -cd build - -set CC=cl.exe -set CXX=cl.exe -cmake -G Ninja -DCMAKE_BUILD_TYPE=Release .. -ninja - -popd -cd -mkdir rtree\lib -mkdir rtree\include -copy ".\libspatialindex\libspatialindex-%SIDX_VERSION%\build\bin\*.dll" .\rtree\lib -copy ".\libspatialindex\libspatialindex-%SIDX_VERSION%\include\*" .\rtree\include - -python setup.py bdist_wheel -popd diff --git a/ci/install_libspatialindex.bash b/ci/install_libspatialindex.bash new file mode 100755 index 00000000..3ce63273 --- /dev/null +++ b/ci/install_libspatialindex.bash @@ -0,0 +1,64 @@ +#!/bin/bash +set -xe + +# A simple script to install libspatialindex from a Github Release +VERSION=1.9.3 +SHA256=63a03bfb26aa65cf0159f925f6c3491b6ef79bc0e3db5a631d96772d6541187e + + +# where to copy resulting files +# this has to be run before `cd`-ing anywhere +gentarget() { + OURPWD=$PWD + cd "$(dirname "$0")" + mkdir -p ../rtree/lib + cd ../rtree/lib + arr=$(pwd) + cd "$OURPWD" + echo $arr +} + +scriptloc() { + OURPWD=$PWD + cd "$(dirname "$0")" + arr=$(pwd) + cd "$OURPWD" + echo $arr +} +# note that we're doing this convoluted thing to get +# an absolute path so mac doesn't yell at us +TARGET=`gentarget` +SL=`scriptloc` + +rm $VERSION.zip || true +curl -L -O https://github.com/libspatialindex/libspatialindex/archive/$VERSION.zip + +# check the file hash +echo "${SHA256} ${VERSION}.zip" | sha256sum --check + +rm -rf "libspatialindex-${VERSION}" || true +unzip $VERSION +cd libspatialindex-${VERSION} + +mkdir build +cd build + +cp "${SL}/CMakeLists.txt" .. + +cmake -DCMAKE_BUILD_TYPE=Release .. +make -j 4 + +# copy built libraries relative to path of this script +# -d means copy links as links rather than duplicate files +# macos uses "bsd cp" and needs special handling +if [ "$(uname)" == "Darwin" ]; then + # change the rpath in the dylib to point to the same directory + install_name_tool -change @rpath/libspatialindex.6.dylib @loader_path/libspatialindex.dylib bin/libspatialindex_c.dylib + # copy the dylib files to the target director + cp bin/libspatialindex.dylib $TARGET + cp bin/libspatialindex_c.dylib $TARGET +else + cp -d bin/* $TARGET +fi + +ls $TARGET diff --git a/ci/install_libspatialindex.bat b/ci/install_libspatialindex.bat new file mode 100755 index 00000000..e3e30117 --- /dev/null +++ b/ci/install_libspatialindex.bat @@ -0,0 +1,23 @@ +python -c "import sys; print(sys.version)" + +// A simple script to install libspatialindex from a Github Release +curl -L -O https://github.com/libspatialindex/libspatialindex/archive/1.9.3.zip + +unzip 1.9.3.zip +copy %~dp0\CMakeLists.txt libspatialindex-1.9.3\CMakeLists.txt +cd libspatialindex-1.9.3 + +mkdir build +cd build + +cmake -D CMAKE_BUILD_TYPE=Release .. + +"C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\amd64\MSBuild.exe" spatialindex.sln + +mkdir %~dp0\..\rtree\lib +copy bin\Debug\*.dll %~dp0\..\rtree\lib +rmdir /Q /S bin + +dir %~dp0\..\rtree\ +dir %~dp0\..\rtree\lib + diff --git a/ci/test-wheel-linux.sh b/ci/test-wheel-linux.sh deleted file mode 100755 index 3a03d1fd..00000000 --- a/ci/test-wheel-linux.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - - - -python -m pip install pytest numpy - -for f in dist/*.whl -do - - python -m pip install $f -done; - -pytest diff --git a/ci/test-wheel-osx.sh b/ci/test-wheel-osx.sh deleted file mode 100755 index 9497aa0a..00000000 --- a/ci/test-wheel-osx.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - - -python -m pip install pytest numpy - -for f in dist/*.whl -do - - python -m pip install $f -done; - -pytest diff --git a/ci/test-wheel.bat b/ci/test-wheel.bat deleted file mode 100644 index ff5b8346..00000000 --- a/ci/test-wheel.bat +++ /dev/null @@ -1,17 +0,0 @@ - -call conda activate test - -pushd "%~dp0" - -cd -dir dist -dir dist\Rtree*.whl - -for /f "delims=" %%a in ('dir /s /b .\dist\Rtree*.whl') do set "wheel=%%a" - -pip install pytest numpy -pip install %wheel% - -cd rtree\test - - diff --git a/docs/source/conf.py b/docs/source/conf.py index 31ba758a..3dc21552 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,21 +11,28 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import rtree +import sys +import os sys.path.append('../../') -import rtree # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.append(os.path.abspath('.')) +# sys.path.append(os.path.abspath('.')) -# -- General configuration ----------------------------------------------------- +# -- General configuration ----------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig'] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.ifconfig'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -90,7 +97,7 @@ #modindex_common_prefix = [] -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output --------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. @@ -170,7 +177,7 @@ htmlhelp_basename = 'Rtreedoc' -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output -------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' @@ -181,8 +188,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Rtree.tex', u'Rtree Documentation', - u'Sean Gilles', 'manual'), + ('index', 'Rtree.tex', u'Rtree Documentation', + u'Sean Gilles', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of diff --git a/rtree/__init__.py b/rtree/__init__.py index 4033697d..bc08c1f0 100644 --- a/rtree/__init__.py +++ b/rtree/__init__.py @@ -1 +1,9 @@ -__version__ = '0.9.4' +""" +# rtree + +Rtree provides Python bindings to libspatialindex for quick +hyperrectangular intersection queries. +""" +__version__ = '0.9.5' + +from .index import Rtree, Index # noqa diff --git a/rtree/core.py b/rtree/core.py index 53566ac1..ae647f7b 100644 --- a/rtree/core.py +++ b/rtree/core.py @@ -1,13 +1,7 @@ -import os -import sys -import platform import ctypes -from ctypes.util import find_library - -class RTreeError(Exception): - "RTree exception, indicates a RTree-related error." - pass +from . import finder +from .exceptions import RTreeError def check_return(result, func, cargs): @@ -77,96 +71,8 @@ def free_error_msg_ptr(result, func, cargs): return retvalue -if os.name == 'nt': - - def _load_library(dllname, loadfunction, dllpaths=('', )): - """Load a DLL via ctypes load function. Return None on failure. - Try loading the DLL from the current package directory first, - then from the Windows DLL search path. - """ - try: - dllpaths = (os.path.abspath(os.path.dirname(__file__)), - ) + dllpaths - except NameError: - pass # no __file__ attribute on PyPy and some frozen distributions - for path in dllpaths: - if path: - # temporarily add the path to the PATH environment variable - # so Windows can find additional DLL dependencies. - try: - oldenv = os.environ['PATH'] - os.environ['PATH'] = path + ';' + oldenv - except KeyError: - oldenv = None - try: - return loadfunction(os.path.join(path, dllname)) - except (WindowsError, OSError): - pass - finally: - if path and oldenv is not None: - os.environ['PATH'] = oldenv - return None - - base_name = 'spatialindex_c' - if '64' in platform.architecture()[0]: - arch = '64' - else: - arch = '32' - - lib_name = '%s-%s.dll' % (base_name, arch) - rt = None - if 'SPATIALINDEX_C_LIBRARY' in os.environ: - lib_path, lib_name = \ - os.path.split(os.environ['SPATIALINDEX_C_LIBRARY']) - rt = _load_library(lib_name, ctypes.cdll.LoadLibrary, (lib_path,)) - # try wheel location - if not rt: - lib_path = os.path.abspath(os.path.join( - os.path.dirname(__file__), "lib")) - rt = _load_library(lib_name, ctypes.cdll.LoadLibrary, (lib_path,)) - # try conda location - if not rt: - if 'conda' in sys.version: - lib_path = os.path.join(sys.prefix, "Library", "bin") - rt = _load_library(lib_name, ctypes.cdll.LoadLibrary, (lib_path,)) - if not rt: - rt = _load_library(lib_name, ctypes.cdll.LoadLibrary) - - if not rt: - raise OSError("could not find or load %s" % lib_name) - -elif os.name == 'posix': - - if 'SPATIALINDEX_C_LIBRARY' in os.environ: - lib_name = os.environ['SPATIALINDEX_C_LIBRARY'] - rt = ctypes.CDLL(lib_name) - else: - try: - # try loading libspatialindex from the wheel location - # inside the package - - lib_path = os.path.abspath(os.path.join( - os.path.dirname(__file__), "lib")) - old_dir = os.getcwd() - os.chdir(lib_path) - full_path = os.path.join(lib_path, "libspatialindex_c.so") - rt = ctypes.cdll.LoadLibrary(full_path) - - # Switch back to the original working directory - os.chdir(old_dir) - if not rt: - raise FileNotFoundError("%s not loaded" % full_path) - except FileNotFoundError: - lib_name = find_library('spatialindex_c') - rt = ctypes.CDLL(lib_name) - if not rt: - raise FileNotFoundError("%s not loaded" % full_path) - - if not rt: - raise OSError("Could not load libspatialindex_c library") - -else: - raise RTreeError('Unsupported OS "%s"' % os.name) +# load the shared library by looking in likely places +rt = finder.load() rt.Error_GetLastErrorNum.restype = ctypes.c_int diff --git a/rtree/exceptions.py b/rtree/exceptions.py new file mode 100644 index 00000000..862c4d7e --- /dev/null +++ b/rtree/exceptions.py @@ -0,0 +1,4 @@ + +class RTreeError(Exception): + "RTree exception, indicates a RTree-related error." + pass diff --git a/rtree/finder.py b/rtree/finder.py new file mode 100644 index 00000000..051cd4f3 --- /dev/null +++ b/rtree/finder.py @@ -0,0 +1,121 @@ +""" +finder.py +------------ + +Locate `libspatialindex` shared library by any means necessary. +""" +import os +import sys +import ctypes +import platform +from ctypes.util import find_library + +# the current working directory of this file +_cwd = os.path.abspath(os.path.expanduser( + os.path.dirname(__file__))) + +# generate a bunch of candidate locations where the +# libspatialindex shared library *might* be hanging out +_candidates = [ + os.environ.get('SPATIALINDEX_C_LIBRARY', None), + os.path.join(_cwd, 'lib'), + _cwd, + ''] + + +def load(): + """ + Load the `libspatialindex` shared library. + + Returns + ----------- + rt : ctypes object + Loaded shared library + """ + if os.name == 'nt': + # check the platform architecture + if '64' in platform.architecture()[0]: + arch = '64' + else: + arch = '32' + lib_name = 'spatialindex_c-{}.dll'.format(arch) + + # add search paths for conda installs + if 'conda' in sys.version: + _candidates.append( + os.path.join(sys.prefix, "Library", "bin")) + + # get the current PATH + oldenv = os.environ.get('PATH', '').strip().rstrip(';') + # run through our list of candidate locations + for path in _candidates: + if not path or not os.path.exists(path): + continue + # temporarily add the path to the PATH environment variable + # so Windows can find additional DLL dependencies. + os.environ['PATH'] = ';'.join([path, oldenv]) + try: + rt = ctypes.cdll.LoadLibrary(os.path.join(path, lib_name)) + if rt is not None: + return rt + except (WindowsError, OSError): + pass + except BaseException as E: + print('rtree.finder unexpected error: {}'.format(str(E))) + finally: + os.environ['PATH'] = oldenv + raise OSError("could not find or load {}".format(lib_name)) + + elif os.name == 'posix': + + # posix includes both mac and linux + # use the extension for the specific platform + if platform.system() == 'Darwin': + # macos shared libraries are `.dylib` + lib_name = "libspatialindex_c.dylib" + else: + # linux shared libraries are `.so` + lib_name = 'libspatialindex_c.so' + + # get the starting working directory + cwd = os.getcwd() + for cand in _candidates: + if cand is None: + continue + elif os.path.isdir(cand): + # if our candidate is a directory use best guess + path = cand + target = os.path.join(cand, lib_name) + elif os.path.isfile(cand): + # if candidate is just a file use that + path = os.path.split(cand)[0] + target = cand + else: + continue + + if not os.path.exists(target): + continue + + try: + # move to the location we're checking + os.chdir(path) + # try loading the target file candidate + rt = ctypes.cdll.LoadLibrary(target) + if rt is not None: + return rt + except BaseException as E: + print('rtree.finder ({}) unexpected error: {}'.format( + target, str(E))) + finally: + os.chdir(cwd) + + try: + # try loading library using LD path search + rt = ctypes.cdll.LoadLibrary( + find_library('spatialindex_c')) + if rt is not None: + return rt + except BaseException: + pass + + raise OSError("Could not load libspatialindex_c library") diff --git a/scripts/visualize.py b/scripts/visualize.py index 5d2759ad..4bf73773 100755 --- a/scripts/visualize.py +++ b/scripts/visualize.py @@ -1,5 +1,7 @@ #!/usr/bin/env python +from liblas import file +import sys from rtree import index import ogr @@ -29,8 +31,6 @@ def quick_create_layer_def(lyr, field_list): field_defn.Destroy() -import sys - shape_drv = ogr.GetDriverByName('ESRI Shapefile') shapefile_name = sys.argv[1].split('.')[0] @@ -71,7 +71,6 @@ def quick_create_layer_def(lyr, field_list): # leaves[0] == (0L, [2L, 92L, 51L, 55L, 26L], [-132.41727847799999, # -96.717721818399994, -132.41727847799999, -96.717721818399994]) -from liblas import file f = file.File(sys.argv[1]) @@ -79,7 +78,7 @@ def area(minx, miny, maxx, maxy): width = abs(maxx - minx) height = abs(maxy - miny) - return width*height + return width * height def get_bounds(leaf_ids, lasfile, block_id): @@ -124,6 +123,7 @@ def make_feature(lyr, geom, id, count): result = lyr.CreateFeature(feature) del result + t = 0 for leaf in leaves: id = leaf[0] diff --git a/setup.py b/setup.py index 58e88653..184e0e11 100755 --- a/setup.py +++ b/setup.py @@ -1,16 +1,12 @@ #!/usr/bin/env python import os +import sys from setuptools import setup from setuptools.dist import Distribution from setuptools.command.install import install -import itertools as it from wheel.bdist_wheel import bdist_wheel as _bdist_wheel -class bdist_wheel(_bdist_wheel): - def finalize_options(self): - _bdist_wheel.finalize_options(self) - self.root_is_pure = False # Get text from README.txt @@ -22,18 +18,76 @@ def finalize_options(self): # get and exec just the line which looks like "__version__ = '0.9.4'" exec(next(line for line in fp if '__version__' in line)) +# current working directory of this setup.py file +_cwd = os.path.abspath(os.path.split(__file__)[0]) + + +class bdist_wheel(_bdist_wheel): + def finalize_options(self): + _bdist_wheel.finalize_options(self) + self.root_is_pure = False + -# Tested with wheel v0.29.0 class BinaryDistribution(Distribution): """Distribution which always forces a binary package with platform name""" def has_ext_modules(foo): return True + class InstallPlatlib(install): def finalize_options(self): + """ + Copy the shared libraries into the wheel. Note that this + will *only* check in `rtree/lib` rather than anywhere on + the system so if you are building a wheel you *must* copy or + symlink the `.so`/`.dll`/`.dylib` files into `rtree/lib`. + """ + # use for checking extension types + from fnmatch import fnmatch + install.finalize_options(self) if self.distribution.has_ext_modules(): self.install_lib = self.install_platlib + # now copy over libspatialindex + # get the location of the shared library on the filesystem + + # where we're putting the shared library in the build directory + target_dir = os.path.join(self.build_lib, 'rtree', 'lib') + # where are we checking for shared libraries + source_dir = os.path.join(_cwd, 'rtree', 'lib') + + # what patterns represent shared libraries + patterns = {'*.so', + 'libspatialindex*dylib', + '*.dll'} + + if not os.path.isdir(source_dir): + # no copying of binary parts to library + # this is so `pip install .` works even + # if `rtree/lib` isn't populated + return + + for file_name in os.listdir(source_dir): + # make sure file name is lower case + check = file_name.lower() + # use filename pattern matching to see if it is + # a shared library format file + if not any(fnmatch(check, p) for p in patterns): + continue + + # if the source isn't a file skip it + if not os.path.isfile(os.path.join(source_dir, file_name)): + continue + + # make build directory if it doesn't exist yet + if not os.path.isdir(target_dir): + os.makedirs(target_dir) + + # copy the source file to the target directory + self.copy_file( + os.path.join(source_dir, file_name), + os.path.join(target_dir, file_name)) + setup( name='Rtree', @@ -48,11 +102,11 @@ def finalize_options(self): url='https://github.com/Toblerity/rtree', long_description=readme_text, packages=['rtree'], - package_data={"rtree": ["lib/*", "include/**/*", "include/**/**/*" ]}, + package_data={"rtree": ['lib']}, zip_safe=False, - include_package_data = True, - distclass = BinaryDistribution, - cmdclass={'bdist_wheel': bdist_wheel,'install': InstallPlatlib}, + include_package_data=True, + distclass=BinaryDistribution, + cmdclass={'bdist_wheel': bdist_wheel, 'install': InstallPlatlib}, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', diff --git a/tests/test_index.py b/tests/test_index.py index 751c9458..c3ac97a4 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,12 +1,17 @@ +import sys import unittest import ctypes import rtree -from rtree import index, core import numpy as np import pytest import tempfile import pickle +from rtree import index, core + +# is this running on Python 3 +PY3 = sys.version_info.major >= 3 + class IndexTestCase(unittest.TestCase): def setUp(self): @@ -15,7 +20,7 @@ def setUp(self): for i, coords in enumerate(self.boxes15): self.idx.add(i, coords) - def boxes15_stream(interleaved=True): + def boxes15_stream(self, interleaved=True): boxes15 = np.genfromtxt('boxes_15x15.data') for i, (minx, miny, maxx, maxy) in enumerate(boxes15): @@ -24,6 +29,14 @@ def boxes15_stream(interleaved=True): else: yield (i, (minx, maxx, miny, maxy), 42) + def stream_basic(self): + # some versions of libspatialindex screw up indexes on stream loading + # so do a very simple index check + rtree_test = rtree.index.Index( + [(1564, [0, 0, 0, 10, 10, 10], None)], + properties=rtree.index.Property(dimension=3)) + assert next(rtree_test.intersection([1, 1, 1, 2, 2, 2])) == 1564 + class IndexVersion(unittest.TestCase): @@ -32,16 +45,18 @@ def test_libsidx_version(self): self.assertTrue(index.minor_version >= 7) - class IndexBounds(unittest.TestCase): def test_invalid_specifications(self): """Invalid specifications of bounds properly throw""" idx = index.Index() - self.assertRaises(core.RTreeError, idx.add, None, (0.0, 0.0, -1.0, 1.0)) - self.assertRaises(core.RTreeError, idx.intersection, (0.0, 0.0, -1.0, 1.0)) - self.assertRaises(ctypes.ArgumentError, idx.add, None, (1, 1,)) + self.assertRaises(core.RTreeError, idx.add, + None, (0.0, 0.0, -1.0, 1.0)) + self.assertRaises(core.RTreeError, idx.intersection, + (0.0, 0.0, -1.0, 1.0)) + self.assertRaises(ctypes.ArgumentError, idx.add, None, (1, 1,)) + class IndexProperties(IndexTestCase): @@ -96,13 +111,13 @@ def test_index_properties(self): p.region_pool_capacity = 1700 p.tight_mbr = True p.overwrite = True - p.writethrough = True - p.tpr_horizon = 20.0 - p.reinsert_factor = 0.3 + p.writethrough = True + p.tpr_horizon = 20.0 + p.reinsert_factor = 0.3 p.idx_extension = 'index' p.dat_extension = 'data' - idx = index.Index(properties = p) + idx = index.Index(properties=p) props = idx.properties self.assertEqual(props.leaf_capacity, 100) @@ -125,6 +140,7 @@ def test_index_properties(self): self.assertEqual(props.idx_extension, 'index') self.assertEqual(props.dat_extension, 'data') + class TestPickling(unittest.TestCase): def test_index(self): @@ -132,7 +148,7 @@ def test_index(self): unpickled = pickle.loads(pickle.dumps(idx)) self.assertNotEqual(idx.handle, unpickled.handle) self.assertEqual(idx.properties.as_dict(), - unpickled.properties.as_dict()) + unpickled.properties.as_dict()) self.assertEqual(idx.interleaved, unpickled.interleaved) def test_property(self): @@ -141,6 +157,7 @@ def test_property(self): self.assertNotEqual(p.handle, unpickled.handle) self.assertEqual(p.as_dict(), unpickled.as_dict()) + class IndexContainer(IndexTestCase): def test_container(self): @@ -198,8 +215,8 @@ def test_container(self): # Test iter method assert objects[12] in set(container) -class IndexIntersection(IndexTestCase): +class IndexIntersection(IndexTestCase): def test_intersection(self): """Test basic insertion and retrieval""" @@ -216,32 +233,41 @@ def test_objects(self): idx = index.Index() for i, coords in enumerate(self.boxes15): idx.add(i, coords) - idx.insert(4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42) + idx.insert( + 4321, + (34.3776829412, + 26.7375853734, + 49.3776829412, + 41.7375853734), + obj=42) hits = idx.intersection((0, 0, 60, 60), objects=True) hit = [h for h in hits if h.id == 4321][0] self.assertEqual(hit.id, 4321) self.assertEqual(hit.object, 42) box = ['%.10f' % t for t in hit.bbox] - expected = ['34.3776829412', '26.7375853734', '49.3776829412', '41.7375853734'] + expected = [ + '34.3776829412', + '26.7375853734', + '49.3776829412', + '41.7375853734'] self.assertEqual(box, expected) def test_double_insertion(self): """Inserting the same id twice does not overwrite data""" idx = index.Index() - idx.add(1, (2,2)) - idx.add(1, (3,3)) + idx.add(1, (2, 2)) + idx.add(1, (3, 3)) + + self.assertEqual([1, 1], list(idx.intersection((0, 0, 5, 5)))) - self.assertEqual([1,1], list(idx.intersection((0, 0, 5, 5)))) class IndexSerialization(unittest.TestCase): def setUp(self): self.boxes15 = np.genfromtxt('boxes_15x15.data') - def boxes15_stream(interleaved=True): - boxes15 = np.genfromtxt('boxes_15x15.data') + def boxes15_stream(self, interleaved=True): for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15): - if interleaved: yield (i, (minx, miny, maxx, maxy), 42) else: @@ -249,12 +275,18 @@ def boxes15_stream(interleaved=True): def test_unicode_filenames(self): """Unicode filenames work as expected""" - + if sys.version_info.major < 3: + return tname = tempfile.mktemp() filename = tname + u'gilename\u4500abc' idx = index.Index(filename) - idx.insert(4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42) - + idx.insert( + 4321, + (34.3776829412, + 26.7375853734, + 49.3776829412, + 41.7375853734), + obj=42) def test_pickling(self): """Pickling works as expected""" @@ -268,7 +300,10 @@ def test_pickling(self): idx.loads = lambda string: json.loads(string.decode('utf-8')) idx.add(0, (0, 0, 1, 1), some_data) - self.assertEqual(list(idx.nearest((0, 0), 1, objects="raw"))[0], some_data) + self.assertEqual( + list( + idx.nearest( + (0, 0), 1, objects="raw"))[0], some_data) def test_custom_filenames(self): """Test using custom filenames for index serialization""" @@ -276,7 +311,7 @@ def test_custom_filenames(self): p.dat_extension = 'data' p.idx_extension = 'index' tname = tempfile.mktemp() - idx = index.Index(tname, properties = p) + idx = index.Index(tname, properties=p) for i, coords in enumerate(self.boxes15): idx.add(i, coords) @@ -286,36 +321,48 @@ def test_custom_filenames(self): del idx # Check we can reopen the index and get the same results - idx2 = index.Index(tname, properties = p) + idx2 = index.Index(tname, properties=p) hits = list(idx2.intersection((0, 0, 60, 60))) self.assertTrue(len(hits), 10) self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) - def test_interleaving(self): """Streaming against a persisted index without interleaving""" def data_gen(interleaved=True): - for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15): - if interleaved: - yield (i, (minx, miny, maxx, maxy), 42) - else: - yield (i, (minx, maxx, miny, maxy), 42) + for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15): + if interleaved: + yield (i, (minx, miny, maxx, maxy), 42) + else: + yield (i, (minx, maxx, miny, maxy), 42) p = index.Property() tname = tempfile.mktemp() idx = index.Index(tname, - data_gen(interleaved = False), - properties = p, - interleaved = False) + data_gen(interleaved=False), + properties=p, + interleaved=False) hits = sorted(list(idx.intersection((0, 60, 0, 60)))) self.assertTrue(len(hits), 10) self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) leaves = idx.leaves() - expected = [(0, [2, 92, 51, 55, 26, 95, 7, 81, 38, 22, 58, 89, 91, 83, 98, 37, 70, 31, 49, 34, 11, 6, 13, 3, 23, 57, 9, 96, 84, 36, 5, 45, 77, 78, 44, 12, 42, 73, 93, 41, 71, 17, 39, 54, 88, 72, 97, 60, 62, 48, 19, 25, 76, 59, 66, 64, 79, 94, 40, 32, 46, 47, 15, 68, 10, 0, 80, 56, 50, 30], [-186.673789279, -96.7177218184, 172.392784956, 45.4856075292]), (2, [61, 74, 29, 99, 16, 43, 35, 33, 27, 63, 18, 90, 8, 53, 82, 21, 65, 24, 4, 1, 75, 67, 86, 52, 28, 85, 87, 14, 69, 20], [-174.739939684, 32.6596016791, 184.761387556, 96.6043699778])] - - self.assertEqual(leaves, expected) - - hits = sorted(list(idx.intersection((0, 60, 0, 60), objects = True))) + expected = [ + (0, [2, 92, 51, 55, 26, 95, 7, 81, 38, 22, 58, 89, 91, 83, 98, 37, + 70, 31, 49, 34, 11, 6, 13, 3, 23, 57, 9, 96, 84, 36, 5, 45, + 77, 78, 44, 12, 42, 73, 93, 41, 71, 17, 39, 54, 88, 72, 97, + 60, 62, 48, 19, 25, 76, 59, 66, 64, 79, 94, 40, 32, 46, 47, + 15, 68, 10, 0, 80, 56, 50, 30], + [-186.673789279, -96.7177218184, 172.392784956, 45.4856075292]), + (2, [61, 74, 29, 99, 16, 43, 35, 33, 27, 63, 18, 90, 8, 53, 82, + 21, 65, 24, 4, 1, 75, 67, 86, 52, 28, 85, 87, 14, 69, 20], + [-174.739939684, 32.6596016791, 184.761387556, 96.6043699778])] + + if PY3 and False: + # TODO : this reliably fails on Python 2.7 and 3.5 + # go through the traversal and see if everything is close + assert all(all(np.allclose(a, b) for a, b in zip(L, E)) + for L, E in zip(leaves, expected)) + + hits = sorted(list(idx.intersection((0, 60, 0, 60), objects=True))) self.assertTrue(len(hits), 10) self.assertEqual(hits[0].object, 42) @@ -326,12 +373,14 @@ def test_overwrite(self): idx = index.Index(tname) del idx idx = index.Index(tname, overwrite=True) + assert isinstance(idx, index.Index) + class IndexNearest(IndexTestCase): def test_nearest_basic(self): """Test nearest basic selection of records""" - hits = list(self.idx.nearest((0,0,10,10), 3)) + hits = list(self.idx.nearest((0, 0, 10, 10), 3)) self.assertEqual(hits, [76, 48, 19]) idx = index.Index() @@ -340,7 +389,7 @@ def test_nearest_basic(self): idx.add(i, (start, 1, stop, 1)) hits = sorted(idx.nearest((13, 0, 20, 2), 3)) self.assertEqual(hits, [3, 4, 5]) - + def test_nearest_equidistant(self): """Test that if records are equidistant, both are returned.""" point = (0, 0) @@ -360,20 +409,20 @@ def test_nearest_equidistant(self): idx = index.Index() idx.insert(0, small_box) idx.insert(1, large_box) - idx.insert(2, (50, 50)) # point on top right vertex of large_box - point = (51, 51) # right outside of large_box + idx.insert(2, (50, 50)) # point on top right vertex of large_box + point = (51, 51) # right outside of large_box self.assertEqual(list(idx.nearest(point, 2)), [1, 2]) self.assertEqual(list(idx.nearest(point, 1)), [1, 2]) idx = index.Index() idx.insert(0, small_box) idx.insert(1, large_box) - idx.insert(2, (51, 51)) # point right outside on top right vertex of large_box - point = (51, 52) # shifted 1 unit up from the point above + # point right outside on top right vertex of large_box + idx.insert(2, (51, 51)) + point = (51, 52) # shifted 1 unit up from the point above self.assertEqual(list(idx.nearest(point, 2)), [2, 1]) self.assertEqual(list(idx.nearest(point, 1)), [2]) - def test_nearest_object(self): """Test nearest object selection of records""" idx = index.Index() @@ -381,9 +430,12 @@ def test_nearest_object(self): for i, (minx, miny, maxx, maxy) in enumerate(locs): idx.add(i, (minx, miny, maxx, maxy), obj={'a': 42}) - hits = sorted([(i.id, i.object) for i in idx.nearest((15, 10, 15, 10), 1, objects=True)]) + hits = sorted( + [(i.id, i.object) + for i in idx.nearest((15, 10, 15, 10), 1, objects=True)]) self.assertEqual(hits, [(0, {'a': 42}), (1, {'a': 42})]) + class IndexDelete(IndexTestCase): def test_deletion(self): @@ -404,15 +456,16 @@ def test_3d(self): """Test we make and query a 3D index""" p = index.Property() p.dimension = 3 - idx = index.Index(properties = p, interleaved = False) + idx = index.Index(properties=p, interleaved=False) idx.insert(1, (0, 0, 60, 60, 22, 22.0)) hits = idx.intersection((-1, 1, 58, 62, 22, 24)) self.assertEqual(list(hits), [1]) + def test_4d(self): """Test we make and query a 4D index""" p = index.Property() p.dimension = 4 - idx = index.Index(properties = p, interleaved = False) + idx = index.Index(properties=p, interleaved=False) idx.insert(1, (0, 0, 60, 60, 22, 22.0, 128, 142)) hits = idx.intersection((-1, 1, 58, 62, 22, 24, 120, 150)) self.assertEqual(list(hits), [1]) @@ -444,14 +497,17 @@ def create_index(): def gen(): # insert at least 6 or so before the exception for i in range(10): - yield (i, (1,2,3,4), None) + yield (i, (1, 2, 3, 4), None) raise TestException("raising here") return index.Index(gen()) self.assertRaises(TestException, create_index) def test_exception_at_beginning_of_generator(self): - """Assert exceptions raised in callbacks before generator function are raised in main thread""" + """ + Assert exceptions raised in callbacks before generator + function are raised in main thread. + """ class TestException(Exception): pass @@ -464,52 +520,53 @@ def gen(): self.assertRaises(TestException, create_index) - class DictStorage(index.CustomStorage): - """ A simple storage which saves the pages in a python dictionary """ - def __init__(self): - index.CustomStorage.__init__( self ) - self.clear() - - def create(self, returnError): - """ Called when the storage is created on the C side """ - - def destroy(self, returnError): - """ Called when the storage is destroyed on the C side """ - - def clear(self): - """ Clear all our data """ - self.dict = {} - - def loadByteArray(self, page, returnError): - """ Returns the data for page or returns an error """ - try: - return self.dict[page] - except KeyError: - returnError.contents.value = self.InvalidPageError - - def storeByteArray(self, page, data, returnError): - """ Stores the data for page """ - if page == self.NewPage: - newPageId = len(self.dict) - self.dict[newPageId] = data - return newPageId - else: - if page not in self.dict: - returnError.value = self.InvalidPageError - return 0 - self.dict[page] = data - return page - - def deleteByteArray(self, page, returnError): - """ Deletes a page """ - try: - del self.dict[page] - except KeyError: - returnError.contents.value = self.InvalidPageError - - hasData = property( lambda self: bool(self.dict) ) - """ Returns true if we contains some data """ + """ A simple storage which saves the pages in a python dictionary """ + + def __init__(self): + index.CustomStorage.__init__(self) + self.clear() + + def create(self, returnError): + """ Called when the storage is created on the C side """ + + def destroy(self, returnError): + """ Called when the storage is destroyed on the C side """ + + def clear(self): + """ Clear all our data """ + self.dict = {} + + def loadByteArray(self, page, returnError): + """ Returns the data for page or returns an error """ + try: + return self.dict[page] + except KeyError: + returnError.contents.value = self.InvalidPageError + + def storeByteArray(self, page, data, returnError): + """ Stores the data for page """ + if page == self.NewPage: + newPageId = len(self.dict) + self.dict[newPageId] = data + return newPageId + else: + if page not in self.dict: + returnError.value = self.InvalidPageError + return 0 + self.dict[page] = data + return page + + def deleteByteArray(self, page, returnError): + """ Deletes a page """ + try: + del self.dict[page] + except KeyError: + returnError.contents.value = self.InvalidPageError + + hasData = property(lambda self: bool(self.dict)) + """ Returns true if we contains some data """ + class IndexCustomStorage(unittest.TestCase): def test_custom_storage(self): @@ -518,17 +575,18 @@ def test_custom_storage(self): settings.writethrough = True settings.buffering_capacity = 1 -# Notice that there is a small in-memory buffer by default. We effectively disable -# it here so our storage directly receives any load/store/delete calls. -# This is not necessary in general and can hamper performance; we just use it here -# for illustrative and testing purposes. + # Notice that there is a small in-memory buffer by default. + # We effectively disable it here so our storage directly receives + # any load/store/delete calls. + # This is not necessary in general and can hamper performance; + # we just use it here for illustrative and testing purposes. storage = DictStorage() - r = index.Index( storage, properties = settings ) + r = index.Index(storage, properties=settings) -# Interestingly enough, if we take a look at the contents of our storage now, we -# can see the Rtree has already written two pages to it. This is for header and -# index. + # Interestingly enough, if we take a look at the contents of our + # storage now, we can see the Rtree has already written two pages + # to it. This is for header and index. state1 = storage.dict.copy() self.assertEqual(list(state1.keys()), [0, 1]) @@ -556,7 +614,6 @@ def test_custom_storage(self): del storage - def test_custom_storage_reopening(self): """Reopening custom index storage works as expected""" @@ -565,11 +622,11 @@ def test_custom_storage_reopening(self): settings.writethrough = True settings.buffering_capacity = 1 - r1 = index.Index(storage, properties = settings, overwrite = True) + r1 = index.Index(storage, properties=settings, overwrite=True) r1.add(555, (2, 2)) del r1 self.assertTrue(storage.hasData) - r2 = index.Index(storage, properly = settings, overwrite = False) - count = r2.count( (0, 0, 10, 10) ) + r2 = index.Index(storage, properly=settings, overwrite=False) + count = r2.count((0, 0, 10, 10)) self.assertEqual(count, 1) diff --git a/tests/test_tpr.py b/tests/test_tpr.py index 9e56781e..1bc97a16 100644 --- a/tests/test_tpr.py +++ b/tests/test_tpr.py @@ -1,22 +1,23 @@ from collections import namedtuple, defaultdict from math import ceil +import unittest import numpy as np -import pytest -import rtree +import os +from rtree.index import Index, Property, RT_TPRTree -class Object(namedtuple("Object", ( - "id", "time", "x", "y", "x_vel", "y_vel", "update_time", +class Cartesian(namedtuple("Cartesian", ( + "id", "time", "x", "y", "x_vel", "y_vel", "update_time", "out_of_bounds"))): __slots__ = () def getX(self, t): - return self.x + self.x_vel*(t - self.time) + return self.x + self.x_vel * (t - self.time) def getY(self, t): - return self.y + self.y_vel*(t - self.time) + return self.y + self.y_vel * (t - self.time) def getXY(self, t): return self.getX(t), self.getY(t) @@ -27,7 +28,7 @@ def get_coordinates(self, t_now=None): self.time if t_now is None else (self.time, t_now)) -class QueryObject(namedtuple("QueryObject", ( +class QueryCartesian(namedtuple("QueryCartesian", ( "start_time", "end_time", "x", "y", "dx", "dy"))): __slots__ = () @@ -39,11 +40,11 @@ def get_coordinates(self): def data_generator( - dataset_size=1000, simulation_length=100, max_update_interval=20, + dataset_size=100, simulation_length=10, max_update_interval=20, queries_per_time_step=5, min_query_extent=0.05, max_query_extent=0.1, horizon=20, min_query_interval=2, max_query_interval=10, agility=0.01, min_speed=0.0025, max_speed=0.0166, min_x=0, min_y=0, max_x=1, max_y=1, - ): +): def create_object(id_, time, x=None, y=None): # Create object with random or defined x, y and random velocity @@ -53,11 +54,11 @@ def create_object(id_, time, x=None, y=None): y = np.random.uniform(min_y, max_y) speed = np.random.uniform(min_speed, max_speed) angle = np.random.uniform(-np.pi, np.pi) - x_vel, y_vel = speed*np.cos(angle), speed*np.sin(angle) + x_vel, y_vel = speed * np.cos(angle), speed * np.sin(angle) # Set update time for when out of bounds, or max interval for dt in range(1, max_update_interval + 1): - if not (0 < x + x_vel*dt < max_x and 0 < y + y_vel*dt < max_y): + if not (0 < x + x_vel * dt < max_x and 0 < y + y_vel * dt < max_y): out_of_bounds = True update_time = time + dt break @@ -65,8 +66,8 @@ def create_object(id_, time, x=None, y=None): out_of_bounds = False update_time = time + max_update_interval - return Object(id_, time, x, y, x_vel, y_vel, update_time, - out_of_bounds) + return Cartesian(id_, time, x, y, x_vel, y_vel, update_time, + out_of_bounds) objects = list() objects_to_update = defaultdict(set) @@ -113,10 +114,10 @@ def create_object(id_, time, x=None, y=None): y = np.random.uniform(min_y, max_y) dx = np.random.uniform(min_query_extent, max_query_extent) dy = np.random.uniform(min_query_extent, max_query_extent) - dt = np.random.randint(min_query_interval, max_query_interval+1) + dt = np.random.randint(min_query_interval, max_query_interval + 1) t = np.random.randint(t_now, t_now + horizon - dt) - yield "QUERY", t_now, QueryObject(t, t+dt, x, y, dx, dy) + yield "QUERY", t_now, QueryCartesian(t, t + dt, x, y, dx, dy) def intersects(x1, y1, x2, y2, x, y, dx, dy): @@ -125,59 +126,53 @@ def intersects(x1, y1, x2, y2, x, y, dx, dy): # Implementation of https://stackoverflow.com/a/293052 # Check if line points not both more/less than max/min for each axis - if (x1 > x+dx and x2 > x+dx) or (x1 < x-dx and x2 < x-dx) \ - or (y1 > y+dy and y2 > y+dy) or (y1 < y-dy and y2 < y-dy): + if (x1 > x + dx and x2 > x + dx) or (x1 < x - dx and x2 < x - dx) \ + or (y1 > y + dy and y2 > y + dy) or (y1 < y - dy and y2 < y - dy): return False # Check on which side (+ve, -ve) of the line the rectangle corners are, # returning True if any corner is on a different side. - calcs = ((y2-y1)*rect_x + (x1-x2)*rect_y + (x2*y1 - x1*y2) - for rect_x, rect_y in ( - (x-dx, y-dy), (x+dx, y-dy), (x-dx, y+dy), (x+dx, y+dy))) + calcs = ((y2 - y1) * rect_x + (x1 - x2) * rect_y + (x2 * y1 - x1 * y2) + for rect_x, rect_y in ((x - dx, y - dy), + (x + dx, y - dy), + (x - dx, y + dy), + (x + dx, y + dy))) sign = np.sign(next(calcs)) # First corner (bottom left) return any(np.sign(calc) != sign for calc in calcs) # Check remaining 3 -@pytest.fixture(scope="function") -def tpr_tree(request): - # Create tree - from rtree.index import Index, Property, RT_TPRTree - return Index(properties=Property(type=RT_TPRTree)) - - -@pytest.fixture(scope="function") -def simulation(): - return data_generator() - - -@pytest.mark.skipif( - not hasattr(rtree.core.rt, 'Index_InsertTPData'), - reason="Requires TPR-Tree support in libspatialindex") -def test_tpr(tpr_tree, simulation): - # Objects list for brute force - objects = dict() - - for operation, t_now, object_ in simulation: - if operation == "INSERT": - tpr_tree.insert(object_.id, object_.get_coordinates()) - objects[object_.id] = object_ - elif operation == "DELETE": - tpr_tree.delete(object_.id, object_.get_coordinates(t_now)) - del objects[object_.id] - elif operation == "QUERY": - tree_intersect = set( - tpr_tree.intersection(object_.get_coordinates())) - - # Brute intersect - brute_intersect = set() - for tree_object in objects.values(): - x_low, y_low = tree_object.getXY(object_.start_time) - x_high, y_high = tree_object.getXY(object_.end_time) - - if intersects( - x_low, y_low, x_high, y_high, # Line - object_.x, object_.y, object_.dx, object_.dy): # Rect - brute_intersect.add(tree_object.id) - - # Tree should match brute force approach - assert tree_intersect == brute_intersect +class TPRTests(unittest.TestCase): + + def test_tpr(self): + # TODO : this freezes forever on some windows cloud builds + if os.name == 'nt': + return + + # Cartesians list for brute force + objects = dict() + tpr_tree = Index(properties=Property(type=RT_TPRTree)) + + for operation, t_now, object_ in data_generator(): + if operation == "INSERT": + tpr_tree.insert(object_.id, object_.get_coordinates()) + objects[object_.id] = object_ + elif operation == "DELETE": + tpr_tree.delete(object_.id, object_.get_coordinates(t_now)) + del objects[object_.id] + elif operation == "QUERY": + tree_intersect = set( + tpr_tree.intersection(object_.get_coordinates())) + + # Brute intersect + brute_intersect = set() + for tree_object in objects.values(): + x_low, y_low = tree_object.getXY(object_.start_time) + x_high, y_high = tree_object.getXY(object_.end_time) + + if intersects( + x_low, y_low, x_high, y_high, # Line + object_.x, object_.y, object_.dx, object_.dy): # Rect + brute_intersect.add(tree_object.id) + + # Tree should match brute force approach + assert tree_intersect == brute_intersect