diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml deleted file mode 100644 index eba3f30d64..0000000000 --- a/.github/workflows/build_test.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: Standard Build and Test - -on: - # allows us to run workflows manually - workflow_dispatch: - pull_request: - branches: - - master - paths-ignore: - - '.github/workflows/docker_publish.yml' - - push: - branches: - - master - paths-ignore: - - '.github/workflows/docker_publish.yml' - - -jobs: - BuildTest: - runs-on: ubuntu-latest - - container: - image: cyclus/cyclus-deps - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: setup - run: | - echo "HOME=/github/home/" >> $GITHUB_ENV - echo "PATH=$PATH:${HOME}/.local/bin" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${HOME}/.local/lib" >> $GITHUB_ENV - - name: Building Cyclus - run: | - mkdir -p ${HOME}/.local/lib/python3.7/site-packages/ - cd ${GITHUB_WORKSPACE} - python install.py -j 2 \ - --build-type=Release --core-version 999999.999999 \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - - name: Unit Test - run: | - cyclus_unit_tests; exit $? - - - name: Nosetest - run: | - nosetests -w ${GITHUB_WORKSPACE}/tests; exit $? diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml new file mode 100644 index 0000000000..5311b3fcd3 --- /dev/null +++ b/.github/workflows/build_test_publish.yml @@ -0,0 +1,56 @@ +name: Build, Test & Publish docker images for future CI and other users + +on: + # allows us to run workflows manually + workflow_dispatch: + pull_request: + push: + branches: + - main + +jobs: + build-dependency-and-test-img: + runs-on: ubuntu-latest + + strategy: + matrix: + ubuntu_versions : [ + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + + name: Installing Dependencies, Building cyclus and running tests + steps: + - name: default environment + run: | + echo "tag-latest-on-default=false" >> "$GITHUB_ENV" + + - name: condition on trigger parameters + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/main' }} + run: | + echo "tag-latest-on-default=true" >> "$GITHUB_ENV" + + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Installing Dependencies in Docker image + uses: firehed/multistage-docker-build-action@v1 + with: + repository: ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} + stages: ${{ matrix.pkg_mgr }}-deps, cyclus + server-stage: cyclus-test + quiet: false + parallel: true + tag-latest-on-default: ${{ env.tag-latest-on-default }} + dockerfile: docker/Dockerfile + build-args: pkg_mgr=${{ matrix.pkg_mgr }} \ No newline at end of file diff --git a/.github/workflows/changelog_test.yml b/.github/workflows/changelog_test.yml index 87f3daba13..b8cdd106d3 100644 --- a/.github/workflows/changelog_test.yml +++ b/.github/workflows/changelog_test.yml @@ -10,7 +10,6 @@ env: jobs: changelog_update: - if: ${{ github.event_name == 'pull_request' }} runs-on: ubuntu-latest container: image: alpine:3.14 @@ -23,10 +22,17 @@ jobs: git --version - name: Checkout repository - uses: actions/checkout@v2 - - - name: Housekeeping - run: | + uses: actions/checkout@v3 + + - run: | + git config --global --add safe.directory ${GITHUB_WORKSPACE} cd $GITHUB_WORKSPACE - housekeeping_script/changelog_test.sh + git remote add cyclus https://github.com/cyclus/cyclus.git + git fetch cyclus + change=`git diff cyclus/main -- CHANGELOG.rst | wc -l` + git remote remove cyclus + if [ $change -eq 0 ]; then + echo "CHANGELOG.rst has not been updated" + exit 1 + fi diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9dc54da862..880d1831db 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -22,7 +22,8 @@ Since last release **Changed:** * Moved to unified CHANGELOG Entry and check them with GithubAction (#1571) - +* Major update and modernization of build (#1587) +* Changed Json formatting for compatibility with current python standards (#1587) **Removed:** diff --git a/CMakeLists.txt b/CMakeLists.txt index 5fd0f711e4..646aa72154 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 3.16) #taken from http://geant4.cern.ch/support/source/geant4/CMakeLists.txt IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) message(STATUS "Cyclus requires an out-of-source build.") @@ -10,20 +10,16 @@ IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) message(FATAL_ERROR "in-source build detected") ENDIF() -# Set some policies -cmake_policy(SET CMP0040 OLD) -cmake_policy(SET CMP0042 OLD) - # This project name is cyclus. PROJECT(CYCLUS) -# check for and enable c++11 support (required for cyclus) +# check for and enable c++17 support (required for cyclus) INCLUDE(CheckCXXCompilerFlag) -CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) -IF(COMPILER_SUPPORTS_CXX11) - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +CHECK_CXX_COMPILER_FLAG("-std=c++17" COMPILER_SUPPORTS_CXX17) +IF(COMPILER_SUPPORTS_CXX17) + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") ELSE() - MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") + MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++17 support. Please use a different C++ compiler.") ENDIF() # enable assembly @@ -108,8 +104,9 @@ IF(NOT CYCLUS_DOC_ONLY) endif() # Tell CMake where the modules are - LIST(APPEND CMAKE_MODULE_PATH - "${CMAKE_DIR}/share/cmake-2.8/Modules" "${PROJECT_SOURCE_DIR}/cmake") + MESSAGE("-- CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") + LIST(APPEND CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} + "${PROJECT_SOURCE_DIR}/cmake") # Include macros INCLUDE(CopyWhenDiffMacro) @@ -162,24 +159,29 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Dependency Binary Hints (DEPS_BIN_HINTS): ${DEPS_BIN_HINTS}") MESSAGE("-- Dependency Library Hints (DEPS_LIB_HINTS): ${DEPS_LIB_HINTS}") MESSAGE("-- Dependency Include Hints (DEPS_INCLUDE_HINTS): ${DEPS_INCLUDE_HINTS}") + MESSAGE("CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") + + # Search pkg-config utility first + find_package(PkgConfig REQUIRED) + # Debian installs useful LibXML2 files to /usr/include/libxml2/libxml # libxml2 is required for relaxng schema validation - FIND_PACKAGE(LibXml2 ${DEPS_HINTS}) - IF(NOT LIBXML2_LIBRARIES) - FIND_LIBRARY(LibXml2 REQUIRED ${DEPS_HINTS}) - ENDIF() + + FIND_PACKAGE(LibXml2 REQUIRED) ADD_DEFINITIONS(${LIBXML2_DEFINITIONS}) SET(LIBS ${LIBS} ${LIBXML2_LIBRARIES}) - - # Find LibXML++ and dependencies - FIND_PACKAGE(LibXML++) - IF(NOT LibXML++_LIBRARIES) - FIND_LIBRARY(LibXML++ REQUIRED ${DEPS_HINTS}) - ENDIF() - SET(LIBS ${LIBS} ${LibXML++_LIBRARIES}) - message("-- LibXML++ Include Dir: ${LibXML++_INCLUDE_DIR}") - + message("-- LibXML2 Include Dir: ${LIBXML2_INCLUDE_DIR}") + + # Then use pkg-config for locate specific package + pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-4.0) + IF ( NOT LIBXMLXX_LIBRARIES ) + pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-2.6) + ENDIF ( NOT LIBXMLXX_LIBRARIES ) + SET(LIBS ${LIBS} ${LIBXMLXX_LIBRARIES}) + message("-- LibXML++ Include Dir: ${LIBXMLXX_INCLUDE_DIRS}") + message("-- LibXML++ Librarires: ${LIBXMLXX_LIBRARIES}") + # find lapack and link to it FIND_PACKAGE(LAPACK REQUIRED) set(LIBS ${LIBS} ${LAPACK_LIBRARIES}) @@ -188,8 +190,9 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Found BLAS Libraries: ${BLAS_LIBRARIES}") # Find Sqlite3 - FIND_PACKAGE(Sqlite3 REQUIRED) - SET(LIBS ${LIBS} ${SQLITE3_LIBRARIES}) + FIND_PACKAGE(SQLite3 REQUIRED) + SET(LIBS ${LIBS} ${SQLite3_LIBRARIES}) + MESSAGE("-- Found SQLite3 Libraries: ${SQLite3_LIBRARIES}") # Find HDF5 FIND_PACKAGE(HDF5 REQUIRED COMPONENTS HL) @@ -300,8 +303,8 @@ IF(NOT CYCLUS_DOC_ONLY) # Cython & Python Bindings # # Use new Python library finder - find_package(PythonInterp) - find_package(PythonLibs) + find_package (Python3 COMPONENTS Interpreter Development NumPy) + execute_process(COMMAND "${PYTHON_EXECUTABLE}" -c "import site; print(site.getsitepackages(['${CMAKE_INSTALL_PREFIX}'])[0])" OUTPUT_VARIABLE PYTHON_SITE_PACKAGES @@ -321,7 +324,6 @@ IF(NOT CYCLUS_DOC_ONLY) endif() include(UseCython) - find_package(Numpy REQUIRED) find_package(Jinja2 REQUIRED) find_package(Pandas REQUIRED) @@ -362,10 +364,10 @@ IF(NOT CYCLUS_DOC_ONLY) # ${Glibmm_INCLUDE_DIRS} breaks Ubuntu 12.04 set(inc_dirs "${LIBXML2_INCLUDE_DIR}" - "${LibXML++_INCLUDE_DIR}" + "${LIBXMLXX_INCLUDE_DIRS}" "${Glibmm_INCLUDE_DIRS}" - "${LibXML++Config_INCLUDE_DIR}" - "${SQLITE3_INCLUDE_DIR}" + "${LIBXMLXXConfig_INCLUDE_DIR}" + "${SQLite3_INCLUDE_DIR}" "${HDF5_INCLUDE_DIRS}" "${Boost_INCLUDE_DIR}" "${COIN_INCLUDE_DIRS}") @@ -376,8 +378,7 @@ IF(NOT CYCLUS_DOC_ONLY) if(Cython_FOUND) - INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}" - "${NUMPY_INCLUDE_DIRS}") + INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}" "${_Python3_NumPy_INCLUDE_DIR}") endif(Cython_FOUND) # set core version, one way or the other IF(NOT "${CORE_VERSION}" STREQUAL "") @@ -403,7 +404,7 @@ IF(NOT CYCLUS_DOC_ONLY) if(Cython_FOUND) ADD_SUBDIRECTORY("${CYCLUS_PYSOURCE_DIR}") endif(Cython_FOUND) - + ############################################################################################## ####################################### end includes ######################################### ############################################################################################## @@ -489,7 +490,7 @@ IF(NOT CYCLUS_DOC_ONLY) SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-program-options-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-serialization-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libhdf5-dev (>= 1.8.11)") - SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++2.6-dev (>= 2.36.0)") + SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++2.6-dev (>= 2.6.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, coinor-libcbc-dev (>= 2.8.7)") MESSAGE("CPACK_DEBIAN_PACKAGE_DEPENDS ${CPACK_DEBIAN_PACKAGE_DEPENDS}") diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0bcf6210f6..7be4d0df16 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -12,9 +12,9 @@ General Notes * Use a branching workflow similar to the one described at http://progit.org/book/ch3-4.html. -* Keep your own "master" branch in sync with the mainline - repository's "master" branch. Specifically, do not push your - own commits directly to your "master" branch. +* Keep your own "main" branch in sync with the mainline + repository's "main" branch. Specifically, do not push your + own commits directly to your "main" branch. * Any commit should *pass all tests* (see `Running Tests`_). @@ -30,11 +30,11 @@ Issuing a Pull Request ====================== * When you are ready to move changes from one of your topic branches into the - "master" branch, it must be reviewed and accepted by another developer. + "main" branch, it must be reviewed and accepted by another developer. * You may want to review this `tutorial `_ before you make a - pull request to the master branch. + pull request to the main branch. Reviewing a Pull Request ======================== @@ -51,7 +51,7 @@ Reviewing a Pull Request * Click the green "Merge Pull Request" button * Note: if the button is not available, the requester needs to merge or rebase - from the current HEAD of the mainline "master" branch + from the current HEAD of the mainline "main" branch Running Tests ============= @@ -75,7 +75,7 @@ Cautions * **DO NOT** rebase any commits that have been pulled/pushed anywhere else other than your own fork (especially if those commits have been integrated into the blessed repository). You should NEVER rebase commits that are a part of the - 'master' branch. *If you do, we will never, ever accept your pull request*. + 'main' branch. *If you do, we will never, ever accept your pull request*. An Example ========== @@ -96,7 +96,7 @@ Acquiring Cyclus and Workflow ----------------------------- We begin with a fork of the mainline Cyclus repository. After initially forking -the repo, we will have the master branch in your fork. +the repo, we will have the main branch in your fork. Acquiring a Fork of the Cyclus Repository ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -111,9 +111,9 @@ First, let's make our "work" branch: .../cyclus_dir/$ git branch work .../cyclus_dir/$ git push origin work -We now have the following situation: there exists the mainline copy of the master -branch, there exists your fork's copy of the master and working branches, -*AND* there exists your *local* copy of the master and working branches. It is +We now have the following situation: there exists the mainline copy of the main +branch, there exists your fork's copy of the main and working branches, +*AND* there exists your *local* copy of the main and working branches. It is important now to note that you may wish to work from home or the office. If you keep your fork's branches up to date (i.e., "push" your changes before you leave), only your *local* copies of your branches may be different when you next sit down at the other location. @@ -128,22 +128,22 @@ work, finished, and successfully pushed your changes to your *Origin* repository. You are now at home and want to continue working a bit. To begin, let's update our *home's local branches*:: - .../cyclus_dir/$ git checkout master - .../cyclus_dir/$ git pull upstream master - .../cyclus_dir/$ git push origin master + .../cyclus_dir/$ git checkout main + .../cyclus_dir/$ git pull upstream main + .../cyclus_dir/$ git push origin main .../cyclus_dir/$ git checkout work .../cyclus_dir/$ git pull origin work - .../cyclus_dir/$ git rebase master + .../cyclus_dir/$ git rebase main .../cyclus_dir/$ git push origin work Perhaps a little explanation is required. We first want to make sure that this new local copy of -the master branch is up-to-date with respect to the remote origin's branch and remote upstream's +the main branch is up-to-date with respect to the remote origin's branch and remote upstream's branch. If there was a change from the remote upstream's branch, we want to push that to origin. We then follow the same process to update the work branch, except: #. we don't need to worry about the *upstream* repo because it doesn't have a work branch, and -#. we want to incorporate any changes which may have been introduced in the master branch update. +#. we want to incorporate any changes which may have been introduced in the main branch update. Workflow: The End ^^^^^^^^^^^^^^^^^ @@ -152,7 +152,7 @@ As time passes, you make some changes to files, and you commit those changes (to branch*). Eventually (hopefully) you come to a stopping point where you have finished your project on your work branch *AND* it compiles *AND* it runs input files correctly *AND* it passes all tests! Perhaps you have found Nirvana. In any case, you've performed the final commit to your work branch, -so it's time to make a pull request online and wait for our masterr friends to +so it's time to make a pull request online and wait for our main friends to review and accept it. Sometimes, your pull request will be held by the reviewer until further changes @@ -176,5 +176,5 @@ Releases If you are going through a release of Cyclus and Cycamore, check out the release procedure notes `here -`_ and +`_ and on the `website `_. diff --git a/README.rst b/README.rst index e33233ae7d..64b02e3572 100644 --- a/README.rst +++ b/README.rst @@ -12,13 +12,13 @@ Cyclus Projects Status ----------------------------------------------------------------------------------- **Branch** **Cyclus** **Cycamore** **Cymetric** ================ ================= =================== =================== -master |cyclus_master| |cycamore_master| |cymetric_master| +main |cyclus_main| |cycamore_main| |cymetric_main| ================ ================= =================== =================== -.. |cyclus_master| image:: https://circleci.com/gh/cyclus/cyclus/tree/master.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 -.. |cycamore_master| image:: https://circleci.com/gh/cyclus/cycamore/tree/master.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 -.. |cymetric_master| image:: https://circleci.com/gh/cyclus/cymetric/tree/master.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c +.. |cyclus_main| image:: https://circleci.com/gh/cyclus/cyclus/tree/main.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 +.. |cycamore_main| image:: https://circleci.com/gh/cyclus/cycamore/tree/main.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 +.. |cymetric_main| image:: https://circleci.com/gh/cyclus/cymetric/tree/main.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 0495b0a626..0000000000 --- a/circle.yml +++ /dev/null @@ -1,179 +0,0 @@ -version: 2 -jobs: - # Update docker container - deploy_latest: # Cyclus/dev -> Cyclus:latest - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/cyclus-ci/Dockerfile . - - setup_remote_docker - - run: - name: log into Docker - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Build Docker container - command: docker build --rm=false -t cyclus/cyclus:latest . - - run: - name: Push on DockerHub - command: docker push cyclus/cyclus:latest # push to docker depot - - deploy_stable: - docker: # Cyclus/master -> Cyclus:stable - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/cyclus-ci/Dockerfile . - - setup_remote_docker - - run: - name: Log on DockerHub - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Tag and Push on DockerHub - command: | - docker tag cyclus/cyclus:latest cyclus/cyclus:stable # creation - docker push cyclus/cyclus:stable # push to docker depot - - - # Debian package generation (on master update) - deb_generation: - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - - checkout - - setup_remote_docker - - run: - name: Tag and Push on DockerHub - command: | - docker/deb-ci/build_upload_deb.sh 14 - docker/deb-ci/build_upload_deb.sh 16 - -# Checking Cycamore and Cymetric compatibilities with the changes - cycamore_master: ## Cycamore/master against Cyclus/dev - docker: - - image: cyclus/cyclus-deps - working_directory: /root - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - run: apt-get -qq update; apt-get -y install git openssh-client - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Checkout Cycamore Master - command: | - git clone https://github.com/cyclus/cycamore.git - cd cycamore - git fetch --all - git checkout master - - run: - name: Build Cycamore - command: | - cd cycamore - python install.py -j 2 --build-type=Release \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - run: - name: Unit Test - command: /root/.local/bin/cycamore_unit_tests; exit $? - - run: - name: Nosetests - command: nosetests -w ~/cycamore/tests; exit $? - - cymetric_master: ## Cymetric/master against Cyclus/dev + Cycamore/dev - docker: - - image: cyclus/cyclus-deps - working_directory: /root - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - run: apt-get -qq update; apt-get -y install git openssh-client - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Checkout Cycamore Master - command: | - git clone https://github.com/cyclus/cycamore.git - cd cycamore - git fetch --all - git checkout master - - run: - name: Build Cycamore - command: | - cd cycamore - python install.py -j 2 --build-type=Release \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - run: cd ~/ - - run: - name: Checkout Cymetric Master - command: | - git clone https://github.com/cyclus/cymetric.git - cd cymetric - git fetch --all - git checkout master - - run: - name: Build/Install Cymetric - command: | - cd cymetric - python setup.py install - - run: - name: Cymetric Nosetest - command: nosetests -w ~/cymetric/tests; exit $? - - # some external triggers - cyXX_trig: - machine: true - steps: - - run: - name: Cymetric/Cycamore Master Triggers - command: | - curl -X POST https://circleci.com/api/v1.1/project/github/cyclus/cycamore/tree/master?circle-token=$CYCAMORE_CIRCLE_TOKEN - curl -X POST https://circleci.com/api/v1.1/project/github/cyclus/cymetric/tree/master?circle-token=$CYMETRIC_CIRCLE_TOKEN - -workflows: - version: 2 #Needed ?? (already on the top of the file) - build_and_test: - jobs: - - # Merge on Master - - deploy_latest: - filters: - branches: - only: master - - cyXX_trig: - filters: - branches: - only: master - requires: - - deploy_latest - - # The following should now be done on version tag. - - deploy_stable: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - - deb_generation: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ diff --git a/cli/CMakeLists.txt b/cli/CMakeLists.txt index 41f98a6e3c..ccc83f14f2 100644 --- a/cli/CMakeLists.txt +++ b/cli/CMakeLists.txt @@ -48,7 +48,7 @@ ADD_EXECUTABLE( ${CYCLUS_CORE_TEST_SOURCE} cyclus_unit_test_driver.cc ) - + TARGET_LINK_LIBRARIES( cyclus_unit_tests dl @@ -57,7 +57,9 @@ TARGET_LINK_LIBRARIES( agents ${CYCLUS_TEST_LIBRARIES} ) - + MESSAGE(STATUS "(cli) CYCLUS_TEST_LIBRARIES: ${CYCLUS_TEST_LIBRARIES}") + MESSAGE(STATUS "(cli) CYCLUS_AGENT_TEST_LIBRARIES: ${CYCLUS_AGENT_TEST_LIBRARIES}") + INSTALL( TARGETS cyclus_unit_tests RUNTIME DESTINATION bin diff --git a/cli/cyclus.cc b/cli/cyclus.cc index ba0c62f001..9e198240f1 100644 --- a/cli/cyclus.cc +++ b/cli/cyclus.cc @@ -240,14 +240,14 @@ int ParseCliArgs(ArgInfo* ai, int argc, char* argv[]) { ("restart", po::value(), "restart from the specified simulation snapshot [db-file]:[sim-id]:[timestep]") ("schema", - "dump the cyclus master schema including all installed module schemas") + "dump the cyclus main schema including all installed module schemas") ("agent-schema", po::value(), "dump the schema for the named agent") ("agent-version", po::value(), "print the version of the specified agent") ("schema-path", po::value(), - "manually specify the path to the cyclus master schema") - ("flat-schema", "use the flat master simulation schema") + "manually specify the path to the cyclus main schema") + ("flat-schema", "use the flat main simulation schema") ("agent-annotations", po::value(), "dump the annotations for the named agent") ("agent-listing,l", po::value(), diff --git a/cli/cycpp.py b/cli/cycpp.py index 6e8aa8a87e..5e72815a7d 100755 --- a/cli/cycpp.py +++ b/cli/cycpp.py @@ -48,7 +48,8 @@ import re import sys import uuid -from collections import Sequence, Mapping, MutableMapping, OrderedDict +from collections.abc import Sequence, Mapping, MutableMapping +from collections import OrderedDict from contextlib import contextmanager from itertools import takewhile from subprocess import Popen, PIPE diff --git a/cmake/FindCOIN.cmake b/cmake/FindCOIN.cmake index 5190103876..d57aa7469d 100644 --- a/cmake/FindCOIN.cmake +++ b/cmake/FindCOIN.cmake @@ -25,7 +25,7 @@ # IF(NOT DEFINED COIN_ROOT_DIR) SET(COIN_ROOT_DIR "$ENV{COIN_ROOT_DIR}") - MESSAGE("\tCOIN Root Dir: ${COIN_INCLUDE_DIR}") + MESSAGE("COIN Root Dir from ENV: ${COIN_INCLUDE_DIR}") ENDIF(NOT DEFINED COIN_ROOT_DIR) MESSAGE(STATUS "COIN_ROOT_DIR hint is : ${COIN_ROOT_DIR}") diff --git a/cmake/FindGlib.cmake b/cmake/FindGlib.cmake deleted file mode 100644 index 623ccf4d6b..0000000000 --- a/cmake/FindGlib.cmake +++ /dev/null @@ -1,50 +0,0 @@ -#pkg_check_modules(GLIB_PKG glib-2.0) -libfind_pkg_check_modules(GLIB_PKG glib-2.0) - -if(GLIB_PKG_FOUND) - find_path(GLIB_INCLUDE_DIR NAMES glib.h PATH_SUFFIXES glib-2.0 - ${DEPS_INCLUDE_HINTS} - PATHS - ${GLIB_PKG_INCLUDE_DIRS} - /usr/include/glib-2.0 - /usr/include - /usr/local/include - ) - find_path(GLIB_CONFIG_INCLUDE_DIR NAMES glibconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${GLIB_PKG_LIBDIR} PATH_SUFFIXES glib-2.0/include) - - find_library(GLIB_LIBRARIES NAMES glib-2.0 - ${DEPS_LIB_HINTS} - PATHS - ${GLIB_PKG_LIBRARY_DIRS} - /usr/lib - /usr/local/lib - ) -else(GLIB_PKG_FOUND) - # Find Glib even if pkg-config is not working (eg. cross compiling to Windows) - find_library(GLIB_LIBRARIES NAMES glib-2.0 ${DEPS_LIB_HINTS}) - string(REGEX REPLACE "/[^/]*$" "" GLIB_LIBRARIES_DIR ${GLIB_LIBRARIES}) - - find_path(GLIB_INCLUDE_DIR NAMES glib.h - ${DEPS_INCLUDE_HINTS} - PATH_SUFFIXES glib-2.0 - ) - find_path(GLIB_CONFIG_INCLUDE_DIR NAMES glibconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${GLIB_LIBRARIES_DIR} PATH_SUFFIXES glib-2.0/include) -endif(GLIB_PKG_FOUND) - -if(GLIB_INCLUDE_DIR AND GLIB_CONFIG_INCLUDE_DIR AND GLIB_LIBRARIES) - set(GLIB_INCLUDE_DIRS ${GLIB_INCLUDE_DIR} ${GLIB_CONFIG_INCLUDE_DIR}) -endif(GLIB_INCLUDE_DIR AND GLIB_CONFIG_INCLUDE_DIR AND GLIB_LIBRARIES) - -if(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - set(GLIB_FOUND TRUE CACHE INTERNAL "glib-2.0 found") - message(STATUS "Found glib-2.0: ${GLIB_INCLUDE_DIR}, ${GLIB_LIBRARIES}") -else(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - set(GLIB_FOUND FALSE CACHE INTERNAL "glib-2.0 found") - message(STATUS "glib-2.0 not found.") -endif(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - -mark_as_advanced(GLIB_INCLUDE_DIR GLIB_CONFIG_INCLUDE_DIR GLIB_INCLUDE_DIRS GLIB_LIBRARIES) diff --git a/cmake/FindGlibmm.cmake b/cmake/FindGlibmm.cmake deleted file mode 100644 index 3790e30164..0000000000 --- a/cmake/FindGlibmm.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# - Try to find Glibmm-2.4 -# Once done, this will define -# -# Glibmm_FOUND - system has Glibmm -# Glibmm_INCLUDE_DIRS - the Glibmm include directories -# Glibmm_LIBRARIES - link these to use Glibmm - -include(LibFindMacros) - -# Dependencies -libfind_package(Glibmm Glib) -libfind_package(Glibmm SigC++) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(Glibmm_PKGCONF glibmm-2.4) - -# Main include dir -find_path(Glibmm_INCLUDE_DIR - NAMES glibmm/main.h - ${DEPS_INCLUDE_HINTS} - PATHS ${Glibmm_PKGCONF_INCLUDE_DIRS} - PATH_SUFFIXES glibmm-2.4 include/glibmm-2.4 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(GlibmmConfig_INCLUDE_DIR - NAMES glibmmconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${Glibmm_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/glibmm-2.4/include - ) - -# find lib -find_path(Glibmm_PKGCONF_LIBRARY_DIRS - NAMES libglib-2.0.so - ${DEPS_LIB_HINTS} - PATH_SUFFIXES lib - ) -libfind_library(Glibmm glibmm 2.4) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(Glibmm_PROCESS_INCLUDES Glibmm_INCLUDE_DIR GlibmmConfig_INCLUDE_DIR GLIB_INCLUDE_DIRS SigC++_INCLUDE_DIRS) -set(Glibmm_PROCESS_LIBS Glibmm_LIBRARY GLIB_LIBRARIES SigC++_LIBRARIES) -libfind_process(Glibmm) diff --git a/cmake/FindLibXML++.cmake b/cmake/FindLibXML++.cmake deleted file mode 100644 index 51485f6d63..0000000000 --- a/cmake/FindLibXML++.cmake +++ /dev/null @@ -1,57 +0,0 @@ -# - Try to find LibXML++ 2.6 -# Once done, this will define -# -# LibXML++_FOUND - system has LibXML++ -# LibXML++_INCLUDE_DIRS - the LibXML++ include directories -# LibXML++_LIBRARIES - link these to use LibXML++ - -include(LibFindMacros) - -# Dependencies -libfind_package(LibXML++ LibXml2 ${DEPS_HINTS}) -find_path(LIBXML2_INCLUDE_DIR - NAMES libxml - ${DEPS_INCLUDE_HINTS} - PATH_SUFFIXES libxml2 include/libxml2 - ) -libfind_package(LibXML++ Glibmm) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(LibXML++_PKGCONF libxml++-2.6) - -# Main include dir -find_path(LibXML++_INCLUDE_DIR - NAMES libxml++/libxml++.h - ${DEPS_INCLUDE_HINTS} - PATHS ${LibXML++_PKGCONF_INCLUDE_DIRS} - PATH_SUFFIXES libxml++-2.6 include/libxml++-2.6 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(LibXML++Config_INCLUDE_DIR - NAMES libxml++config.h - ${DEPS_HINTS} - PATHS ${LibXML++_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/libxml++-2.6/include - ) - -# find lib -find_path(LibXML++_PKGCONF_LIBRARY_DIRS - NAMES xml++-2.6 - ${DEPS_LIB_DIR} - PATH_SUFFIXES lib - ) -libfind_library(LibXML++ xml++ 2.6) - # Finally the library itself -#find_library(LibXML++_LIBRARY -# NAMES xml++-2.6 -# ${DEPS_LIB_DIR} -# PATHS ${LibXML++_PKGCONF_LIBRARY_DIRS} -# ) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(LibXML++_PROCESS_INCLUDES LibXML++_INCLUDE_DIR LibXML++Config_INCLUDE_DIR LIBXML2_INCLUDE_DIR Glibmm_INCLUDE_DIRS) -set(LibXML++_PROCESS_LIBS LibXML++_LIBRARY LIBXML2_LIBRARIES Glibmm_LIBRARIES) - -libfind_process(LibXML++) diff --git a/cmake/FindNumpy.cmake b/cmake/FindNumpy.cmake deleted file mode 100644 index 136fce28cf..0000000000 --- a/cmake/FindNumpy.cmake +++ /dev/null @@ -1,87 +0,0 @@ -# - Find Numpy -# NumPy is the fundamental package needed for scientific computing with Python -# www.numpy.scipy.org -# -# The module defines the following variables: -# NUMPY_FOUND - the system has numpy -# NUMPY_INCLUDE_DIR - where to find numpy/arrayobject.h -# NUMPY_INCLUDE_DIRS - numpy include directories -# NUMPY_VERSION_STRING - version (ex. 1.2.3) -# NUMPY_MAJOR_VERSION - major version (ex. 1) -# NUMPY_MINOR_VERSION - minor version (ex. 2) -# NUMPY_PATCH_VERSION - patch version (ex. 3) - -#============================================================================= -# Copyright 2005-2012 EDF-EADS-Phimeca -# -# Distributed under the OSI-approved BSD License (the "License"); -# see accompanying file Copyright.txt for details. -# -# This software is distributed WITHOUT ANY WARRANTY; without even the -# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the License for more information. -#============================================================================= -# (To distributed this file outside of CMake, substitute the full -# License text for the above reference.) - -# set NUMPY_INCLUDE_DIR -find_package ( PythonInterp ) - -if ( PYTHONINTERP_FOUND ) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print(numpy.get_include())" - OUTPUT_VARIABLE NUMPY_INCLUDE_DIR - ERROR_QUIET - OUTPUT_STRIP_TRAILING_WHITESPACE ) -endif () - -# set NUMPY_INCLUDE_DIRS -set ( NUMPY_INCLUDE_DIRS ${NUMPY_INCLUDE_DIR} ) - -# version -if ( PYTHONINTERP_FOUND ) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print(numpy.__version__)" - OUTPUT_VARIABLE NUMPY_VERSION_STRING - OUTPUT_STRIP_TRAILING_WHITESPACE ) - - if ( NUMPY_VERSION_STRING ) - string ( REGEX REPLACE "([0-9]+)\\..*" "\\1" NUMPY_MAJOR_VERSION ${NUMPY_VERSION_STRING} ) - string ( REGEX REPLACE "[0-9]+\\.([0-9]+).*" "\\1" NUMPY_MINOR_VERSION ${NUMPY_VERSION_STRING} ) - string ( REGEX REPLACE "[0-9]+\\.[0-9]+\\.([0-9]+).*" "\\1" NUMPY_PATCH_VERSION ${NUMPY_VERSION_STRING} ) - endif () - -endif () - -# check version -set ( _NUMPY_VERSION_MATCH TRUE ) -if ( Numpy_FIND_VERSION AND NUMPY_VERSION ) - if ( Numpy_FIND_VERSION_EXACT ) - if ( Numpy_FIND_VERSION VERSION_EQUAL NUMPY_VERSION_STRING ) - else() - set ( _NUMPY_VERSION_MATCH FALSE) - endif () - else () - if ( Numpy_FIND_VERSION VERSION_GREATER NUMPY_VERSION_STRING ) - set ( _NUMPY_VERSION_MATCH FALSE ) - endif () - endif () -endif () - -message("-- NUMPY_VERSION_STRING = ${NUMPY_VERSION_STRING}") - -# handle REQUIRED and QUIET options -include ( FindPackageHandleStandardArgs ) -find_package_handle_standard_args ( Numpy DEFAULT_MSG - NUMPY_VERSION_STRING - _NUMPY_VERSION_MATCH - NUMPY_INCLUDE_DIR - NUMPY_INCLUDE_DIRS -) - -mark_as_advanced ( - NUMPY_VERSION_STRING - NUMPY_MAJOR_VERSION - NUMPY_MINOR_VERSION - NUMPY_PATCH_VERSION - NUMPY_INCLUDE_DIR - NUMPY_INCLUDE_DIRS -) diff --git a/cmake/FindSigC++.cmake b/cmake/FindSigC++.cmake deleted file mode 100644 index 20133b11aa..0000000000 --- a/cmake/FindSigC++.cmake +++ /dev/null @@ -1,41 +0,0 @@ -# - Try to find SigC++-2.0 -# Once done, this will define -# -# SigC++_FOUND - system has SigC++ -# SigC++_INCLUDE_DIRS - the SigC++ include directories -# SigC++_LIBRARIES - link these to use SigC++ - -include(LibFindMacros) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(SigC++_PKGCONF sigc++-2.0) - -# Main include dir -find_path(SigC++_INCLUDE_DIR - NAMES sigc++/sigc++.h - ${DEPS_INCLUDE_HINTS} - PATHS ${SigC++_PKGCONF_INCLUDE_DIRS} ${SigC++_PKGCONF_INCLUDE_DIRS}/include - PATH_SUFFIXES include/sigc++-2.0 sigc++-2.0 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(SigC++Config_INCLUDE_DIR - NAMES sigc++config.h - ${DEPS_INCLUDE_HINTS} - PATHS ${SigC++_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/sigc++-2.0/include - ) - -# find lib -find_path(SigC++_PKGCONF_LIBRARY_DIRS - NAMES libsigc-2.0.so - ${DEPS_LIB_HINTS} - PATH_SUFFIXES lib - ) -libfind_library(SigC++ sigc 2.0) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(SigC++_PROCESS_INCLUDES SigC++_INCLUDE_DIR SigC++Config_INCLUDE_DIR) -set(SigC++_PROCESS_LIBS SigC++_LIBRARY) -libfind_process(SigC++) diff --git a/cmake/FindSqlite3.cmake b/cmake/FindSqlite3.cmake deleted file mode 100644 index 65064e7acc..0000000000 --- a/cmake/FindSqlite3.cmake +++ /dev/null @@ -1,58 +0,0 @@ -# - find Sqlite 3 -# SQLITE3_INCLUDE_DIR - Where to find Sqlite 3 header files (directory) -# SQLITE3_LIBRARIES - Sqlite 3 libraries -# SQLITE3_LIBRARY_RELEASE - Where the release library is -# SQLITE3_LIBRARY_DEBUG - Where the debug library is -# SQLITE3_FOUND - Set to TRUE if we found everything (library, includes and executable) - -# Copyright (c) 2010 Pau Garcia i Quiles, -# -# Redistribution and use is allowed according to the terms of the BSD license. -# For details see the accompanying COPYING-CMAKE-SCRIPTS file. -# -# Generated by CModuler, a CMake Module Generator - http://gitorious.org/cmoduler - -IF(SQLITE3_INCLUDE_DIR AND SQLITE3_LIBRARY_RELEASE AND SQLITE3_LIBRARY_DEBUG) - SET(SQLITE3_FIND_QUIETLY TRUE) -ENDIF(SQLITE3_INCLUDE_DIR AND SQLITE3_LIBRARY_RELEASE AND SQLITE3_LIBRARY_DEBUG) - -FIND_LIBRARY(SQLITE3_LIBRARY_RELEASE NAMES sqlite3 ${DEPS_LIB_HINTS}) -GET_FILENAME_COMPONENT(SQLITE3_INCLUDE_DIR "${SQLITE3_LIBRARY_RELEASE}" DIRECTORY) -GET_FILENAME_COMPONENT(SQLITE3_INCLUDE_DIR "${SQLITE3_INCLUDE_DIR}" DIRECTORY) -SET(SQLITE3_INCLUDE_DIR "${SQLITE3_INCLUDE_DIR}/include") - -FIND_LIBRARY(SQLITE3_LIBRARY_DEBUG NAMES sqlite3 sqlite3d - ${DEPS_LIB_HINTS} HINTS /usr/lib/debug/usr/lib/) - -IF(SQLITE3_LIBRARY_RELEASE OR SQLITE3_LIBRARY_DEBUG AND SQLITE3_INCLUDE_DIR) - SET(SQLITE3_FOUND TRUE) -ENDIF(SQLITE3_LIBRARY_RELEASE OR SQLITE3_LIBRARY_DEBUG AND SQLITE3_INCLUDE_DIR) - -IF(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - # if the generator supports configuration types then set - # optimized and debug libraries, or if the CMAKE_BUILD_TYPE has a value - IF(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) - SET(SQLITE3_LIBRARIES optimized ${SQLITE3_LIBRARY_RELEASE} debug ${SQLITE3_LIBRARY_DEBUG}) - ELSE(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) - # if there are no configuration types and CMAKE_BUILD_TYPE has no value - # then just use the release libraries - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_RELEASE}) - ENDIF(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) -ELSEIF(SQLITE3_LIBRARY_RELEASE) - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_RELEASE}) -ELSE(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_DEBUG}) -ENDIF(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - -IF(SQLITE3_FOUND) - IF(NOT SQLITE3_FIND_QUIETLY) - MESSAGE(STATUS "Found Sqlite3 header file in ${SQLITE3_INCLUDE_DIR}") - MESSAGE(STATUS "Found Sqlite3 libraries: ${SQLITE3_LIBRARIES}") - ENDIF(NOT SQLITE3_FIND_QUIETLY) -ELSE(SQLITE3_FOUND) - IF(SQLITE3_FIND_REQUIRED) - MESSAGE(FATAL_ERROR "Could not find Sqlite3") - ELSE(SQLITE3_FIND_REQUIRED) - MESSAGE(STATUS "Optional package Sqlite3 was not found") - ENDIF(SQLITE3_FIND_REQUIRED) -ENDIF(SQLITE3_FOUND) diff --git a/cmake/FindTcmalloc.cmake b/cmake/FindTcmalloc.cmake deleted file mode 100644 index ec549b5f3a..0000000000 --- a/cmake/FindTcmalloc.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# - Find Tcmalloc -# Find the native Tcmalloc includes and library -# -# Tcmalloc_LIBRARIES - List of libraries when using Tcmalloc. -# Tcmalloc_FOUND - True if Tcmalloc found. - -if (USE_TCMALLOC) - set(Tcmalloc_NAMES tcmalloc) -else () - set(Tcmalloc_NAMES tcmalloc_minimal tcmalloc) -endif () - -find_library(Tcmalloc_LIBRARY NO_DEFAULT_PATH - NAMES ${Tcmalloc_NAMES} - ${DEPS_HINTS} - PATHS ${HT_DEPENDENCY_LIB_DIR} /lib /usr/lib /usr/local/lib /opt/local/lib -) - -if (Tcmalloc_LIBRARY) - set(Tcmalloc_FOUND TRUE) - set( Tcmalloc_LIBRARIES ${Tcmalloc_LIBRARY} ) -else () - set(Tcmalloc_FOUND FALSE) - set( Tcmalloc_LIBRARIES ) -endif () - -if (Tcmalloc_FOUND) - message(STATUS "Found Tcmalloc: ${Tcmalloc_LIBRARY}") -else () - message(STATUS "Not Found Tcmalloc: ${Tcmalloc_LIBRARY}") - if (Tcmalloc_FIND_REQUIRED) - message(STATUS "Looked for Tcmalloc libraries named ${Tcmalloc_NAMES}.") - message(FATAL_ERROR "Could NOT find Tcmalloc library") - endif () -endif () - -mark_as_advanced( - Tcmalloc_LIBRARY - ) diff --git a/cmake/UseCyclus.cmake b/cmake/UseCyclus.cmake index c99c7f8d0c..6ef69e8620 100644 --- a/cmake/UseCyclus.cmake +++ b/cmake/UseCyclus.cmake @@ -127,32 +127,29 @@ MACRO(USE_CYCLUS lib_root src_root) SET(CCOUT "${BUILD_DIR}/${src_root}.cc") SET(CCFLAG "-o=${CCOUT}") - # not sure if needed.. + # do all processing for CC file - always needed IF(NOT EXISTS ${CCOUT}) - MESSAGE(STATUS "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}") - EXECUTE_PROCESS(COMMAND ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} - ${ORIG} ${INCL_ARGS} RESULT_VARIABLE res_var) - IF(NOT "${res_var}" STREQUAL "0") - message(FATAL_ERROR "cycpp failed on '${CCIN}' with exit code '${res_var}'") - ENDIF() + PREPROCESS_CYCLUS_FILE_(${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}) ENDIF(NOT EXISTS ${CCOUT}) SET( "${lib_root}_CC" "${${lib_root}_CC}" "${CCOUT}" CACHE INTERNAL "Agent impl" FORCE ) + + # check for existing of header file IF(EXISTS "${HIN}") - # not sure if we still need this... + # Do all processing for header file IF(NOT EXISTS ${HOUT}) - MESSAGE(STATUS "Executing ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}") - EXECUTE_PROCESS(COMMAND ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS} - RESULT_VARIABLE res_var) - - IF(NOT "${res_var}" STREQUAL "0") - message(FATAL_ERROR "archetype preprocessing failed for ${HIN}, res_var = '${res_var}'") - ENDIF() - + PREPROCESS_CYCLUS_FILE_( ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}) ENDIF(NOT EXISTS ${HOUT}) + SET( + "${lib_root}_H" + "${${lib_root}_H}" "${HOUT}" + CACHE INTERNAL "Agent header" FORCE + ) + + # make custom Makefile target for CC and H file together for joint dependency ADD_CUSTOM_COMMAND( OUTPUT ${CCOUT} OUTPUT ${HOUT} @@ -165,12 +162,9 @@ MACRO(USE_CYCLUS lib_root src_root) COMMENT "Executing ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}" COMMENT "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}" ) - SET( - "${lib_root}_H" - "${${lib_root}_H}" "${HOUT}" - CACHE INTERNAL "Agent header" FORCE - ) + SET(DEP_LIST ${DEP_LIST} ${CCOUT} ${HOUT}) ELSE(EXISTS "${HIN}") + # Make custom Makefile target for CC file alone if ho header ADD_CUSTOM_COMMAND( OUTPUT ${CCOUT} COMMAND ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS} @@ -179,6 +173,7 @@ MACRO(USE_CYCLUS lib_root src_root) DEPENDS ${CYCLUS_CUSTOM_HEADERS} COMMENT "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}" ) + SET(DEP_LIST ${CCOUT}) ENDIF(EXISTS "${HIN}") # add tests @@ -187,11 +182,20 @@ MACRO(USE_CYCLUS lib_root src_root) SET(HTIN "${CMAKE_CURRENT_SOURCE_DIR}/${src_root}_tests.h") SET(HTOUT "${BUILD_DIR}/${src_root}_tests.h") SET(CMD "cp") + IF(EXISTS "${CCTIN}") + MESSAGE(STATUS "Copying ${CCTIN} to ${CCTOUT}.") + EXECUTE_PROCESS(COMMAND ${CMD} ${CCTIN} ${CCTOUT}) + SET("${lib_root}_TEST_CC" "${${lib_root}_TEST_CC}" "${CCTOUT}" + CACHE INTERNAL "Agent test source" FORCE) + IF(EXISTS "${HTIN}") # install test headers MESSAGE(STATUS "Copying ${HTIN} to ${HTOUT}.") EXECUTE_PROCESS(COMMAND ${CMD} ${HTIN} ${HTOUT}) + SET("${lib_root}_TEST_H" "${${lib_root}_TEST_H}" "${HTOUT}" + CACHE INTERNAL "Agent test headers" FORCE) + # Create custom Makefile target for CC and H file together for joint dependency ADD_CUSTOM_COMMAND( OUTPUT ${HTOUT} OUTPUT ${CCTOUT} @@ -205,27 +209,34 @@ MACRO(USE_CYCLUS lib_root src_root) COMMENT "Copying ${HTIN} to ${HTOUT}." COMMENT "Copying ${CCTIN} to ${CCTOUT}." ) - SET("${lib_root}_TEST_H" "${${lib_root}_TEST_H}" "${HTOUT}" - CACHE INTERNAL "Agent test headers" FORCE) + SET(DEP_LIST ${DEP_LIST} ${HTOUT} ${CCTOUT}) + ELSE(EXISTS "${HTIN}") + # create custom Makefile target for CC only + ADD_CUSTOM_COMMAND( + OUTPUT ${CCTOUT} + COMMAND ${CMD} ${CCTIN} ${CCTOUT} + DEPENDS ${CCTIN} + DEPENDS ${CCIN} + DEPENDS ${CYCLUS_CUSTOM_HEADERS} + COMMENT "Copying ${CCTIN} to ${CCTOUT}." + ) + SET(DEP_LIST ${DEP_LIST} ${CCTOUT}) ENDIF(EXISTS "${HTIN}") - - # install test impl - MESSAGE(STATUS "Copying ${CCTIN} to ${CCTOUT}.") - EXECUTE_PROCESS(COMMAND ${CMD} ${CCTIN} ${CCTOUT}) - ADD_CUSTOM_COMMAND( - OUTPUT ${CCTOUT} - COMMAND ${CMD} ${CCTIN} ${CCTOUT} - DEPENDS ${CCTIN} - DEPENDS ${CCIN} - DEPENDS ${CYCLUS_CUSTOM_HEADERS} - COMMENT "Copying ${CCTIN} to ${CCTOUT}." - ) - SET("${lib_root}_TEST_CC" "${${lib_root}_TEST_CC}" "${CCTOUT}" - CACHE INTERNAL "Agent test source" FORCE) ENDIF(EXISTS "${CCTIN}") + MESSAGE(STATUS "Finished construction of build files for agent: ${src_root}") ENDMACRO() +MACRO(PREPROCESS_CYCLUS_FILE_ cycpp filein preproc flags orig incl_args) + MESSAGE(STATUS "Executing ${cycpp} ${filein} ${preproc} ${flags} ${orig} ${incl_args}") + EXECUTE_PROCESS(COMMAND ${cycpp} ${filein} ${PREPROCESSOR} ${flags} + ${orig} ${incl_args} RESULT_VARIABLE res_var) + IF(NOT "${res_var}" STREQUAL "0") + message(FATAL_ERROR "${cycpp} failed on '${filein}' with exit code '${res_var}'") + ENDIF() + +ENDMACRO() + MACRO(INSTALL_CYCLUS_STANDALONE lib_root src_root lib_dir) # clear variables before starting SET("${lib_root}_H" "" CACHE INTERNAL "Agent header" FORCE) @@ -266,11 +277,14 @@ MACRO(INSTALL_CYCLUS_MODULE lib_root lib_dir) ENDMACRO() MACRO(INSTALL_AGENT_LIB_ lib_name lib_src lib_h inst_dir) + + ADD_CUSTOM_TARGET(${lib_name}-sources DEPENDS ${lib_src} ${lib_h}) + # add lib ADD_LIBRARY(${lib_name} ${lib_src}) TARGET_LINK_LIBRARIES(${lib_name} dl ${LIBS}) SET(CYCLUS_LIBRARIES ${CYCLUS_LIBRARIES} ${lib_root}) - ADD_DEPENDENCIES(${lib_name} ${lib_src} ${lib_h}) + ADD_DEPENDENCIES(${lib_name} ${lib_name}-sources) set(dest_ "lib/cyclus") string(COMPARE EQUAL "${inst_dir}" "" is_empty) diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh deleted file mode 100644 index db4d8f06fc..0000000000 --- a/conda-recipe/build.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -mkdir build -cd build -export LD_LIBRARY_PATH=$PREFIX/lib/ -export CMAKE_LIBRARY_PATH=$PREFIX/lib/ -export PATH=$PREFIX/bin:$PATH - -export MACOSX_DEPLOYMENT_TARGET= - -# -# Previous iterations have ahd trouble with hdf build vs. link -# versions. Something like the following has helped in the past. -# -#### hack fix for hdf5 issues -### if [[ `uname` == 'Linux' ]]; then -### ln -s $PREFIX/lib/libhdf5.so.9 $PREFIX/lib/libhdf5.so.8 -### ln -s $PREFIX/lib/libhdf5_hl.so.9 $PREFIX/lib/libhdf5_hl.so.8 -### else -### ln -s $PREFIX/lib/libhdf5.9.dylib $PREFIX/lib/libhdf5.8.dylib -### ln -s $PREFIX/lib/libhdf5_hl.9.dylib $PREFIX/lib/libhdf5_hl.8.dylib -### fi - -if [[ `uname` == 'Linux' ]]; then - cmake .. \ - -DCMAKE_INSTALL_PREFIX=$PREFIX \ - -DHDF5_ROOT=$PREFIX \ - -DBOOST_ROOT=$PREFIX \ - -DBOOST_LIBRARYDIR=$PREFIX/lib \ - -DBoost_NO_SYSTEM_PATHS=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DLAPACK_LIBRARIES=$PREFIX/lib/liblapack.so \ - -DBLAS_LIBRARIES=$PREFIX/lib/libblas.so -else - echo $CFLAGS - echo $LDFLAGS - export MACOSX_DEPLOYMENT_TARGET= - export DYLD_LIBRARY_PATH=$PREFIX/lib - export LDFLAGS="-headerpad_max_install_names -headerpad" - export CFLAGS="-headerpad_max_install_names -headerpad" - export CXXFLAGS= - cmake .. \ - -DCMAKE_INSTALL_PREFIX=$PREFIX \ - -DHDF5_ROOT=$PREFIX \ - -DCOIN_ROOT_DIR=$PREFIX \ - -DBOOST_ROOT=$PREFIX \ - -DCMAKE_BUILD_TYPE=Release \ - -DLAPACK_LIBRARIES=$PREFIX/lib/liblapack.dylib \ - -DBLAS_LIBRARIES=$PREFIX/lib/libblas.dylib -fi - -make VERBOSE=1 -make install - -cd .. diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml deleted file mode 100644 index 51ade4fceb..0000000000 --- a/conda-recipe/meta.yaml +++ /dev/null @@ -1,51 +0,0 @@ -package: - name: cyclus - version: 0.0 - -# Only use fn and url for polyphemus compatability -source: - fn: cyclus-src.tar.gz # ["TRAVIS" not in environ] - url: https://github.com/cyclus/cyclus/archive/develop.tar.gz # ["TRAVIS" not in environ] - path: .. # ["TRAVIS" in environ] - -requirements: - build: - - sigcpp - - glibmm - - libxmlpp - - coincbc - - boost - - hdf5 - - mylibxml2 # [osx] - - libxml2 # [linux] - - myglib # [osx] - - libffi # [osx] - - gettext # [osx] - - pkg-config-lite # [osx] - - cmake - - python - run: - - sigcpp - - glibmm - - libxmlpp - - coincbc - - boost - - hdf5 - - mylibxml2 # [osx] - - libxml2 # [linux] - - myglib # [osx] - - libffi # [osx] - - gettext # [osx] - - pkg-config-lite # [osx] - -build: - string: nightly - -test: - requires: - - nose - - pytables - -about: - home: Cyclus - license: BSD Clause 3 diff --git a/conda-recipe/post-link.sh b/conda-recipe/post-link.sh deleted file mode 100755 index e9d444048d..0000000000 --- a/conda-recipe/post-link.sh +++ /dev/null @@ -1,50 +0,0 @@ -# This script replaces the cyclus and cyclus_unit_tests commands with simple -# wrappers that will modify the user's environment as needed to point -# cyclus-sepcific envrionment variables to the conda install location $PREFIX. -# Conda packaging has three phases which come to a head here. -# -# 1. builing the package on a builder's computer -# 2. installing the package on the user's machine, where this script is run -# 3. runtime, when the wrapper script(s) execute. -# -# At install time (2), the conda post-link phase will define some extra -# environment variables, such as $PREFIX, that are not available elsewhere. -# These variables are descriped at http://conda.pydata.org/docs/building/build-scripts.html -# Otherwise envrionment variables in the wrapper script (eg $CYCLUS_PATH) -# must be escaped here so that they are evaluated at run time (3) rather -# than at build (1) or install (2). -echo "post-link.sh, PREFIX: $PREFIX" - -mv $PREFIX/bin/cyclus $PREFIX/bin/cyclus_base -echo "#!/bin/bash -export CYCLUS_PATH=\"\$CYCLUS_PATH:\$HOME/.local/lib/cyclus:$PREFIX/lib/cyclus\" -if [ -z \"\$CYCLUS_NUC_DATA\" ]; then - export CYCLUS_NUC_DATA=\"$PREFIX/share/cyclus/cyclus_nuc_data.h5\" -fi -if [ -z \"\$CYCLUS_RNG_SCHEMA\" ]; then - export CYCLUS_RNG_SCHEMA=\"$PREFIX/share/cyclus/cyclus.rng.in\" -fi - -$PREFIX/bin/cyclus_base \$* -" > $PREFIX/bin/cyclus -chmod 755 $PREFIX/bin/cyclus - -# The library path modifications are here because cyclus installs -# libgtest and libbaseagentunittests into the lib/cyclus directory. -# We make this directory the last possible location to be searched. -mv $PREFIX/bin/cyclus_unit_tests $PREFIX/bin/cyclus_unit_tests_base -echo "#!/bin/bash -export LD_LIBRARY_PATH=\"\$LD_LIBRARY_PATH:$PREFIX/lib/cyclus\" -export DYLD_FALLBACK_LIBRARY_PATH=\"\$DYLD_FALLBACK_LIBRARY_PATH:$PREFIX/lib/cyclus\" -export CYCLUS_PATH=\"\$CYCLUS_PATH:\$HOME/.local/lib/cyclus:$PREFIX/lib/cyclus\" -if [ -z \"\$CYCLUS_NUC_DATA\" ]; then - export CYCLUS_NUC_DATA=\"$PREFIX/share/cyclus/cyclus_nuc_data.h5\" -fi -if [ -z \"\$CYCLUS_RNG_SCHEMA\" ]; then - export CYCLUS_RNG_SCHEMA=\"$PREFIX/share/cyclus/cyclus.rng.in\" -fi - -$PREFIX/bin/cyclus_unit_tests_base \$* -" > $PREFIX/bin/cyclus_unit_tests -chmod 755 $PREFIX/bin/cyclus_unit_tests - diff --git a/cyclus/agents.pyx b/cyclus/agents.pyx index 221a4ce8fb..9154d35691 100644 --- a/cyclus/agents.pyx +++ b/cyclus/agents.pyx @@ -17,7 +17,7 @@ from cpython cimport (PyObject, PyDict_New, PyDict_Contains, import json from inspect import getmro, getdoc from copy import deepcopy -from collections import Mapping +from collections.abc import Mapping from cyclus cimport cpp_cyclus from cyclus.cpp_cyclus cimport shared_ptr, reinterpret_pointer_cast diff --git a/cyclus/gentypesystem.py b/cyclus/gentypesystem.py index 506dca2a9d..f50a5e45b3 100644 --- a/cyclus/gentypesystem.py +++ b/cyclus/gentypesystem.py @@ -10,7 +10,6 @@ import io import os import sys -import imp import json import argparse import platform @@ -700,7 +699,7 @@ def convert_to_cpp(self, x, t): '{valdecl}\n' 'cdef {type} cpp{var}\n', 'cpp{var} = {type}()\n' - 'if not isinstance({var}, collections.Mapping):\n' + 'if not isinstance({var}, collections.abc.Mapping):\n' ' {var} = dict({var})\n' 'for {keyname}, {valname} in {var}.items():\n' ' {keybody.indent4}\n' diff --git a/cyclus/jsoncpp.pyx b/cyclus/jsoncpp.pyx index 7df8ae7ff6..5135802d52 100644 --- a/cyclus/jsoncpp.pyx +++ b/cyclus/jsoncpp.pyx @@ -35,7 +35,7 @@ cdef cpp_jsoncpp.Value * tocppval(object doc) except NULL: cdef cpp_jsoncpp.Value * cval = NULL if isinstance(doc, Value): cval = new cpp_jsoncpp.Value( ( doc)._inst[0]) - elif isinstance(doc, collections.Mapping): + elif isinstance(doc, collections.abc.Mapping): cval = new cpp_jsoncpp.Value( cpp_jsoncpp.objectValue) for k, v in doc.items(): if not isinstance(k, basestring): diff --git a/cyclus/lib.pyx b/cyclus/lib.pyx index fe79b8337d..1512f280e5 100644 --- a/cyclus/lib.pyx +++ b/cyclus/lib.pyx @@ -22,7 +22,8 @@ from cpython.pycapsule cimport PyCapsule_GetPointer from binascii import hexlify import uuid import os -from collections import Mapping, Sequence, Iterable, defaultdict +from collections import defaultdict +from collections.abc import Mapping, Sequence, Iterable from importlib import import_module cimport numpy as np @@ -853,7 +854,7 @@ class XMLFileLoader(_XMLFileLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". """ @@ -886,7 +887,7 @@ class XMLFlatLoader(_XMLFlatLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". diff --git a/cyclus/main.py b/cyclus/main.py index 19af6e6091..21c17125c0 100644 --- a/cyclus/main.py +++ b/cyclus/main.py @@ -314,7 +314,7 @@ def make_parser(): help='restart from the specified simulation snapshot, ' 'not supported.') p.add_argument('--schema', action=Schema, - help='dump the cyclus master schema including all ' + help='dump the cyclus main schema including all ' 'installed module schemas') p.add_argument('--agent-schema', action=AgentSchema, dest='agent_schema', @@ -323,10 +323,10 @@ def make_parser(): dest='agent_version', help='dump the version for the named agent') p.add_argument('--schema-path', dest='schema_path', default=None, - help='manually specify the path to the cyclus master schema') + help='manually specify the path to the cyclus main schema') p.add_argument('--flat-schema', action='store_true', default=False, dest='flat_schema', - help='use the flat master simulation schema') + help='use the flat main simulation schema') p.add_argument('--agent-annotations', action=AgentAnnotations, dest='agent_annotations', help='dump the annotations for the named agent') @@ -364,7 +364,7 @@ def make_parser(): p.add_argument('--rng-schema', action=RngSchema, help='print the path to cyclus.rng.in') p.add_argument('--rng-print', action=RngPrint, - help='print the master schema for the input simulation') + help='print the main schema for the input simulation') p.add_argument('--nuc-data', action=NucData, help='print the path to cyclus_nuc_data.h5') p.add_argument('--json-to-xml', action=JsonToXml, @@ -407,8 +407,8 @@ def run_simulation(ns): state.si.context.sim_id) print(msg) -def print_master_schema(ns): - """Prints the master schema for the simulation""" +def print_main_schema(ns): + """Prints the main schema for the simulation""" state = SimState(input_file=ns.input_file, input_format=ns.format, output_path=ns.output_path, schema_path=ns.schema_path, flat_schema=ns.flat_schema, print_ms=True) @@ -422,7 +422,7 @@ def main(args=None): p = make_parser() ns = p.parse_args(args=args) if(ns.rng_print): - print_master_schema(ns) + print_main_schema(ns) elif ns.input_file is not None: run_simulation(ns) diff --git a/cyclus/memback.pyx b/cyclus/memback.pyx index 9f59a7fa75..3c6106d61e 100644 --- a/cyclus/memback.pyx +++ b/cyclus/memback.pyx @@ -100,7 +100,7 @@ cdef cppclass CyclusMemBack "CyclusMemBack" (cpp_cyclus.RecBackend): if key_exists: pyobval = PyDict_GetItem( this.cache, pyname) pyval = pyobval - results = pyval.append(results, ignore_index=True) + results = pd.concat([pyval, results], ignore_index=True) PyDict_SetItem( this.cache, pyname, results) std_string Name(): diff --git a/cyclus/simstate.py b/cyclus/simstate.py index 4ba3e9da5a..32923c0e81 100644 --- a/cyclus/simstate.py +++ b/cyclus/simstate.py @@ -58,9 +58,9 @@ class SimState(object): The initial registry to start the in-memory backend with. Defaults is True, which stores all of the tables. schema_path : str or None, optional: - The path to the cyclus master schema. + The path to the cyclus main schema. flat_schema : bool, optional - Whether or not to use the flat master simulation schema. + Whether or not to use the flat main simulation schema. frequency : int or float, optional The amount of time [sec] to sleep for in tight loops, default 1 ms. repeating_actions : list or None, optional diff --git a/doc/release_notes/v0.3.rst b/doc/release_notes/v0.3.rst index 8cc2454312..2f4c8f7c4a 100644 --- a/doc/release_notes/v0.3.rst +++ b/doc/release_notes/v0.3.rst @@ -85,7 +85,7 @@ New features - updated namespace name to reflect directory name - made mock fac/inst/region/market classes to be used with testing. added the stub facility in a new stub namespace and related tests. - added initial stub directory and adjusted cmake files to include their tests. -- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added master and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. +- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added main and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. - i think moving loglevel and the macros into the same namespace encapsulation is more promising... still unable to confirm. - finishes remaining doxygen warnings - I believe that this will fix the warning stemming from logger.h, but I do not see the warning on my machine, so I can't be sure @@ -201,7 +201,7 @@ New features - literal 0 -> 0.0 for fp compares - abs() -> fabs(), types they are a-changing. - fixes doc errors, should clear up @gonuke's cron job errors -- fixes master schema building +- fixes main schema building - updated test files so cycamore can build - ran all files in Core dir through astyle for style guide formatting - updated enrichment function names @@ -271,7 +271,7 @@ New features - first cyclus ns changes. - made buildSchema private. Used Agent class module type list instead of custom one. - removed cyclus.rng.in generation - now done dynamically in cyclus core -- modified XML loading to dynamically build the master schema by searching for installed modules +- modified XML loading to dynamically build the main schema by searching for installed modules - created csv backend. - fixed name erro - updated setup with localdir as default for some params diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000000..baa59ade8b --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,129 @@ +ARG pkg_mgr=apt +ARG make_cores=2 + +FROM ubuntu:22.04 as common-base + +ENV TZ=America/Chicago +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt update --fix-missing + +FROM common-base as apt-deps + +RUN apt install -y \ + libssh-dev \ + g++ \ + gcc \ + cmake \ + make \ + libxml2-dev \ + libxml++2.6-dev \ + libblas-dev \ + liblapack-dev \ + pkg-config \ + coinor-libcbc-dev \ + coinor-libclp-dev \ + coinor-libcoinutils-dev \ + coinor-libosi-dev \ + libboost-all-dev \ + libhdf5-dev \ + libsqlite3-dev \ + libpcre2-dev \ + gettext-base \ + xz-utils \ + python3-setuptools \ + python3-pytest \ + python3-tables \ + python3-pandas \ + python3-jinja2 \ + cython3 \ + libwebsockets-dev \ + python3-pprintpp \ + && apt clean -y all + +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 10 + +FROM common-base as conda-deps + +RUN apt install -y \ + wget \ + bzip2 \ + ca-certificates \ + && apt clean -y all + +RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ + wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh + +ENV PATH /opt/conda/bin:$PATH + +RUN conda config --add channels conda-forge +RUN conda update -n base -c defaults conda +RUN conda install -y mamba +RUN conda update -y --all && \ + mamba install -y \ + openssh \ + gxx_linux-64 \ + gcc_linux-64 \ + cmake \ + make \ + docker-pycreds \ + git \ + xo \ + python-json-logger \ + glib \ + libxml2 \ + libxmlpp \ + libblas \ + libcblas \ + liblapack \ + pkg-config \ + coincbc \ + boost-cpp \ + hdf5 \ + sqlite \ + pcre \ + gettext \ + bzip2 \ + xz \ + setuptools \ + pytest \ + pytables \ + pandas \ + jinja2 \ + "cython<3" \ + websockets \ + pprintpp \ + && \ + mamba install -y --force-reinstall libsqlite && \ + conda clean -y --all +ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc +ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ +ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp +ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" + +FROM ${pkg_mgr}-deps as cyclus +ARG make_cores=2 + +COPY . /cyclus +WORKDIR /cyclus + +# Uncomment the following line to run cmake in verbose mode. +# This is sometimes useful for debugging. +#ENV VERBOSE=1 + +# You may add the option "--cmake-debug" to the following command +# for further CMake debugging. +RUN python install.py -j ${make_cores} --build-type=Release --core-version 999999.999999 +ENV PATH /root/.local/bin:$PATH +ENV LD_LIBRARY_PATH /root/.local/lib:/root/.local/lib/cyclus + +FROM cyclus as cyclus-test + +RUN cyclus_unit_tests + +FROM cyclus-test as cyclus-pytest + +RUN cd tests && pytest + diff --git a/docker/README.md b/docker/README.md index 7639ab3634..7c6f4917d6 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,46 +1,12 @@ +This Dockerfile supports two paths for building a docker image with Cyclus, one +that uses `conda` to install depenendencies and one that uses Ubuntu's `apt` to +install dependencies. -Each subdirectory contains a dockerfile that does something useful: +All of the docker images must be built from the top level directory of the +Cyclus code space. -* ``cyclus-deps`` builds all cyclus dependencies. This is used as the base - image for other dockerfile's that build cyclus and should be updated only - occasionally as needed and pushed up to the docker hub - ``cyclus/cyclus-deps`` repository: +To build the docker image in using `conda`: +`docker build --build-arg pkg_mgr=conda -f docker/Dockerfile .` - ``` - cd cyclus-deps - docker build -t cyclus/cyclus-deps:X.X . - docker tag cyclus/cyclus-deps:X.X cyclus/cyclus-deps:latest - docker push cyclus/cyclus-deps - ``` - -* ``cyclus-ci`` is the dockerfile used for running cyclus on a continuous - integration service. This dockerfile assumes that the current working - directory is a cyclus repository - and that version of cyclus is copied into - the docker container and used for the build. The dockerfile in the cyclus - repository root is a symbolic link to this dockerfile. - -* ``deb-ci`` is the dockerfile used to generate the Cyclus debian installation - package. It contains 2 files, one Dockerfile_template and a script which: - * replace the template variables to the appropriate values: the major ubuntu - version require (provided as an parameter) and the commit hash tag in the - Dockerfile_template (recover by the script), - * runs the Dockerfile, - * extract the debian package, - * upload it on dory.fuelcycle.org. - -The script ``dockercyclus.sh`` downloads (if not already downloaded before) -the cyclus/cycamore docker image and passes all given arguments to an cyclus -command run inside a docker container. The current working directory is also -mounted inside the docker container so files in it (recursively) can be seen -by cyclus, and all output files end up in the host working directory. This is -an example of an alternative distribution mechanism for cyclus. - -The ``dockerbuild.sh`` script assumes the current working directory contains -the cyclus core repository and mounts it inside a docker container and builds -and installs the cyclus kernel. The built docker image is saved as -cyclus/cyclus:local - which can be used to run tests, etc. This could become -an easy way to onboard new kernel developers - they no longer have to set up a -fancy environment - all they have to do is clone cyclus and install docker. - -The ``dockerinst.sh`` script is similar to ``dockerbuild.sh`` except that it -uses ``install.py`` to build and install cyclus. \ No newline at end of file +To build the docker image in using `apt`: +`docker build --build-arg pkg_mgr=apt -f docker/Dockerfile .` diff --git a/docker/cyclus-ci/Dockerfile b/docker/cyclus-ci/Dockerfile deleted file mode 100644 index 18849df4aa..0000000000 --- a/docker/cyclus-ci/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM cyclus/cyclus-deps - -COPY . /cyclus -WORKDIR /cyclus - -# Uncomment the following line to run cmake in verbose mode. -# This is sometimes useful for debugging. -#ENV VERBOSE=1 - -# You may add the option "--cmake-debug" to the following command -# for further CMake debugging. -RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - diff --git a/docker/cyclus-deps/Dockerfile b/docker/cyclus-deps/Dockerfile deleted file mode 100644 index 6f83f90ec6..0000000000 --- a/docker/cyclus-deps/Dockerfile +++ /dev/null @@ -1,83 +0,0 @@ -FROM debian:9 - -RUN apt-get update --fix-missing && apt-get install -y wget bzip2 ca-certificates \ - libglib2.0-0 libxext6 libsm6 libxrender1 - -RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ - wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ - /bin/bash ~/miniconda.sh -b -p /opt/conda && \ - rm ~/miniconda.sh - -RUN apt-get install -y curl grep sed dpkg && \ - TINI_VERSION=`curl https://github.com/krallin/tini/releases/latest | grep -o "/v.*\"" | sed 's:^..\(.*\).$:\1:'` && \ - curl -L "https://github.com/krallin/tini/releases/download/v${TINI_VERSION}/tini_${TINI_VERSION}.deb" > tini.deb && \ - dpkg -i tini.deb && \ - rm tini.deb && \ - apt-get clean - -ENV PATH /root/.local/bin:/opt/conda/bin:$PATH - -ENTRYPOINT [ "/usr/bin/tini", "--" ] -CMD [ "/bin/bash" ] - -# -# apt packages -# -RUN apt-get update && \ - apt-get install -y openssh-client \ - git \ - vim nano && \ - apt-get clean - -# -# conda packages -# -RUN conda config --add channels conda-forge -RUN conda update -n base -c defaults conda -RUN conda update -y --all && \ - conda install -y \ - openssh \ - gxx_linux-64 \ - gcc_linux-64 \ - cmake \ - make \ - docker-pycreds \ - git \ - xo \ - python-json-logger \ - glib=2.56 \ - libxml2 \ - libxmlpp \ - libblas \ - libcblas \ - liblapack \ - pkg-config \ - coincbc=2.9 \ - boost-cpp \ - hdf5 \ - sqlite \ - pcre \ - gettext \ - bzip2 \ - xz \ - setuptools \ - nose \ - pytables \ - pandas \ - jinja2 \ - "cython<=0.28.5" \ - websockets \ - pprintpp \ - && \ - conda clean -y --all -ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc -ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ -ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp -ENV PYTHONPATH "/home/conda/.local/lib/python3.7/site-packages/:/root/.local/lib/python3.7/site-packages/" -# required for the nosetest -ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.7/site-packages/ -# -# pip packages to overide conda -# -RUN pip install docker diff --git a/housekeeping_script/changelog_test.sh b/housekeeping_script/changelog_test.sh deleted file mode 100755 index d69c6a4f28..0000000000 --- a/housekeeping_script/changelog_test.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh - -OWNER=cyclus -REPO=cyclus -CHANGELOG_FILE=CHANGELOG.rst - -# default main repo setup -PR_BASE_BRANCH=master -echo "Testing changelog against $PR_BASE_BRANCH branch" - -master_repo="https://github.com/${OWNER}/${REPO}.git" -default_branch=$PR_BASE_BRANCH - -# setup temp remote -git_remote_name=ci_changelog_`git log --pretty=format:'%h' -n 1` -git remote add ${git_remote_name} ${master_repo} -git fetch ${git_remote_name} - -# diff against temp remote -added_changelog_entry=$((`git diff ${git_remote_name}/${default_branch} -- ${CHANGELOG_FILE} |wc -l`)) - -# cleaning temp remote -git remote remove ${git_remote_name} - -# analysing the diff and returning accordingly -if [ $added_changelog_entry -eq 0 ]; then - echo "No new changelog entry detected, please update the ${CHANGELOG_FILE} according to your submited changes!" - exit 1 -fi diff --git a/src/infile_tree.cc b/src/infile_tree.cc index 252f1ce616..1540b4c7ff 100644 --- a/src/infile_tree.cc +++ b/src/infile_tree.cc @@ -10,6 +10,14 @@ namespace cyclus { +#if LIBXMLXX_MAJOR_VERSION == 2 + typedef xmlpp::NodeSet NodeSet; + typedef xmlpp::Node::NodeList const_NodeList; +#else + typedef xmlpp::Node::NodeSet NodeSet; + typedef xmlpp::Node::const_NodeList const_NodeList; +#endif + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - InfileTree::InfileTree(XMLParser& parser) : current_node_(0) { current_node_ = parser.Document()->get_root_node(); @@ -53,7 +61,6 @@ int InfileTree::NMatches(std::string query) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - std::string InfileTree::GetString(std::string query, int index) { using xmlpp::Node; - using xmlpp::NodeSet; using xmlpp::TextNode; using xmlpp::Element; const NodeSet nodeset = current_node_->find(query); @@ -73,7 +80,7 @@ std::string InfileTree::GetString(std::string query, int index) { " is not an Element node."); } - const Node::NodeList nodelist = element->get_children(); + const const_NodeList nodelist = element->get_children(); if (nodelist.size() != 1) { throw ValueError("Element node " + element->get_name() + " has more content than expected."); @@ -92,7 +99,7 @@ std::string InfileTree::GetString(std::string query, int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - std::string InfileTree::GetElementName(int index) { using xmlpp::Node; - using xmlpp::NodeSet; + std::vector elements; const Node::NodeList nodelist = current_node_->get_children(); Node::NodeList::const_iterator it; @@ -112,7 +119,7 @@ std::string InfileTree::GetElementName(int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - InfileTree* InfileTree::GetEngineFromQuery(std::string query, int index) { using xmlpp::Node; - using xmlpp::NodeSet; + const NodeSet nodeset = current_node_->find(query); if (nodeset.size() < index + 1) { diff --git a/src/pyinfile.pyx b/src/pyinfile.pyx index 993a5c7253..db50e5c92f 100644 --- a/src/pyinfile.pyx +++ b/src/pyinfile.pyx @@ -38,7 +38,7 @@ cdef public std_string py_to_json "CyclusPyToJson" (std_string cpp_infile) excep raise RuntimeError('simulation not found in python file.') if callable(sim): sim = sim() - from collections import Mapping + from collections.abc import Mapping if isinstance(sim, str): pass # assume in JSON format elif isinstance(sim, bytes): diff --git a/src/query_backend.h b/src/query_backend.h index e70783e25c..51725ea88b 100644 --- a/src/query_backend.h +++ b/src/query_backend.h @@ -23,7 +23,7 @@ namespace cyclus { -/// This is the master list of all supported database types. All types must +/// This is the primary list of all supported database types. All types must /// have a constant length unless they begin with the prefix VL_, which stands /// for "variable length" or are implicitly variable length, such as blob. /// Changing the order here may invalidate previously created databases. diff --git a/src/toolkit/infile_converters.cc b/src/toolkit/infile_converters.cc index c7f88e5f8f..ac49b780cd 100644 --- a/src/toolkit/infile_converters.cc +++ b/src/toolkit/infile_converters.cc @@ -157,7 +157,7 @@ std::string XmlToJson(std::string s) { jroot[rootname] = Value(Json::objectValue); AddXmlToJson(&xroot, jroot[rootname], rootname); Json::CustomWriter writer = Json::CustomWriter("{", "}", "[", "]", ": ", - ", ", " ", 80); + ",", " ", 1); return writer.write(jroot); } diff --git a/src/xml_parser.cc b/src/xml_parser.cc index 02a1949bbb..55975a6dbb 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -10,6 +10,14 @@ namespace cyclus { +#if LIBXMLXX_MAJOR_VERSION == 2 + typedef xmlpp::NodeSet NodeSet; + typedef xmlpp::Node::NodeList const_NodeList; +#else + typedef xmlpp::Node::NodeSet NodeSet; + typedef xmlpp::Node::const_NodeList const_NodeList; +#endif + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - XMLParser::XMLParser() : parser_(NULL) { parser_ = new xmlpp::DomParser(); @@ -53,13 +61,19 @@ void XMLParser::Validate(const std::stringstream& xml_schema_snippet) { xmlpp::Document* XMLParser::Document() { xmlpp::Document* doc = parser_->get_document(); // This adds the capability to have nice include semantics - doc->process_xinclude(); + bool generate_xinclude_nodes = true; + bool fixup_base_uris = false; + #if LIBXMLXX_MAJOR_VERSION == 2 + doc->process_xinclude(generate_xinclude_nodes); + #else + doc->process_xinclude(generate_xinclude_nodes, fixup_base_uris); + #endif // This removes the stupid xml:base attribute that including adds, // but which is unvalidatable. The web is truly cobbled together // by a race of evil gnomes. xmlpp::Element* root = doc->get_root_node(); - xmlpp::NodeSet have_base = root->find("//*[@xml:base]"); - xmlpp::NodeSet::iterator it = have_base.begin(); + NodeSet have_base = root->find("//*[@xml:base]"); + NodeSet::iterator it = have_base.begin(); for (; it != have_base.end(); ++it) { reinterpret_cast(*it)->remove_attribute("base", "xml"); } diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index ffec6ecfaa..1e54fddd4d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -110,13 +110,14 @@ INSTALL(FILES ${test_agents} # read tests after building the driver, and add them to ctest set(tgt "cyclus_unit_tests") -add_custom_command(TARGET ${tgt} - POST_BUILD + +add_custom_target( + ${tgt}-post-build + DEPENDS ${tgt} COMMAND python "${CMAKE_CURRENT_SOURCE_DIR}/generate_gtest_macros.py" "--executable=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${tgt}" "--output=${CYCLUS_BINARY_DIR}/CTestTestfile.cmake" COMMENT "adding tests from ${tgt}" - DEPENDS VERBATIM ) diff --git a/tests/cycpp_tests.py b/tests/cycpp_test.py similarity index 88% rename from tests/cycpp_tests.py rename to tests/cycpp_test.py index b201294d28..78c1f6bb64 100644 --- a/tests/cycpp_tests.py +++ b/tests/cycpp_test.py @@ -3,11 +3,11 @@ import uuid import pprint import tempfile +import pytest + from collections import OrderedDict from subprocess import Popen, PIPE, STDOUT -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises cycdir = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(cycdir, 'cli')) @@ -28,8 +28,6 @@ import cycpp -assert_equal.__self__.maxDiff = None - class MockMachine(object): def __init__(self): self.depth = 0 @@ -65,117 +63,118 @@ def test_tffilt(): """Test TypedefFilter""" m = MockMachine() f = TypedefFilter(m) - yield assert_false, f.isvalid("mis typedef kind") - yield assert_false, f.isvalid("typedef kind") + assert not f.isvalid("mis typedef kind") + assert not f.isvalid("typedef kind") statement, sep = "typedef double db", ";" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "double", "db"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "double", "db") == m.aliases.pop() statement, sep = "typedef struct {int a; int b;} S, *pS", ";" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) def test_uffilt(): """Test UsingFilter""" m = MockMachine() f = UsingFilter(m) - yield assert_false, f.isvalid("not using namespace") + assert not f.isvalid("not using namespace") statement, sep = "using std::cout", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "std::cout", "cout"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "std::cout", "cout") == m.aliases.pop() def test_nsfilt(): """Test NamespaceFilter""" m = MockMachine() f = NamespaceFilter(m) - yield assert_false, f.isvalid("olzhas is not a namespace") + assert not f.isvalid("olzhas is not a namespace") # anonymous namespaces statement, sep = " namespace ", "{" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.namespaces), 1 - yield assert_equal, m.namespaces[0], (0, '') + assert len(m.namespaces) == 1 + assert m.namespaces[0] == (0, '') f.revert(statement, sep) - yield assert_equal, len(m.namespaces), 0 + assert len(m.namespaces) == 0 # nymous namespace statement, sep = "namespace gorgus ", "{" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.namespaces), 1 - yield assert_equal, m.namespaces[0], (0, "gorgus") + assert len(m.namespaces) == 1 + assert m.namespaces[0] == (0, "gorgus") f.revert(statement, sep) - yield assert_equal, len(m.namespaces), 0 + assert len(m.namespaces) == 0 def test_unfilt(): """Test UsingNamespaseFilter""" m = MockMachine() f = UsingNamespaceFilter(m) - yield assert_false, f.isvalid("using cycamore") + assert not f.isvalid("using cycamore") statement, sep = "using namespace std", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.using_namespaces), 1 - yield assert_equal, (0, "std"), m.using_namespaces.pop() + assert len(m.using_namespaces) == 1 + assert (0, "std") == m.using_namespaces.pop() f.revert(statement, sep) - yield assert_equal, len(m.using_namespaces), 0 + assert len(m.using_namespaces) == 0 def test_nafilter(): """Test NamespaceAliasFilter""" m = MockMachine() f = NamespaceAliasFilter(m) - yield assert_false, f.isvalid("namespace cycamore") + assert not f.isvalid("namespace cycamore") statement, sep = "namespace cycamore = cm", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "cm", "cycamore"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "cm", "cycamore") == m.aliases.pop() def test_cfilter(): """Test ClassFilter""" m = MockMachine() f = ClassFilter(m) - yield assert_false, f.isvalid("class ") + assert not f.isvalid("class ") statement, sep = "class Cyclus", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.classes), 1 - yield assert_equal, m.classes[0], (0, "Cyclus") - yield assert_equal, m.access[tuple(m.classes)], "private" + assert len(m.classes) == 1 + assert m.classes[0] == (0, "Cyclus") + assert m.access[tuple(m.classes)] == "private" f.revert(statement, sep) - yield assert_equal, len(m.classes), 0 + assert len(m.classes) == 0 def test_afilter(): """Test AccessFilter""" m = MockMachine() f = AccessFilter(m) - yield assert_false, f.isvalid("new private") + assert not f.isvalid("new private") statement, sep = "private:", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.access[tuple(m.classes)], "private" + assert m.access[tuple(m.classes)] == "private" def test_synerror(): """Test PragmaCyclusErrorFilter""" m = MockMachine() f = PragmaCyclusErrorFilter(m) - yield assert_false, f.isvalid("#pragma cyclus var {}") - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus var {}") + assert not f.isvalid("#pragma cyclus") - yield assert_true, f.isvalid('#pragma cyclus nooooo') + assert f.isvalid('#pragma cyclus nooooo') statement, sep = "#pragma cyclus var{}", "\n" - yield assert_true, f.isvalid(statement) - yield assert_raises, SyntaxError, f.transform, statement, sep + assert f.isvalid(statement) + with pytest.raises(SyntaxError): + f.transform(statement, sep) # # pass 2 Filters @@ -184,26 +183,26 @@ def test_vdecorfilter(): """Test VarDecorationFilter""" m = MockMachine() f = VarDecorationFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus var {'name': 'James Bond'} ", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.var_annotations, {'name': 'James Bond'} + assert m.var_annotations == {'name': 'James Bond'} def test_vdeclarfilter(): """Test VarDeclarationFilter""" m = MockMachine() f = VarDeclarationFilter(m) - yield assert_false, f.isvalid("one ") + assert not f.isvalid("one ") statement, sep = "one two", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) m.classes = [(0, "trader")] m.access = {"trader": "public"} # m.var_annotations = {'name': 'James Bond'} f.transform(statement, sep) - yield assert_equal, m.var_annotations, None + assert m.var_annotations == None def test_vdeclarfilter_canonize_alias(): m = MockMachine() @@ -231,7 +230,7 @@ def test_vdeclarfilter_canonize_alias(): ] for exp, t, name, alias in cases: obs = f.canonize_alias(t, name, alias=alias) - yield assert_equal, exp, obs + assert exp == obs def test_vdeclarfilter_canonize_ui(): m = MockMachine() @@ -256,33 +255,33 @@ def test_vdeclarfilter_canonize_ui(): ] for exp, t, name, x in cases: obs = f.canonize_uilabel(t, name, uilabel=x) - yield assert_equal, exp, obs + assert exp == obs obs = f.canonize_tooltip(t, name, tooltip=x) - yield assert_equal, exp, obs + assert exp == obs def test_execfilter(): """Test ExecFilter""" m = MockMachine() f = ExecFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus exec x = 42", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) # What are the other possible tests - yield assert_equal, m.execns["x"], 42 + assert m.execns["x"] == 42 def test_notefilter(): """Test NoteDecorationFilter""" m = MockMachine() f = NoteDecorationFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus note {'doc': 'string'} ", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.context['']['doc'], 'string' + assert m.context['']['doc'] == 'string' class MockAliasCodeGenMachine(object): """Mock machine for testing aliasing on pass 3 filters""" @@ -328,7 +327,7 @@ def test_canon_type(): ] for t, exp in cases: obs = sa.canonize_type(t) - yield assert_equal, exp, obs + assert exp == obs # # pass 3 Filters @@ -373,7 +372,7 @@ def test_clonefilter(): impl = f.impl() exp_impl = " MyFactory* m = new MyFactory(context());\n" + \ " m->InitFrom(this);\n return m;\n" - assert_equal(exp_impl, impl) + assert exp_impl == impl def test_ifcfilter(): """Test InitFromCopyFilter""" @@ -383,7 +382,7 @@ def test_ifcfilter(): args = f.methodargs() exp_args = "MyFactory* m" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' int rawcycpp_shape_y[1] = {42};\n' @@ -391,7 +390,7 @@ def test_ifcfilter(): 'rawcycpp_shape_y + 1);\n' " x = m->x;\n" "y=m -> y;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_ifdbfilter(): """Test InitFromDbFilter""" @@ -401,7 +400,7 @@ def test_ifdbfilter(): args = f.methodargs() exp_args = "cyclus::QueryableBackend* b" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' int rawcycpp_shape_y[1] = {42};\n' @@ -410,31 +409,31 @@ def test_ifdbfilter(): ' cyclus::QueryResult qr = b->Query("Info", NULL);\n' ' x = qr.GetVal("x");\n' "WAKKA JAWAKA") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_aliasing_schemafilter(): impl = setup_alias(SchemaFilter) - assert_true('foo_alias' in impl) - assert_false('bar_var' in impl) - assert_true('foo_map_alias' in impl) - assert_false('bar_map_var' in impl) + assert('foo_alias' in impl) + assert not('bar_var' in impl) + assert('foo_map_alias' in impl) + assert not('bar_map_var' in impl) def test_aliasing_snapshotfilter(): impl = setup_alias(SnapshotFilter) - assert_false('foo_alias' in impl) - assert_true('bar_var' in impl) - assert_false('foo_map_alias' in impl) - assert_true('bar_map_var' in impl) + assert not('foo_alias' in impl) + assert('bar_var' in impl) + assert not('foo_map_alias' in impl) + assert('bar_map_var' in impl) def test_aliasing_infiletodbfilter(): impl = setup_alias(InfileToDbFilter) - assert_true('foo_alias' in impl) - assert_true('bar_var' in impl) - assert_true('foo_map_alias' in impl) - assert_true('bar_map_var' in impl) + assert('foo_alias' in impl) + assert('bar_var' in impl) + assert('foo_map_alias' in impl) + assert('bar_map_var' in impl) def setup_alias(filt): m = MockAliasCodeGenMachine() @@ -450,14 +449,14 @@ def test_itdbfilter(): args = f.methodargs() exp_args = "cyclus::InfileTree* tree, cyclus::DbInit di" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = ( ' int rawcycpp_shape_y[1] = {42};\n cycpp_shape_y = std::vector(rawcycpp_shape_y, rawcycpp_shape_y + 1);\n cyclus::InfileTree* sub = tree->SubTree("config/*");\n int i;\n int n;\n {\n int x_val = cyclus::Query(sub, "x");\n x = x_val;\n }\nTHINGFISH\n di.NewDatum("Info")\n ->AddVal("x", x)\nABSOLUTELY FREE\n ->Record();\n' ) - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_itdbfilter_val(): """Test InfileToDbFilter._val() Defaults""" @@ -608,7 +607,7 @@ def test_itdbfilter_val(): for t, v, name, uitype, exp in cases: obs = f._val(t, val=v, name=name, uitype=uitype) - yield assert_equal, exp, obs + assert exp == obs def test_schemafilter(): """Test SchemaFilter""" @@ -618,7 +617,7 @@ def test_schemafilter(): args = f.methodargs() exp_args = "" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' return ""\n' @@ -628,12 +627,12 @@ def test_schemafilter(): ' " \\n"\n' ' " FREAK OUT\\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl # schema type tests - yield assert_equal, 'string', f._type('std::string') - yield assert_equal, 'boolean', f._type('bool') - yield assert_equal, 'token', f._type('std::string', 'token') + assert 'string' == f._type('std::string') + assert 'boolean' == f._type('bool') + assert 'token' == f._type('std::string', 'token') m.context = {"MyFactory": OrderedDict([('vars', OrderedDict([ ('x', {'type': ('std::map', 'int', 'double')}), @@ -658,7 +657,7 @@ def test_schemafilter(): ' " \\n"\n' ' " \\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_annotationsfilter(): """Test SchemaFilter""" @@ -668,10 +667,10 @@ def test_annotationsfilter(): args = f.methodargs() exp_args = "" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() - yield assert_true, isinstance(impl, str) + assert isinstance(impl, str) def test_snapshotfilter(): """Test SnapshotFilter""" @@ -681,14 +680,14 @@ def test_snapshotfilter(): args = f.methodargs() exp_args = 'cyclus::DbInit di' - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' di.NewDatum("Info")\n' ' ->AddVal("x", x)\n' 'JUST ANOTHER BAND FROM LA\n' ' ->Record();\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_sshinvfilter(): """Test SnapshotInvFilter""" @@ -698,19 +697,19 @@ def test_sshinvfilter(): args = f.methodargs() exp_args = '' - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (" cyclus::Inventories invs;\n" " return invs;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl f = SnapshotInvFilter(m) f.given_classname = 'MyFactory' f.mode = 'impl' impl = f.impl() exp_impl = (" cyclus::Inventories invs;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_intinvfilter(): """Test InitInvFilter""" @@ -720,11 +719,11 @@ def test_intinvfilter(): args = f.methodargs() exp_args = "cyclus::Inventories& inv" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = '' - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_defpragmafilter(): """Test DefaultPragmaFilter""" @@ -751,7 +750,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs cpptype = ['std::map', 'std::string', ['std::vector', 'double']] names = ['streams', 'name', ['efficiencies', 'val']] @@ -761,7 +760,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs # test item aliasing cpptype = ['std::map', 'std::string', ['std::vector', 'double']] @@ -772,7 +771,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs def test_escape_xml(): """Test escape_xml""" @@ -790,7 +789,7 @@ def test_escape_xml(): ' " \\n"\n' \ ' "\\n"' - yield assert_equal, s, got + assert s == got def test_infiletodb_read_member1(): m = MockCodeGenMachine() @@ -902,7 +901,7 @@ def test_infiletodb_read_member1(): #print(gen) #print(exp_gen) - yield assert_equal, exp_gen, gen + assert exp_gen == gen def test_infiletodb_read_member2(): m = MockCodeGenMachine() @@ -966,7 +965,7 @@ def test_infiletodb_read_member2(): # print() # print(gen) # print(exp_gen) - yield assert_equal, exp_gen, gen + assert exp_gen == gen def test_infiletodb_read_map(): m = MockCodeGenMachine() @@ -1003,7 +1002,7 @@ def test_infiletodb_read_map(): ' mymap = mymap_in;\n' ' }\n') - yield assert_equal, exp, obs + assert exp == obs def test_internal_schema(): cases = [ @@ -1050,7 +1049,7 @@ def test_internal_schema(): msg = 'case {0} failed\n ---- got ----\n {1}\n ---- want ----\n {2}'.format(i + 1, impl.replace('\n', '\n '), want.replace('\n', '\n ')) if want != impl: pprint.pprint(impl) - assert_true(False, msg) + assert(False, msg) def test_internal_infiletodb(): # the expected output (i.e. 'want':...) is set as 'throw' if the @@ -1132,7 +1131,7 @@ def test_internal_infiletodb(): except: haderr = True msg = 'case {0} failed: expected raised exception, got none.' - assert_true(haderr, msg) + assert(haderr, msg) continue else: impl = f.impl() @@ -1140,7 +1139,7 @@ def test_internal_infiletodb(): msg = 'case {0} failed\n ---- got ----\n {1}\n ---- want ----\n {2}'.format(i + 1, impl.replace('\n', '\n '), want.replace('\n', '\n ')) if want != impl: pprint.pprint(impl) - assert_true(False, msg) + assert(False, msg) def test_nuclide_uitype(): m = MockCodeGenMachine() @@ -1159,7 +1158,7 @@ def test_nuclide_uitype(): ' " \\n"\n' ' " \\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl # test infiletodb updates f = InfileToDbFilter(m) @@ -1167,7 +1166,7 @@ def test_nuclide_uitype(): impl = f.impl() exp_impl = ' cyclus::InfileTree* sub = tree->SubTree("config/*");\n int i;\n int n;\n {\n int x_val = pyne::nucname::id(cyclus::Query(sub, "x"));\n x = x_val;\n }\n di.NewDatum("Info")\n ->AddVal("x", x)\n ->Record();\n' - yield assert_equal, exp_impl, impl + assert exp_impl == impl # test bad uitypes values fail m.context = {"MyFactory": OrderedDict([('vars', OrderedDict([ @@ -1176,7 +1175,8 @@ def test_nuclide_uitype(): ])} f = SchemaFilter(m) f.given_classname = 'MyFactory' - yield assert_raises, TypeError, f.impl + with pytest.raises(TypeError): + f.impl() def test_integration(): inf = os.path.join(os.path.dirname(__file__), 'cycpp_tests.h') @@ -1188,7 +1188,5 @@ def test_integration(): else: cmd = 'cycpp.py {} -o {} --cpp-path `which g++`'.format(inf, outf.name) p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) - assert_equal('', p.stdout.read().decode()) + assert '' == p.stdout.read().decode() -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/hdf5_back_gen_tests.py b/tests/hdf5_back_gen_test.py similarity index 88% rename from tests/hdf5_back_gen_tests.py rename to tests/hdf5_back_gen_test.py index a79bd6da55..dde86939cd 100644 --- a/tests/hdf5_back_gen_tests.py +++ b/tests/hdf5_back_gen_test.py @@ -4,10 +4,9 @@ import subprocess from random import randint import uuid -import nose -from nose.plugins.skip import SkipTest import pandas as pd -from pandas.util.testing import assert_frame_equal +from pandas.testing import assert_frame_equal +import pytest from cyclus.lib import Hdf5Back, Recorder import cyclus.typesystem as ts @@ -55,6 +54,8 @@ def setup(): CANON_TO_DB[canon] = db CANON_TO_VL[canon] = is_vl +setup() + def make_bytes(string): return string.encode() @@ -241,39 +242,41 @@ def get_shape(meta): shape.extend(get_shape(i)) return shape + +@pytest.fixture(params=CANON_TYPES) +def canon_type(request): + ret = request.param + yield ret + ROW_NUM = 3 PATH = 'gen_db.h5' -def generate_and_test(): +def test_generate(canon_type): """Generate and run tests for supported Hdf5 datatypes.""" if sys.version_info[0] == 2: msg = 'Hdf5 backend gen tests do not support Python 2.x' - raise SkipTest(msg) + pytest.skip(msg) if os.path.isfile(PATH): os.remove(PATH) - for i in CANON_TYPES: - print(CANON_TO_DB[i],'\n') - rec = Recorder(inject_sim_id=False) - back = Hdf5Back(PATH) - rec.register_backend(back) - data_meta = generate_meta(i) - shape = get_shape(data_meta) - print("shape: ", shape) - data = [] - for j in range(ROW_NUM): - data.append(populate(data_meta)) - exp = pd.DataFrame({'col0': data}, columns=['col0']) - print("expected: \n", exp) - for j in data: - d = rec.new_datum("test0") - d.add_val("col0", j, shape=shape, type=ts.IDS[CANON_TO_DB[i]]) - d.record() - rec.flush() - obs = back.query("test0") - print("observed: \n", obs) - yield assert_frame_equal, exp, obs - rec.close() - os.remove(PATH) + print(CANON_TO_DB[canon_type],'\n') + rec = Recorder(inject_sim_id=False) + back = Hdf5Back(PATH) + rec.register_backend(back) + data_meta = generate_meta(canon_type) + shape = get_shape(data_meta) + print("shape: ", shape) + data = [] + for j in range(ROW_NUM): + data.append(populate(data_meta)) + exp = pd.DataFrame({'col0': data}, columns=['col0']) + print("expected: \n", exp) + for j in data: + d = rec.new_datum("test0") + d.add_val("col0", j, shape=shape, type=ts.IDS[CANON_TO_DB[canon_type]]) + d.record() + rec.flush() + obs = back.query("test0") + print("observed: \n", obs) + assert_frame_equal, exp, obs + rec.close() -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_abi.py b/tests/test_abi.py index 3eb100fc17..e772ef933c 100644 --- a/tests/test_abi.py +++ b/tests/test_abi.py @@ -1,10 +1,8 @@ import os import sys import subprocess +import pytest -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises -from nose.plugins.skip import SkipTest cycdir = os.path.dirname(os.path.dirname(__file__)) reldir = os.path.join(cycdir, 'release') @@ -19,20 +17,18 @@ def test_abi_stability(): - raise SkipTest('manually remove this skip to test ABI stability') + pytest.skip('manually remove this skip to test ABI stability') if smbchk is None: - raise SkipTest('Could not import smbchk!') + pytest.skip('Could not import smbchk!') if os.name != 'posix': - raise SkipTest('can only check for ABI stability on posix systems.') + pytest.skip('can only check for ABI stability on posix systems.') libcyc = os.path.join(cycdir, 'build', 'lib', 'libcyclus.so') if not os.path.exists(libcyc): - raise SkipTest('libcyclus could not be found, ' + pytest.skip('libcyclus could not be found, ' 'cannot check for ABI stability') args = '--update -t HEAD --no-save --check'.split() with tools.indir(reldir): obs = smbchk.main(args=args) - assert_true(obs) + assert(obs) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_bear_deploy.py b/tests/test_bear_deploy.py index d797014263..8756f97dd2 100644 --- a/tests/test_bear_deploy.py +++ b/tests/test_bear_deploy.py @@ -3,7 +3,6 @@ import json import subprocess -from nose.tools import assert_in, assert_true, assert_greater_equal inputfile = { 'simulation': { @@ -55,11 +54,11 @@ def test_bear_deploy(): s = subprocess.check_output(['cyclus', '-o', 'bears.h5', 'bears.json'], universal_newlines=True, env=env) # test that the institution deploys a BearStore - assert_in("New fac: BearStore", s) + assert ("New fac: BearStore" in s) # test that the first agents exist with right minimum production. agents = re.compile('Agent \d+ 8\.0') all_agents = set(agents.findall(s)) - assert_greater_equal(len(all_agents), 9) + assert (len(all_agents) >= 9) if os.path.exists('bears.json'): os.remove('bears.json') if os.path.exists('bears.h5'): diff --git a/tests/test_cycluslib.py b/tests/test_cycluslib.py index 9f649f950d..12860b9a6f 100644 --- a/tests/test_cycluslib.py +++ b/tests/test_cycluslib.py @@ -3,9 +3,6 @@ import subprocess from functools import wraps -import nose -from nose.tools import assert_equal, assert_less - from cyclus import lib from tools import libcyclus_setup, dbtest @@ -16,7 +13,7 @@ @dbtest def test_name(db, fname, backend): obs = db.name - assert_equal(fname, obs) + assert fname == obs @dbtest @@ -25,23 +22,23 @@ def test_simid(db, fname, backend): simid = df['SimId'] exp = simid[0] for obs in simid: - assert_equal(exp, obs) + assert exp == obs @dbtest def test_conds_ae(db, fname, backend): obs = db.query("AgentEntry", [('Kind', '==', 'Region')]) - assert_equal(1, len(obs)) - assert_equal('Region', obs['Kind'][0]) - assert_equal(':agents:NullRegion', obs['Spec'][0]) + assert 1 == len(obs) + assert 'Region' == obs['Kind'][0] + assert ':agents:NullRegion' == obs['Spec'][0] @dbtest def test_conds_comp(db, fname, backend): conds = [('NucId', '==', 922350000), ('MassFrac', '<=', 0.0072)] df = db.query("Compositions", conds) - assert_less(0, len(df)) + assert (0 < len(df)) for row in df['MassFrac']: - assert_less(row, 0.00720000001) + assert (row < 0.00720000001) @dbtest @@ -51,25 +48,23 @@ def test_dbopen(db, fname, backend): @dbtest def test_schema(db, fname, backend): schema = db.schema("AgentEntry") - assert_equal(8, len(schema)) + assert 8 == len(schema) cols = ["SimId", "AgentId", "Kind", "Spec", "Prototype", "ParentId", "Lifetime", "EnterTime"] dbs = [7, 1, 5, 5, 5, 1, 1, 1] for i, ci in enumerate(schema): - assert_equal("AgentEntry", ci.table) - assert_equal(cols[i], ci.col) - assert_equal(dbs[i], ci.dbtype) - assert_equal(i, ci.index) - assert_equal(1, len(ci.shape)) - assert_equal(-1, ci.shape) + assert "AgentEntry" == ci.table + assert cols[i] == ci.col + assert dbs[i] == ci.dbtype + assert i == ci.index + assert 1 == len(ci.shape) + assert -1 == ci.shape def test_position(): p1 = lib.Position(42.65, 28.6) p2 = lib.Position(42.65, 28.6) d = p1.distance(p2) - assert_equal(0.0, d) + assert 0.0 == d -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_dynamic_modules.py b/tests/test_dynamic_modules.py index 3ae18d86f1..2f3e91dce1 100644 --- a/tests/test_dynamic_modules.py +++ b/tests/test_dynamic_modules.py @@ -1,31 +1,30 @@ from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib def test_agent_spec_empty(): spec = lib.AgentSpec() - yield assert_equal, spec.path, "" - yield assert_equal, spec.lib, "" - yield assert_equal, spec.agent, "" - yield assert_equal, spec.alias, "" + assert spec.path == "" + assert spec.lib == "" + assert spec.agent == "" + assert spec.alias == "" def test_agent_spec_spec(): spec = lib.AgentSpec(":wakka:Jawaka") - yield assert_equal, spec.path, "" - yield assert_equal, spec.lib, "wakka" - yield assert_equal, spec.agent, "Jawaka" - yield assert_equal, spec.alias, "Jawaka" + assert spec.path == "" + assert spec.lib == "wakka" + assert spec.agent == "Jawaka" + assert spec.alias == "Jawaka" def test_agent_spec_full(): spec = lib.AgentSpec("why", "not", "me", "?") - yield assert_equal, spec.path, "why" - yield assert_equal, spec.lib, "not" - yield assert_equal, spec.agent, "me" - yield assert_equal, spec.alias, "?" + assert spec.path == "why" + assert spec.lib == "not" + assert spec.agent == "me" + assert spec.alias == "?" def test_dm_exists(): @@ -33,4 +32,4 @@ def test_dm_exists(): print(spec) dm = lib.DynamicModule() obs = dm.exists(spec) - assert_true(obs) + assert(obs) diff --git a/tests/test_env.py b/tests/test_env.py index 47db0fbd32..2fba65ba5d 100644 --- a/tests/test_env.py +++ b/tests/test_env.py @@ -1,35 +1,34 @@ """Tests Python wrapping on Env object.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib ENV = lib.Env() def test_path_base(): - yield assert_equal, ENV.path_base("/home/cyclus"), "/home" + assert ENV.path_base("/home/cyclus") == "/home" def test_paths(): - yield assert_true, len(ENV.install_path) > 0 - yield assert_true, len(ENV.build_path) > 0 - yield assert_true, len(ENV.get('HOME')) > 0 - yield assert_true, len(ENV.rng_schema()) > 0 + assert len(ENV.install_path) > 0 + assert len(ENV.build_path) > 0 + assert len(ENV.get('HOME')) > 0 + assert len(ENV.rng_schema()) > 0 # cyclus_path cp = ENV.cyclus_path - yield assert_true, len(cp) > 0 + assert len(cp) > 0 for path in cp: - yield assert_true, isinstance(path, str) - yield assert_true, len(ENV.env_delimiter) > 0 - yield assert_true, len(ENV.path_delimiter) > 0 - yield assert_true, len(ENV.find_module('agents')) > 0 + assert isinstance(path, str) + assert len(ENV.env_delimiter) > 0 + assert len(ENV.path_delimiter) > 0 + assert len(ENV.find_module('libagents.so')) > 0 def test_nuc_data(): - yield assert_true, len(ENV.nuc_data) > 0 + assert len(ENV.nuc_data) > 0 ENV.set_nuc_data_path(ENV.nuc_data) def test_allow_milps(): - assert_true(isinstance(ENV.allow_milps, bool)) + assert(isinstance(ENV.allow_milps, bool)) diff --git a/tests/test_error.py b/tests/test_error.py index faa40e3612..4c5e7c11ac 100644 --- a/tests/test_error.py +++ b/tests/test_error.py @@ -1,22 +1,21 @@ """Tests Python wrapping for cyclus errors.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib def test_warn_limit(): orig = lib.get_warn_limit() - yield assert_true, orig >= 0 + assert orig >= 0 lib.set_warn_limit(42) - yield assert_equal, 42, lib.get_warn_limit() + assert 42 == lib.get_warn_limit() lib.set_warn_limit(orig) def test_warn_as_error(): orig = lib.get_warn_as_error() - yield assert_true, isinstance(orig, bool) + assert isinstance(orig, bool) lib.set_warn_as_error(True) - yield assert_true, lib.get_warn_as_error() + assert lib.get_warn_as_error() lib.set_warn_as_error(orig) diff --git a/tests/tests_hdf5_back_gen.py b/tests/test_hdf5_back_gen.py similarity index 91% rename from tests/tests_hdf5_back_gen.py rename to tests/test_hdf5_back_gen.py index 95545992aa..6dfa5fac83 100644 --- a/tests/tests_hdf5_back_gen.py +++ b/tests/test_hdf5_back_gen.py @@ -2,8 +2,6 @@ import sys import pprint -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises cycdir = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(cycdir, 'src')) @@ -21,19 +19,19 @@ def test_node_pretty(): exp = "Node()" n = Node() obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_var_pretty(): exp = "Var(\n name='x'\n)" n = Var(name="x") obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_decl_pretty(): exp = "Decl(\n type='x',\n name='y'\n)" n = Decl(type="x", name="y") obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs #cppgen tests @@ -41,43 +39,43 @@ def test_cppgen_var(): exp = "x" n = Var(name="x") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_type(): exp = "std::string" n = Type(cpp="std::string") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_decl(): exp = "std::string s" n = Decl(type=Type(cpp="std::string"), name=Var(name="s")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_assign(): exp = "x=y" n = Assign(target=Var(name="x"), value=Var(name="y")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_binop(): exp = "x+y" n = BinOp(x=Var(name="x"), op="+", y=Var(name="y")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_leftunaryop(): exp = "++x" n = LeftUnaryOp(op="++", name=Var(name="x")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_rightunaryop(): exp = "x++" n = RightUnaryOp(name=Var(name="x"), op="++") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_if(): exp = """ @@ -93,7 +91,7 @@ def test_cppgen_if(): elifs=[(BinOp(x=Var(name="x"), op=">", y=Var(name="y")), [ExprStmt(child=Assign(target=Var(name="x"), value=Raw(code="2")))])],\ el=ExprStmt(child=Assign(target=Var(name="x"), value=Raw(code="3")))) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_for(): exp = """ @@ -110,7 +108,7 @@ def test_cppgen_for(): ExprStmt(child=Assign(target=RightUnaryOp(name=Var(name="c"), op="[i]"), value=BinOp(x=Var(name="a"), op="+", y=Var(name="b"))))]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_funccall(): exp = """ @@ -119,7 +117,7 @@ def test_cppgen_funccall(): args=[Var(name="a"), Var(name="b")],\ targs=[Type(cpp="std::string"), Var(name="STRING")]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_case(): exp = """case 3: { @@ -130,7 +128,7 @@ def test_cppgen_case(): body=[ExprStmt(child=RightUnaryOp(name=Var(name="b"), op="++")), Raw(code="break;")]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_block(): exp = """int x=5; @@ -150,7 +148,7 @@ def test_cppgen_block(): op="+", y=Var(name="y"))))])]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_funcdef(): exp = """template<> @@ -176,7 +174,7 @@ def test_cppgen_funcdef(): y=Var(name="y"))))], tspecial=True) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs #test various node structures @@ -196,9 +194,9 @@ def test_get_item_size(): db="VECTOR_DOUBLE", canon=("VECTOR","DOUBLE")), [0,1]) - assert_equal(exp1, obs1) - assert_equal(exp2, obs2) - assert_equal(exp3, obs3) + assert exp1 == obs1 + assert exp2 == obs2 + assert exp3 == obs3 diff --git a/tests/test_include_recipe.py b/tests/test_include_recipe.py index ccfb7cd1bd..98da509dc5 100644 --- a/tests/test_include_recipe.py +++ b/tests/test_include_recipe.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_false, assert_true, assert_equal import os import tables import numpy as np @@ -21,7 +20,7 @@ def test_include_recipe(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, CWD, holdsrtn + check_cmd(cmd, CWD, holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands diff --git a/tests/test_inventories.py b/tests/test_inventories.py index 339242f649..3342c59eb8 100644 --- a/tests/test_inventories.py +++ b/tests/test_inventories.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_false, assert_true, assert_equal from numpy.testing import assert_array_equal import os import tables @@ -24,13 +23,13 @@ def test_inventories_false(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = sqliteout cmd = ["cyclus", "-o", outfile, "--input-file", sim] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands # Ensure tables do not exist - assert_false, tables_exist(outfile, path) + assert not tables_exist(outfile, path) if tables_exist(outfile, path): print('Inventory table exists despite false entry in control section of input file.') outfile.close() @@ -49,13 +48,13 @@ def test_inventories(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = sqliteout cmd = ["cyclus", "-o", outfile, "--input-file", sim] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands # Check if inventory tables exist - assert_true, tables_exist(outfile, path) + assert tables_exist(outfile, path) if not tables_exist(outfile, path): print('Inventory table does not exist despite true entry in control section of input file.') outfile.close() diff --git a/tests/test_logger.py b/tests/test_logger.py index 9c0709432d..b942d3d953 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,7 +1,6 @@ """Tests Python wrapping on Env object.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib @@ -9,10 +8,10 @@ def test_report_level(): orig = LOGGER.report_level - yield assert_true, orig >= lib.LEV_ERROR - yield assert_true, orig <= lib.LEV_DEBUG5 + assert orig >= lib.LEV_ERROR + assert orig <= lib.LEV_DEBUG5 LOGGER.report_level = 4 - yield assert_true, LOGGER.report_level == 4 + assert LOGGER.report_level == 4 LOGGER.report_level = orig @@ -30,9 +29,9 @@ def test_no_mem(): def test_to_log_level_string(): s = LOGGER.to_string(lib.LEV_ERROR) - yield assert_true, isinstance(s, str) + assert isinstance(s, str) level = LOGGER.to_log_level(s) - yield assert_true, isinstance(level, int) - yield assert_equal, lib.LEV_ERROR, level + assert isinstance(level, int) + assert lib.LEV_ERROR == level diff --git a/tests/test_lotka_volterra.py b/tests/test_lotka_volterra.py index 8769ca8097..533ada1672 100644 --- a/tests/test_lotka_volterra.py +++ b/tests/test_lotka_volterra.py @@ -1,8 +1,6 @@ #! /usr/bin/env python from __future__ import print_function -import nose -from nose.tools import assert_equal, assert_almost_equal, assert_true from numpy.testing import assert_array_equal import os import tables @@ -32,11 +30,11 @@ def test_predator_only(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -44,8 +42,8 @@ def test_predator_only(): prey_exp = [0 for n in range(10)] pred_exp = [1, 1] + [0 for n in range(8)] - assert_equal(series[prey], prey_exp) - assert_equal(series[pred], pred_exp) + assert series[prey] == prey_exp + assert series[pred] == pred_exp clean_outs() @@ -60,11 +58,11 @@ def test_prey_only(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -72,8 +70,8 @@ def test_prey_only(): prey_exp = [2**n for n in range(10)] pred_exp = [0 for n in range(10)] - assert_equal(series[prey], prey_exp) - assert_equal(series[pred], pred_exp) + assert series[prey] == prey_exp + assert series[pred] == pred_exp clean_outs() @@ -97,11 +95,11 @@ def test_lotka_volterra(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -110,9 +108,7 @@ def test_lotka_volterra(): pred_max = series[pred].index(max(series[pred])) print("t_prey_max:", prey_max, "t_pred_max:", pred_max) - assert_true(prey_max < pred_max) + assert(prey_max < pred_max) clean_outs() -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_main.py b/tests/test_main.py index c5e0781f58..e0e81d866b 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,40 +1,43 @@ """Tests Python main CLI for Cyclus.""" from __future__ import print_function, unicode_literals - -from nose.tools import assert_equal, assert_true +import pytest from cyclus.main import main +cases = [ + ['-V'], + ['--schema'], + ['--flat-schema', '--schema'], + ['--agent-schema', ':agents:KFacility'], + ['--agent-version', ':agents:KFacility'], + ['--schema-path', '/path/to/schema'], + ['--agent-annotations', ':agents:NullRegion'], + ['--agent-listing', ':agents'], + ['--no-agent'], + ['--no-mem'], + ['-v', '5'], + ['--warn-limit', '21'], + ['-p'], + ['--include'], + ['--install-path'], + ['--cmake-module-path'], + ['--build-path'], + ['--rng-schema'], + ['--nuc-data'], + ] + + +@pytest.fixture(params=cases) +def case(request): + yield request.param -def test_main(): - cases = [ - ['-V'], - ['--schema'], - ['--flat-schema', '--schema'], - ['--agent-schema', ':agents:KFacility'], - ['--agent-version', ':agents:KFacility'], - ['--schema-path', '/path/to/schema'], - ['--agent-annotations', ':agents:NullRegion'], - ['--agent-listing', ':agents'], - ['--no-agent'], - ['--no-mem'], - ['-v', '5'], - ['--warn-limit', '21'], - ['-p'], - ['--include'], - ['--install-path'], - ['--cmake-module-path'], - ['--build-path'], - ['--rng-schema'], - ['--nuc-data'], - ] - for case in cases: - try: - main(args=case) - res = True - except Exception: - res = False - raise - yield assert_true, res +def test_main(case): + try: + main(args=case) + res = True + except Exception: + res = False + raise + assert res diff --git a/tests/test_memback.py b/tests/test_memback.py index 103c7c5379..aac617e3e6 100644 --- a/tests/test_memback.py +++ b/tests/test_memback.py @@ -1,17 +1,13 @@ """Tests Python memory backend.""" from __future__ import print_function, unicode_literals -import nose -from nose.tools import assert_equal, assert_true, assert_is_instance, \ - assert_in, assert_false, assert_not_in, assert_is, assert_is_not - from cyclus import memback from cyclus import lib from cyclus import typesystem as ts import numpy as np import pandas as pd -from pandas.util.testing import assert_frame_equal +from pandas.testing import assert_frame_equal def make_rec_back(inject_sim_id=False): @@ -34,11 +30,11 @@ def test_simple(): exp = pd.DataFrame({"col0": [1], "col1": [42.0], "col2": ["wakka"]}, columns=['col0', 'col1', 'col2']) obs = back.query("test") - yield assert_frame_equal, exp, obs + assert_frame_equal, exp, obs rec.close() # test covert to JSON - yield assert_is_instance, obs.to_json(), str + assert isinstance(obs.to_json(), str) def test_simple_with_sim_id(): @@ -54,7 +50,7 @@ def test_simple_with_sim_id(): rec.close() # test covert to JSON - yield assert_is_instance, obs.to_json(default_handler=str), str + assert isinstance(obs.to_json(default_handler=str), str) def test_many_rows_one_table(): @@ -99,7 +95,7 @@ def test_two_tables_interleaved(): "col2": ["wakka"*i for i in range(0, n, 2)]}, columns=['col0', 'col1', 'col2']) obs0 = back.query("test0") - yield assert_frame_equal, exp0, obs0 + assert_frame_equal, exp0, obs0 exp1 = pd.DataFrame({ "col0": list(range(1, n, 2)), @@ -107,7 +103,7 @@ def test_two_tables_interleaved(): "col2": ["wakka"*i for i in range(1, n, 2)]}, columns=['col0', 'col1', 'col2']) obs1 = back.query("test1") - yield assert_frame_equal, exp1, obs1 + assert_frame_equal, exp1, obs1 rec.close() @@ -134,7 +130,7 @@ def test_three_tables_grouped(): "col2": ["wakka"*i for i in range(n)]}, columns=['col1', 'col2']) obs0 = back.query("test0") - yield assert_frame_equal, exp0, obs0 + assert_frame_equal, exp0, obs0 j = 1 exp1 = pd.DataFrame({ @@ -142,7 +138,7 @@ def test_three_tables_grouped(): "col2": ["wakka"*i for i in range(n)]}, columns=['col0', 'col2']) obs1 = back.query("test1") - yield assert_frame_equal, exp1, obs1 + assert_frame_equal, exp1, obs1 j = 2 exp2 = pd.DataFrame({ @@ -150,7 +146,7 @@ def test_three_tables_grouped(): "col1": [42.0*i*j for i in range(n)]}, columns=['col0', 'col1']) obs2 = back.query("test2") - yield assert_frame_equal, exp2, obs2 + assert_frame_equal, exp2, obs2 rec.close() @@ -236,55 +232,55 @@ def test_many_cols_one_table(): def test_registry_operations(): n = 10 rec, back = make_rec_back() - yield assert_true, back.store_all_tables + assert back.store_all_tables rec.flush() # test empty datalist # test storing only one table back.registry = ["test0"] - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 1, len(back.registry) - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 1 == len(back.registry) + assert 0 == len(back.cache) make_two_interleaved(rec, n) - yield assert_equal, 1, len(back.cache) - yield assert_in, "test0", back.cache + assert 1 == len(back.cache) + assert ("test0" in back.cache) # test removing registry with False back.registry = False - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 0 == len(back.cache) rec.flush() # test partial registry back.registry = ["test0", "test1"] - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 2, len(back.registry) - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 2 == len(back.registry) + assert 0 == len(back.cache) make_two_interleaved(rec, n) - yield assert_equal, 2, len(back.cache) - yield assert_in, "test0", back.cache - yield assert_in, "test1", back.cache + assert 2 == len(back.cache) + assert ("test0" in back.cache) + assert ("test1" in back.cache) # stop following test1 back.registry = ["test0", "test42", "test43"] - yield assert_equal, 3, len(back.registry) - yield assert_equal, 1, len(back.cache) - yield assert_in, "test0", back.cache - yield assert_not_in, "test1", back.cache + assert 3 == len(back.registry) + assert 1 == len(back.cache) + assert ("test0" in back.cache) + assert ("test1" not in back.cache) # test removing registry with None back.registry = None - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 0 == len(back.cache) rec.close() def test_no_fallback(): back = memback.MemBack() - yield assert_is, back.fallback, None - yield assert_is, back.query("yo"), None + assert (back.fallback is None) + assert (back.query("yo") is None) class FallBackend(object): @@ -303,14 +299,14 @@ def query(self, table, conds=None): def test_fallback(): fallback = FallBackend() back = memback.MemBack(fallback=fallback) - yield assert_is_not, back.fallback, None + assert (back.fallback is not None) n = 10 x = pd.DataFrame({ "col0": list(range(n)), "col1": [42.0*i for i in range(n)], "col2": ["wakka"*i for i in range(n)]}, columns=['col0', 'col1', 'col2']) - yield assert_frame_equal, x, back.query("yo") + assert_frame_equal, x, back.query("yo") def test_query(): @@ -325,51 +321,46 @@ def test_query(): # test == obs = back.query('x', [('col0', '==', 4)]) - yield assert_equal, 1, len(obs) - yield assert_equal, 4, obs['col0'].loc[4] + assert 1 == len(obs) + assert 4 == obs['col0'].loc[4] # test != obs = back.query('x', [('col2', '!=', 'wakka')]) - yield assert_equal, n-1, len(obs) - yield assert_not_in, 1, obs['col0'] + assert n-1 == len(obs) + assert (1 not in obs['col0']) # test < obs = back.query('x', [('col1', '<', 42.0*6.0)]) - yield assert_equal, 6, len(obs) - yield assert_frame_equal, x[x.col1 < 42.0*6.0], obs + assert 6 == len(obs) + assert_frame_equal, x[x.col1 < 42.0*6.0], obs # test <= obs = back.query('x', [('col1', '<=', 42.0*3.1)]) - yield assert_equal, 4, len(obs) - yield assert_frame_equal, x[x.col1 <= 42.0*3.1], obs + assert 4 == len(obs) + assert_frame_equal, x[x.col1 <= 42.0*3.1], obs # test < obs = back.query('x', [('col1', '>', 42.0*6.0)]) - yield assert_equal, 3, len(obs) - yield assert_frame_equal, x[x.col1 > 42.0*6.0], obs + assert 3 == len(obs) + assert_frame_equal, x[x.col1 > 42.0*6.0], obs # test <= obs = back.query('x', [('col1', '>=', 42.0*3.1)]) - yield assert_equal, 6, len(obs) - yield assert_frame_equal, x[x.col1 >= 42.0*3.1], obs + assert 6 == len(obs) + assert_frame_equal, x[x.col1 >= 42.0*3.1], obs # Test two conds obs = back.query('x', [('col1', '<', 42.0*6.0), ('col1', '>=', 42.0*3.1)]) - yield assert_equal, 2, len(obs) - yield assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs + assert 2 == len(obs) + assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs # Test three conds obs = back.query('x', [('col1', '<', 42.0*6.0), ('col1', '>=', 42.0*3.1), ('col2', '!=', 'wakka')]) - yield assert_equal, 2, len(obs) - yield assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs + assert 2 == len(obs) + assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs # test convert to JSON obs.to_json() - - - -if __name__ == "__main__": - nose.runmodule() \ No newline at end of file diff --git a/tests/test_minimal_cycle.py b/tests/test_minimal_cycle.py index c6431e1d72..536c31115f 100644 --- a/tests/test_minimal_cycle.py +++ b/tests/test_minimal_cycle.py @@ -1,15 +1,14 @@ #! /usr/bin/env python -import nose -from nose.tools import assert_equal, assert_almost_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, create_sim_input, \ h5out, sqliteout, clean_outs, sha1array, to_ary, which_outfile @@ -100,7 +99,7 @@ def test_minimal_cycle(): This equation is used to test each transaction amount. """ if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # A reference simulation input for minimal cycle with different commodities ref_input = os.path.join(INPUT, "minimal_cycle.xml") @@ -115,7 +114,7 @@ def test_minimal_cycle(): holdsrtn = [1] # needed b/c nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -124,7 +123,7 @@ def test_minimal_cycle(): paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): outfile.close() clean_outs() @@ -159,18 +158,18 @@ def test_minimal_cycle(): facility_id = find_ids(":agents:KFacility", spec, agent_ids) # Test for two KFacility - yield assert_equal, len(facility_id), 2 + assert len(facility_id) == 2 # Test for one Facility A and Facility B facility_a = find_ids("FacilityA", agent_protos, agent_ids) facility_b = find_ids("FacilityB", agent_protos, agent_ids) - yield assert_equal, len(facility_a), 1 - yield assert_equal, len(facility_b), 1 + assert len(facility_a) == 1 + assert len(facility_b) == 1 # Test if both facilities are KFracilities # Assume FacilityA is deployed first according to the schema - yield assert_equal, facility_a[0], facility_id[0] - yield assert_equal, facility_b[0], facility_id[1] + assert facility_a[0] == facility_id[0] + assert facility_b[0] == facility_id[1] # Test if the transactions are strictly between Facility A and # Facility B. There are no Facility A to Facility A or vice versa. @@ -186,23 +185,23 @@ def test_minimal_cycle(): pattern_a = pattern_two pattern_b = pattern_one - yield assert_array_equal, \ + assert_array_equal, \ np.where(sender_ids == facility_a[0])[0], \ pattern_a, "Fac A Pattern A" - yield assert_array_equal, \ + assert_array_equal, \ np.where(receiver_ids == facility_a[0])[0], \ pattern_b, "Fac A Pattern B" # reverse pattern when acted as a receiver - yield assert_array_equal, \ + assert_array_equal, \ np.where(sender_ids == facility_b[0])[0], \ pattern_b, "Fac B Pattern A" - yield assert_array_equal, \ + assert_array_equal, \ np.where(receiver_ids == facility_b[0])[0], \ pattern_a, "Fac B Pattern B" # reverse pattern when acted as a receiver # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(sender_ids.size) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"), \ expected_trans_ids @@ -213,7 +212,7 @@ def test_minimal_cycle(): # there must be (2 * duration) number of transactions. exp = 2 * duration obs = sender_ids.size - yield assert_equal, exp, obs, "number of transactions, {} != {}".format(exp, obs) + assert exp == obs, f"number of transactions, {exp} != {obs}" # Track transacted resources quantities = to_ary(resources, "Quantity") @@ -223,18 +222,16 @@ def test_minimal_cycle(): init_capacity_b = quantities[1] j = 0 for p in pattern_a: - yield assert_almost_equal, quantities[p], \ + assert pytest.approx(quantities[p], abs=1e-7) == \ init_capacity_a * k_factor_a ** j j += 1 j = 0 for p in pattern_b: - yield assert_almost_equal, quantities[p], \ + assert pytest.approx(quantities[p], abs=1e-7) == \ init_capacity_b * k_factor_b ** j j += 1 clean_outs() os.remove(sim_input) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index 0e122d6ec0..52d73991aa 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -2,11 +2,7 @@ import os import sqlite3 - -from nose.tools import assert_false, assert_true, assert_equal -from nose.plugins.skip import SkipTest - - +import pytest import numpy as np import tables from helper import tables_exist, find_ids, exit_times, \ @@ -17,22 +13,31 @@ INPUT = os.path.join(os.path.dirname(__file__), "input") -def check_null_sink(fname, given_spec): +@pytest.fixture(params=[("null_sink.xml", ":agents:Sink"), + ("null_sink.py", ":cyclus.pyagents:Sink")]) +def null_sink_case(request): + yield request.param + + +def test_null_sink(null_sink_case): """Testing for null sink case without a source facility. No transactions are expected in this test; therefore, a table with transaction records must not exist in order to pass this test. """ + + fname, given_spec = null_sink_case + clean_outs() if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # Cyclus simulation input for null sink testing sim_input = os.path.join(INPUT, fname) holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -40,7 +45,7 @@ def check_null_sink(fname, given_spec): legal_paths = ["/AgentEntry", "/Info"] illegal_paths = ["/Transactions"] # this must contain tables to test # Check if these tables exist - yield assert_true, tables_exist(outfile, legal_paths) + assert tables_exist(outfile, legal_paths) if not tables_exist(outfile, legal_paths): outfile.close() clean_outs() @@ -68,18 +73,10 @@ def check_null_sink(fname, given_spec): sink_id = find_ids(given_spec, spec, agent_ids) # Test if one SimpleSink is deployed - yield assert_equal, len(sink_id), 1 + assert len(sink_id) == 1 # No resource exchange is expected - yield assert_false, tables_exist(outfile, illegal_paths) - + assert not tables_exist(outfile, illegal_paths) clean_outs() -def test_null_sink(): - cases = [("null_sink.xml", ":agents:Sink"), - ("null_sink.py", ":cyclus.pyagents:Sink")] - for case in cases: - for x in check_null_sink(*case): - yield x - diff --git a/tests/test_record_time_series.py b/tests/test_record_time_series.py index 8cf4fc2a0c..08fe2c82d2 100644 --- a/tests/test_record_time_series.py +++ b/tests/test_record_time_series.py @@ -2,7 +2,6 @@ import subprocess import os -from nose.tools import assert_in inputfile = {'simulation': {'archetypes': {'spec': [ {'lib': 'dummy_power_recorder', 'name': 'DummyPowerRecorder'}, @@ -29,7 +28,7 @@ def test_record_time_series(): env = dict(os.environ) env['PYTHONPATH'] = "." s = subprocess.check_output(['cyclus', '-o', 'dummy.h5', 'dummy.json'], universal_newlines=True, env=env) - assert_in("The power is 10", s) + assert ("The power is 10" in s) if os.path.exists('dummy.json'): os.remove('dummy.json') if os.path.exists('dummy.h5'): diff --git a/tests/test_smbchk.py b/tests/test_smbchk.py index 631221470d..5ac9301528 100644 --- a/tests/test_smbchk.py +++ b/tests/test_smbchk.py @@ -3,11 +3,7 @@ import platform import sys from argparse import Namespace - -import nose -from nose.plugins.deprecated import DeprecatedTest -from nose.tools import assert_equal, assert_true, assert_false, assert_raises, \ - assert_in +import pytest from tools import skip_then_continue @@ -21,16 +17,16 @@ except ImportError: smbchk = False +@pytest.mark.skip(reason="symbol test has been deprecated") def test_load(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return ns = Namespace(filename=os.path.join(reldir, 'symbols.json')) db = smbchk.load(ns) - assert_true(isinstance(db, list)) + assert(isinstance(db, list)) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_nm(): - raise DeprecatedTest("symbol test has been deprecated") if platform.system() == 'Darwin': skip_then_continue("Skipping for Mac") if not smbchk: @@ -39,10 +35,10 @@ def test_nm(): return ns = Namespace(prefix=blddir) syms = smbchk.nm(ns) - assert_in("cyclus::Agent::Agent(cyclus::Context*)", syms) + assert ("cyclus::Agent::Agent(cyclus::Context*)" in syms) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_diff(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return db = [{'symbols': ["cyclus::Agent::Agent(cyclus::Context*)"], @@ -51,10 +47,10 @@ def test_diff(): "cyclus::Agent::~Agent()"], 'version': 'Y', 'date': 'y.y.y'},] obs = smbchk.diff(db, 0, 1) - assert_true(len(obs) > 0) + assert(len(obs) > 0) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_check(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return # adds to API @@ -64,13 +60,11 @@ def test_check(): "cyclus::Agent::~Agent()"], 'version': 'Y', 'date': 'y.y.y'},] obs = smbchk.check(db) - assert_true(obs) + assert(obs) # removes from API db.append({'symbols': ["cyclus::Agent::~Agent()"], 'version': 'Z', 'date': 'z.z.z'}) obs = smbchk.check(db) - assert_false(obs) + assert not(obs) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_source_to_sink.py b/tests/test_source_to_sink.py index 6bdf692727..2e83ea4459 100644 --- a/tests/test_source_to_sink.py +++ b/tests/test_source_to_sink.py @@ -1,26 +1,35 @@ #! /usr/bin/env python -from nose.tools import assert_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, \ h5out, sqliteout, clean_outs, to_ary, which_outfile INPUT = os.path.join(os.path.dirname(__file__), "input") -def check_source_to_sink(fname, source_spec, sink_spec): +@pytest.fixture(params=[("source_to_sink.xml", ":agents:Source", ":agents:Sink"), + ("source_to_sink.py", ":cyclus.pyagents:Source", ":cyclus.pyagents:Sink"), + ]) +def source_to_sink_case(request): + yield request.param + +def test_source_to_sink(source_to_sink_case): """Tests linear growth of sink inventory by checking if the transactions were of equal quantities and only between sink and source facilities. """ clean_outs() + + fname, source_spec, sink_spec = source_to_sink_case + if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # Cyclus simulation input for Source and Sink sim_inputs = [os.path.join(INPUT, fname)] @@ -29,7 +38,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -37,7 +46,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): # Tables of interest paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): clean_outs() return # don't execute further commands @@ -71,8 +80,8 @@ def check_source_to_sink(fname, source_spec, sink_spec): sink_id = find_ids(sink_spec, spec, agent_ids) # Test for only one source and one sink are deployed in the simulation - yield assert_equal, len(source_id), 1 - yield assert_equal, len(sink_id), 1 + assert len(source_id) == 1 + assert len(sink_id) == 1 # Check if transactions are only between source and sink sender_ids = to_ary(transactions, "SenderId") @@ -81,12 +90,12 @@ def check_source_to_sink(fname, source_spec, sink_spec): expected_sender_array.fill(source_id[0]) expected_receiver_array = np.empty(receiver_ids.size) expected_receiver_array.fill(sink_id[0]) - yield assert_array_equal, sender_ids, expected_sender_array - yield assert_array_equal, receiver_ids, expected_receiver_array + assert_array_equal, sender_ids, expected_sender_array + assert_array_equal, receiver_ids, expected_receiver_array # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(0, sender_ids.size, 1) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"),\ expected_trans_ids @@ -97,15 +106,9 @@ def check_source_to_sink(fname, source_spec, sink_spec): # Expect that every transaction quantity is the same amount expected_quantities.fill(quantities[0]) - yield assert_array_equal, quantities, expected_quantities + assert_array_equal, quantities, expected_quantities clean_outs() -def test_source_to_sink(): - cases = [("source_to_sink.xml", ":agents:Source", ":agents:Sink"), - ("source_to_sink.py", ":cyclus.pyagents:Source", ":cyclus.pyagents:Sink"), - ] - for case in cases: - for x in check_source_to_sink(*case): - yield x + diff --git a/tests/test_stubs.py b/tests/test_stubs.py index 059ef049a7..a7675500ec 100644 --- a/tests/test_stubs.py +++ b/tests/test_stubs.py @@ -3,10 +3,9 @@ import shutil import sys import tempfile -import io from contextlib import contextmanager +import pytest -from nose.plugins.skip import SkipTest @contextmanager def tmpdir(): @@ -16,7 +15,8 @@ def tmpdir(): @contextmanager def tmplog(fname): - yield io.open(fname, mode='w') + file_ptr = open(fname, mode='w') + yield file_ptr os.remove(fname) def test_stubs(): @@ -55,7 +55,7 @@ def test_stubs(): cwd=src, stdout=f, stderr=f) except subprocess.CalledProcessError as e: print(msg) - raise SkipTest(msg) # skip if we can't install for some reason. + pytest.skip(msg) # skip if we can't install for some reason. # run unit tests for stub cmd = tst_cmd.format(pth) diff --git a/tests/test_toaster.py b/tests/test_toaster.py index 433448c12c..2fc6c8b9ae 100644 --- a/tests/test_toaster.py +++ b/tests/test_toaster.py @@ -3,7 +3,6 @@ import json import subprocess -from nose.tools import assert_in, assert_true, assert_equals DEFAULTFILE = {'simulation': {'archetypes': {'spec': [ @@ -34,9 +33,9 @@ def test_pyagent_defaults(): s = subprocess.check_output(['cyclus', '-o', 'default-toaster.h5', 'default-toaster.json'], universal_newlines=True, env=env) # tests default value set on facility - assert_in("Bread is rye", s) + assert ("Bread is rye" in s) # tests that value in input file overrides default value - assert_in("Toast level is 10", s) + assert ("Toast level is 10" in s) if os.path.exists('default-toaster.json'): os.remove('default-toaster.json') if os.path.exists('default-toaster.h5'): @@ -75,22 +74,22 @@ def test_pyagent_attr_toasters(): info = s.split('=== Start AttrToaster ===\n')[-1].split('\n=== End AttrToaster ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_true(0 <= info['parent'] < 100) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert(0 <= info['parent'] < 100) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Facility_HappyToaster')) - assert_equals(info['kind'], 'Facility') - assert_equals(info['spec'], ':toaster:AttrToaster') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'HappyToaster') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 0) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Facility_HappyToaster')) + assert (info['kind'] == 'Facility') + assert (info['spec'] == ':toaster:AttrToaster') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'HappyToaster') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 0) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) @@ -131,22 +130,22 @@ def test_pyagent_attr_toaster_company(): info = info.split('\n=== End AttrToasterCompany ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_true(0 <= info['parent'] <= 100) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert(0 <= info['parent'] <= 100) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Inst_FamousToastersLLC')) - assert_equals(info['kind'], 'Inst') - assert_equals(info['spec'], ':toaster:AttrToasterCompany') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'FamousToastersLLC') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 1) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Inst_FamousToastersLLC')) + assert (info['kind'] == 'Inst') + assert (info['spec'] == ':toaster:AttrToasterCompany') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'FamousToastersLLC') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 1) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) @@ -187,22 +186,22 @@ def test_pyagent_attr_toaster_region(): info = info.split('\n=== End AttrToasterRegion ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_equals(info['parent'], -1) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert (info['parent'] == -1) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Region_RepublicOfToast')) - assert_equals(info['kind'], 'Region') - assert_equals(info['spec'], ':toaster:AttrToasterRegion') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'RepublicOfToast') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 1) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Region_RepublicOfToast')) + assert (info['kind'] == 'Region') + assert (info['spec'] == ':toaster:AttrToasterRegion') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'RepublicOfToast') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 1) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) diff --git a/tests/test_trivial_cycle.py b/tests/test_trivial_cycle.py index a8c49d0e4e..25ec5b494f 100644 --- a/tests/test_trivial_cycle.py +++ b/tests/test_trivial_cycle.py @@ -1,12 +1,12 @@ #! /usr/bin/env python -from nose.tools import assert_equal, assert_almost_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, create_sim_input, \ h5out, sqliteout, clean_outs, to_ary, which_outfile @@ -26,7 +26,7 @@ def test_source_to_sink(): This equation is used to test each transaction amount. """ if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # A reference simulation input for the trivial cycle simulation. ref_input = os.path.join(INPUT, "trivial_cycle.xml") @@ -41,7 +41,7 @@ def test_source_to_sink(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -50,7 +50,7 @@ def test_source_to_sink(): paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): outfile.close() clean_outs() @@ -81,7 +81,7 @@ def test_source_to_sink(): facility_id = find_ids(":agents:KFacility", spec, agent_ids) # Test for only one KFacility - yield assert_equal, len(facility_id), 1 + assert len(facility_id) == 1 sender_ids = to_ary(transactions, "SenderId") receiver_ids = to_ary(transactions, "ReceiverId") @@ -89,12 +89,12 @@ def test_source_to_sink(): expected_sender_array.fill(facility_id[0]) expected_receiver_array = np.empty(receiver_ids.size) expected_receiver_array.fill(facility_id[0]) - yield assert_array_equal, sender_ids, expected_sender_array - yield assert_array_equal, receiver_ids, expected_receiver_array + assert_array_equal, sender_ids, expected_sender_array + assert_array_equal, receiver_ids, expected_receiver_array # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(0, sender_ids.size, 1) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"), \ expected_trans_ids @@ -106,7 +106,7 @@ def test_source_to_sink(): i = 0 initial_capacity = quantities[0] for q in quantities: - yield assert_almost_equal, q, initial_capacity * k_factor ** i + assert pytest.approx(q, abs=1e-7) == initial_capacity * k_factor ** i i += 1 clean_outs() diff --git a/tests/toolkit/infile_converters_tests.cc b/tests/toolkit/infile_converters_tests.cc index 81f358eaf2..b1ac647763 100644 --- a/tests/toolkit/infile_converters_tests.cc +++ b/tests/toolkit/infile_converters_tests.cc @@ -86,11 +86,12 @@ TEST(InfileConverters, JsonPyRoundTrip) { cyclus::PyStart(); string inp = cyclus::toolkit::XmlToJson(MakeInput()); string p1 = cyclus::toolkit::JsonToPy(inp); - string j1 = cyclus::toolkit::PyToJson(p1); + string j1 = cyclus::toolkit::PyToJson(p1) + "\n"; string p2 = cyclus::toolkit::JsonToPy(j1); - string j2 = cyclus::toolkit::PyToJson(p2); + string j2 = cyclus::toolkit::PyToJson(p2) + "\n"; cyclus::PyStop(); + EXPECT_STREQ(inp.c_str(), j1.c_str()); EXPECT_STREQ(j1.c_str(), j2.c_str()); EXPECT_STREQ(p1.c_str(), p2.c_str()); } diff --git a/tests/tools.py b/tests/tools.py index fe6417cb5b..a3be48eefa 100644 --- a/tests/tools.py +++ b/tests/tools.py @@ -3,26 +3,20 @@ import os import re import sys -import imp +import importlib import shutil import unittest import subprocess import tempfile from contextlib import contextmanager -from functools import wraps +import pytest -from nose.tools import assert_true, assert_equal -from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest from cyclus import lib as libcyclus if sys.version_info[0] >= 3: basestring = str -unit = attr('unit') -integration = attr('integration') - INPUT = os.path.join(os.path.dirname(__file__), "input") CYCLUS_HAS_COIN = None @@ -55,7 +49,7 @@ def check_cmd(args, cwd, holdsrtn): print("STDOUT + STDERR:\n\n" + f.read().decode()) f.close() holdsrtn[0] = rtn - assert_equal(rtn, 0) + assert rtn == 0 def cyclus_has_coin(): @@ -79,7 +73,10 @@ def clean_import(name, paths=None): """ sys.path = paths + sys.path origmods = set(sys.modules.keys()) - mod = imp.load_module(name, *imp.find_module(name, paths)) + spec = importlib.machinery.PathFinder.find_spec(name, paths) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + sys.modules[name] = mod yield mod sys.path = sys.path[len(paths):] del mod @@ -121,7 +118,7 @@ def skip_then_continue(msg=""): and we may continue on our merry way. A message may be optionally passed to this function. """ - raise SkipTest(msg) + pytest.skip(msg) @contextmanager def indir(d): @@ -170,19 +167,17 @@ def libcyclus_setup(): def dbtest(f): - @wraps(f) def wrapper(): for fname, oname, backend in DBS: if os.path.exists(fname): os.remove(fname) shutil.copy(oname, fname) db = backend(fname) - yield f, db, fname, backend + f(db, fname, backend) return wrapper - # # Here there be Hackons! #