From 2b6470425d21a98e2b8fbd2a19ff3cdbae5c5206 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 17 May 2022 10:17:25 -0500 Subject: [PATCH 01/82] Update CMake version --- CMakeLists.txt | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5fd0f711e4..7f0925efae 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 3.16) #taken from http://geant4.cern.ch/support/source/geant4/CMakeLists.txt IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) message(STATUS "Cyclus requires an out-of-source build.") @@ -10,10 +10,6 @@ IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) message(FATAL_ERROR "in-source build detected") ENDIF() -# Set some policies -cmake_policy(SET CMP0040 OLD) -cmake_policy(SET CMP0042 OLD) - # This project name is cyclus. PROJECT(CYCLUS) From f0016c3bb2caffa30c733d328ace53d53daa1710 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 17 May 2022 12:57:32 -0500 Subject: [PATCH 02/82] update CMakeLists and move unused Find* out of the way --- CMakeLists.txt | 32 ++++++++++++++-------------- {cmake => xCmake}/FindCOIN.cmake | 0 {cmake => xCmake}/FindGlib.cmake | 0 {cmake => xCmake}/FindGlibmm.cmake | 0 {cmake => xCmake}/FindJinja2.cmake | 0 {cmake => xCmake}/FindLibXML++.cmake | 0 {cmake => xCmake}/FindNumpy.cmake | 0 {cmake => xCmake}/FindPandas.cmake | 0 {cmake => xCmake}/FindSigC++.cmake | 0 {cmake => xCmake}/FindSqlite3.cmake | 0 {cmake => xCmake}/FindTcmalloc.cmake | 0 11 files changed, 16 insertions(+), 16 deletions(-) rename {cmake => xCmake}/FindCOIN.cmake (100%) rename {cmake => xCmake}/FindGlib.cmake (100%) rename {cmake => xCmake}/FindGlibmm.cmake (100%) rename {cmake => xCmake}/FindJinja2.cmake (100%) rename {cmake => xCmake}/FindLibXML++.cmake (100%) rename {cmake => xCmake}/FindNumpy.cmake (100%) rename {cmake => xCmake}/FindPandas.cmake (100%) rename {cmake => xCmake}/FindSigC++.cmake (100%) rename {cmake => xCmake}/FindSqlite3.cmake (100%) rename {cmake => xCmake}/FindTcmalloc.cmake (100%) diff --git a/CMakeLists.txt b/CMakeLists.txt index 7f0925efae..5f704777aa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -104,8 +104,8 @@ IF(NOT CYCLUS_DOC_ONLY) endif() # Tell CMake where the modules are - LIST(APPEND CMAKE_MODULE_PATH - "${CMAKE_DIR}/share/cmake-2.8/Modules" "${PROJECT_SOURCE_DIR}/cmake") + LIST(APPEND CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} + "${CMAKE_DIR}/usr/share/cmake-3.22/Modules" "${PROJECT_SOURCE_DIR}/cmake") # Include macros INCLUDE(CopyWhenDiffMacro) @@ -158,21 +158,22 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Dependency Binary Hints (DEPS_BIN_HINTS): ${DEPS_BIN_HINTS}") MESSAGE("-- Dependency Library Hints (DEPS_LIB_HINTS): ${DEPS_LIB_HINTS}") MESSAGE("-- Dependency Include Hints (DEPS_INCLUDE_HINTS): ${DEPS_INCLUDE_HINTS}") + MESSAGE("CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") + + # Search pkg-config utility first + find_package(PkgConfig REQUIRED) + # Debian installs useful LibXML2 files to /usr/include/libxml2/libxml # libxml2 is required for relaxng schema validation - FIND_PACKAGE(LibXml2 ${DEPS_HINTS}) - IF(NOT LIBXML2_LIBRARIES) - FIND_LIBRARY(LibXml2 REQUIRED ${DEPS_HINTS}) - ENDIF() + + FIND_PACKAGE(LibXml2 REQUIRED) ADD_DEFINITIONS(${LIBXML2_DEFINITIONS}) SET(LIBS ${LIBS} ${LIBXML2_LIBRARIES}) + message("-- LibXML2 Include Dir: ${LIBXML2_INCLUDE_DIR}") - # Find LibXML++ and dependencies - FIND_PACKAGE(LibXML++) - IF(NOT LibXML++_LIBRARIES) - FIND_LIBRARY(LibXML++ REQUIRED ${DEPS_HINTS}) - ENDIF() + # Then use pkg-config for locate specific package + pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-2.6) SET(LIBS ${LIBS} ${LibXML++_LIBRARIES}) message("-- LibXML++ Include Dir: ${LibXML++_INCLUDE_DIR}") @@ -184,7 +185,7 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Found BLAS Libraries: ${BLAS_LIBRARIES}") # Find Sqlite3 - FIND_PACKAGE(Sqlite3 REQUIRED) + pkg_check_modules(SQLITE3 REQUIRED IMPORTED_TARGET sqlite3) SET(LIBS ${LIBS} ${SQLITE3_LIBRARIES}) # Find HDF5 @@ -225,7 +226,7 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Boost Serialization location: ${Boost_SERIALIZATION_LIBRARY}") # find coin and link to it - FIND_PACKAGE(COIN) + pkg_check_modules(COIN REQUIRED IMPORTED_TARGET cbc) MESSAGE("-- COIN Version: ${COIN_VERSION}") set(LIBS ${LIBS} ${COIN_LIBRARIES}) if(NOT COIN_FOUND) @@ -296,8 +297,8 @@ IF(NOT CYCLUS_DOC_ONLY) # Cython & Python Bindings # # Use new Python library finder - find_package(PythonInterp) - find_package(PythonLibs) + find_package (Python3 COMPONENTS Interpreter Development NumPy) + execute_process(COMMAND "${PYTHON_EXECUTABLE}" -c "import site; print(site.getsitepackages(['${CMAKE_INSTALL_PREFIX}'])[0])" OUTPUT_VARIABLE PYTHON_SITE_PACKAGES @@ -317,7 +318,6 @@ IF(NOT CYCLUS_DOC_ONLY) endif() include(UseCython) - find_package(Numpy REQUIRED) find_package(Jinja2 REQUIRED) find_package(Pandas REQUIRED) diff --git a/cmake/FindCOIN.cmake b/xCmake/FindCOIN.cmake similarity index 100% rename from cmake/FindCOIN.cmake rename to xCmake/FindCOIN.cmake diff --git a/cmake/FindGlib.cmake b/xCmake/FindGlib.cmake similarity index 100% rename from cmake/FindGlib.cmake rename to xCmake/FindGlib.cmake diff --git a/cmake/FindGlibmm.cmake b/xCmake/FindGlibmm.cmake similarity index 100% rename from cmake/FindGlibmm.cmake rename to xCmake/FindGlibmm.cmake diff --git a/cmake/FindJinja2.cmake b/xCmake/FindJinja2.cmake similarity index 100% rename from cmake/FindJinja2.cmake rename to xCmake/FindJinja2.cmake diff --git a/cmake/FindLibXML++.cmake b/xCmake/FindLibXML++.cmake similarity index 100% rename from cmake/FindLibXML++.cmake rename to xCmake/FindLibXML++.cmake diff --git a/cmake/FindNumpy.cmake b/xCmake/FindNumpy.cmake similarity index 100% rename from cmake/FindNumpy.cmake rename to xCmake/FindNumpy.cmake diff --git a/cmake/FindPandas.cmake b/xCmake/FindPandas.cmake similarity index 100% rename from cmake/FindPandas.cmake rename to xCmake/FindPandas.cmake diff --git a/cmake/FindSigC++.cmake b/xCmake/FindSigC++.cmake similarity index 100% rename from cmake/FindSigC++.cmake rename to xCmake/FindSigC++.cmake diff --git a/cmake/FindSqlite3.cmake b/xCmake/FindSqlite3.cmake similarity index 100% rename from cmake/FindSqlite3.cmake rename to xCmake/FindSqlite3.cmake diff --git a/cmake/FindTcmalloc.cmake b/xCmake/FindTcmalloc.cmake similarity index 100% rename from cmake/FindTcmalloc.cmake rename to xCmake/FindTcmalloc.cmake From 72244783cf2b07b5526e94f33f1d3e4b589628f9 Mon Sep 17 00:00:00 2001 From: Katie Mummah Date: Tue, 17 May 2022 13:40:23 -0500 Subject: [PATCH 03/82] slightly further along --- CMakeLists.txt | 3 +-- {xCmake => cmake}/FindJinja2.cmake | 0 {xCmake => cmake}/FindPandas.cmake | 0 3 files changed, 1 insertion(+), 2 deletions(-) rename {xCmake => cmake}/FindJinja2.cmake (100%) rename {xCmake => cmake}/FindPandas.cmake (100%) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5f704777aa..830a16caf1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -372,8 +372,7 @@ IF(NOT CYCLUS_DOC_ONLY) if(Cython_FOUND) - INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}" - "${NUMPY_INCLUDE_DIRS}") + INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}") endif(Cython_FOUND) # set core version, one way or the other IF(NOT "${CORE_VERSION}" STREQUAL "") diff --git a/xCmake/FindJinja2.cmake b/cmake/FindJinja2.cmake similarity index 100% rename from xCmake/FindJinja2.cmake rename to cmake/FindJinja2.cmake diff --git a/xCmake/FindPandas.cmake b/cmake/FindPandas.cmake similarity index 100% rename from xCmake/FindPandas.cmake rename to cmake/FindPandas.cmake From 786db847f0d3c9d5cc343549c88c89467e34a7b8 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 17 May 2022 17:06:25 -0500 Subject: [PATCH 04/82] Move target creation to facilitate CMake changes --- cli/CMakeLists.txt | 9 +++------ tests/CMakeLists.txt | 5 +++++ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/cli/CMakeLists.txt b/cli/CMakeLists.txt index 41f98a6e3c..0c24116159 100644 --- a/cli/CMakeLists.txt +++ b/cli/CMakeLists.txt @@ -42,13 +42,10 @@ INSTALL( ############################################################################################## ################################## begin cyclus unit tests ################################### ############################################################################################## - -ADD_EXECUTABLE( - cyclus_unit_tests - ${CYCLUS_CORE_TEST_SOURCE} - cyclus_unit_test_driver.cc +TARGET_SOURCES(cyclus_unit_tests + PRIVATE cyclus_unit_test_driver.cc ) - + TARGET_LINK_LIBRARIES( cyclus_unit_tests dl diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index ffec6ecfaa..3f5fc8c83a 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -110,6 +110,11 @@ INSTALL(FILES ${test_agents} # read tests after building the driver, and add them to ctest set(tgt "cyclus_unit_tests") +ADD_EXECUTABLE( + ${tgt} + ${CYCLUS_CORE_TEST_SOURCE} + ) + add_custom_command(TARGET ${tgt} POST_BUILD COMMAND python "${CMAKE_CURRENT_SOURCE_DIR}/generate_gtest_macros.py" From 3eb7dc706ab71419241d67787b429510bd00c7bc Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 17 May 2022 17:06:33 -0500 Subject: [PATCH 05/82] update for Python 3 --- cli/cycpp.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cli/cycpp.py b/cli/cycpp.py index 6e8aa8a87e..5e72815a7d 100755 --- a/cli/cycpp.py +++ b/cli/cycpp.py @@ -48,7 +48,8 @@ import re import sys import uuid -from collections import Sequence, Mapping, MutableMapping, OrderedDict +from collections.abc import Sequence, Mapping, MutableMapping +from collections import OrderedDict from contextlib import contextmanager from itertools import takewhile from subprocess import Popen, PIPE From 928e5e0b523781c7e0635ac7f5759b78b32a3a41 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Mon, 13 Mar 2023 09:29:38 -0500 Subject: [PATCH 06/82] Update deps dockerfile to modern ubuntu and conda-installed packages --- docker/cyclus-deps/Dockerfile | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/docker/cyclus-deps/Dockerfile b/docker/cyclus-deps/Dockerfile index 6f83f90ec6..0f8078dce4 100644 --- a/docker/cyclus-deps/Dockerfile +++ b/docker/cyclus-deps/Dockerfile @@ -1,24 +1,16 @@ -FROM debian:9 +FROM ubuntu:22.04 as base RUN apt-get update --fix-missing && apt-get install -y wget bzip2 ca-certificates \ libglib2.0-0 libxext6 libsm6 libxrender1 + RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ /bin/bash ~/miniconda.sh -b -p /opt/conda && \ rm ~/miniconda.sh -RUN apt-get install -y curl grep sed dpkg && \ - TINI_VERSION=`curl https://github.com/krallin/tini/releases/latest | grep -o "/v.*\"" | sed 's:^..\(.*\).$:\1:'` && \ - curl -L "https://github.com/krallin/tini/releases/download/v${TINI_VERSION}/tini_${TINI_VERSION}.deb" > tini.deb && \ - dpkg -i tini.deb && \ - rm tini.deb && \ - apt-get clean - ENV PATH /root/.local/bin:/opt/conda/bin:$PATH -ENTRYPOINT [ "/usr/bin/tini", "--" ] -CMD [ "/bin/bash" ] # # apt packages @@ -34,8 +26,9 @@ RUN apt-get update && \ # RUN conda config --add channels conda-forge RUN conda update -n base -c defaults conda +RUN conda install -y mamba RUN conda update -y --all && \ - conda install -y \ + mamba install -y \ openssh \ gxx_linux-64 \ gcc_linux-64 \ @@ -45,14 +38,14 @@ RUN conda update -y --all && \ git \ xo \ python-json-logger \ - glib=2.56 \ + glib \ libxml2 \ libxmlpp \ libblas \ libcblas \ liblapack \ pkg-config \ - coincbc=2.9 \ + coincbc \ boost-cpp \ hdf5 \ sqlite \ @@ -61,11 +54,11 @@ RUN conda update -y --all && \ bzip2 \ xz \ setuptools \ - nose \ + pytest \ pytables \ pandas \ jinja2 \ - "cython<=0.28.5" \ + cython \ websockets \ pprintpp \ && \ @@ -73,11 +66,12 @@ RUN conda update -y --all && \ ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp -ENV PYTHONPATH "/home/conda/.local/lib/python3.7/site-packages/:/root/.local/lib/python3.7/site-packages/" +ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" # required for the nosetest ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.7/site-packages/ +RUN mkdir -p /root/.local/lib/python3.10/site-packages/ # # pip packages to overide conda # + RUN pip install docker From a972f2b8ea7f88e7891cf958cfad3cd98750a2a3 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Mon, 13 Mar 2023 10:57:52 -0500 Subject: [PATCH 07/82] force libsqlite reinstall (why?) --- docker/cyclus-deps/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/cyclus-deps/Dockerfile b/docker/cyclus-deps/Dockerfile index 0f8078dce4..a36d5fca80 100644 --- a/docker/cyclus-deps/Dockerfile +++ b/docker/cyclus-deps/Dockerfile @@ -62,6 +62,7 @@ RUN conda update -y --all && \ websockets \ pprintpp \ && \ + mamba install -y --force-reinstall libsqlite && \ conda clean -y --all ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ From cd15181fe665a07b75d1f04fbd075dd0e4785a34 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Mon, 13 Mar 2023 10:58:14 -0500 Subject: [PATCH 08/82] temporarily use cyclus-local image for deps --- docker/cyclus-ci/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/cyclus-ci/Dockerfile b/docker/cyclus-ci/Dockerfile index 18849df4aa..1cd95e966f 100644 --- a/docker/cyclus-ci/Dockerfile +++ b/docker/cyclus-ci/Dockerfile @@ -1,4 +1,4 @@ -FROM cyclus/cyclus-deps +FROM cyclus-local COPY . /cyclus WORKDIR /cyclus From d88226589b01a5890c8020d84c9d427e5fd8107a Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Mon, 13 Mar 2023 10:58:39 -0500 Subject: [PATCH 09/82] update xml++ version and trying to find SQLite3 --- CMakeLists.txt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 830a16caf1..5653db17d9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -104,8 +104,10 @@ IF(NOT CYCLUS_DOC_ONLY) endif() # Tell CMake where the modules are + MESSAGE("-- CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") LIST(APPEND CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} - "${CMAKE_DIR}/usr/share/cmake-3.22/Modules" "${PROJECT_SOURCE_DIR}/cmake") + # "/opt/conda/share/cmake-3.25/Modules" + "${PROJECT_SOURCE_DIR}/cmake") # Include macros INCLUDE(CopyWhenDiffMacro) @@ -173,7 +175,7 @@ IF(NOT CYCLUS_DOC_ONLY) message("-- LibXML2 Include Dir: ${LIBXML2_INCLUDE_DIR}") # Then use pkg-config for locate specific package - pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-2.6) + pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-4.0) SET(LIBS ${LIBS} ${LibXML++_LIBRARIES}) message("-- LibXML++ Include Dir: ${LibXML++_INCLUDE_DIR}") @@ -185,7 +187,7 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Found BLAS Libraries: ${BLAS_LIBRARIES}") # Find Sqlite3 - pkg_check_modules(SQLITE3 REQUIRED IMPORTED_TARGET sqlite3) + FIND_PACKAGE(SQLite3 REQUIRED) SET(LIBS ${LIBS} ${SQLITE3_LIBRARIES}) # Find HDF5 From f1f7b162c44b29aa3252661acbbb5d2accdebcbf Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 14 Mar 2023 14:22:27 -0500 Subject: [PATCH 10/82] a little cleanup but no substantive changes --- cmake/UseCyclus.cmake | 85 ++++++++++++++++++++++++------------------- 1 file changed, 48 insertions(+), 37 deletions(-) diff --git a/cmake/UseCyclus.cmake b/cmake/UseCyclus.cmake index c99c7f8d0c..99d830120a 100644 --- a/cmake/UseCyclus.cmake +++ b/cmake/UseCyclus.cmake @@ -127,32 +127,29 @@ MACRO(USE_CYCLUS lib_root src_root) SET(CCOUT "${BUILD_DIR}/${src_root}.cc") SET(CCFLAG "-o=${CCOUT}") - # not sure if needed.. + # do all processing for CC file - always needed IF(NOT EXISTS ${CCOUT}) - MESSAGE(STATUS "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}") - EXECUTE_PROCESS(COMMAND ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} - ${ORIG} ${INCL_ARGS} RESULT_VARIABLE res_var) - IF(NOT "${res_var}" STREQUAL "0") - message(FATAL_ERROR "cycpp failed on '${CCIN}' with exit code '${res_var}'") - ENDIF() + PREPROCESS_CYCLUS_FILE_(${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}) ENDIF(NOT EXISTS ${CCOUT}) SET( "${lib_root}_CC" "${${lib_root}_CC}" "${CCOUT}" CACHE INTERNAL "Agent impl" FORCE ) + + # check for existing of header file IF(EXISTS "${HIN}") - # not sure if we still need this... + # Do all processing for header file IF(NOT EXISTS ${HOUT}) - MESSAGE(STATUS "Executing ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}") - EXECUTE_PROCESS(COMMAND ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS} - RESULT_VARIABLE res_var) - - IF(NOT "${res_var}" STREQUAL "0") - message(FATAL_ERROR "archetype preprocessing failed for ${HIN}, res_var = '${res_var}'") - ENDIF() - + PREPROCESS_CYCLUS_FILE_( ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}) ENDIF(NOT EXISTS ${HOUT}) + SET( + "${lib_root}_H" + "${${lib_root}_H}" "${HOUT}" + CACHE INTERNAL "Agent header" FORCE + ) + + # make custom Makefile target for CC and H file together for joint dependency ADD_CUSTOM_COMMAND( OUTPUT ${CCOUT} OUTPUT ${HOUT} @@ -165,12 +162,9 @@ MACRO(USE_CYCLUS lib_root src_root) COMMENT "Executing ${CYCPP} ${HIN} ${PREPROCESSOR} ${HFLAG} ${ORIG} ${INCL_ARGS}" COMMENT "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}" ) - SET( - "${lib_root}_H" - "${${lib_root}_H}" "${HOUT}" - CACHE INTERNAL "Agent header" FORCE - ) + SET(DEP_LIST ${DEP_LIST} ${CCOUT} ${HOUT}) ELSE(EXISTS "${HIN}") + # Make custom Makefile target for CC file alone if ho header ADD_CUSTOM_COMMAND( OUTPUT ${CCOUT} COMMAND ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS} @@ -179,6 +173,7 @@ MACRO(USE_CYCLUS lib_root src_root) DEPENDS ${CYCLUS_CUSTOM_HEADERS} COMMENT "Executing ${CYCPP} ${CCIN} ${PREPROCESSOR} ${CCFLAG} ${ORIG} ${INCL_ARGS}" ) + SET(DEP_LIST ${CCOUT}) ENDIF(EXISTS "${HIN}") # add tests @@ -187,11 +182,20 @@ MACRO(USE_CYCLUS lib_root src_root) SET(HTIN "${CMAKE_CURRENT_SOURCE_DIR}/${src_root}_tests.h") SET(HTOUT "${BUILD_DIR}/${src_root}_tests.h") SET(CMD "cp") + IF(EXISTS "${CCTIN}") + MESSAGE(STATUS "Copying ${CCTIN} to ${CCTOUT}.") + EXECUTE_PROCESS(COMMAND ${CMD} ${CCTIN} ${CCTOUT}) + SET("${lib_root}_TEST_CC" "${${lib_root}_TEST_CC}" "${CCTOUT}" + CACHE INTERNAL "Agent test source" FORCE) + IF(EXISTS "${HTIN}") # install test headers MESSAGE(STATUS "Copying ${HTIN} to ${HTOUT}.") EXECUTE_PROCESS(COMMAND ${CMD} ${HTIN} ${HTOUT}) + SET("${lib_root}_TEST_H" "${${lib_root}_TEST_H}" "${HTOUT}" + CACHE INTERNAL "Agent test headers" FORCE) + # Create custom Makefile target for CC and H file together for joint dependency ADD_CUSTOM_COMMAND( OUTPUT ${HTOUT} OUTPUT ${CCTOUT} @@ -205,27 +209,34 @@ MACRO(USE_CYCLUS lib_root src_root) COMMENT "Copying ${HTIN} to ${HTOUT}." COMMENT "Copying ${CCTIN} to ${CCTOUT}." ) - SET("${lib_root}_TEST_H" "${${lib_root}_TEST_H}" "${HTOUT}" - CACHE INTERNAL "Agent test headers" FORCE) + SET(DEP_LIST ${DEP_LIST} ${HTOUT} ${CCTOUT}) + ELSE(EXISTS "${HTIN}") + # create custom Makefile target for CC only + ADD_CUSTOM_COMMAND( + OUTPUT ${CCTOUT} + COMMAND ${CMD} ${CCTIN} ${CCTOUT} + DEPENDS ${CCTIN} + DEPENDS ${CCIN} + DEPENDS ${CYCLUS_CUSTOM_HEADERS} + COMMENT "Copying ${CCTIN} to ${CCTOUT}." + ) + SET(DEP_LIST ${DEP_LIST} ${CCTOUT}) ENDIF(EXISTS "${HTIN}") - - # install test impl - MESSAGE(STATUS "Copying ${CCTIN} to ${CCTOUT}.") - EXECUTE_PROCESS(COMMAND ${CMD} ${CCTIN} ${CCTOUT}) - ADD_CUSTOM_COMMAND( - OUTPUT ${CCTOUT} - COMMAND ${CMD} ${CCTIN} ${CCTOUT} - DEPENDS ${CCTIN} - DEPENDS ${CCIN} - DEPENDS ${CYCLUS_CUSTOM_HEADERS} - COMMENT "Copying ${CCTIN} to ${CCTOUT}." - ) - SET("${lib_root}_TEST_CC" "${${lib_root}_TEST_CC}" "${CCTOUT}" - CACHE INTERNAL "Agent test source" FORCE) ENDIF(EXISTS "${CCTIN}") + ADD_CUSTOM_TARGET(${lib_root}-target ${DEP_LIST}) MESSAGE(STATUS "Finished construction of build files for agent: ${src_root}") ENDMACRO() +MACRO(PREPROCESS_CYCLUS_FILE_ cycpp filein preproc flags orig incl_args) + MESSAGE(STATUS "Executing ${cycpp} ${filein} ${preproc} ${flags} ${orig} ${incl_args}") + EXECUTE_PROCESS(COMMAND ${cycpp} ${filein} ${PREPROCESSOR} ${flags} + ${orig} ${incl_args} RESULT_VARIABLE res_var) + IF(NOT "${res_var}" STREQUAL "0") + message(FATAL_ERROR "${cycpp} failed on '${filein}' with exit code '${res_var}'") +ENDIF() + +ENDMACRO() + MACRO(INSTALL_CYCLUS_STANDALONE lib_root src_root lib_dir) # clear variables before starting SET("${lib_root}_H" "" CACHE INTERNAL "Agent header" FORCE) From 462481ea8088971a0dd588c55f3bd07f6100a195 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 14 Mar 2023 15:28:05 -0500 Subject: [PATCH 11/82] introduce custom target to pass along dependency on custom command --- cmake/UseCyclus.cmake | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/cmake/UseCyclus.cmake b/cmake/UseCyclus.cmake index 99d830120a..6ef69e8620 100644 --- a/cmake/UseCyclus.cmake +++ b/cmake/UseCyclus.cmake @@ -223,7 +223,7 @@ MACRO(USE_CYCLUS lib_root src_root) SET(DEP_LIST ${DEP_LIST} ${CCTOUT}) ENDIF(EXISTS "${HTIN}") ENDIF(EXISTS "${CCTIN}") - ADD_CUSTOM_TARGET(${lib_root}-target ${DEP_LIST}) + MESSAGE(STATUS "Finished construction of build files for agent: ${src_root}") ENDMACRO() @@ -233,7 +233,7 @@ MACRO(PREPROCESS_CYCLUS_FILE_ cycpp filein preproc flags orig incl_args) ${orig} ${incl_args} RESULT_VARIABLE res_var) IF(NOT "${res_var}" STREQUAL "0") message(FATAL_ERROR "${cycpp} failed on '${filein}' with exit code '${res_var}'") -ENDIF() + ENDIF() ENDMACRO() @@ -277,11 +277,14 @@ MACRO(INSTALL_CYCLUS_MODULE lib_root lib_dir) ENDMACRO() MACRO(INSTALL_AGENT_LIB_ lib_name lib_src lib_h inst_dir) + + ADD_CUSTOM_TARGET(${lib_name}-sources DEPENDS ${lib_src} ${lib_h}) + # add lib ADD_LIBRARY(${lib_name} ${lib_src}) TARGET_LINK_LIBRARIES(${lib_name} dl ${LIBS}) SET(CYCLUS_LIBRARIES ${CYCLUS_LIBRARIES} ${lib_root}) - ADD_DEPENDENCIES(${lib_name} ${lib_src} ${lib_h}) + ADD_DEPENDENCIES(${lib_name} ${lib_name}-sources) set(dest_ "lib/cyclus") string(COMPARE EQUAL "${inst_dir}" "" is_empty) From a9a7f28ce955aca3278ae2c1a121d77eb6d7ec1a Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 14 Mar 2023 17:04:29 -0500 Subject: [PATCH 12/82] update for XML++ 4.0 --- src/infile_tree.cc | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/infile_tree.cc b/src/infile_tree.cc index 252f1ce616..164f9aa94a 100644 --- a/src/infile_tree.cc +++ b/src/infile_tree.cc @@ -53,10 +53,9 @@ int InfileTree::NMatches(std::string query) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - std::string InfileTree::GetString(std::string query, int index) { using xmlpp::Node; - using xmlpp::NodeSet; using xmlpp::TextNode; using xmlpp::Element; - const NodeSet nodeset = current_node_->find(query); + const Node::NodeSet nodeset = current_node_->find(query); if (nodeset.empty()) { throw KeyError("Could not find a node by the name: " + query); } @@ -73,7 +72,7 @@ std::string InfileTree::GetString(std::string query, int index) { " is not an Element node."); } - const Node::NodeList nodelist = element->get_children(); + const Node::const_NodeList nodelist = element->get_children(); if (nodelist.size() != 1) { throw ValueError("Element node " + element->get_name() + " has more content than expected."); @@ -92,7 +91,6 @@ std::string InfileTree::GetString(std::string query, int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - std::string InfileTree::GetElementName(int index) { using xmlpp::Node; - using xmlpp::NodeSet; std::vector elements; const Node::NodeList nodelist = current_node_->get_children(); Node::NodeList::const_iterator it; @@ -112,8 +110,7 @@ std::string InfileTree::GetElementName(int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - InfileTree* InfileTree::GetEngineFromQuery(std::string query, int index) { using xmlpp::Node; - using xmlpp::NodeSet; - const NodeSet nodeset = current_node_->find(query); + const Node::NodeSet nodeset = current_node_->find(query); if (nodeset.size() < index + 1) { throw ValueError("Index exceeds number of nodes in query: " + query); From 6651f34b2c7aa8bff4ed9f6ec5138f0751ca001b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 14 Mar 2023 17:19:10 -0500 Subject: [PATCH 13/82] update for xml++ 4.0 --- src/xml_parser.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/xml_parser.cc b/src/xml_parser.cc index 02a1949bbb..a41706826c 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -58,8 +58,8 @@ xmlpp::Document* XMLParser::Document() { // but which is unvalidatable. The web is truly cobbled together // by a race of evil gnomes. xmlpp::Element* root = doc->get_root_node(); - xmlpp::NodeSet have_base = root->find("//*[@xml:base]"); - xmlpp::NodeSet::iterator it = have_base.begin(); + xmlpp::Node::NodeSet have_base = root->find("//*[@xml:base]"); + xmlpp::Node::NodeSet::iterator it = have_base.begin(); for (; it != have_base.end(); ++it) { reinterpret_cast(*it)->remove_attribute("base", "xml"); } From fe4364c1a0cd871c24377ffc7db6e023d65ec34b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 14 Mar 2023 17:19:29 -0500 Subject: [PATCH 14/82] require C++ 14 --- CMakeLists.txt | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5653db17d9..25e972af22 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,11 +15,11 @@ PROJECT(CYCLUS) # check for and enable c++11 support (required for cyclus) INCLUDE(CheckCXXCompilerFlag) -CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) +CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) IF(COMPILER_SUPPORTS_CXX11) - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14") ELSE() - MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") + MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++14 support. Please use a different C++ compiler.") ENDIF() # enable assembly @@ -176,9 +176,10 @@ IF(NOT CYCLUS_DOC_ONLY) # Then use pkg-config for locate specific package pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-4.0) - SET(LIBS ${LIBS} ${LibXML++_LIBRARIES}) - message("-- LibXML++ Include Dir: ${LibXML++_INCLUDE_DIR}") - + SET(LIBS ${LIBS} ${LIBXMLXX_LIBRARIES}) + message("-- LibXML++ Include Dir: ${LIBXMLXX_INCLUDE_DIRS}") + message("-- LibXML++ Librarires: ${LIBXMLXX_LIBRARIES}") + # find lapack and link to it FIND_PACKAGE(LAPACK REQUIRED) set(LIBS ${LIBS} ${LAPACK_LIBRARIES}) @@ -360,9 +361,9 @@ IF(NOT CYCLUS_DOC_ONLY) # ${Glibmm_INCLUDE_DIRS} breaks Ubuntu 12.04 set(inc_dirs "${LIBXML2_INCLUDE_DIR}" - "${LibXML++_INCLUDE_DIR}" + "${LIBXMLXX_INCLUDE_DIRS}" "${Glibmm_INCLUDE_DIRS}" - "${LibXML++Config_INCLUDE_DIR}" + "${LIBXMLXXConfig_INCLUDE_DIR}" "${SQLITE3_INCLUDE_DIR}" "${HDF5_INCLUDE_DIRS}" "${Boost_INCLUDE_DIR}" @@ -486,7 +487,7 @@ IF(NOT CYCLUS_DOC_ONLY) SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-program-options-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-serialization-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libhdf5-dev (>= 1.8.11)") - SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++2.6-dev (>= 2.36.0)") + SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++4.0-dev (>= 4.0.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, coinor-libcbc-dev (>= 2.8.7)") MESSAGE("CPACK_DEBIAN_PACKAGE_DEPENDS ${CPACK_DEBIAN_PACKAGE_DEPENDS}") From 5f2d4da84ebd38d7d59069bc2f2cebc30bda2f10 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 15 Mar 2023 09:15:26 -0500 Subject: [PATCH 15/82] fix case on SQLite3 variables --- CMakeLists.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 25e972af22..b867e5a98d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -189,7 +189,8 @@ IF(NOT CYCLUS_DOC_ONLY) # Find Sqlite3 FIND_PACKAGE(SQLite3 REQUIRED) - SET(LIBS ${LIBS} ${SQLITE3_LIBRARIES}) + SET(LIBS ${LIBS} ${SQLite3_LIBRARIES}) + MESSAGE("-- Found SQLite3 Libraries: ${SQLite3_LIBRARIES}") # Find HDF5 FIND_PACKAGE(HDF5 REQUIRED COMPONENTS HL) @@ -364,7 +365,7 @@ IF(NOT CYCLUS_DOC_ONLY) "${LIBXMLXX_INCLUDE_DIRS}" "${Glibmm_INCLUDE_DIRS}" "${LIBXMLXXConfig_INCLUDE_DIR}" - "${SQLITE3_INCLUDE_DIR}" + "${SQLite3_INCLUDE_DIR}" "${HDF5_INCLUDE_DIRS}" "${Boost_INCLUDE_DIR}" "${COIN_INCLUDE_DIRS}") From 0112f7bab1b0a7072e8dcfd0b6f46fc2636bd20e Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 15 Mar 2023 09:42:22 -0500 Subject: [PATCH 16/82] add numpy includ dirs --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b867e5a98d..fec9daa5e7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -376,7 +376,7 @@ IF(NOT CYCLUS_DOC_ONLY) if(Cython_FOUND) - INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}") + INCLUDE_DIRECTORIES(AFTER "${PYTHON_INCLUDE_DIRS}" "${_Python3_NumPy_INCLUDE_DIR}") endif(Cython_FOUND) # set core version, one way or the other IF(NOT "${CORE_VERSION}" STREQUAL "") From fb817abd75690c290073f12d75774bbfb5df17ec Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 15 Mar 2023 13:31:53 -0500 Subject: [PATCH 17/82] revert to original test building pattern (with failures) --- cli/CMakeLists.txt | 11 ++++++++--- tests/CMakeLists.txt | 4 ---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/cli/CMakeLists.txt b/cli/CMakeLists.txt index 0c24116159..ccc83f14f2 100644 --- a/cli/CMakeLists.txt +++ b/cli/CMakeLists.txt @@ -42,8 +42,11 @@ INSTALL( ############################################################################################## ################################## begin cyclus unit tests ################################### ############################################################################################## -TARGET_SOURCES(cyclus_unit_tests - PRIVATE cyclus_unit_test_driver.cc + +ADD_EXECUTABLE( + cyclus_unit_tests + ${CYCLUS_CORE_TEST_SOURCE} + cyclus_unit_test_driver.cc ) TARGET_LINK_LIBRARIES( @@ -54,7 +57,9 @@ TARGET_LINK_LIBRARIES( agents ${CYCLUS_TEST_LIBRARIES} ) - + MESSAGE(STATUS "(cli) CYCLUS_TEST_LIBRARIES: ${CYCLUS_TEST_LIBRARIES}") + MESSAGE(STATUS "(cli) CYCLUS_AGENT_TEST_LIBRARIES: ${CYCLUS_AGENT_TEST_LIBRARIES}") + INSTALL( TARGETS cyclus_unit_tests RUNTIME DESTINATION bin diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 3f5fc8c83a..c442a767a0 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -110,10 +110,6 @@ INSTALL(FILES ${test_agents} # read tests after building the driver, and add them to ctest set(tgt "cyclus_unit_tests") -ADD_EXECUTABLE( - ${tgt} - ${CYCLUS_CORE_TEST_SOURCE} - ) add_custom_command(TARGET ${tgt} POST_BUILD From eca26f574c6dfb9297fde512dada699bb03c5db7 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 15 Mar 2023 17:05:28 -0500 Subject: [PATCH 18/82] revert to custom FindCOIN methods to ensure all libraries are included in linking --- CMakeLists.txt | 4 +- cmake/FindCOIN.cmake | 194 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 196 insertions(+), 2 deletions(-) create mode 100644 cmake/FindCOIN.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index fec9daa5e7..0214326431 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -230,7 +230,7 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Boost Serialization location: ${Boost_SERIALIZATION_LIBRARY}") # find coin and link to it - pkg_check_modules(COIN REQUIRED IMPORTED_TARGET cbc) + FIND_PACKAGE(COIN) MESSAGE("-- COIN Version: ${COIN_VERSION}") set(LIBS ${LIBS} ${COIN_LIBRARIES}) if(NOT COIN_FOUND) @@ -402,7 +402,7 @@ IF(NOT CYCLUS_DOC_ONLY) if(Cython_FOUND) ADD_SUBDIRECTORY("${CYCLUS_PYSOURCE_DIR}") endif(Cython_FOUND) - + ############################################################################################## ####################################### end includes ######################################### ############################################################################################## diff --git a/cmake/FindCOIN.cmake b/cmake/FindCOIN.cmake new file mode 100644 index 0000000000..d57aa7469d --- /dev/null +++ b/cmake/FindCOIN.cmake @@ -0,0 +1,194 @@ +# agented after FindCOIN.cmake in the lemon project + +# Written by: Matthew Gidden +# Last updated: 12/17/12 +# Last updated: 16/08/12 + +# This cmake file is designed to locate coin-related +# dependencies on a filesystem. +# +# If the coin dependencies were installed in a non-standard +# directory, e.g. installed from source perhaps, then +# the user can provide a prefix hint via the COIN_ROOT_DIR +# cmake variable: +# $> cmake ../src -DCOIN_ROOT_DIR=/path/to/coin/root + +# To date, this install requires the following dev versions +# of the respective coin libraries: +# * coinor-libCbc-dev +# * coinor-libClp-dev +# * coinor-libcoinutils-dev +# * coinor-libOsi-dev + +# +# Get the root directory hint if provided +# +IF(NOT DEFINED COIN_ROOT_DIR) + SET(COIN_ROOT_DIR "$ENV{COIN_ROOT_DIR}") + MESSAGE("COIN Root Dir from ENV: ${COIN_INCLUDE_DIR}") +ENDIF(NOT DEFINED COIN_ROOT_DIR) +MESSAGE(STATUS "COIN_ROOT_DIR hint is : ${COIN_ROOT_DIR}") + +# +# Find the path based on a required header file +# +MESSAGE(STATUS "Coin multiple library dependency status:") +FIND_PATH(COIN_INCLUDE_DIR coin/CbcModel.hpp + HINTS "${COIN_INCLUDE_DIR}" + HINTS "${COIN_ROOT_DIR}/include" + ${DEPS_INCLUDE_HINTS} + HINTS /usr/ + HINTS /usr/include/ + HINTS /usr/local/ + HINTS /usr/local/include/ + HINTS /usr/coin/ + HINTS /usr/coin-Cbc/ + HINTS /usr/local/coin/ + HINTS /usr/local/coin-Cbc/ + ) +set(COIN_INCLUDE_DIR ${COIN_INCLUDE_DIR}/coin) +MESSAGE("\tCOIN Include Dir: ${COIN_INCLUDE_DIR}") + +# +# Find all coin library dependencies +# +FIND_LIBRARY(COIN_CBC_LIBRARY + NAMES Cbc libCbc #libCbc.so.0 + HINTS "${COIN_INCLUDE_DIR}/../../lib/" + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN CBC: ${COIN_CBC_LIBRARY}") + +FIND_LIBRARY(COIN_CBC_SOLVER_LIBRARY + NAMES CbcSolver libCbcSolver libCbcSolver.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN CBC solver: ${COIN_CBC_SOLVER_LIBRARY}") + +FIND_LIBRARY(COIN_CGL_LIBRARY + NAMES Cgl libCgl libCgl.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN CGL: ${COIN_CGL_LIBRARY}") + +FIND_LIBRARY(COIN_CLP_SOLVER_LIBRARY + NAMES ClpSolver libClpSolver libClpSolver.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN CLP SOLVER: ${COIN_CLP_SOLVER_LIBRARY}") + +FIND_LIBRARY(COIN_CLP_LIBRARY + NAMES Clp libClp libClp.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN CLP: ${COIN_CLP_LIBRARY}") + +FIND_LIBRARY(COIN_COIN_UTILS_LIBRARY + NAMES CoinUtils libCoinUtils libCoinUtils.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN UTILS: ${COIN_COIN_UTILS_LIBRARY}") + +FIND_LIBRARY(COIN_OSI_LIBRARY + NAMES Osi libOsi libOsi.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN OSI: ${COIN_OSI_LIBRARY}") + +FIND_LIBRARY(COIN_OSI_CBC_LIBRARY + NAMES OsiCbc libOsiCbc #libOsiCbc.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN OSI CBC: ${COIN_OSI_CBC_LIBRARY}") + +FIND_LIBRARY(COIN_OSI_CLP_LIBRARY + NAMES OsiClp libOsiClp libOsiClp.so.0 + HINTS ${COIN_INCLUDE_DIR}/../../lib/ + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN OSI CLP: ${COIN_OSI_CLP_LIBRARY}") + +FIND_LIBRARY(COIN_ZLIB_LIBRARY + NAMES z libz libz.so.1 + HINTS ${COIN_ROOT_DIR}/lib + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN ZLIB: ${COIN_ZLIB_LIBRARY}") + +FIND_LIBRARY(COIN_BZ2_LIBRARY + NAMES bz2 libz2 libz2.so.1 + HINTS ${COIN_ROOT_DIR}/lib + HINTS "${COIN_ROOT_DIR}/lib" + ${DEPS_LIB_HINTS} + ) +MESSAGE("\tCOIN BZ2: ${COIN_BZ2_LIBRARY}") + +INCLUDE(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(COIN DEFAULT_MSG + COIN_INCLUDE_DIR + COIN_CBC_LIBRARY + COIN_CBC_SOLVER_LIBRARY + COIN_CGL_LIBRARY + COIN_CLP_LIBRARY + COIN_COIN_UTILS_LIBRARY + COIN_OSI_LIBRARY + # Not required by cbc v2.5, but required by later versions + COIN_OSI_CBC_LIBRARY + COIN_OSI_CLP_LIBRARY + COIN_ZLIB_LIBRARY + COIN_BZ2_LIBRARY + ) + +# +# Set all required cmake variables based on our findings +# +IF(COIN_FOUND) + SET(COIN_INCLUDE_DIRS ${COIN_INCLUDE_DIR}) + #SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARY};${COIN_COIN_UTILS_LIBRARY};${COIN_ZLIB_LIBRARY};${COIN_CLP_SOLVER_LIBRARY}") + SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARY};${COIN_COIN_UTILS_LIBRARY};${COIN_ZLIB_LIBRARY}") + IF (COIN_CLP_SOLVER_LIBRARY) + SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_CLP_SOLVER_LIBRARY}") + ENDIF (COIN_CLP_SOLVER_LIBRARY) + IF(COIN_ZLIB_LIBRARY) + SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_ZLIB_LIBRARY}") + ENDIF(COIN_ZLIB_LIBRARY) + IF(COIN_BZ2_LIBRARY) + SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_BZ2_LIBRARY}") + ENDIF(COIN_BZ2_LIBRARY) + # Not required by cbc v2.5, but required by later versions in which case, + # the lower line should be commented out and this line used + #SET(COIN_CBC_LIBRARIES "${COIN_CBC_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CGL_LIBRARY};${COIN_OSI_LIBRARY};${COIN_OSI_CBC_LIBRARY};${COIN_OSI_CLP_LIBRARY};${COIN_CLP_LIBRARIES}") + SET(COIN_CBC_LIBRARIES "${COIN_OSI_LIBRARY};${COIN_CLP_LIBRARIES};${COIN_OSI_CLP_LIBRARY};${COIN_CGL_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CBC_LIBRARY};${COIN_OSI_CBC_LIBRARY}") + #SET(COIN_CBC_LIBRARIES "${COIN_CBC_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CGL_LIBRARY};${COIN_OSI_LIBRARY};${COIN_OSI_CLP_LIBRARY};${COIN_CLP_LIBRARIES}") + SET(COIN_LIBRARIES "${COIN_CBC_LIBRARIES}") + + FILE(STRINGS "${COIN_INCLUDE_DIR}/CbcConfig.h" COIN_VERSION REGEX "define CBC_VERSION .*") + STRING(REPLACE "#define CBC_VERSION " "" COIN_VERSION "${COIN_VERSION}") + STRING(REPLACE "\"" "" COIN_VERSION "${COIN_VERSION}") +ENDIF(COIN_FOUND) + +# +# Report a synopsis of our findings +# +IF(COIN_INCLUDE_DIRS) + MESSAGE(STATUS "Found COIN Include Dirs: ${COIN_INCLUDE_DIRS}") +ELSE(COIN_INCLUDE_DIRS) + MESSAGE(STATUS "COIN Include Dirs NOT FOUND") +ENDIF(COIN_INCLUDE_DIRS) From d6a6cf04cab6199683726e892b132ec3282d0387 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 15 Mar 2023 17:05:44 -0500 Subject: [PATCH 19/82] convert custom command to custom target --- tests/CMakeLists.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index c442a767a0..1e54fddd4d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -111,13 +111,13 @@ INSTALL(FILES ${test_agents} # read tests after building the driver, and add them to ctest set(tgt "cyclus_unit_tests") -add_custom_command(TARGET ${tgt} - POST_BUILD +add_custom_target( + ${tgt}-post-build + DEPENDS ${tgt} COMMAND python "${CMAKE_CURRENT_SOURCE_DIR}/generate_gtest_macros.py" "--executable=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${tgt}" "--output=${CYCLUS_BINARY_DIR}/CTestTestfile.cmake" COMMENT "adding tests from ${tgt}" - DEPENDS VERBATIM ) From de32221e26d94b03a8c978b4afc27dec56ed1ffe Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 17 Mar 2023 09:17:09 -0500 Subject: [PATCH 20/82] replace nose.tools asserts except yields --- tests/cycpp_tests.py | 38 ++++++------- tests/hdf5_back_gen_tests.py | 2 - tests/test_abi.py | 5 +- tests/test_bear_deploy.py | 5 +- tests/test_cycluslib.py | 32 +++++------ tests/test_dynamic_modules.py | 3 +- tests/test_env.py | 3 +- tests/test_error.py | 1 - tests/test_include_recipe.py | 1 - tests/test_inventories.py | 5 +- tests/test_logger.py | 1 - tests/test_lotka_volterra.py | 18 +++--- tests/test_main.py | 1 - tests/test_memback.py | 4 -- tests/test_minimal_cycle.py | 3 - tests/test_null_sink.py | 2 - tests/test_record_time_series.py | 3 +- tests/test_smbchk.py | 15 ++--- tests/test_source_to_sink.py | 2 - tests/test_stubs.py | 1 - tests/test_toaster.py | 95 ++++++++++++++++---------------- tests/test_trivial_cycle.py | 2 - tests/tests_hdf5_back_gen.py | 40 +++++++------- tests/tools.py | 5 +- 24 files changed, 121 insertions(+), 166 deletions(-) diff --git a/tests/cycpp_tests.py b/tests/cycpp_tests.py index b201294d28..6595daa670 100644 --- a/tests/cycpp_tests.py +++ b/tests/cycpp_tests.py @@ -6,8 +6,6 @@ from collections import OrderedDict from subprocess import Popen, PIPE, STDOUT -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises cycdir = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(cycdir, 'cli')) @@ -28,7 +26,7 @@ import cycpp -assert_equal.__self__.maxDiff = None +# assert_equal.__self__.maxDiff = None !! this may not be necessary for pytest??? class MockMachine(object): def __init__(self): @@ -373,7 +371,7 @@ def test_clonefilter(): impl = f.impl() exp_impl = " MyFactory* m = new MyFactory(context());\n" + \ " m->InitFrom(this);\n return m;\n" - assert_equal(exp_impl, impl) + assert exp_impl == impl def test_ifcfilter(): """Test InitFromCopyFilter""" @@ -415,26 +413,26 @@ def test_ifdbfilter(): def test_aliasing_schemafilter(): impl = setup_alias(SchemaFilter) - assert_true('foo_alias' in impl) - assert_false('bar_var' in impl) - assert_true('foo_map_alias' in impl) - assert_false('bar_map_var' in impl) + assert('foo_alias' in impl) + assert not('bar_var' in impl) + assert('foo_map_alias' in impl) + assert not('bar_map_var' in impl) def test_aliasing_snapshotfilter(): impl = setup_alias(SnapshotFilter) - assert_false('foo_alias' in impl) - assert_true('bar_var' in impl) - assert_false('foo_map_alias' in impl) - assert_true('bar_map_var' in impl) + assert not('foo_alias' in impl) + assert('bar_var' in impl) + assert not('foo_map_alias' in impl) + assert('bar_map_var' in impl) def test_aliasing_infiletodbfilter(): impl = setup_alias(InfileToDbFilter) - assert_true('foo_alias' in impl) - assert_true('bar_var' in impl) - assert_true('foo_map_alias' in impl) - assert_true('bar_map_var' in impl) + assert('foo_alias' in impl) + assert('bar_var' in impl) + assert('foo_map_alias' in impl) + assert('bar_map_var' in impl) def setup_alias(filt): m = MockAliasCodeGenMachine() @@ -1050,7 +1048,7 @@ def test_internal_schema(): msg = 'case {0} failed\n ---- got ----\n {1}\n ---- want ----\n {2}'.format(i + 1, impl.replace('\n', '\n '), want.replace('\n', '\n ')) if want != impl: pprint.pprint(impl) - assert_true(False, msg) + assert(False, msg) def test_internal_infiletodb(): # the expected output (i.e. 'want':...) is set as 'throw' if the @@ -1132,7 +1130,7 @@ def test_internal_infiletodb(): except: haderr = True msg = 'case {0} failed: expected raised exception, got none.' - assert_true(haderr, msg) + assert(haderr, msg) continue else: impl = f.impl() @@ -1140,7 +1138,7 @@ def test_internal_infiletodb(): msg = 'case {0} failed\n ---- got ----\n {1}\n ---- want ----\n {2}'.format(i + 1, impl.replace('\n', '\n '), want.replace('\n', '\n ')) if want != impl: pprint.pprint(impl) - assert_true(False, msg) + assert(False, msg) def test_nuclide_uitype(): m = MockCodeGenMachine() @@ -1188,7 +1186,7 @@ def test_integration(): else: cmd = 'cycpp.py {} -o {} --cpp-path `which g++`'.format(inf, outf.name) p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) - assert_equal('', p.stdout.read().decode()) + assert '' == p.stdout.read().decode() if __name__ == "__main__": nose.runmodule() diff --git a/tests/hdf5_back_gen_tests.py b/tests/hdf5_back_gen_tests.py index a79bd6da55..114ca93221 100644 --- a/tests/hdf5_back_gen_tests.py +++ b/tests/hdf5_back_gen_tests.py @@ -4,8 +4,6 @@ import subprocess from random import randint import uuid -import nose -from nose.plugins.skip import SkipTest import pandas as pd from pandas.util.testing import assert_frame_equal diff --git a/tests/test_abi.py b/tests/test_abi.py index 3eb100fc17..e80d703242 100644 --- a/tests/test_abi.py +++ b/tests/test_abi.py @@ -2,9 +2,6 @@ import sys import subprocess -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises -from nose.plugins.skip import SkipTest cycdir = os.path.dirname(os.path.dirname(__file__)) reldir = os.path.join(cycdir, 'release') @@ -31,7 +28,7 @@ def test_abi_stability(): args = '--update -t HEAD --no-save --check'.split() with tools.indir(reldir): obs = smbchk.main(args=args) - assert_true(obs) + assert(obs) if __name__ == "__main__": diff --git a/tests/test_bear_deploy.py b/tests/test_bear_deploy.py index d797014263..8756f97dd2 100644 --- a/tests/test_bear_deploy.py +++ b/tests/test_bear_deploy.py @@ -3,7 +3,6 @@ import json import subprocess -from nose.tools import assert_in, assert_true, assert_greater_equal inputfile = { 'simulation': { @@ -55,11 +54,11 @@ def test_bear_deploy(): s = subprocess.check_output(['cyclus', '-o', 'bears.h5', 'bears.json'], universal_newlines=True, env=env) # test that the institution deploys a BearStore - assert_in("New fac: BearStore", s) + assert ("New fac: BearStore" in s) # test that the first agents exist with right minimum production. agents = re.compile('Agent \d+ 8\.0') all_agents = set(agents.findall(s)) - assert_greater_equal(len(all_agents), 9) + assert (len(all_agents) >= 9) if os.path.exists('bears.json'): os.remove('bears.json') if os.path.exists('bears.h5'): diff --git a/tests/test_cycluslib.py b/tests/test_cycluslib.py index 9f649f950d..dfa853f194 100644 --- a/tests/test_cycluslib.py +++ b/tests/test_cycluslib.py @@ -3,8 +3,6 @@ import subprocess from functools import wraps -import nose -from nose.tools import assert_equal, assert_less from cyclus import lib @@ -16,7 +14,7 @@ @dbtest def test_name(db, fname, backend): obs = db.name - assert_equal(fname, obs) + assert fname == obs @dbtest @@ -25,23 +23,23 @@ def test_simid(db, fname, backend): simid = df['SimId'] exp = simid[0] for obs in simid: - assert_equal(exp, obs) + assert exp == obs @dbtest def test_conds_ae(db, fname, backend): obs = db.query("AgentEntry", [('Kind', '==', 'Region')]) - assert_equal(1, len(obs)) - assert_equal('Region', obs['Kind'][0]) - assert_equal(':agents:NullRegion', obs['Spec'][0]) + assert 1 == len(obs) + assert 'Region' == obs['Kind'][0] + assert ':agents:NullRegion' == obs['Spec'][0] @dbtest def test_conds_comp(db, fname, backend): conds = [('NucId', '==', 922350000), ('MassFrac', '<=', 0.0072)] df = db.query("Compositions", conds) - assert_less(0, len(df)) + assert (0 < len(df)) for row in df['MassFrac']: - assert_less(row, 0.00720000001) + assert (row < 0.00720000001) @dbtest @@ -51,23 +49,23 @@ def test_dbopen(db, fname, backend): @dbtest def test_schema(db, fname, backend): schema = db.schema("AgentEntry") - assert_equal(8, len(schema)) + assert 8 == len(schema) cols = ["SimId", "AgentId", "Kind", "Spec", "Prototype", "ParentId", "Lifetime", "EnterTime"] dbs = [7, 1, 5, 5, 5, 1, 1, 1] for i, ci in enumerate(schema): - assert_equal("AgentEntry", ci.table) - assert_equal(cols[i], ci.col) - assert_equal(dbs[i], ci.dbtype) - assert_equal(i, ci.index) - assert_equal(1, len(ci.shape)) - assert_equal(-1, ci.shape) + assert "AgentEntry" == ci.table + assert cols[i] == ci.col + assert dbs[i] == ci.dbtype + assert i == ci.index + assert 1 == len(ci.shape) + assert -1 == ci.shape def test_position(): p1 = lib.Position(42.65, 28.6) p2 = lib.Position(42.65, 28.6) d = p1.distance(p2) - assert_equal(0.0, d) + assert 0.0 == d if __name__ == "__main__": diff --git a/tests/test_dynamic_modules.py b/tests/test_dynamic_modules.py index 3ae18d86f1..41dbac5c0f 100644 --- a/tests/test_dynamic_modules.py +++ b/tests/test_dynamic_modules.py @@ -1,6 +1,5 @@ from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib @@ -33,4 +32,4 @@ def test_dm_exists(): print(spec) dm = lib.DynamicModule() obs = dm.exists(spec) - assert_true(obs) + assert(obs) diff --git a/tests/test_env.py b/tests/test_env.py index 47db0fbd32..fdc4d78ac2 100644 --- a/tests/test_env.py +++ b/tests/test_env.py @@ -1,7 +1,6 @@ """Tests Python wrapping on Env object.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib @@ -32,4 +31,4 @@ def test_nuc_data(): def test_allow_milps(): - assert_true(isinstance(ENV.allow_milps, bool)) + assert(isinstance(ENV.allow_milps, bool)) diff --git a/tests/test_error.py b/tests/test_error.py index faa40e3612..c0023b1877 100644 --- a/tests/test_error.py +++ b/tests/test_error.py @@ -1,7 +1,6 @@ """Tests Python wrapping for cyclus errors.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib diff --git a/tests/test_include_recipe.py b/tests/test_include_recipe.py index ccfb7cd1bd..f266732923 100644 --- a/tests/test_include_recipe.py +++ b/tests/test_include_recipe.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_false, assert_true, assert_equal import os import tables import numpy as np diff --git a/tests/test_inventories.py b/tests/test_inventories.py index 339242f649..61c826abce 100644 --- a/tests/test_inventories.py +++ b/tests/test_inventories.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_false, assert_true, assert_equal from numpy.testing import assert_array_equal import os import tables @@ -30,7 +29,7 @@ def test_inventories_false(): return # don't execute further commands # Ensure tables do not exist - assert_false, tables_exist(outfile, path) + assert not tables_exist(outfile, path) if tables_exist(outfile, path): print('Inventory table exists despite false entry in control section of input file.') outfile.close() @@ -55,7 +54,7 @@ def test_inventories(): return # don't execute further commands # Check if inventory tables exist - assert_true, tables_exist(outfile, path) + assert tables_exist(outfile, path) if not tables_exist(outfile, path): print('Inventory table does not exist despite true entry in control section of input file.') outfile.close() diff --git a/tests/test_logger.py b/tests/test_logger.py index 9c0709432d..d88b9257c1 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,7 +1,6 @@ """Tests Python wrapping on Env object.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus import lib diff --git a/tests/test_lotka_volterra.py b/tests/test_lotka_volterra.py index 8769ca8097..60d8589508 100644 --- a/tests/test_lotka_volterra.py +++ b/tests/test_lotka_volterra.py @@ -1,8 +1,6 @@ #! /usr/bin/env python from __future__ import print_function -import nose -from nose.tools import assert_equal, assert_almost_equal, assert_true from numpy.testing import assert_array_equal import os import tables @@ -36,7 +34,7 @@ def test_predator_only(): rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -44,8 +42,8 @@ def test_predator_only(): prey_exp = [0 for n in range(10)] pred_exp = [1, 1] + [0 for n in range(8)] - assert_equal(series[prey], prey_exp) - assert_equal(series[pred], pred_exp) + assert series[prey] == prey_exp + assert series[pred] == pred_exp clean_outs() @@ -64,7 +62,7 @@ def test_prey_only(): rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -72,8 +70,8 @@ def test_prey_only(): prey_exp = [2**n for n in range(10)] pred_exp = [0 for n in range(10)] - assert_equal(series[prey], prey_exp) - assert_equal(series[pred], pred_exp) + assert series[prey] == prey_exp + assert series[pred] == pred_exp clean_outs() @@ -101,7 +99,7 @@ def test_lotka_volterra(): rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") - assert_equal(rtn, 0) + assert rtn == 0 series = agent_time_series([prey, pred]) print("Prey:", series[prey], "Predators:", series[pred]) @@ -110,7 +108,7 @@ def test_lotka_volterra(): pred_max = series[pred].index(max(series[pred])) print("t_prey_max:", prey_max, "t_pred_max:", pred_max) - assert_true(prey_max < pred_max) + assert(prey_max < pred_max) clean_outs() diff --git a/tests/test_main.py b/tests/test_main.py index c5e0781f58..10ee096e6f 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,7 +1,6 @@ """Tests Python main CLI for Cyclus.""" from __future__ import print_function, unicode_literals -from nose.tools import assert_equal, assert_true from cyclus.main import main diff --git a/tests/test_memback.py b/tests/test_memback.py index 103c7c5379..adf63e6760 100644 --- a/tests/test_memback.py +++ b/tests/test_memback.py @@ -1,10 +1,6 @@ """Tests Python memory backend.""" from __future__ import print_function, unicode_literals -import nose -from nose.tools import assert_equal, assert_true, assert_is_instance, \ - assert_in, assert_false, assert_not_in, assert_is, assert_is_not - from cyclus import memback from cyclus import lib from cyclus import typesystem as ts diff --git a/tests/test_minimal_cycle.py b/tests/test_minimal_cycle.py index c6431e1d72..6b0bd64f7f 100644 --- a/tests/test_minimal_cycle.py +++ b/tests/test_minimal_cycle.py @@ -1,9 +1,6 @@ #! /usr/bin/env python -import nose -from nose.tools import assert_equal, assert_almost_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index 0e122d6ec0..ed51739b07 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -3,8 +3,6 @@ import os import sqlite3 -from nose.tools import assert_false, assert_true, assert_equal -from nose.plugins.skip import SkipTest import numpy as np diff --git a/tests/test_record_time_series.py b/tests/test_record_time_series.py index 8cf4fc2a0c..08fe2c82d2 100644 --- a/tests/test_record_time_series.py +++ b/tests/test_record_time_series.py @@ -2,7 +2,6 @@ import subprocess import os -from nose.tools import assert_in inputfile = {'simulation': {'archetypes': {'spec': [ {'lib': 'dummy_power_recorder', 'name': 'DummyPowerRecorder'}, @@ -29,7 +28,7 @@ def test_record_time_series(): env = dict(os.environ) env['PYTHONPATH'] = "." s = subprocess.check_output(['cyclus', '-o', 'dummy.h5', 'dummy.json'], universal_newlines=True, env=env) - assert_in("The power is 10", s) + assert ("The power is 10" in s) if os.path.exists('dummy.json'): os.remove('dummy.json') if os.path.exists('dummy.h5'): diff --git a/tests/test_smbchk.py b/tests/test_smbchk.py index 631221470d..25ecc8ef23 100644 --- a/tests/test_smbchk.py +++ b/tests/test_smbchk.py @@ -4,11 +4,6 @@ import sys from argparse import Namespace -import nose -from nose.plugins.deprecated import DeprecatedTest -from nose.tools import assert_equal, assert_true, assert_false, assert_raises, \ - assert_in - from tools import skip_then_continue cycdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -27,7 +22,7 @@ def test_load(): return ns = Namespace(filename=os.path.join(reldir, 'symbols.json')) db = smbchk.load(ns) - assert_true(isinstance(db, list)) + assert(isinstance(db, list)) def test_nm(): raise DeprecatedTest("symbol test has been deprecated") @@ -39,7 +34,7 @@ def test_nm(): return ns = Namespace(prefix=blddir) syms = smbchk.nm(ns) - assert_in("cyclus::Agent::Agent(cyclus::Context*)", syms) + assert ("cyclus::Agent::Agent(cyclus::Context*)" in syms) def test_diff(): raise DeprecatedTest("symbol test has been deprecated") @@ -51,7 +46,7 @@ def test_diff(): "cyclus::Agent::~Agent()"], 'version': 'Y', 'date': 'y.y.y'},] obs = smbchk.diff(db, 0, 1) - assert_true(len(obs) > 0) + assert(len(obs) > 0) def test_check(): raise DeprecatedTest("symbol test has been deprecated") @@ -64,13 +59,13 @@ def test_check(): "cyclus::Agent::~Agent()"], 'version': 'Y', 'date': 'y.y.y'},] obs = smbchk.check(db) - assert_true(obs) + assert(obs) # removes from API db.append({'symbols': ["cyclus::Agent::~Agent()"], 'version': 'Z', 'date': 'z.z.z'}) obs = smbchk.check(db) - assert_false(obs) + assert not(obs) if __name__ == "__main__": nose.runmodule() diff --git a/tests/test_source_to_sink.py b/tests/test_source_to_sink.py index 6bdf692727..e2e743a7be 100644 --- a/tests/test_source_to_sink.py +++ b/tests/test_source_to_sink.py @@ -1,7 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os diff --git a/tests/test_stubs.py b/tests/test_stubs.py index 059ef049a7..d591194a48 100644 --- a/tests/test_stubs.py +++ b/tests/test_stubs.py @@ -6,7 +6,6 @@ import io from contextlib import contextmanager -from nose.plugins.skip import SkipTest @contextmanager def tmpdir(): diff --git a/tests/test_toaster.py b/tests/test_toaster.py index 433448c12c..2fc6c8b9ae 100644 --- a/tests/test_toaster.py +++ b/tests/test_toaster.py @@ -3,7 +3,6 @@ import json import subprocess -from nose.tools import assert_in, assert_true, assert_equals DEFAULTFILE = {'simulation': {'archetypes': {'spec': [ @@ -34,9 +33,9 @@ def test_pyagent_defaults(): s = subprocess.check_output(['cyclus', '-o', 'default-toaster.h5', 'default-toaster.json'], universal_newlines=True, env=env) # tests default value set on facility - assert_in("Bread is rye", s) + assert ("Bread is rye" in s) # tests that value in input file overrides default value - assert_in("Toast level is 10", s) + assert ("Toast level is 10" in s) if os.path.exists('default-toaster.json'): os.remove('default-toaster.json') if os.path.exists('default-toaster.h5'): @@ -75,22 +74,22 @@ def test_pyagent_attr_toasters(): info = s.split('=== Start AttrToaster ===\n')[-1].split('\n=== End AttrToaster ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_true(0 <= info['parent'] < 100) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert(0 <= info['parent'] < 100) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Facility_HappyToaster')) - assert_equals(info['kind'], 'Facility') - assert_equals(info['spec'], ':toaster:AttrToaster') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'HappyToaster') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 0) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Facility_HappyToaster')) + assert (info['kind'] == 'Facility') + assert (info['spec'] == ':toaster:AttrToaster') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'HappyToaster') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 0) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) @@ -131,22 +130,22 @@ def test_pyagent_attr_toaster_company(): info = info.split('\n=== End AttrToasterCompany ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_true(0 <= info['parent'] <= 100) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert(0 <= info['parent'] <= 100) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Inst_FamousToastersLLC')) - assert_equals(info['kind'], 'Inst') - assert_equals(info['spec'], ':toaster:AttrToasterCompany') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'FamousToastersLLC') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 1) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Inst_FamousToastersLLC')) + assert (info['kind'] == 'Inst') + assert (info['spec'] == ':toaster:AttrToasterCompany') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'FamousToastersLLC') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 1) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) @@ -187,22 +186,22 @@ def test_pyagent_attr_toaster_region(): info = info.split('\n=== End AttrToasterRegion ===')[0] info = json.loads(info) # test ids - assert_true(isinstance(info['id'], int)) - assert_true(isinstance(info['parent'], int)) - assert_true(info['parent'] != info['id']) - assert_equals(info['parent'], -1) - assert_true(info['id'] == info['hash']) + assert(isinstance(info['id'], int)) + assert(isinstance(info['parent'], int)) + assert(info['parent'] != info['id']) + assert (info['parent'] == -1) + assert(info['id'] == info['hash']) # test attrs - assert_true(info['str'].startswith('Region_RepublicOfToast')) - assert_equals(info['kind'], 'Region') - assert_equals(info['spec'], ':toaster:AttrToasterRegion') - assert_equals(info['version'], '0.0.0') - assert_equals(info['prototype'], 'RepublicOfToast') - assert_equals(info['enter_time'], 0) - assert_equals(info['lifetime'], -1) - assert_equals(info['exit_time'], -1) - assert_equals(len(info['childern']), 1) - assert_true(len(info['annotations']) > 0) + assert(info['str'].startswith('Region_RepublicOfToast')) + assert (info['kind'] == 'Region') + assert (info['spec'] == ':toaster:AttrToasterRegion') + assert (info['version'] == '0.0.0') + assert (info['prototype'] == 'RepublicOfToast') + assert (info['enter_time'] == 0) + assert (info['lifetime'] == -1) + assert (info['exit_time'] == -1) + assert (len(info['childern']) == 1) + assert(len(info['annotations']) > 0) # clean up if os.path.exists(iname): os.remove(iname) diff --git a/tests/test_trivial_cycle.py b/tests/test_trivial_cycle.py index a8c49d0e4e..8d32a60000 100644 --- a/tests/test_trivial_cycle.py +++ b/tests/test_trivial_cycle.py @@ -1,7 +1,5 @@ #! /usr/bin/env python -from nose.tools import assert_equal, assert_almost_equal, assert_true -from nose.plugins.skip import SkipTest from numpy.testing import assert_array_equal import os import sqlite3 diff --git a/tests/tests_hdf5_back_gen.py b/tests/tests_hdf5_back_gen.py index 95545992aa..6dfa5fac83 100644 --- a/tests/tests_hdf5_back_gen.py +++ b/tests/tests_hdf5_back_gen.py @@ -2,8 +2,6 @@ import sys import pprint -import nose -from nose.tools import assert_equal, assert_true, assert_false, assert_raises cycdir = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(cycdir, 'src')) @@ -21,19 +19,19 @@ def test_node_pretty(): exp = "Node()" n = Node() obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_var_pretty(): exp = "Var(\n name='x'\n)" n = Var(name="x") obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_decl_pretty(): exp = "Decl(\n type='x',\n name='y'\n)" n = Decl(type="x", name="y") obs = PRETTY.visit(n) - assert_equal(exp, obs) + assert exp == obs #cppgen tests @@ -41,43 +39,43 @@ def test_cppgen_var(): exp = "x" n = Var(name="x") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_type(): exp = "std::string" n = Type(cpp="std::string") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_decl(): exp = "std::string s" n = Decl(type=Type(cpp="std::string"), name=Var(name="s")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_assign(): exp = "x=y" n = Assign(target=Var(name="x"), value=Var(name="y")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_binop(): exp = "x+y" n = BinOp(x=Var(name="x"), op="+", y=Var(name="y")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_leftunaryop(): exp = "++x" n = LeftUnaryOp(op="++", name=Var(name="x")) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_rightunaryop(): exp = "x++" n = RightUnaryOp(name=Var(name="x"), op="++") obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_if(): exp = """ @@ -93,7 +91,7 @@ def test_cppgen_if(): elifs=[(BinOp(x=Var(name="x"), op=">", y=Var(name="y")), [ExprStmt(child=Assign(target=Var(name="x"), value=Raw(code="2")))])],\ el=ExprStmt(child=Assign(target=Var(name="x"), value=Raw(code="3")))) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_for(): exp = """ @@ -110,7 +108,7 @@ def test_cppgen_for(): ExprStmt(child=Assign(target=RightUnaryOp(name=Var(name="c"), op="[i]"), value=BinOp(x=Var(name="a"), op="+", y=Var(name="b"))))]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_funccall(): exp = """ @@ -119,7 +117,7 @@ def test_cppgen_funccall(): args=[Var(name="a"), Var(name="b")],\ targs=[Type(cpp="std::string"), Var(name="STRING")]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_case(): exp = """case 3: { @@ -130,7 +128,7 @@ def test_cppgen_case(): body=[ExprStmt(child=RightUnaryOp(name=Var(name="b"), op="++")), Raw(code="break;")]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_block(): exp = """int x=5; @@ -150,7 +148,7 @@ def test_cppgen_block(): op="+", y=Var(name="y"))))])]) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs def test_cppgen_funcdef(): exp = """template<> @@ -176,7 +174,7 @@ def test_cppgen_funcdef(): y=Var(name="y"))))], tspecial=True) obs = CPPGEN.visit(n) - assert_equal(exp, obs) + assert exp == obs #test various node structures @@ -196,9 +194,9 @@ def test_get_item_size(): db="VECTOR_DOUBLE", canon=("VECTOR","DOUBLE")), [0,1]) - assert_equal(exp1, obs1) - assert_equal(exp2, obs2) - assert_equal(exp3, obs3) + assert exp1 == obs1 + assert exp2 == obs2 + assert exp3 == obs3 diff --git a/tests/tools.py b/tests/tools.py index fe6417cb5b..f53852eaa1 100644 --- a/tests/tools.py +++ b/tests/tools.py @@ -11,9 +11,6 @@ from contextlib import contextmanager from functools import wraps -from nose.tools import assert_true, assert_equal -from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest from cyclus import lib as libcyclus @@ -55,7 +52,7 @@ def check_cmd(args, cwd, holdsrtn): print("STDOUT + STDERR:\n\n" + f.read().decode()) f.close() holdsrtn[0] = rtn - assert_equal(rtn, 0) + assert rtn == 0 def cyclus_has_coin(): From 71026ac7a98e94c0fc92bfe84decb69b58c73071 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 17 Mar 2023 10:07:00 -0500 Subject: [PATCH 21/82] address more complex nose to pytest cases --- test_foo.py | 5 + tests/cycpp_tests.py | 184 +++++++++++++++++----------------- tests/hdf5_back_gen_tests.py | 2 +- tests/test_dynamic_modules.py | 24 ++--- tests/test_env.py | 22 ++-- tests/test_error.py | 8 +- tests/test_logger.py | 12 +-- tests/test_main.py | 2 +- tests/test_memback.py | 104 +++++++++---------- tests/test_minimal_cycle.py | 30 +++--- tests/test_null_sink.py | 6 +- tests/test_source_to_sink.py | 14 +-- tests/test_trivial_cycle.py | 14 +-- 13 files changed, 220 insertions(+), 207 deletions(-) create mode 100644 test_foo.py diff --git a/test_foo.py b/test_foo.py new file mode 100644 index 0000000000..2e22adac16 --- /dev/null +++ b/test_foo.py @@ -0,0 +1,5 @@ +def test_foo(): + print("got here A") + assert 1 == 2 + assert 3 == 3 + print("got here B") \ No newline at end of file diff --git a/tests/cycpp_tests.py b/tests/cycpp_tests.py index 6595daa670..4e8f873d30 100644 --- a/tests/cycpp_tests.py +++ b/tests/cycpp_tests.py @@ -3,6 +3,8 @@ import uuid import pprint import tempfile +import pytest + from collections import OrderedDict from subprocess import Popen, PIPE, STDOUT @@ -63,117 +65,118 @@ def test_tffilt(): """Test TypedefFilter""" m = MockMachine() f = TypedefFilter(m) - yield assert_false, f.isvalid("mis typedef kind") - yield assert_false, f.isvalid("typedef kind") + assert not f.isvalid("mis typedef kind") + assert not f.isvalid("typedef kind") statement, sep = "typedef double db", ";" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "double", "db"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "double", "db") == m.aliases.pop() statement, sep = "typedef struct {int a; int b;} S, *pS", ";" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) def test_uffilt(): """Test UsingFilter""" m = MockMachine() f = UsingFilter(m) - yield assert_false, f.isvalid("not using namespace") + assert not f.isvalid("not using namespace") statement, sep = "using std::cout", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "std::cout", "cout"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "std::cout", "cout") == m.aliases.pop() def test_nsfilt(): """Test NamespaceFilter""" m = MockMachine() f = NamespaceFilter(m) - yield assert_false, f.isvalid("olzhas is not a namespace") + assert not f.isvalid("olzhas is not a namespace") # anonymous namespaces statement, sep = " namespace ", "{" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.namespaces), 1 - yield assert_equal, m.namespaces[0], (0, '') + assert len(m.namespaces) == 1 + assert m.namespaces[0] == (0, '') f.revert(statement, sep) - yield assert_equal, len(m.namespaces), 0 + assert len(m.namespaces) == 0 # nymous namespace statement, sep = "namespace gorgus ", "{" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.namespaces), 1 - yield assert_equal, m.namespaces[0], (0, "gorgus") + assert len(m.namespaces) == 1 + assert m.namespaces[0] == (0, "gorgus") f.revert(statement, sep) - yield assert_equal, len(m.namespaces), 0 + assert len(m.namespaces) == 0 def test_unfilt(): """Test UsingNamespaseFilter""" m = MockMachine() f = UsingNamespaceFilter(m) - yield assert_false, f.isvalid("using cycamore") + assert not f.isvalid("using cycamore") statement, sep = "using namespace std", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.using_namespaces), 1 - yield assert_equal, (0, "std"), m.using_namespaces.pop() + assert len(m.using_namespaces) == 1 + assert (0, "std") == m.using_namespaces.pop() f.revert(statement, sep) - yield assert_equal, len(m.using_namespaces), 0 + assert len(m.using_namespaces) == 0 def test_nafilter(): """Test NamespaceAliasFilter""" m = MockMachine() f = NamespaceAliasFilter(m) - yield assert_false, f.isvalid("namespace cycamore") + assert not f.isvalid("namespace cycamore") statement, sep = "namespace cycamore = cm", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.aliases), 1 - yield assert_equal, (0, "cm", "cycamore"), m.aliases.pop() + assert len(m.aliases) == 1 + assert (0, "cm", "cycamore") == m.aliases.pop() def test_cfilter(): """Test ClassFilter""" m = MockMachine() f = ClassFilter(m) - yield assert_false, f.isvalid("class ") + assert not f.isvalid("class ") statement, sep = "class Cyclus", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, len(m.classes), 1 - yield assert_equal, m.classes[0], (0, "Cyclus") - yield assert_equal, m.access[tuple(m.classes)], "private" + assert len(m.classes) == 1 + assert m.classes[0] == (0, "Cyclus") + assert m.access[tuple(m.classes)] == "private" f.revert(statement, sep) - yield assert_equal, len(m.classes), 0 + assert len(m.classes) == 0 def test_afilter(): """Test AccessFilter""" m = MockMachine() f = AccessFilter(m) - yield assert_false, f.isvalid("new private") + assert not f.isvalid("new private") statement, sep = "private:", "" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.access[tuple(m.classes)], "private" + assert m.access[tuple(m.classes)] == "private" def test_synerror(): """Test PragmaCyclusErrorFilter""" m = MockMachine() f = PragmaCyclusErrorFilter(m) - yield assert_false, f.isvalid("#pragma cyclus var {}") - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus var {}") + assert not f.isvalid("#pragma cyclus") - yield assert_true, f.isvalid('#pragma cyclus nooooo') + assert f.isvalid('#pragma cyclus nooooo') statement, sep = "#pragma cyclus var{}", "\n" - yield assert_true, f.isvalid(statement) - yield assert_raises, SyntaxError, f.transform, statement, sep + assert f.isvalid(statement) + with pytest.raises(SyntaxError): + f.transform(statement, sep) # # pass 2 Filters @@ -182,26 +185,26 @@ def test_vdecorfilter(): """Test VarDecorationFilter""" m = MockMachine() f = VarDecorationFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus var {'name': 'James Bond'} ", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.var_annotations, {'name': 'James Bond'} + assert m.var_annotations == {'name': 'James Bond'} def test_vdeclarfilter(): """Test VarDeclarationFilter""" m = MockMachine() f = VarDeclarationFilter(m) - yield assert_false, f.isvalid("one ") + assert not f.isvalid("one ") statement, sep = "one two", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) m.classes = [(0, "trader")] m.access = {"trader": "public"} # m.var_annotations = {'name': 'James Bond'} f.transform(statement, sep) - yield assert_equal, m.var_annotations, None + assert m.var_annotations == None def test_vdeclarfilter_canonize_alias(): m = MockMachine() @@ -229,7 +232,7 @@ def test_vdeclarfilter_canonize_alias(): ] for exp, t, name, alias in cases: obs = f.canonize_alias(t, name, alias=alias) - yield assert_equal, exp, obs + assert exp == obs def test_vdeclarfilter_canonize_ui(): m = MockMachine() @@ -254,33 +257,33 @@ def test_vdeclarfilter_canonize_ui(): ] for exp, t, name, x in cases: obs = f.canonize_uilabel(t, name, uilabel=x) - yield assert_equal, exp, obs + assert exp == obs obs = f.canonize_tooltip(t, name, tooltip=x) - yield assert_equal, exp, obs + assert exp == obs def test_execfilter(): """Test ExecFilter""" m = MockMachine() f = ExecFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus exec x = 42", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) # What are the other possible tests - yield assert_equal, m.execns["x"], 42 + assert m.execns["x"] == 42 def test_notefilter(): """Test NoteDecorationFilter""" m = MockMachine() f = NoteDecorationFilter(m) - yield assert_false, f.isvalid("#pragma cyclus") + assert not f.isvalid("#pragma cyclus") statement, sep = "#pragma cyclus note {'doc': 'string'} ", "\n" - yield assert_true, f.isvalid(statement) + assert f.isvalid(statement) f.transform(statement, sep) - yield assert_equal, m.context['']['doc'], 'string' + assert m.context['']['doc'] == 'string' class MockAliasCodeGenMachine(object): """Mock machine for testing aliasing on pass 3 filters""" @@ -326,7 +329,7 @@ def test_canon_type(): ] for t, exp in cases: obs = sa.canonize_type(t) - yield assert_equal, exp, obs + assert exp == obs # # pass 3 Filters @@ -381,7 +384,7 @@ def test_ifcfilter(): args = f.methodargs() exp_args = "MyFactory* m" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' int rawcycpp_shape_y[1] = {42};\n' @@ -389,7 +392,7 @@ def test_ifcfilter(): 'rawcycpp_shape_y + 1);\n' " x = m->x;\n" "y=m -> y;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_ifdbfilter(): """Test InitFromDbFilter""" @@ -399,7 +402,7 @@ def test_ifdbfilter(): args = f.methodargs() exp_args = "cyclus::QueryableBackend* b" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' int rawcycpp_shape_y[1] = {42};\n' @@ -408,7 +411,7 @@ def test_ifdbfilter(): ' cyclus::QueryResult qr = b->Query("Info", NULL);\n' ' x = qr.GetVal("x");\n' "WAKKA JAWAKA") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_aliasing_schemafilter(): impl = setup_alias(SchemaFilter) @@ -448,14 +451,14 @@ def test_itdbfilter(): args = f.methodargs() exp_args = "cyclus::InfileTree* tree, cyclus::DbInit di" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = ( ' int rawcycpp_shape_y[1] = {42};\n cycpp_shape_y = std::vector(rawcycpp_shape_y, rawcycpp_shape_y + 1);\n cyclus::InfileTree* sub = tree->SubTree("config/*");\n int i;\n int n;\n {\n int x_val = cyclus::Query(sub, "x");\n x = x_val;\n }\nTHINGFISH\n di.NewDatum("Info")\n ->AddVal("x", x)\nABSOLUTELY FREE\n ->Record();\n' ) - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_itdbfilter_val(): """Test InfileToDbFilter._val() Defaults""" @@ -606,7 +609,7 @@ def test_itdbfilter_val(): for t, v, name, uitype, exp in cases: obs = f._val(t, val=v, name=name, uitype=uitype) - yield assert_equal, exp, obs + assert exp == obs def test_schemafilter(): """Test SchemaFilter""" @@ -616,7 +619,7 @@ def test_schemafilter(): args = f.methodargs() exp_args = "" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' return ""\n' @@ -626,12 +629,12 @@ def test_schemafilter(): ' " \\n"\n' ' " FREAK OUT\\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl # schema type tests - yield assert_equal, 'string', f._type('std::string') - yield assert_equal, 'boolean', f._type('bool') - yield assert_equal, 'token', f._type('std::string', 'token') + assert 'string' == f._type('std::string') + assert 'boolean' == f._type('bool') + assert 'token' == f._type('std::string', 'token') m.context = {"MyFactory": OrderedDict([('vars', OrderedDict([ ('x', {'type': ('std::map', 'int', 'double')}), @@ -656,7 +659,7 @@ def test_schemafilter(): ' " \\n"\n' ' " \\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_annotationsfilter(): """Test SchemaFilter""" @@ -666,10 +669,10 @@ def test_annotationsfilter(): args = f.methodargs() exp_args = "" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() - yield assert_true, isinstance(impl, str) + assert isinstance(impl, str) def test_snapshotfilter(): """Test SnapshotFilter""" @@ -679,14 +682,14 @@ def test_snapshotfilter(): args = f.methodargs() exp_args = 'cyclus::DbInit di' - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (' di.NewDatum("Info")\n' ' ->AddVal("x", x)\n' 'JUST ANOTHER BAND FROM LA\n' ' ->Record();\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_sshinvfilter(): """Test SnapshotInvFilter""" @@ -696,19 +699,19 @@ def test_sshinvfilter(): args = f.methodargs() exp_args = '' - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = (" cyclus::Inventories invs;\n" " return invs;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl f = SnapshotInvFilter(m) f.given_classname = 'MyFactory' f.mode = 'impl' impl = f.impl() exp_impl = (" cyclus::Inventories invs;\n") - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_intinvfilter(): """Test InitInvFilter""" @@ -718,11 +721,11 @@ def test_intinvfilter(): args = f.methodargs() exp_args = "cyclus::Inventories& inv" - yield assert_equal, exp_args, args + assert exp_args == args impl = f.impl() exp_impl = '' - yield assert_equal, exp_impl, impl + assert exp_impl == impl def test_defpragmafilter(): """Test DefaultPragmaFilter""" @@ -749,7 +752,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs cpptype = ['std::map', 'std::string', ['std::vector', 'double']] names = ['streams', 'name', ['efficiencies', 'val']] @@ -759,7 +762,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs # test item aliasing cpptype = ['std::map', 'std::string', ['std::vector', 'double']] @@ -770,7 +773,7 @@ def test_schemafilter_buildschema(): '' '') obs = f._buildschema(cpptype, schematype, uitype, names) - yield assert_equal, exp, obs + assert exp == obs def test_escape_xml(): """Test escape_xml""" @@ -788,7 +791,7 @@ def test_escape_xml(): ' " \\n"\n' \ ' "\\n"' - yield assert_equal, s, got + assert s == got def test_infiletodb_read_member1(): m = MockCodeGenMachine() @@ -900,7 +903,7 @@ def test_infiletodb_read_member1(): #print(gen) #print(exp_gen) - yield assert_equal, exp_gen, gen + assert exp_gen == gen def test_infiletodb_read_member2(): m = MockCodeGenMachine() @@ -964,7 +967,7 @@ def test_infiletodb_read_member2(): # print() # print(gen) # print(exp_gen) - yield assert_equal, exp_gen, gen + assert exp_gen == gen def test_infiletodb_read_map(): m = MockCodeGenMachine() @@ -1001,7 +1004,7 @@ def test_infiletodb_read_map(): ' mymap = mymap_in;\n' ' }\n') - yield assert_equal, exp, obs + assert exp == obs def test_internal_schema(): cases = [ @@ -1157,7 +1160,7 @@ def test_nuclide_uitype(): ' " \\n"\n' ' " \\n"\n' ' "\\n";\n') - yield assert_equal, exp_impl, impl + assert exp_impl == impl # test infiletodb updates f = InfileToDbFilter(m) @@ -1165,7 +1168,7 @@ def test_nuclide_uitype(): impl = f.impl() exp_impl = ' cyclus::InfileTree* sub = tree->SubTree("config/*");\n int i;\n int n;\n {\n int x_val = pyne::nucname::id(cyclus::Query(sub, "x"));\n x = x_val;\n }\n di.NewDatum("Info")\n ->AddVal("x", x)\n ->Record();\n' - yield assert_equal, exp_impl, impl + assert exp_impl == impl # test bad uitypes values fail m.context = {"MyFactory": OrderedDict([('vars', OrderedDict([ @@ -1174,7 +1177,8 @@ def test_nuclide_uitype(): ])} f = SchemaFilter(m) f.given_classname = 'MyFactory' - yield assert_raises, TypeError, f.impl + with pytest.raises(TypeError): + f.impl() def test_integration(): inf = os.path.join(os.path.dirname(__file__), 'cycpp_tests.h') diff --git a/tests/hdf5_back_gen_tests.py b/tests/hdf5_back_gen_tests.py index 114ca93221..d805d89576 100644 --- a/tests/hdf5_back_gen_tests.py +++ b/tests/hdf5_back_gen_tests.py @@ -268,7 +268,7 @@ def generate_and_test(): rec.flush() obs = back.query("test0") print("observed: \n", obs) - yield assert_frame_equal, exp, obs + assert_frame_equal, exp, obs rec.close() os.remove(PATH) diff --git a/tests/test_dynamic_modules.py b/tests/test_dynamic_modules.py index 41dbac5c0f..2f3e91dce1 100644 --- a/tests/test_dynamic_modules.py +++ b/tests/test_dynamic_modules.py @@ -5,26 +5,26 @@ def test_agent_spec_empty(): spec = lib.AgentSpec() - yield assert_equal, spec.path, "" - yield assert_equal, spec.lib, "" - yield assert_equal, spec.agent, "" - yield assert_equal, spec.alias, "" + assert spec.path == "" + assert spec.lib == "" + assert spec.agent == "" + assert spec.alias == "" def test_agent_spec_spec(): spec = lib.AgentSpec(":wakka:Jawaka") - yield assert_equal, spec.path, "" - yield assert_equal, spec.lib, "wakka" - yield assert_equal, spec.agent, "Jawaka" - yield assert_equal, spec.alias, "Jawaka" + assert spec.path == "" + assert spec.lib == "wakka" + assert spec.agent == "Jawaka" + assert spec.alias == "Jawaka" def test_agent_spec_full(): spec = lib.AgentSpec("why", "not", "me", "?") - yield assert_equal, spec.path, "why" - yield assert_equal, spec.lib, "not" - yield assert_equal, spec.agent, "me" - yield assert_equal, spec.alias, "?" + assert spec.path == "why" + assert spec.lib == "not" + assert spec.agent == "me" + assert spec.alias == "?" def test_dm_exists(): diff --git a/tests/test_env.py b/tests/test_env.py index fdc4d78ac2..f1a316829a 100644 --- a/tests/test_env.py +++ b/tests/test_env.py @@ -7,26 +7,26 @@ ENV = lib.Env() def test_path_base(): - yield assert_equal, ENV.path_base("/home/cyclus"), "/home" + assert ENV.path_base("/home/cyclus") == "/home" def test_paths(): - yield assert_true, len(ENV.install_path) > 0 - yield assert_true, len(ENV.build_path) > 0 - yield assert_true, len(ENV.get('HOME')) > 0 - yield assert_true, len(ENV.rng_schema()) > 0 + assert len(ENV.install_path) > 0 + assert len(ENV.build_path) > 0 + assert len(ENV.get('HOME')) > 0 + assert len(ENV.rng_schema()) > 0 # cyclus_path cp = ENV.cyclus_path - yield assert_true, len(cp) > 0 + assert len(cp) > 0 for path in cp: - yield assert_true, isinstance(path, str) - yield assert_true, len(ENV.env_delimiter) > 0 - yield assert_true, len(ENV.path_delimiter) > 0 - yield assert_true, len(ENV.find_module('agents')) > 0 + assert isinstance(path, str) + assert len(ENV.env_delimiter) > 0 + assert len(ENV.path_delimiter) > 0 + assert len(ENV.find_module('agents')) > 0 def test_nuc_data(): - yield assert_true, len(ENV.nuc_data) > 0 + assert len(ENV.nuc_data) > 0 ENV.set_nuc_data_path(ENV.nuc_data) diff --git a/tests/test_error.py b/tests/test_error.py index c0023b1877..4c5e7c11ac 100644 --- a/tests/test_error.py +++ b/tests/test_error.py @@ -6,16 +6,16 @@ def test_warn_limit(): orig = lib.get_warn_limit() - yield assert_true, orig >= 0 + assert orig >= 0 lib.set_warn_limit(42) - yield assert_equal, 42, lib.get_warn_limit() + assert 42 == lib.get_warn_limit() lib.set_warn_limit(orig) def test_warn_as_error(): orig = lib.get_warn_as_error() - yield assert_true, isinstance(orig, bool) + assert isinstance(orig, bool) lib.set_warn_as_error(True) - yield assert_true, lib.get_warn_as_error() + assert lib.get_warn_as_error() lib.set_warn_as_error(orig) diff --git a/tests/test_logger.py b/tests/test_logger.py index d88b9257c1..b942d3d953 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -8,10 +8,10 @@ def test_report_level(): orig = LOGGER.report_level - yield assert_true, orig >= lib.LEV_ERROR - yield assert_true, orig <= lib.LEV_DEBUG5 + assert orig >= lib.LEV_ERROR + assert orig <= lib.LEV_DEBUG5 LOGGER.report_level = 4 - yield assert_true, LOGGER.report_level == 4 + assert LOGGER.report_level == 4 LOGGER.report_level = orig @@ -29,9 +29,9 @@ def test_no_mem(): def test_to_log_level_string(): s = LOGGER.to_string(lib.LEV_ERROR) - yield assert_true, isinstance(s, str) + assert isinstance(s, str) level = LOGGER.to_log_level(s) - yield assert_true, isinstance(level, int) - yield assert_equal, lib.LEV_ERROR, level + assert isinstance(level, int) + assert lib.LEV_ERROR == level diff --git a/tests/test_main.py b/tests/test_main.py index 10ee096e6f..af39f9ed61 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -34,6 +34,6 @@ def test_main(): except Exception: res = False raise - yield assert_true, res + assert res diff --git a/tests/test_memback.py b/tests/test_memback.py index adf63e6760..6c42cba0cd 100644 --- a/tests/test_memback.py +++ b/tests/test_memback.py @@ -30,11 +30,11 @@ def test_simple(): exp = pd.DataFrame({"col0": [1], "col1": [42.0], "col2": ["wakka"]}, columns=['col0', 'col1', 'col2']) obs = back.query("test") - yield assert_frame_equal, exp, obs + assert_frame_equal, exp, obs rec.close() # test covert to JSON - yield assert_is_instance, obs.to_json(), str + assert isinstance(obs.to_json(), str) def test_simple_with_sim_id(): @@ -50,7 +50,7 @@ def test_simple_with_sim_id(): rec.close() # test covert to JSON - yield assert_is_instance, obs.to_json(default_handler=str), str + assert isinstance(obs.to_json(default_handler=str), str) def test_many_rows_one_table(): @@ -95,7 +95,7 @@ def test_two_tables_interleaved(): "col2": ["wakka"*i for i in range(0, n, 2)]}, columns=['col0', 'col1', 'col2']) obs0 = back.query("test0") - yield assert_frame_equal, exp0, obs0 + assert_frame_equal, exp0, obs0 exp1 = pd.DataFrame({ "col0": list(range(1, n, 2)), @@ -103,7 +103,7 @@ def test_two_tables_interleaved(): "col2": ["wakka"*i for i in range(1, n, 2)]}, columns=['col0', 'col1', 'col2']) obs1 = back.query("test1") - yield assert_frame_equal, exp1, obs1 + assert_frame_equal, exp1, obs1 rec.close() @@ -130,7 +130,7 @@ def test_three_tables_grouped(): "col2": ["wakka"*i for i in range(n)]}, columns=['col1', 'col2']) obs0 = back.query("test0") - yield assert_frame_equal, exp0, obs0 + assert_frame_equal, exp0, obs0 j = 1 exp1 = pd.DataFrame({ @@ -138,7 +138,7 @@ def test_three_tables_grouped(): "col2": ["wakka"*i for i in range(n)]}, columns=['col0', 'col2']) obs1 = back.query("test1") - yield assert_frame_equal, exp1, obs1 + assert_frame_equal, exp1, obs1 j = 2 exp2 = pd.DataFrame({ @@ -146,7 +146,7 @@ def test_three_tables_grouped(): "col1": [42.0*i*j for i in range(n)]}, columns=['col0', 'col1']) obs2 = back.query("test2") - yield assert_frame_equal, exp2, obs2 + assert_frame_equal, exp2, obs2 rec.close() @@ -232,55 +232,55 @@ def test_many_cols_one_table(): def test_registry_operations(): n = 10 rec, back = make_rec_back() - yield assert_true, back.store_all_tables + assert back.store_all_tables rec.flush() # test empty datalist # test storing only one table back.registry = ["test0"] - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 1, len(back.registry) - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 1 == len(back.registry) + assert 0 == len(back.cache) make_two_interleaved(rec, n) - yield assert_equal, 1, len(back.cache) - yield assert_in, "test0", back.cache + assert 1 == len(back.cache) + assert ("test0" in back.cache) # test removing registry with False back.registry = False - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 0 == len(back.cache) rec.flush() # test partial registry back.registry = ["test0", "test1"] - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 2, len(back.registry) - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 2 == len(back.registry) + assert 0 == len(back.cache) make_two_interleaved(rec, n) - yield assert_equal, 2, len(back.cache) - yield assert_in, "test0", back.cache - yield assert_in, "test1", back.cache + assert 2 == len(back.cache) + assert ("test0" in back.cache) + assert ("test1" in back.cache) # stop following test1 back.registry = ["test0", "test42", "test43"] - yield assert_equal, 3, len(back.registry) - yield assert_equal, 1, len(back.cache) - yield assert_in, "test0", back.cache - yield assert_not_in, "test1", back.cache + assert 3 == len(back.registry) + assert 1 == len(back.cache) + assert ("test0" in back.cache) + assert ("test1" not in back.cache) # test removing registry with None back.registry = None - yield assert_false, back.store_all_tables - yield assert_is_instance, back.registry, frozenset - yield assert_equal, 0, len(back.cache) + assert not back.store_all_tables + assert isinstance(back.registry, frozenset) + assert 0 == len(back.cache) rec.close() def test_no_fallback(): back = memback.MemBack() - yield assert_is, back.fallback, None - yield assert_is, back.query("yo"), None + assert (back.fallback is None) + assert (back.query("yo") is None) class FallBackend(object): @@ -299,14 +299,14 @@ def query(self, table, conds=None): def test_fallback(): fallback = FallBackend() back = memback.MemBack(fallback=fallback) - yield assert_is_not, back.fallback, None + assert (back.fallback is not None) n = 10 x = pd.DataFrame({ "col0": list(range(n)), "col1": [42.0*i for i in range(n)], "col2": ["wakka"*i for i in range(n)]}, columns=['col0', 'col1', 'col2']) - yield assert_frame_equal, x, back.query("yo") + assert_frame_equal, x, back.query("yo") def test_query(): @@ -321,46 +321,46 @@ def test_query(): # test == obs = back.query('x', [('col0', '==', 4)]) - yield assert_equal, 1, len(obs) - yield assert_equal, 4, obs['col0'].loc[4] + assert 1 == len(obs) + assert 4 == obs['col0'].loc[4] # test != obs = back.query('x', [('col2', '!=', 'wakka')]) - yield assert_equal, n-1, len(obs) - yield assert_not_in, 1, obs['col0'] + assert n-1 == len(obs) + assert (1 not in obs['col0']) # test < obs = back.query('x', [('col1', '<', 42.0*6.0)]) - yield assert_equal, 6, len(obs) - yield assert_frame_equal, x[x.col1 < 42.0*6.0], obs + assert 6 == len(obs) + assert_frame_equal, x[x.col1 < 42.0*6.0], obs # test <= obs = back.query('x', [('col1', '<=', 42.0*3.1)]) - yield assert_equal, 4, len(obs) - yield assert_frame_equal, x[x.col1 <= 42.0*3.1], obs + assert 4 == len(obs) + assert_frame_equal, x[x.col1 <= 42.0*3.1], obs # test < obs = back.query('x', [('col1', '>', 42.0*6.0)]) - yield assert_equal, 3, len(obs) - yield assert_frame_equal, x[x.col1 > 42.0*6.0], obs + assert 3 == len(obs) + assert_frame_equal, x[x.col1 > 42.0*6.0], obs # test <= obs = back.query('x', [('col1', '>=', 42.0*3.1)]) - yield assert_equal, 6, len(obs) - yield assert_frame_equal, x[x.col1 >= 42.0*3.1], obs + assert 6 == len(obs) + assert_frame_equal, x[x.col1 >= 42.0*3.1], obs # Test two conds obs = back.query('x', [('col1', '<', 42.0*6.0), ('col1', '>=', 42.0*3.1)]) - yield assert_equal, 2, len(obs) - yield assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs + assert 2 == len(obs) + assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs # Test three conds obs = back.query('x', [('col1', '<', 42.0*6.0), ('col1', '>=', 42.0*3.1), ('col2', '!=', 'wakka')]) - yield assert_equal, 2, len(obs) - yield assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs + assert 2 == len(obs) + assert_frame_equal, x[(x.col1 < 42.0*6.0) & (x.col1 >= 42.0*3.1)], obs # test convert to JSON obs.to_json() diff --git a/tests/test_minimal_cycle.py b/tests/test_minimal_cycle.py index 6b0bd64f7f..559a2716b6 100644 --- a/tests/test_minimal_cycle.py +++ b/tests/test_minimal_cycle.py @@ -7,6 +7,8 @@ import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, create_sim_input, \ h5out, sqliteout, clean_outs, sha1array, to_ary, which_outfile @@ -121,7 +123,7 @@ def test_minimal_cycle(): paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): outfile.close() clean_outs() @@ -156,18 +158,18 @@ def test_minimal_cycle(): facility_id = find_ids(":agents:KFacility", spec, agent_ids) # Test for two KFacility - yield assert_equal, len(facility_id), 2 + assert len(facility_id) == 2 # Test for one Facility A and Facility B facility_a = find_ids("FacilityA", agent_protos, agent_ids) facility_b = find_ids("FacilityB", agent_protos, agent_ids) - yield assert_equal, len(facility_a), 1 - yield assert_equal, len(facility_b), 1 + assert len(facility_a) == 1 + assert len(facility_b) == 1 # Test if both facilities are KFracilities # Assume FacilityA is deployed first according to the schema - yield assert_equal, facility_a[0], facility_id[0] - yield assert_equal, facility_b[0], facility_id[1] + assert facility_a[0] == facility_id[0] + assert facility_b[0] == facility_id[1] # Test if the transactions are strictly between Facility A and # Facility B. There are no Facility A to Facility A or vice versa. @@ -183,23 +185,23 @@ def test_minimal_cycle(): pattern_a = pattern_two pattern_b = pattern_one - yield assert_array_equal, \ + assert_array_equal, \ np.where(sender_ids == facility_a[0])[0], \ pattern_a, "Fac A Pattern A" - yield assert_array_equal, \ + assert_array_equal, \ np.where(receiver_ids == facility_a[0])[0], \ pattern_b, "Fac A Pattern B" # reverse pattern when acted as a receiver - yield assert_array_equal, \ + assert_array_equal, \ np.where(sender_ids == facility_b[0])[0], \ pattern_b, "Fac B Pattern A" - yield assert_array_equal, \ + assert_array_equal, \ np.where(receiver_ids == facility_b[0])[0], \ pattern_a, "Fac B Pattern B" # reverse pattern when acted as a receiver # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(sender_ids.size) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"), \ expected_trans_ids @@ -210,7 +212,7 @@ def test_minimal_cycle(): # there must be (2 * duration) number of transactions. exp = 2 * duration obs = sender_ids.size - yield assert_equal, exp, obs, "number of transactions, {} != {}".format(exp, obs) + assert exp == obs, f"number of transactions, {exp} != {obs}" # Track transacted resources quantities = to_ary(resources, "Quantity") @@ -220,13 +222,13 @@ def test_minimal_cycle(): init_capacity_b = quantities[1] j = 0 for p in pattern_a: - yield assert_almost_equal, quantities[p], \ + assert pytest.approx(quantities[p], abs=1e-7) == \ init_capacity_a * k_factor_a ** j j += 1 j = 0 for p in pattern_b: - yield assert_almost_equal, quantities[p], \ + assert pytest.approx(quantities[p], abs=1e-7) == \ init_capacity_b * k_factor_b ** j j += 1 diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index ed51739b07..7b47c6eaab 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -38,7 +38,7 @@ def check_null_sink(fname, given_spec): legal_paths = ["/AgentEntry", "/Info"] illegal_paths = ["/Transactions"] # this must contain tables to test # Check if these tables exist - yield assert_true, tables_exist(outfile, legal_paths) + assert tables_exist(outfile, legal_paths) if not tables_exist(outfile, legal_paths): outfile.close() clean_outs() @@ -66,10 +66,10 @@ def check_null_sink(fname, given_spec): sink_id = find_ids(given_spec, spec, agent_ids) # Test if one SimpleSink is deployed - yield assert_equal, len(sink_id), 1 + assert len(sink_id) == 1 # No resource exchange is expected - yield assert_false, tables_exist(outfile, illegal_paths) + assert not tables_exist(outfile, illegal_paths) clean_outs() diff --git a/tests/test_source_to_sink.py b/tests/test_source_to_sink.py index e2e743a7be..7b1ffde580 100644 --- a/tests/test_source_to_sink.py +++ b/tests/test_source_to_sink.py @@ -35,7 +35,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): # Tables of interest paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): clean_outs() return # don't execute further commands @@ -69,8 +69,8 @@ def check_source_to_sink(fname, source_spec, sink_spec): sink_id = find_ids(sink_spec, spec, agent_ids) # Test for only one source and one sink are deployed in the simulation - yield assert_equal, len(source_id), 1 - yield assert_equal, len(sink_id), 1 + assert len(source_id) == 1 + assert len(sink_id) == 1 # Check if transactions are only between source and sink sender_ids = to_ary(transactions, "SenderId") @@ -79,12 +79,12 @@ def check_source_to_sink(fname, source_spec, sink_spec): expected_sender_array.fill(source_id[0]) expected_receiver_array = np.empty(receiver_ids.size) expected_receiver_array.fill(sink_id[0]) - yield assert_array_equal, sender_ids, expected_sender_array - yield assert_array_equal, receiver_ids, expected_receiver_array + assert_array_equal, sender_ids, expected_sender_array + assert_array_equal, receiver_ids, expected_receiver_array # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(0, sender_ids.size, 1) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"),\ expected_trans_ids @@ -95,7 +95,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): # Expect that every transaction quantity is the same amount expected_quantities.fill(quantities[0]) - yield assert_array_equal, quantities, expected_quantities + assert_array_equal, quantities, expected_quantities clean_outs() diff --git a/tests/test_trivial_cycle.py b/tests/test_trivial_cycle.py index 8d32a60000..2cbbdde8d0 100644 --- a/tests/test_trivial_cycle.py +++ b/tests/test_trivial_cycle.py @@ -5,6 +5,8 @@ import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, create_sim_input, \ h5out, sqliteout, clean_outs, to_ary, which_outfile @@ -48,7 +50,7 @@ def test_source_to_sink(): paths = ["/AgentEntry", "/Resources", "/Transactions", "/Info"] # Check if these tables exist - yield assert_true, tables_exist(outfile, paths) + assert tables_exist(outfile, paths) if not tables_exist(outfile, paths): outfile.close() clean_outs() @@ -79,7 +81,7 @@ def test_source_to_sink(): facility_id = find_ids(":agents:KFacility", spec, agent_ids) # Test for only one KFacility - yield assert_equal, len(facility_id), 1 + assert len(facility_id) == 1 sender_ids = to_ary(transactions, "SenderId") receiver_ids = to_ary(transactions, "ReceiverId") @@ -87,12 +89,12 @@ def test_source_to_sink(): expected_sender_array.fill(facility_id[0]) expected_receiver_array = np.empty(receiver_ids.size) expected_receiver_array.fill(facility_id[0]) - yield assert_array_equal, sender_ids, expected_sender_array - yield assert_array_equal, receiver_ids, expected_receiver_array + assert_array_equal, sender_ids, expected_sender_array + assert_array_equal, receiver_ids, expected_receiver_array # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(0, sender_ids.size, 1) - yield assert_array_equal, \ + assert_array_equal, \ to_ary(transactions, "TransactionId"), \ expected_trans_ids @@ -104,7 +106,7 @@ def test_source_to_sink(): i = 0 initial_capacity = quantities[0] for q in quantities: - yield assert_almost_equal, q, initial_capacity * k_factor ** i + assert pytest.approx(q, abs=1e-7) == initial_capacity * k_factor ** i i += 1 clean_outs() From 71a3367dd43cd675d803f351c978fc8bfd060350 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 17 Mar 2023 11:01:10 -0500 Subject: [PATCH 22/82] fix collections in cython --- cyclus/agents.pyx | 2 +- cyclus/lib.pyx | 3 ++- src/pyinfile.pyx | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cyclus/agents.pyx b/cyclus/agents.pyx index 221a4ce8fb..9154d35691 100644 --- a/cyclus/agents.pyx +++ b/cyclus/agents.pyx @@ -17,7 +17,7 @@ from cpython cimport (PyObject, PyDict_New, PyDict_Contains, import json from inspect import getmro, getdoc from copy import deepcopy -from collections import Mapping +from collections.abc import Mapping from cyclus cimport cpp_cyclus from cyclus.cpp_cyclus cimport shared_ptr, reinterpret_pointer_cast diff --git a/cyclus/lib.pyx b/cyclus/lib.pyx index fe79b8337d..258533e2f0 100644 --- a/cyclus/lib.pyx +++ b/cyclus/lib.pyx @@ -22,7 +22,8 @@ from cpython.pycapsule cimport PyCapsule_GetPointer from binascii import hexlify import uuid import os -from collections import Mapping, Sequence, Iterable, defaultdict +from collections import defaultdict +from collections.abc import Mapping, Sequence, Iterable from importlib import import_module cimport numpy as np diff --git a/src/pyinfile.pyx b/src/pyinfile.pyx index 993a5c7253..db50e5c92f 100644 --- a/src/pyinfile.pyx +++ b/src/pyinfile.pyx @@ -38,7 +38,7 @@ cdef public std_string py_to_json "CyclusPyToJson" (std_string cpp_infile) excep raise RuntimeError('simulation not found in python file.') if callable(sim): sim = sim() - from collections import Mapping + from collections.abc import Mapping if isinstance(sim, str): pass # assume in JSON format elif isinstance(sim, bytes): From f654029a8ed58599874d470bddcb27ddf42e02b3 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 17 Mar 2023 11:02:14 -0500 Subject: [PATCH 23/82] advanced pytest conversion --- tests/cycpp_tests.py | 2 -- tests/hdf5_back_gen_tests.py | 5 ++--- tests/test_abi.py | 11 +++++------ tests/test_cycluslib.py | 5 +---- tests/test_include_recipe.py | 2 +- tests/test_inventories.py | 4 ++-- tests/test_lotka_volterra.py | 8 +++----- tests/test_memback.py | 5 ----- tests/test_minimal_cycle.py | 6 ++---- tests/test_null_sink.py | 9 +++++---- tests/test_smbchk.py | 2 -- tests/test_source_to_sink.py | 8 +++++--- tests/test_stubs.py | 6 +++--- tests/test_trivial_cycle.py | 4 ++-- tests/tools.py | 9 +++++---- 15 files changed, 36 insertions(+), 50 deletions(-) diff --git a/tests/cycpp_tests.py b/tests/cycpp_tests.py index 4e8f873d30..65083f86d3 100644 --- a/tests/cycpp_tests.py +++ b/tests/cycpp_tests.py @@ -1192,5 +1192,3 @@ def test_integration(): p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) assert '' == p.stdout.read().decode() -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/hdf5_back_gen_tests.py b/tests/hdf5_back_gen_tests.py index d805d89576..6b2850ed21 100644 --- a/tests/hdf5_back_gen_tests.py +++ b/tests/hdf5_back_gen_tests.py @@ -6,6 +6,7 @@ import uuid import pandas as pd from pandas.util.testing import assert_frame_equal +import pytest from cyclus.lib import Hdf5Back, Recorder import cyclus.typesystem as ts @@ -245,7 +246,7 @@ def generate_and_test(): """Generate and run tests for supported Hdf5 datatypes.""" if sys.version_info[0] == 2: msg = 'Hdf5 backend gen tests do not support Python 2.x' - raise SkipTest(msg) + pytest.skip(msg) if os.path.isfile(PATH): os.remove(PATH) for i in CANON_TYPES: @@ -272,6 +273,4 @@ def generate_and_test(): rec.close() os.remove(PATH) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_abi.py b/tests/test_abi.py index e80d703242..e772ef933c 100644 --- a/tests/test_abi.py +++ b/tests/test_abi.py @@ -1,6 +1,7 @@ import os import sys import subprocess +import pytest cycdir = os.path.dirname(os.path.dirname(__file__)) @@ -16,14 +17,14 @@ def test_abi_stability(): - raise SkipTest('manually remove this skip to test ABI stability') + pytest.skip('manually remove this skip to test ABI stability') if smbchk is None: - raise SkipTest('Could not import smbchk!') + pytest.skip('Could not import smbchk!') if os.name != 'posix': - raise SkipTest('can only check for ABI stability on posix systems.') + pytest.skip('can only check for ABI stability on posix systems.') libcyc = os.path.join(cycdir, 'build', 'lib', 'libcyclus.so') if not os.path.exists(libcyc): - raise SkipTest('libcyclus could not be found, ' + pytest.skip('libcyclus could not be found, ' 'cannot check for ABI stability') args = '--update -t HEAD --no-save --check'.split() with tools.indir(reldir): @@ -31,5 +32,3 @@ def test_abi_stability(): assert(obs) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_cycluslib.py b/tests/test_cycluslib.py index dfa853f194..54c6565088 100644 --- a/tests/test_cycluslib.py +++ b/tests/test_cycluslib.py @@ -3,13 +3,12 @@ import subprocess from functools import wraps - from cyclus import lib from tools import libcyclus_setup, dbtest -setup = libcyclus_setup +#setup = libcyclus_setup @dbtest def test_name(db, fname, backend): @@ -68,6 +67,4 @@ def test_position(): assert 0.0 == d -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_include_recipe.py b/tests/test_include_recipe.py index f266732923..98da509dc5 100644 --- a/tests/test_include_recipe.py +++ b/tests/test_include_recipe.py @@ -20,7 +20,7 @@ def test_include_recipe(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, CWD, holdsrtn + check_cmd(cmd, CWD, holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands diff --git a/tests/test_inventories.py b/tests/test_inventories.py index 61c826abce..3342c59eb8 100644 --- a/tests/test_inventories.py +++ b/tests/test_inventories.py @@ -23,7 +23,7 @@ def test_inventories_false(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = sqliteout cmd = ["cyclus", "-o", outfile, "--input-file", sim] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -48,7 +48,7 @@ def test_inventories(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = sqliteout cmd = ["cyclus", "-o", outfile, "--input-file", sim] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands diff --git a/tests/test_lotka_volterra.py b/tests/test_lotka_volterra.py index 60d8589508..533ada1672 100644 --- a/tests/test_lotka_volterra.py +++ b/tests/test_lotka_volterra.py @@ -30,7 +30,7 @@ def test_predator_only(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") @@ -58,7 +58,7 @@ def test_prey_only(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") @@ -95,7 +95,7 @@ def test_lotka_volterra(): outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] print("Confirming valid Cyclus execution.") @@ -112,5 +112,3 @@ def test_lotka_volterra(): clean_outs() -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_memback.py b/tests/test_memback.py index 6c42cba0cd..d50fe28655 100644 --- a/tests/test_memback.py +++ b/tests/test_memback.py @@ -364,8 +364,3 @@ def test_query(): # test convert to JSON obs.to_json() - - - -if __name__ == "__main__": - nose.runmodule() \ No newline at end of file diff --git a/tests/test_minimal_cycle.py b/tests/test_minimal_cycle.py index 559a2716b6..536c31115f 100644 --- a/tests/test_minimal_cycle.py +++ b/tests/test_minimal_cycle.py @@ -99,7 +99,7 @@ def test_minimal_cycle(): This equation is used to test each transaction amount. """ if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # A reference simulation input for minimal cycle with different commodities ref_input = os.path.join(INPUT, "minimal_cycle.xml") @@ -114,7 +114,7 @@ def test_minimal_cycle(): holdsrtn = [1] # needed b/c nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -235,5 +235,3 @@ def test_minimal_cycle(): clean_outs() os.remove(sim_input) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index 7b47c6eaab..b1799156f5 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -2,6 +2,7 @@ import os import sqlite3 +import pytest @@ -23,14 +24,14 @@ def check_null_sink(fname, given_spec): """ clean_outs() if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # Cyclus simulation input for null sink testing sim_input = os.path.join(INPUT, fname) holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -70,7 +71,7 @@ def check_null_sink(fname, given_spec): # No resource exchange is expected assert not tables_exist(outfile, illegal_paths) - + assert 1 == 2 # FIX ME clean_outs() @@ -79,5 +80,5 @@ def test_null_sink(): ("null_sink.py", ":cyclus.pyagents:Sink")] for case in cases: for x in check_null_sink(*case): - yield x + pass diff --git a/tests/test_smbchk.py b/tests/test_smbchk.py index 25ecc8ef23..5229434acc 100644 --- a/tests/test_smbchk.py +++ b/tests/test_smbchk.py @@ -67,5 +67,3 @@ def test_check(): obs = smbchk.check(db) assert not(obs) -if __name__ == "__main__": - nose.runmodule() diff --git a/tests/test_source_to_sink.py b/tests/test_source_to_sink.py index 7b1ffde580..7750691ec0 100644 --- a/tests/test_source_to_sink.py +++ b/tests/test_source_to_sink.py @@ -6,6 +6,8 @@ import sqlite3 import tables import numpy as np +import pytest + from tools import check_cmd, cyclus_has_coin from helper import tables_exist, find_ids, exit_times, \ h5out, sqliteout, clean_outs, to_ary, which_outfile @@ -18,7 +20,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): """ clean_outs() if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # Cyclus simulation input for Source and Sink sim_inputs = [os.path.join(INPUT, fname)] @@ -27,7 +29,7 @@ def check_source_to_sink(fname, source_spec, sink_spec): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands @@ -106,4 +108,4 @@ def test_source_to_sink(): ] for case in cases: for x in check_source_to_sink(*case): - yield x + pass diff --git a/tests/test_stubs.py b/tests/test_stubs.py index d591194a48..c2d8008481 100644 --- a/tests/test_stubs.py +++ b/tests/test_stubs.py @@ -5,17 +5,17 @@ import tempfile import io from contextlib import contextmanager +import pytest @contextmanager def tmpdir(): d = tempfile.mkdtemp() - yield d shutil.rmtree(d) @contextmanager def tmplog(fname): - yield io.open(fname, mode='w') + io.open(fname, mode='w') os.remove(fname) def test_stubs(): @@ -54,7 +54,7 @@ def test_stubs(): cwd=src, stdout=f, stderr=f) except subprocess.CalledProcessError as e: print(msg) - raise SkipTest(msg) # skip if we can't install for some reason. + pytest.skip(msg) # skip if we can't install for some reason. # run unit tests for stub cmd = tst_cmd.format(pth) diff --git a/tests/test_trivial_cycle.py b/tests/test_trivial_cycle.py index 2cbbdde8d0..25ec5b494f 100644 --- a/tests/test_trivial_cycle.py +++ b/tests/test_trivial_cycle.py @@ -26,7 +26,7 @@ def test_source_to_sink(): This equation is used to test each transaction amount. """ if not cyclus_has_coin(): - raise SkipTest("Cyclus does not have COIN") + pytest.skip("Cyclus does not have COIN") # A reference simulation input for the trivial cycle simulation. ref_input = os.path.join(INPUT, "trivial_cycle.xml") @@ -41,7 +41,7 @@ def test_source_to_sink(): holdsrtn = [1] # needed because nose does not send() to test generator outfile = which_outfile() cmd = ["cyclus", "-o", outfile, "--input-file", sim_input] - yield check_cmd, cmd, '.', holdsrtn + check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands diff --git a/tests/tools.py b/tests/tools.py index f53852eaa1..772a3e5d5d 100644 --- a/tests/tools.py +++ b/tests/tools.py @@ -10,6 +10,7 @@ import tempfile from contextlib import contextmanager from functools import wraps +import pytest from cyclus import lib as libcyclus @@ -17,8 +18,8 @@ if sys.version_info[0] >= 3: basestring = str -unit = attr('unit') -integration = attr('integration') +#unit = attr('unit') +#integration = attr('integration') INPUT = os.path.join(os.path.dirname(__file__), "input") @@ -118,7 +119,7 @@ def skip_then_continue(msg=""): and we may continue on our merry way. A message may be optionally passed to this function. """ - raise SkipTest(msg) + pytest.skip(msg) @contextmanager def indir(d): @@ -174,7 +175,7 @@ def wrapper(): os.remove(fname) shutil.copy(oname, fname) db = backend(fname) - yield f, db, fname, backend + f(db, fname, backend) return wrapper From 40675f52ae300b523e5921b93336021b80e34616 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 17 Mar 2023 11:08:56 -0500 Subject: [PATCH 24/82] additional test in JsonPyRoundTrip --- tests/toolkit/infile_converters_tests.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/toolkit/infile_converters_tests.cc b/tests/toolkit/infile_converters_tests.cc index 81f358eaf2..d0bb90caee 100644 --- a/tests/toolkit/infile_converters_tests.cc +++ b/tests/toolkit/infile_converters_tests.cc @@ -91,6 +91,7 @@ TEST(InfileConverters, JsonPyRoundTrip) { string j2 = cyclus::toolkit::PyToJson(p2); cyclus::PyStop(); + EXPECT_STREQ(inp.c_str(), j1.c_str()); EXPECT_STREQ(j1.c_str(), j2.c_str()); EXPECT_STREQ(p1.c_str(), p2.c_str()); } From d346adba6922e7527568bda857eea506bebaa318 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 31 Aug 2023 11:23:04 -0500 Subject: [PATCH 25/82] constrain cython to avoid incompatibility --- docker/cyclus-deps/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/cyclus-deps/Dockerfile b/docker/cyclus-deps/Dockerfile index a36d5fca80..7c70a8c6a5 100644 --- a/docker/cyclus-deps/Dockerfile +++ b/docker/cyclus-deps/Dockerfile @@ -58,7 +58,7 @@ RUN conda update -y --all && \ pytables \ pandas \ jinja2 \ - cython \ + "cython<3" \ websockets \ pprintpp \ && \ From 37b2730d821a8524b6f853be2e564f641d4a27b5 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 7 Sep 2023 17:45:10 -0500 Subject: [PATCH 26/82] handle different APIs for libxml++ --- src/infile_tree.cc | 16 +++++++++++++--- src/xml_parser.cc | 12 ++++++++++-- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/infile_tree.cc b/src/infile_tree.cc index 164f9aa94a..61a60907c3 100644 --- a/src/infile_tree.cc +++ b/src/infile_tree.cc @@ -10,6 +10,14 @@ namespace cyclus { +#if LIBXMLXX_MAJOR_VERSION == 2 + using xmlpp::NodeSet; + typedef xmlpp::Node::NodeList const_NodeList; +#else + using xmlpp::Node::NodeSet; + using xmlpp::Node::const_NodeList; +#endif + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - InfileTree::InfileTree(XMLParser& parser) : current_node_(0) { current_node_ = parser.Document()->get_root_node(); @@ -55,7 +63,7 @@ std::string InfileTree::GetString(std::string query, int index) { using xmlpp::Node; using xmlpp::TextNode; using xmlpp::Element; - const Node::NodeSet nodeset = current_node_->find(query); + const NodeSet nodeset = current_node_->find(query); if (nodeset.empty()) { throw KeyError("Could not find a node by the name: " + query); } @@ -72,7 +80,7 @@ std::string InfileTree::GetString(std::string query, int index) { " is not an Element node."); } - const Node::const_NodeList nodelist = element->get_children(); + const const_NodeList nodelist = element->get_children(); if (nodelist.size() != 1) { throw ValueError("Element node " + element->get_name() + " has more content than expected."); @@ -91,6 +99,7 @@ std::string InfileTree::GetString(std::string query, int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - std::string InfileTree::GetElementName(int index) { using xmlpp::Node; + std::vector elements; const Node::NodeList nodelist = current_node_->get_children(); Node::NodeList::const_iterator it; @@ -110,7 +119,8 @@ std::string InfileTree::GetElementName(int index) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - InfileTree* InfileTree::GetEngineFromQuery(std::string query, int index) { using xmlpp::Node; - const Node::NodeSet nodeset = current_node_->find(query); + + const NodeSet nodeset = current_node_->find(query); if (nodeset.size() < index + 1) { throw ValueError("Index exceeds number of nodes in query: " + query); diff --git a/src/xml_parser.cc b/src/xml_parser.cc index a41706826c..0c768d431c 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -10,6 +10,14 @@ namespace cyclus { +#if LIBXMLXX_MAJOR_VERSION == 2 + using xmlpp::NodeSet; + typedef xmlpp::Node::NodeList const_NodeList; +#else + using xmlpp::Node::NodeSet; + using xmlpp::Node::const_NodeList; +#endif + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - XMLParser::XMLParser() : parser_(NULL) { parser_ = new xmlpp::DomParser(); @@ -58,8 +66,8 @@ xmlpp::Document* XMLParser::Document() { // but which is unvalidatable. The web is truly cobbled together // by a race of evil gnomes. xmlpp::Element* root = doc->get_root_node(); - xmlpp::Node::NodeSet have_base = root->find("//*[@xml:base]"); - xmlpp::Node::NodeSet::iterator it = have_base.begin(); + NodeSet have_base = root->find("//*[@xml:base]"); + NodeSet::iterator it = have_base.begin(); for (; it != have_base.end(); ++it) { reinterpret_cast(*it)->remove_attribute("base", "xml"); } From 8118a7e165758af94caf83ca80e338187699d03b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 7 Sep 2023 17:45:29 -0500 Subject: [PATCH 27/82] multistage build for apt and conda --- docker/Dockerfile.apt | 73 ++++++++++++++++++++++++++++++ docker/Dockerfile.conda | 99 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 172 insertions(+) create mode 100644 docker/Dockerfile.apt create mode 100644 docker/Dockerfile.conda diff --git a/docker/Dockerfile.apt b/docker/Dockerfile.apt new file mode 100644 index 0000000000..5048e4ee22 --- /dev/null +++ b/docker/Dockerfile.apt @@ -0,0 +1,73 @@ +FROM ubuntu:22.04 as apt-base + +ENV TZ=America/Chicago +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt update --fix-missing && \ + apt install -y \ + wget \ + bzip2 \ + ca-certificates \ + libglib2.0-0 \ + libxext6 \ + libsm6 \ + libxrender1 \ + openssh-client \ + git \ + vim \ + nano \ + libssh-dev \ + g++ \ + gcc \ + cmake \ + make \ + libglib2.0-dev \ + libxml2-dev \ + libxml++2.6-dev \ + libblas-dev \ + liblapack-dev \ + pkg-config \ + coinor-cbc \ + libboost-dev \ + libhdf5-dev \ + libsqlite3-dev \ + libpcre2-dev \ + gettext-base \ + xz-utils \ + python3-setuptools \ + python3-pytest \ + python3-tables \ + python3-pandas \ + python3-jinja2 \ + cython3 \ + libwebsockets-dev \ + python3-pprintpp \ + && apt clean -y all + +RUN apt install -y libboost-all-dev + +# required for the nosetest +ENV PYTHONWARNINGS ignore +RUN mkdir -p /root/.local/lib/python3.10/site-packages/ +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 10 + +# + +FROM apt-base as cyclus-install + +COPY . /cyclus +WORKDIR /cyclus + +# Uncomment the following line to run cmake in verbose mode. +# This is sometimes useful for debugging. +#ENV VERBOSE=1 + +# You may add the option "--cmake-debug" to the following command +# for further CMake debugging. +RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 + +FROM cyclus-install as cyclus-test + +RUN cyclus_unit_tests + + diff --git a/docker/Dockerfile.conda b/docker/Dockerfile.conda new file mode 100644 index 0000000000..9bbc55ce47 --- /dev/null +++ b/docker/Dockerfile.conda @@ -0,0 +1,99 @@ +FROM ubuntu:22.04 as conda-base + +RUN apt-get update --fix-missing && apt-get install -y wget bzip2 ca-certificates \ + libglib2.0-0 libxext6 libsm6 libxrender1 + + +RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ + wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh + +ENV PATH /root/.local/bin:/opt/conda/bin:$PATH + + +# +# apt packages +# +RUN apt-get update && \ + apt-get install -y openssh-client \ + git \ + vim nano && \ + apt-get clean + +# +# conda packages +# +RUN conda config --add channels conda-forge +RUN conda update -n base -c defaults conda +RUN conda install -y mamba +RUN conda update -y --all && \ + mamba install -y \ + openssh \ + gxx_linux-64 \ + gcc_linux-64 \ + cmake \ + make \ + docker-pycreds \ + git \ + xo \ + python-json-logger \ + glib \ + libxml2 \ + libxmlpp \ + libblas \ + libcblas \ + liblapack \ + pkg-config \ + coincbc \ + boost-cpp \ + hdf5 \ + sqlite \ + pcre \ + gettext \ + bzip2 \ + xz \ + setuptools \ + pytest \ + pytables \ + pandas \ + jinja2 \ + "cython<3" \ + websockets \ + pprintpp \ + && \ + mamba install -y --force-reinstall libsqlite && \ + conda clean -y --all +ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc +ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ +ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp +ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" +# required for the nosetest +ENV PYTHONWARNINGS ignore +RUN mkdir -p /root/.local/lib/python3.10/site-packages/ +# +# pip packages to overide conda +# + +RUN pip install docker + +FROM conda-base as cyclus-install + +COPY . /cyclus +WORKDIR /cyclus + +# Uncomment the following line to run cmake in verbose mode. +# This is sometimes useful for debugging. +#ENV VERBOSE=1 + +# You may add the option "--cmake-debug" to the following command +# for further CMake debugging. +RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 \ + -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ + -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" + +FROM cyclus-install as cyclus-test + +RUN cyclus_unit_tests + + From 15600879982cb86920178a026daa5b98982b7e9c Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 7 Sep 2023 18:07:40 -0500 Subject: [PATCH 28/82] improve libxml++ flexibility --- CMakeLists.txt | 5 ++++- src/infile_tree.cc | 6 +++--- src/xml_parser.cc | 6 +++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 0214326431..add1b8913d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -175,7 +175,10 @@ IF(NOT CYCLUS_DOC_ONLY) message("-- LibXML2 Include Dir: ${LIBXML2_INCLUDE_DIR}") # Then use pkg-config for locate specific package - pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-4.0) + pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-4.0) + IF ( NOT LIBXMLXX_LIBRARIES ) + pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-2.6) + ENDIF ( ${LIBXMLXX_LIBRARIES} STREQUAL "" ) SET(LIBS ${LIBS} ${LIBXMLXX_LIBRARIES}) message("-- LibXML++ Include Dir: ${LIBXMLXX_INCLUDE_DIRS}") message("-- LibXML++ Librarires: ${LIBXMLXX_LIBRARIES}") diff --git a/src/infile_tree.cc b/src/infile_tree.cc index 61a60907c3..1540b4c7ff 100644 --- a/src/infile_tree.cc +++ b/src/infile_tree.cc @@ -11,11 +11,11 @@ namespace cyclus { #if LIBXMLXX_MAJOR_VERSION == 2 - using xmlpp::NodeSet; + typedef xmlpp::NodeSet NodeSet; typedef xmlpp::Node::NodeList const_NodeList; #else - using xmlpp::Node::NodeSet; - using xmlpp::Node::const_NodeList; + typedef xmlpp::Node::NodeSet NodeSet; + typedef xmlpp::Node::const_NodeList const_NodeList; #endif // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/xml_parser.cc b/src/xml_parser.cc index 0c768d431c..512dbcd11d 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -11,11 +11,11 @@ namespace cyclus { #if LIBXMLXX_MAJOR_VERSION == 2 - using xmlpp::NodeSet; + typedef xmlpp::NodeSet NodeSet; typedef xmlpp::Node::NodeList const_NodeList; #else - using xmlpp::Node::NodeSet; - using xmlpp::Node::const_NodeList; + typedef xmlpp::Node::NodeSet NodeSet; + typedef xmlpp::Node::const_NodeList const_NodeList; #endif // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 69cecd64ad41aa7a2fd01370f7039b6b1ae0c779 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 08:05:21 -0500 Subject: [PATCH 29/82] single dockerfile with conda vs apt option --- docker/Dockerfile | 131 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 docker/Dockerfile diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000000..e88e971606 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,131 @@ +ARG pkg_mgr=apt + +FROM ubuntu:22.04 as common-base + +ENV TZ=America/Chicago +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt update --fix-missing + + # libglib2.0-0-dev \ + # libxext6 \ + # libsm6 \ + # libxrender1 \ + # openssh-client \ + # git \ + + +FROM common-base as apt-deps + +RUN apt install -y \ + libssh-dev \ + g++ \ + gcc \ + cmake \ + make \ + libxml2-dev \ + libxml++2.6-dev \ + libblas-dev \ + liblapack-dev \ + pkg-config \ + coinor-cbc \ + libboost-all-dev \ + libhdf5-dev \ + libsqlite3-dev \ + libpcre2-dev \ + gettext-base \ + xz-utils \ + python3-setuptools \ + python3-pytest \ + python3-tables \ + python3-pandas \ + python3-jinja2 \ + cython3 \ + libwebsockets-dev \ + python3-pprintpp \ + && apt clean -y all + +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 10 + +FROM common-base as conda-deps + +RUN apt install -y \ + wget \ + bzip2 \ + ca-certificates \ + && apt clean -y all + +RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ + wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh + +ENV PATH /root/.local/bin:/opt/conda/bin:$PATH + +RUN conda config --add channels conda-forge +RUN conda update -n base -c defaults conda +RUN conda install -y mamba +RUN conda update -y --all && \ + mamba install -y \ + openssh \ + gxx_linux-64 \ + gcc_linux-64 \ + cmake \ + make \ + docker-pycreds \ + git \ + xo \ + python-json-logger \ + glib \ + libxml2 \ + libxmlpp \ + libblas \ + libcblas \ + liblapack \ + pkg-config \ + coincbc \ + boost-cpp \ + hdf5 \ + sqlite \ + pcre \ + gettext \ + bzip2 \ + xz \ + setuptools \ + pytest \ + pytables \ + pandas \ + jinja2 \ + "cython<3" \ + websockets \ + pprintpp \ + && \ + mamba install -y --force-reinstall libsqlite && \ + conda clean -y --all +ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc +ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ +ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp +ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" + +FROM ${pkg_mgr}-deps as cyclus + +# required for the nosetest +ENV PYTHONWARNINGS ignore +RUN mkdir -p /root/.local/lib/python3.10/site-packages/ + +COPY . /cyclus +WORKDIR /cyclus + +# Uncomment the following line to run cmake in verbose mode. +# This is sometimes useful for debugging. +#ENV VERBOSE=1 + +# You may add the option "--cmake-debug" to the following command +# for further CMake debugging. +RUN python install.py -j 8 --build-type=Release --core-version 999999.999999 + +FROM cyclus as cyclus-test + +RUN cyclus_unit_tests + + From 6206b630d23434354b84e075bae97b8bbe139cbb Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 08:05:34 -0500 Subject: [PATCH 30/82] cleanup libxml++ options --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index add1b8913d..09ac57986c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -177,8 +177,8 @@ IF(NOT CYCLUS_DOC_ONLY) # Then use pkg-config for locate specific package pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-4.0) IF ( NOT LIBXMLXX_LIBRARIES ) - pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-2.6) - ENDIF ( ${LIBXMLXX_LIBRARIES} STREQUAL "" ) + pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-2.6) + ENDIF ( NOT LIBXMLXX_LIBRARIES ) SET(LIBS ${LIBS} ${LIBXMLXX_LIBRARIES}) message("-- LibXML++ Include Dir: ${LIBXMLXX_INCLUDE_DIRS}") message("-- LibXML++ Librarires: ${LIBXMLXX_LIBRARIES}") From ede3724169e20a254e0b499572959b20548286a3 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 08:05:59 -0500 Subject: [PATCH 31/82] first attempt at docker file prep for CI --- .github/workflows/docker_publish.yml | 100 +++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 .github/workflows/docker_publish.yml diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml new file mode 100644 index 0000000000..dda70b5dfe --- /dev/null +++ b/.github/workflows/docker_publish.yml @@ -0,0 +1,100 @@ +name: Build & Publish docker image for CI + +on: + # allows us to run workflows manually + workflow_dispatch: + push: + paths: + - 'docker/Dockerfile' + - '.github/workflows/docker_publish.yml' + +jobs: + build-dependency-and-test-img: + runs-on: ubuntu-latest + + strategy: + matrix: + ubuntu_versions : [ + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + + name: Installing Dependencies, Building cyclus and running tests + steps: + - name: default environment + run: | + echo "tag-latest-on-default=false" >> "$GITHUB_ENV" + + - name: condition on trigger parameters + if: ${{ github.repository_owner == 'svalinn' && github.ref == 'refs/heads/develop' }} + run: | + echo "tag-latest-on-default=true" >> "$GITHUB_ENV" + + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Installing Dependencies in Docker image + uses: firehed/multistage-docker-build-action@v1 + with: + repository: ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} + stages: ${{ matrix.pkg_mgr }}-deps + server-stage: cyclus + quiet: false + parallel: true + tag-latest-on-default: ${{ env.tag-latest-on-default }} + dockerfile: docker/Dockerfile + build-args: pkg_mgr=${{ matrix.pkg_mgr }} + + + push_stable_ci_img: + needs: [build-dependency-and-test-img] + runs-on: ubuntu-latest + + strategy: + matrix: + ubuntu_versions : [ + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + + name: Pushing final images + steps: + - name: Log in to the Container registry + if: ${{ github.repository_owner == 'cyclus' }} + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Store image name + if: ${{ github.repository_owner == 'cyclus' }} + run: | + echo "image_base_tag=ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }}" >> "$GITHUB_ENV" + + - name: Push Image as latest img + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/develop' }} + uses: akhilerm/tag-push-action@v2.1.0 + with: + src: ${{ env.image_base_tag }}/cyclus:latest + dst: ${{ env.image_base_tag }}:latest + + - name: Push Image as latest img + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/develop' }} + uses: akhilerm/tag-push-action@v2.1.0 + with: + src: ${{ env.image_base_tag }}:latest + dst: ${{ env.image_base_tag }}:stable From 969bb47ae5506db5160953f2c2008d36f6c6e153 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 08:12:30 -0500 Subject: [PATCH 32/82] add make core arg --- docker/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index e88e971606..e58cc19532 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,5 @@ ARG pkg_mgr=apt +ARG make_cores=2 FROM ubuntu:22.04 as common-base @@ -108,6 +109,7 @@ ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" FROM ${pkg_mgr}-deps as cyclus +ARG make_cores=2 # required for the nosetest ENV PYTHONWARNINGS ignore @@ -122,7 +124,7 @@ WORKDIR /cyclus # You may add the option "--cmake-debug" to the following command # for further CMake debugging. -RUN python install.py -j 8 --build-type=Release --core-version 999999.999999 +RUN python install.py -j ${make_cores} --build-type=Release --core-version 999999.999999 FROM cyclus as cyclus-test From 20ef6eb15d0de6d8358c070557f601621d69bc60 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 08:14:11 -0500 Subject: [PATCH 33/82] indentation fix --- .github/workflows/docker_publish.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml index dda70b5dfe..4c9eecf7ad 100644 --- a/.github/workflows/docker_publish.yml +++ b/.github/workflows/docker_publish.yml @@ -65,9 +65,9 @@ jobs: ubuntu_versions : [ 22.04, ] - pkg_mgr : [ - apt, - conda, + pkg_mgr : [ + apt, + conda, ] name: Pushing final images From dc51cc84016c3589fca779b3aa23abdfd8b6277e Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 12:44:05 -0500 Subject: [PATCH 34/82] add muiltstage based build test --- .github/workflows/build_test.yml | 73 +++++++++++++++----------------- 1 file changed, 35 insertions(+), 38 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index eba3f30d64..b08e7944ec 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -1,50 +1,47 @@ -name: Standard Build and Test +name: Build & Publish docker image for CI on: # allows us to run workflows manually workflow_dispatch: - pull_request: - branches: - - master + pull-request: paths-ignore: + - 'docker/Dockerfile' - '.github/workflows/docker_publish.yml' - push: - branches: - - master - paths-ignore: - - '.github/workflows/docker_publish.yml' - - jobs: - BuildTest: - runs-on: ubuntu-latest + build-dependency-and-test-img: + runs-on: ubuntu-latest - container: - image: cyclus/cyclus-deps - + strategy: + matrix: + ubuntu_versions : [ + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + + name: Installing Dependencies, Building cyclus and running tests steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} - - name: setup - run: | - echo "HOME=/github/home/" >> $GITHUB_ENV - echo "PATH=$PATH:${HOME}/.local/bin" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${HOME}/.local/lib" >> $GITHUB_ENV - - name: Building Cyclus - run: | - mkdir -p ${HOME}/.local/lib/python3.7/site-packages/ - cd ${GITHUB_WORKSPACE} - python install.py -j 2 \ - --build-type=Release --core-version 999999.999999 \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" + - name: Checkout repository + uses: actions/checkout@v3 - - name: Unit Test - run: | - cyclus_unit_tests; exit $? - - - name: Nosetest - run: | - nosetests -w ${GITHUB_WORKSPACE}/tests; exit $? + - name: Installing Dependencies in Docker image + uses: firehed/multistage-docker-build-action@v1 + with: + repository: ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} + stages: ${{ matrix.pkg_mgr }}-deps, cyclus + server-stage: cyclus-test + quiet: false + parallel: true + tag-latest-on-default: false + dockerfile: docker/Dockerfile + build-args: pkg_mgr=${{ matrix.pkg_mgr }} From cb8a022424208982eb36cb79f0e572e54c3e2cd6 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 12:45:04 -0500 Subject: [PATCH 35/82] dumb typo --- .github/workflows/build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index b08e7944ec..ce3dcacf90 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -3,7 +3,7 @@ name: Build & Publish docker image for CI on: # allows us to run workflows manually workflow_dispatch: - pull-request: + pull_request: paths-ignore: - 'docker/Dockerfile' - '.github/workflows/docker_publish.yml' From 4aba0f8eea41ec78ae98bbb6a0eaefb73ad2bc0e Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 12:48:42 -0500 Subject: [PATCH 36/82] change name of build test action --- .github/workflows/build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index ce3dcacf90..3917ae7ca4 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -1,4 +1,4 @@ -name: Build & Publish docker image for CI +name: Build & Test PR on: # allows us to run workflows manually From 2f1145893a439b8d8a941ee62ffca8ac85415776 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:22:33 -0500 Subject: [PATCH 37/82] update Json CustomWriter to match contemporary python format --- src/toolkit/infile_converters.cc | 2 +- tests/toolkit/infile_converters_tests.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/toolkit/infile_converters.cc b/src/toolkit/infile_converters.cc index c7f88e5f8f..ac49b780cd 100644 --- a/src/toolkit/infile_converters.cc +++ b/src/toolkit/infile_converters.cc @@ -157,7 +157,7 @@ std::string XmlToJson(std::string s) { jroot[rootname] = Value(Json::objectValue); AddXmlToJson(&xroot, jroot[rootname], rootname); Json::CustomWriter writer = Json::CustomWriter("{", "}", "[", "]", ": ", - ", ", " ", 80); + ",", " ", 1); return writer.write(jroot); } diff --git a/tests/toolkit/infile_converters_tests.cc b/tests/toolkit/infile_converters_tests.cc index d0bb90caee..b1ac647763 100644 --- a/tests/toolkit/infile_converters_tests.cc +++ b/tests/toolkit/infile_converters_tests.cc @@ -86,9 +86,9 @@ TEST(InfileConverters, JsonPyRoundTrip) { cyclus::PyStart(); string inp = cyclus::toolkit::XmlToJson(MakeInput()); string p1 = cyclus::toolkit::JsonToPy(inp); - string j1 = cyclus::toolkit::PyToJson(p1); + string j1 = cyclus::toolkit::PyToJson(p1) + "\n"; string p2 = cyclus::toolkit::JsonToPy(j1); - string j2 = cyclus::toolkit::PyToJson(p2); + string j2 = cyclus::toolkit::PyToJson(p2) + "\n"; cyclus::PyStop(); EXPECT_STREQ(inp.c_str(), j1.c_str()); From 5a65d5478bdca02c03f785bb4782763c9d8303eb Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:25:56 -0500 Subject: [PATCH 38/82] add simplified changelog test --- .github/workflows/changelog_test.yml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/changelog_test.yml b/.github/workflows/changelog_test.yml index 87f3daba13..98e0b9360a 100644 --- a/.github/workflows/changelog_test.yml +++ b/.github/workflows/changelog_test.yml @@ -10,7 +10,6 @@ env: jobs: changelog_update: - if: ${{ github.event_name == 'pull_request' }} runs-on: ubuntu-latest container: image: alpine:3.14 @@ -23,10 +22,17 @@ jobs: git --version - name: Checkout repository - uses: actions/checkout@v2 - - - name: Housekeeping - run: | + uses: actions/checkout@v3 + + - run: | + git config --global --add safe.directory ${GITHUB_WORKSPACE} cd $GITHUB_WORKSPACE - housekeeping_script/changelog_test.sh + git remote add cyclus https://github.com/cyclus/cyclus.git + git fetch cyclus + change=`git diff cyclus/master -- CHANGELOG.rst | wc -l` + git remote remove cyclys + if [ $change -eq 0 ]; then + echo "CHANGELOG.rst has not been updated" + exit 1 + fi From 2e272cf171085bc4794ec58e00b5cb4ec1dee201 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:27:13 -0500 Subject: [PATCH 39/82] another dumb typo --- .github/workflows/changelog_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog_test.yml b/.github/workflows/changelog_test.yml index 98e0b9360a..cb327a45c6 100644 --- a/.github/workflows/changelog_test.yml +++ b/.github/workflows/changelog_test.yml @@ -30,7 +30,7 @@ jobs: git remote add cyclus https://github.com/cyclus/cyclus.git git fetch cyclus change=`git diff cyclus/master -- CHANGELOG.rst | wc -l` - git remote remove cyclys + git remote remove cyclus if [ $change -eq 0 ]; then echo "CHANGELOG.rst has not been updated" exit 1 From 82f660c4453834a2e294a38e1675bafb81804bbb Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:28:52 -0500 Subject: [PATCH 40/82] updated changelog --- CHANGELOG.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9dc54da862..0e6dcbe216 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -22,7 +22,7 @@ Since last release **Changed:** * Moved to unified CHANGELOG Entry and check them with GithubAction (#1571) - +* major update and modernization of build (#1587) **Removed:** From 2d443c27725fc70006cd388142039aefbee0c572 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:32:57 -0500 Subject: [PATCH 41/82] add trigger for testing --- .github/workflows/build_test.yml | 1 + CHANGELOG.rst | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 3917ae7ca4..443da16e7d 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -3,6 +3,7 @@ name: Build & Test PR on: # allows us to run workflows manually workflow_dispatch: + push: pull_request: paths-ignore: - 'docker/Dockerfile' diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 0e6dcbe216..880d1831db 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -22,7 +22,8 @@ Since last release **Changed:** * Moved to unified CHANGELOG Entry and check them with GithubAction (#1571) -* major update and modernization of build (#1587) +* Major update and modernization of build (#1587) +* Changed Json formatting for compatibility with current python standards (#1587) **Removed:** From 6027a6448d40e2637bbf21260abd1c919aaaa6cf Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 17:34:37 -0500 Subject: [PATCH 42/82] add testing to docker image build --- .github/workflows/docker_publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml index 4c9eecf7ad..2e3035d00d 100644 --- a/.github/workflows/docker_publish.yml +++ b/.github/workflows/docker_publish.yml @@ -47,8 +47,8 @@ jobs: uses: firehed/multistage-docker-build-action@v1 with: repository: ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} - stages: ${{ matrix.pkg_mgr }}-deps - server-stage: cyclus + stages: ${{ matrix.pkg_mgr }}-deps, cyclus + server-stage: cyclus-test quiet: false parallel: true tag-latest-on-default: ${{ env.tag-latest-on-default }} From ec9218a9267fe7b03a64b87d0f2a3c73b2ac5bca Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 8 Sep 2023 18:47:43 -0500 Subject: [PATCH 43/82] update path in apt version --- docker/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index e58cc19532..9814f15975 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -61,7 +61,7 @@ RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ /bin/bash ~/miniconda.sh -b -p /opt/conda && \ rm ~/miniconda.sh -ENV PATH /root/.local/bin:/opt/conda/bin:$PATH +ENV PATH /opt/conda/bin:$PATH RUN conda config --add channels conda-forge RUN conda update -n base -c defaults conda @@ -113,7 +113,7 @@ ARG make_cores=2 # required for the nosetest ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.10/site-packages/ +ENV PATH /root/.local/bin:$PATH COPY . /cyclus WORKDIR /cyclus From 3c274f5e769862f11ba7609f4a2f89fa363319d5 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 08:39:48 -0500 Subject: [PATCH 44/82] add correct coin libraries for APT and cleanup --- docker/Dockerfile | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 9814f15975..ddea4b203d 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -6,15 +6,7 @@ FROM ubuntu:22.04 as common-base ENV TZ=America/Chicago RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone -RUN apt update --fix-missing - - # libglib2.0-0-dev \ - # libxext6 \ - # libsm6 \ - # libxrender1 \ - # openssh-client \ - # git \ - +RUN apt update --fix-missing FROM common-base as apt-deps @@ -29,7 +21,10 @@ RUN apt install -y \ libblas-dev \ liblapack-dev \ pkg-config \ - coinor-cbc \ + coinor-libCbc-dev \ + coinor-libClp-dev \ + coinor-libcoinutils-dev \ + coinor-libOsi-dev \ libboost-all-dev \ libhdf5-dev \ libsqlite3-dev \ From 6da6518c79d3e9c3e115a435553b4d8e541f7c20 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 08:58:25 -0500 Subject: [PATCH 45/82] fix capitalization --- docker/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index ddea4b203d..4e46c58990 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -21,10 +21,10 @@ RUN apt install -y \ libblas-dev \ liblapack-dev \ pkg-config \ - coinor-libCbc-dev \ - coinor-libClp-dev \ + coinor-libcbc-dev \ + coinor-libclp-dev \ coinor-libcoinutils-dev \ - coinor-libOsi-dev \ + coinor-libosi-dev \ libboost-all-dev \ libhdf5-dev \ libsqlite3-dev \ From 7690ea4e91155943634d9aba19eda7b219b09faf Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 12:01:44 -0500 Subject: [PATCH 46/82] clean up extra ocker files --- docker/Dockerfile.apt | 73 -------------------------- docker/Dockerfile.conda | 99 ----------------------------------- docker/README.md | 52 ++++-------------- docker/cyclus-ci/Dockerfile | 15 ------ docker/cyclus-deps/Dockerfile | 78 --------------------------- 5 files changed, 9 insertions(+), 308 deletions(-) delete mode 100644 docker/Dockerfile.apt delete mode 100644 docker/Dockerfile.conda delete mode 100644 docker/cyclus-ci/Dockerfile delete mode 100644 docker/cyclus-deps/Dockerfile diff --git a/docker/Dockerfile.apt b/docker/Dockerfile.apt deleted file mode 100644 index 5048e4ee22..0000000000 --- a/docker/Dockerfile.apt +++ /dev/null @@ -1,73 +0,0 @@ -FROM ubuntu:22.04 as apt-base - -ENV TZ=America/Chicago -RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone - -RUN apt update --fix-missing && \ - apt install -y \ - wget \ - bzip2 \ - ca-certificates \ - libglib2.0-0 \ - libxext6 \ - libsm6 \ - libxrender1 \ - openssh-client \ - git \ - vim \ - nano \ - libssh-dev \ - g++ \ - gcc \ - cmake \ - make \ - libglib2.0-dev \ - libxml2-dev \ - libxml++2.6-dev \ - libblas-dev \ - liblapack-dev \ - pkg-config \ - coinor-cbc \ - libboost-dev \ - libhdf5-dev \ - libsqlite3-dev \ - libpcre2-dev \ - gettext-base \ - xz-utils \ - python3-setuptools \ - python3-pytest \ - python3-tables \ - python3-pandas \ - python3-jinja2 \ - cython3 \ - libwebsockets-dev \ - python3-pprintpp \ - && apt clean -y all - -RUN apt install -y libboost-all-dev - -# required for the nosetest -ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.10/site-packages/ -RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 10 - -# - -FROM apt-base as cyclus-install - -COPY . /cyclus -WORKDIR /cyclus - -# Uncomment the following line to run cmake in verbose mode. -# This is sometimes useful for debugging. -#ENV VERBOSE=1 - -# You may add the option "--cmake-debug" to the following command -# for further CMake debugging. -RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 - -FROM cyclus-install as cyclus-test - -RUN cyclus_unit_tests - - diff --git a/docker/Dockerfile.conda b/docker/Dockerfile.conda deleted file mode 100644 index 9bbc55ce47..0000000000 --- a/docker/Dockerfile.conda +++ /dev/null @@ -1,99 +0,0 @@ -FROM ubuntu:22.04 as conda-base - -RUN apt-get update --fix-missing && apt-get install -y wget bzip2 ca-certificates \ - libglib2.0-0 libxext6 libsm6 libxrender1 - - -RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ - wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ - /bin/bash ~/miniconda.sh -b -p /opt/conda && \ - rm ~/miniconda.sh - -ENV PATH /root/.local/bin:/opt/conda/bin:$PATH - - -# -# apt packages -# -RUN apt-get update && \ - apt-get install -y openssh-client \ - git \ - vim nano && \ - apt-get clean - -# -# conda packages -# -RUN conda config --add channels conda-forge -RUN conda update -n base -c defaults conda -RUN conda install -y mamba -RUN conda update -y --all && \ - mamba install -y \ - openssh \ - gxx_linux-64 \ - gcc_linux-64 \ - cmake \ - make \ - docker-pycreds \ - git \ - xo \ - python-json-logger \ - glib \ - libxml2 \ - libxmlpp \ - libblas \ - libcblas \ - liblapack \ - pkg-config \ - coincbc \ - boost-cpp \ - hdf5 \ - sqlite \ - pcre \ - gettext \ - bzip2 \ - xz \ - setuptools \ - pytest \ - pytables \ - pandas \ - jinja2 \ - "cython<3" \ - websockets \ - pprintpp \ - && \ - mamba install -y --force-reinstall libsqlite && \ - conda clean -y --all -ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc -ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ -ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp -ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" -# required for the nosetest -ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.10/site-packages/ -# -# pip packages to overide conda -# - -RUN pip install docker - -FROM conda-base as cyclus-install - -COPY . /cyclus -WORKDIR /cyclus - -# Uncomment the following line to run cmake in verbose mode. -# This is sometimes useful for debugging. -#ENV VERBOSE=1 - -# You may add the option "--cmake-debug" to the following command -# for further CMake debugging. -RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - -FROM cyclus-install as cyclus-test - -RUN cyclus_unit_tests - - diff --git a/docker/README.md b/docker/README.md index 7639ab3634..7c6f4917d6 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,46 +1,12 @@ +This Dockerfile supports two paths for building a docker image with Cyclus, one +that uses `conda` to install depenendencies and one that uses Ubuntu's `apt` to +install dependencies. -Each subdirectory contains a dockerfile that does something useful: +All of the docker images must be built from the top level directory of the +Cyclus code space. -* ``cyclus-deps`` builds all cyclus dependencies. This is used as the base - image for other dockerfile's that build cyclus and should be updated only - occasionally as needed and pushed up to the docker hub - ``cyclus/cyclus-deps`` repository: +To build the docker image in using `conda`: +`docker build --build-arg pkg_mgr=conda -f docker/Dockerfile .` - ``` - cd cyclus-deps - docker build -t cyclus/cyclus-deps:X.X . - docker tag cyclus/cyclus-deps:X.X cyclus/cyclus-deps:latest - docker push cyclus/cyclus-deps - ``` - -* ``cyclus-ci`` is the dockerfile used for running cyclus on a continuous - integration service. This dockerfile assumes that the current working - directory is a cyclus repository - and that version of cyclus is copied into - the docker container and used for the build. The dockerfile in the cyclus - repository root is a symbolic link to this dockerfile. - -* ``deb-ci`` is the dockerfile used to generate the Cyclus debian installation - package. It contains 2 files, one Dockerfile_template and a script which: - * replace the template variables to the appropriate values: the major ubuntu - version require (provided as an parameter) and the commit hash tag in the - Dockerfile_template (recover by the script), - * runs the Dockerfile, - * extract the debian package, - * upload it on dory.fuelcycle.org. - -The script ``dockercyclus.sh`` downloads (if not already downloaded before) -the cyclus/cycamore docker image and passes all given arguments to an cyclus -command run inside a docker container. The current working directory is also -mounted inside the docker container so files in it (recursively) can be seen -by cyclus, and all output files end up in the host working directory. This is -an example of an alternative distribution mechanism for cyclus. - -The ``dockerbuild.sh`` script assumes the current working directory contains -the cyclus core repository and mounts it inside a docker container and builds -and installs the cyclus kernel. The built docker image is saved as -cyclus/cyclus:local - which can be used to run tests, etc. This could become -an easy way to onboard new kernel developers - they no longer have to set up a -fancy environment - all they have to do is clone cyclus and install docker. - -The ``dockerinst.sh`` script is similar to ``dockerbuild.sh`` except that it -uses ``install.py`` to build and install cyclus. \ No newline at end of file +To build the docker image in using `apt`: +`docker build --build-arg pkg_mgr=apt -f docker/Dockerfile .` diff --git a/docker/cyclus-ci/Dockerfile b/docker/cyclus-ci/Dockerfile deleted file mode 100644 index 1cd95e966f..0000000000 --- a/docker/cyclus-ci/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM cyclus-local - -COPY . /cyclus -WORKDIR /cyclus - -# Uncomment the following line to run cmake in verbose mode. -# This is sometimes useful for debugging. -#ENV VERBOSE=1 - -# You may add the option "--cmake-debug" to the following command -# for further CMake debugging. -RUN python install.py -j 2 --build-type=Release --core-version 999999.999999 \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - diff --git a/docker/cyclus-deps/Dockerfile b/docker/cyclus-deps/Dockerfile deleted file mode 100644 index 7c70a8c6a5..0000000000 --- a/docker/cyclus-deps/Dockerfile +++ /dev/null @@ -1,78 +0,0 @@ -FROM ubuntu:22.04 as base - -RUN apt-get update --fix-missing && apt-get install -y wget bzip2 ca-certificates \ - libglib2.0-0 libxext6 libsm6 libxrender1 - - -RUN echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh && \ - wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ - /bin/bash ~/miniconda.sh -b -p /opt/conda && \ - rm ~/miniconda.sh - -ENV PATH /root/.local/bin:/opt/conda/bin:$PATH - - -# -# apt packages -# -RUN apt-get update && \ - apt-get install -y openssh-client \ - git \ - vim nano && \ - apt-get clean - -# -# conda packages -# -RUN conda config --add channels conda-forge -RUN conda update -n base -c defaults conda -RUN conda install -y mamba -RUN conda update -y --all && \ - mamba install -y \ - openssh \ - gxx_linux-64 \ - gcc_linux-64 \ - cmake \ - make \ - docker-pycreds \ - git \ - xo \ - python-json-logger \ - glib \ - libxml2 \ - libxmlpp \ - libblas \ - libcblas \ - liblapack \ - pkg-config \ - coincbc \ - boost-cpp \ - hdf5 \ - sqlite \ - pcre \ - gettext \ - bzip2 \ - xz \ - setuptools \ - pytest \ - pytables \ - pandas \ - jinja2 \ - "cython<3" \ - websockets \ - pprintpp \ - && \ - mamba install -y --force-reinstall libsqlite && \ - conda clean -y --all -ENV CC /opt/conda/bin/x86_64-conda_cos6-linux-gnu-gcc -ENV CXX /opt/conda/bin/x86_64-conda_cos6-linux-gnu-g++ -ENV CPP /opt/conda/bin/x86_64-conda_cos6-linux-gnu-cpp -ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" -# required for the nosetest -ENV PYTHONWARNINGS ignore -RUN mkdir -p /root/.local/lib/python3.10/site-packages/ -# -# pip packages to overide conda -# - -RUN pip install docker From 80d91ff8757e82bd58815e326d08dead1e1b090b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 12:22:07 -0500 Subject: [PATCH 47/82] remove files that were previously hidden --- xCmake/FindCOIN.cmake | 194 -------------------------------------- xCmake/FindGlib.cmake | 50 ---------- xCmake/FindGlibmm.cmake | 45 --------- xCmake/FindLibXML++.cmake | 57 ----------- xCmake/FindNumpy.cmake | 87 ----------------- xCmake/FindSigC++.cmake | 41 -------- xCmake/FindSqlite3.cmake | 58 ------------ xCmake/FindTcmalloc.cmake | 39 -------- 8 files changed, 571 deletions(-) delete mode 100644 xCmake/FindCOIN.cmake delete mode 100644 xCmake/FindGlib.cmake delete mode 100644 xCmake/FindGlibmm.cmake delete mode 100644 xCmake/FindLibXML++.cmake delete mode 100644 xCmake/FindNumpy.cmake delete mode 100644 xCmake/FindSigC++.cmake delete mode 100644 xCmake/FindSqlite3.cmake delete mode 100644 xCmake/FindTcmalloc.cmake diff --git a/xCmake/FindCOIN.cmake b/xCmake/FindCOIN.cmake deleted file mode 100644 index 5190103876..0000000000 --- a/xCmake/FindCOIN.cmake +++ /dev/null @@ -1,194 +0,0 @@ -# agented after FindCOIN.cmake in the lemon project - -# Written by: Matthew Gidden -# Last updated: 12/17/12 -# Last updated: 16/08/12 - -# This cmake file is designed to locate coin-related -# dependencies on a filesystem. -# -# If the coin dependencies were installed in a non-standard -# directory, e.g. installed from source perhaps, then -# the user can provide a prefix hint via the COIN_ROOT_DIR -# cmake variable: -# $> cmake ../src -DCOIN_ROOT_DIR=/path/to/coin/root - -# To date, this install requires the following dev versions -# of the respective coin libraries: -# * coinor-libCbc-dev -# * coinor-libClp-dev -# * coinor-libcoinutils-dev -# * coinor-libOsi-dev - -# -# Get the root directory hint if provided -# -IF(NOT DEFINED COIN_ROOT_DIR) - SET(COIN_ROOT_DIR "$ENV{COIN_ROOT_DIR}") - MESSAGE("\tCOIN Root Dir: ${COIN_INCLUDE_DIR}") -ENDIF(NOT DEFINED COIN_ROOT_DIR) -MESSAGE(STATUS "COIN_ROOT_DIR hint is : ${COIN_ROOT_DIR}") - -# -# Find the path based on a required header file -# -MESSAGE(STATUS "Coin multiple library dependency status:") -FIND_PATH(COIN_INCLUDE_DIR coin/CbcModel.hpp - HINTS "${COIN_INCLUDE_DIR}" - HINTS "${COIN_ROOT_DIR}/include" - ${DEPS_INCLUDE_HINTS} - HINTS /usr/ - HINTS /usr/include/ - HINTS /usr/local/ - HINTS /usr/local/include/ - HINTS /usr/coin/ - HINTS /usr/coin-Cbc/ - HINTS /usr/local/coin/ - HINTS /usr/local/coin-Cbc/ - ) -set(COIN_INCLUDE_DIR ${COIN_INCLUDE_DIR}/coin) -MESSAGE("\tCOIN Include Dir: ${COIN_INCLUDE_DIR}") - -# -# Find all coin library dependencies -# -FIND_LIBRARY(COIN_CBC_LIBRARY - NAMES Cbc libCbc #libCbc.so.0 - HINTS "${COIN_INCLUDE_DIR}/../../lib/" - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN CBC: ${COIN_CBC_LIBRARY}") - -FIND_LIBRARY(COIN_CBC_SOLVER_LIBRARY - NAMES CbcSolver libCbcSolver libCbcSolver.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN CBC solver: ${COIN_CBC_SOLVER_LIBRARY}") - -FIND_LIBRARY(COIN_CGL_LIBRARY - NAMES Cgl libCgl libCgl.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN CGL: ${COIN_CGL_LIBRARY}") - -FIND_LIBRARY(COIN_CLP_SOLVER_LIBRARY - NAMES ClpSolver libClpSolver libClpSolver.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN CLP SOLVER: ${COIN_CLP_SOLVER_LIBRARY}") - -FIND_LIBRARY(COIN_CLP_LIBRARY - NAMES Clp libClp libClp.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN CLP: ${COIN_CLP_LIBRARY}") - -FIND_LIBRARY(COIN_COIN_UTILS_LIBRARY - NAMES CoinUtils libCoinUtils libCoinUtils.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN UTILS: ${COIN_COIN_UTILS_LIBRARY}") - -FIND_LIBRARY(COIN_OSI_LIBRARY - NAMES Osi libOsi libOsi.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN OSI: ${COIN_OSI_LIBRARY}") - -FIND_LIBRARY(COIN_OSI_CBC_LIBRARY - NAMES OsiCbc libOsiCbc #libOsiCbc.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN OSI CBC: ${COIN_OSI_CBC_LIBRARY}") - -FIND_LIBRARY(COIN_OSI_CLP_LIBRARY - NAMES OsiClp libOsiClp libOsiClp.so.0 - HINTS ${COIN_INCLUDE_DIR}/../../lib/ - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN OSI CLP: ${COIN_OSI_CLP_LIBRARY}") - -FIND_LIBRARY(COIN_ZLIB_LIBRARY - NAMES z libz libz.so.1 - HINTS ${COIN_ROOT_DIR}/lib - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN ZLIB: ${COIN_ZLIB_LIBRARY}") - -FIND_LIBRARY(COIN_BZ2_LIBRARY - NAMES bz2 libz2 libz2.so.1 - HINTS ${COIN_ROOT_DIR}/lib - HINTS "${COIN_ROOT_DIR}/lib" - ${DEPS_LIB_HINTS} - ) -MESSAGE("\tCOIN BZ2: ${COIN_BZ2_LIBRARY}") - -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(COIN DEFAULT_MSG - COIN_INCLUDE_DIR - COIN_CBC_LIBRARY - COIN_CBC_SOLVER_LIBRARY - COIN_CGL_LIBRARY - COIN_CLP_LIBRARY - COIN_COIN_UTILS_LIBRARY - COIN_OSI_LIBRARY - # Not required by cbc v2.5, but required by later versions - COIN_OSI_CBC_LIBRARY - COIN_OSI_CLP_LIBRARY - COIN_ZLIB_LIBRARY - COIN_BZ2_LIBRARY - ) - -# -# Set all required cmake variables based on our findings -# -IF(COIN_FOUND) - SET(COIN_INCLUDE_DIRS ${COIN_INCLUDE_DIR}) - #SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARY};${COIN_COIN_UTILS_LIBRARY};${COIN_ZLIB_LIBRARY};${COIN_CLP_SOLVER_LIBRARY}") - SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARY};${COIN_COIN_UTILS_LIBRARY};${COIN_ZLIB_LIBRARY}") - IF (COIN_CLP_SOLVER_LIBRARY) - SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_CLP_SOLVER_LIBRARY}") - ENDIF (COIN_CLP_SOLVER_LIBRARY) - IF(COIN_ZLIB_LIBRARY) - SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_ZLIB_LIBRARY}") - ENDIF(COIN_ZLIB_LIBRARY) - IF(COIN_BZ2_LIBRARY) - SET(COIN_CLP_LIBRARIES "${COIN_CLP_LIBRARIES};${COIN_BZ2_LIBRARY}") - ENDIF(COIN_BZ2_LIBRARY) - # Not required by cbc v2.5, but required by later versions in which case, - # the lower line should be commented out and this line used - #SET(COIN_CBC_LIBRARIES "${COIN_CBC_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CGL_LIBRARY};${COIN_OSI_LIBRARY};${COIN_OSI_CBC_LIBRARY};${COIN_OSI_CLP_LIBRARY};${COIN_CLP_LIBRARIES}") - SET(COIN_CBC_LIBRARIES "${COIN_OSI_LIBRARY};${COIN_CLP_LIBRARIES};${COIN_OSI_CLP_LIBRARY};${COIN_CGL_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CBC_LIBRARY};${COIN_OSI_CBC_LIBRARY}") - #SET(COIN_CBC_LIBRARIES "${COIN_CBC_LIBRARY};${COIN_CBC_SOLVER_LIBRARY};${COIN_CGL_LIBRARY};${COIN_OSI_LIBRARY};${COIN_OSI_CLP_LIBRARY};${COIN_CLP_LIBRARIES}") - SET(COIN_LIBRARIES "${COIN_CBC_LIBRARIES}") - - FILE(STRINGS "${COIN_INCLUDE_DIR}/CbcConfig.h" COIN_VERSION REGEX "define CBC_VERSION .*") - STRING(REPLACE "#define CBC_VERSION " "" COIN_VERSION "${COIN_VERSION}") - STRING(REPLACE "\"" "" COIN_VERSION "${COIN_VERSION}") -ENDIF(COIN_FOUND) - -# -# Report a synopsis of our findings -# -IF(COIN_INCLUDE_DIRS) - MESSAGE(STATUS "Found COIN Include Dirs: ${COIN_INCLUDE_DIRS}") -ELSE(COIN_INCLUDE_DIRS) - MESSAGE(STATUS "COIN Include Dirs NOT FOUND") -ENDIF(COIN_INCLUDE_DIRS) diff --git a/xCmake/FindGlib.cmake b/xCmake/FindGlib.cmake deleted file mode 100644 index 623ccf4d6b..0000000000 --- a/xCmake/FindGlib.cmake +++ /dev/null @@ -1,50 +0,0 @@ -#pkg_check_modules(GLIB_PKG glib-2.0) -libfind_pkg_check_modules(GLIB_PKG glib-2.0) - -if(GLIB_PKG_FOUND) - find_path(GLIB_INCLUDE_DIR NAMES glib.h PATH_SUFFIXES glib-2.0 - ${DEPS_INCLUDE_HINTS} - PATHS - ${GLIB_PKG_INCLUDE_DIRS} - /usr/include/glib-2.0 - /usr/include - /usr/local/include - ) - find_path(GLIB_CONFIG_INCLUDE_DIR NAMES glibconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${GLIB_PKG_LIBDIR} PATH_SUFFIXES glib-2.0/include) - - find_library(GLIB_LIBRARIES NAMES glib-2.0 - ${DEPS_LIB_HINTS} - PATHS - ${GLIB_PKG_LIBRARY_DIRS} - /usr/lib - /usr/local/lib - ) -else(GLIB_PKG_FOUND) - # Find Glib even if pkg-config is not working (eg. cross compiling to Windows) - find_library(GLIB_LIBRARIES NAMES glib-2.0 ${DEPS_LIB_HINTS}) - string(REGEX REPLACE "/[^/]*$" "" GLIB_LIBRARIES_DIR ${GLIB_LIBRARIES}) - - find_path(GLIB_INCLUDE_DIR NAMES glib.h - ${DEPS_INCLUDE_HINTS} - PATH_SUFFIXES glib-2.0 - ) - find_path(GLIB_CONFIG_INCLUDE_DIR NAMES glibconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${GLIB_LIBRARIES_DIR} PATH_SUFFIXES glib-2.0/include) -endif(GLIB_PKG_FOUND) - -if(GLIB_INCLUDE_DIR AND GLIB_CONFIG_INCLUDE_DIR AND GLIB_LIBRARIES) - set(GLIB_INCLUDE_DIRS ${GLIB_INCLUDE_DIR} ${GLIB_CONFIG_INCLUDE_DIR}) -endif(GLIB_INCLUDE_DIR AND GLIB_CONFIG_INCLUDE_DIR AND GLIB_LIBRARIES) - -if(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - set(GLIB_FOUND TRUE CACHE INTERNAL "glib-2.0 found") - message(STATUS "Found glib-2.0: ${GLIB_INCLUDE_DIR}, ${GLIB_LIBRARIES}") -else(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - set(GLIB_FOUND FALSE CACHE INTERNAL "glib-2.0 found") - message(STATUS "glib-2.0 not found.") -endif(GLIB_INCLUDE_DIRS AND GLIB_LIBRARIES) - -mark_as_advanced(GLIB_INCLUDE_DIR GLIB_CONFIG_INCLUDE_DIR GLIB_INCLUDE_DIRS GLIB_LIBRARIES) diff --git a/xCmake/FindGlibmm.cmake b/xCmake/FindGlibmm.cmake deleted file mode 100644 index 3790e30164..0000000000 --- a/xCmake/FindGlibmm.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# - Try to find Glibmm-2.4 -# Once done, this will define -# -# Glibmm_FOUND - system has Glibmm -# Glibmm_INCLUDE_DIRS - the Glibmm include directories -# Glibmm_LIBRARIES - link these to use Glibmm - -include(LibFindMacros) - -# Dependencies -libfind_package(Glibmm Glib) -libfind_package(Glibmm SigC++) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(Glibmm_PKGCONF glibmm-2.4) - -# Main include dir -find_path(Glibmm_INCLUDE_DIR - NAMES glibmm/main.h - ${DEPS_INCLUDE_HINTS} - PATHS ${Glibmm_PKGCONF_INCLUDE_DIRS} - PATH_SUFFIXES glibmm-2.4 include/glibmm-2.4 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(GlibmmConfig_INCLUDE_DIR - NAMES glibmmconfig.h - ${DEPS_INCLUDE_HINTS} - PATHS ${Glibmm_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/glibmm-2.4/include - ) - -# find lib -find_path(Glibmm_PKGCONF_LIBRARY_DIRS - NAMES libglib-2.0.so - ${DEPS_LIB_HINTS} - PATH_SUFFIXES lib - ) -libfind_library(Glibmm glibmm 2.4) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(Glibmm_PROCESS_INCLUDES Glibmm_INCLUDE_DIR GlibmmConfig_INCLUDE_DIR GLIB_INCLUDE_DIRS SigC++_INCLUDE_DIRS) -set(Glibmm_PROCESS_LIBS Glibmm_LIBRARY GLIB_LIBRARIES SigC++_LIBRARIES) -libfind_process(Glibmm) diff --git a/xCmake/FindLibXML++.cmake b/xCmake/FindLibXML++.cmake deleted file mode 100644 index 51485f6d63..0000000000 --- a/xCmake/FindLibXML++.cmake +++ /dev/null @@ -1,57 +0,0 @@ -# - Try to find LibXML++ 2.6 -# Once done, this will define -# -# LibXML++_FOUND - system has LibXML++ -# LibXML++_INCLUDE_DIRS - the LibXML++ include directories -# LibXML++_LIBRARIES - link these to use LibXML++ - -include(LibFindMacros) - -# Dependencies -libfind_package(LibXML++ LibXml2 ${DEPS_HINTS}) -find_path(LIBXML2_INCLUDE_DIR - NAMES libxml - ${DEPS_INCLUDE_HINTS} - PATH_SUFFIXES libxml2 include/libxml2 - ) -libfind_package(LibXML++ Glibmm) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(LibXML++_PKGCONF libxml++-2.6) - -# Main include dir -find_path(LibXML++_INCLUDE_DIR - NAMES libxml++/libxml++.h - ${DEPS_INCLUDE_HINTS} - PATHS ${LibXML++_PKGCONF_INCLUDE_DIRS} - PATH_SUFFIXES libxml++-2.6 include/libxml++-2.6 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(LibXML++Config_INCLUDE_DIR - NAMES libxml++config.h - ${DEPS_HINTS} - PATHS ${LibXML++_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/libxml++-2.6/include - ) - -# find lib -find_path(LibXML++_PKGCONF_LIBRARY_DIRS - NAMES xml++-2.6 - ${DEPS_LIB_DIR} - PATH_SUFFIXES lib - ) -libfind_library(LibXML++ xml++ 2.6) - # Finally the library itself -#find_library(LibXML++_LIBRARY -# NAMES xml++-2.6 -# ${DEPS_LIB_DIR} -# PATHS ${LibXML++_PKGCONF_LIBRARY_DIRS} -# ) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(LibXML++_PROCESS_INCLUDES LibXML++_INCLUDE_DIR LibXML++Config_INCLUDE_DIR LIBXML2_INCLUDE_DIR Glibmm_INCLUDE_DIRS) -set(LibXML++_PROCESS_LIBS LibXML++_LIBRARY LIBXML2_LIBRARIES Glibmm_LIBRARIES) - -libfind_process(LibXML++) diff --git a/xCmake/FindNumpy.cmake b/xCmake/FindNumpy.cmake deleted file mode 100644 index 136fce28cf..0000000000 --- a/xCmake/FindNumpy.cmake +++ /dev/null @@ -1,87 +0,0 @@ -# - Find Numpy -# NumPy is the fundamental package needed for scientific computing with Python -# www.numpy.scipy.org -# -# The module defines the following variables: -# NUMPY_FOUND - the system has numpy -# NUMPY_INCLUDE_DIR - where to find numpy/arrayobject.h -# NUMPY_INCLUDE_DIRS - numpy include directories -# NUMPY_VERSION_STRING - version (ex. 1.2.3) -# NUMPY_MAJOR_VERSION - major version (ex. 1) -# NUMPY_MINOR_VERSION - minor version (ex. 2) -# NUMPY_PATCH_VERSION - patch version (ex. 3) - -#============================================================================= -# Copyright 2005-2012 EDF-EADS-Phimeca -# -# Distributed under the OSI-approved BSD License (the "License"); -# see accompanying file Copyright.txt for details. -# -# This software is distributed WITHOUT ANY WARRANTY; without even the -# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the License for more information. -#============================================================================= -# (To distributed this file outside of CMake, substitute the full -# License text for the above reference.) - -# set NUMPY_INCLUDE_DIR -find_package ( PythonInterp ) - -if ( PYTHONINTERP_FOUND ) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print(numpy.get_include())" - OUTPUT_VARIABLE NUMPY_INCLUDE_DIR - ERROR_QUIET - OUTPUT_STRIP_TRAILING_WHITESPACE ) -endif () - -# set NUMPY_INCLUDE_DIRS -set ( NUMPY_INCLUDE_DIRS ${NUMPY_INCLUDE_DIR} ) - -# version -if ( PYTHONINTERP_FOUND ) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print(numpy.__version__)" - OUTPUT_VARIABLE NUMPY_VERSION_STRING - OUTPUT_STRIP_TRAILING_WHITESPACE ) - - if ( NUMPY_VERSION_STRING ) - string ( REGEX REPLACE "([0-9]+)\\..*" "\\1" NUMPY_MAJOR_VERSION ${NUMPY_VERSION_STRING} ) - string ( REGEX REPLACE "[0-9]+\\.([0-9]+).*" "\\1" NUMPY_MINOR_VERSION ${NUMPY_VERSION_STRING} ) - string ( REGEX REPLACE "[0-9]+\\.[0-9]+\\.([0-9]+).*" "\\1" NUMPY_PATCH_VERSION ${NUMPY_VERSION_STRING} ) - endif () - -endif () - -# check version -set ( _NUMPY_VERSION_MATCH TRUE ) -if ( Numpy_FIND_VERSION AND NUMPY_VERSION ) - if ( Numpy_FIND_VERSION_EXACT ) - if ( Numpy_FIND_VERSION VERSION_EQUAL NUMPY_VERSION_STRING ) - else() - set ( _NUMPY_VERSION_MATCH FALSE) - endif () - else () - if ( Numpy_FIND_VERSION VERSION_GREATER NUMPY_VERSION_STRING ) - set ( _NUMPY_VERSION_MATCH FALSE ) - endif () - endif () -endif () - -message("-- NUMPY_VERSION_STRING = ${NUMPY_VERSION_STRING}") - -# handle REQUIRED and QUIET options -include ( FindPackageHandleStandardArgs ) -find_package_handle_standard_args ( Numpy DEFAULT_MSG - NUMPY_VERSION_STRING - _NUMPY_VERSION_MATCH - NUMPY_INCLUDE_DIR - NUMPY_INCLUDE_DIRS -) - -mark_as_advanced ( - NUMPY_VERSION_STRING - NUMPY_MAJOR_VERSION - NUMPY_MINOR_VERSION - NUMPY_PATCH_VERSION - NUMPY_INCLUDE_DIR - NUMPY_INCLUDE_DIRS -) diff --git a/xCmake/FindSigC++.cmake b/xCmake/FindSigC++.cmake deleted file mode 100644 index 20133b11aa..0000000000 --- a/xCmake/FindSigC++.cmake +++ /dev/null @@ -1,41 +0,0 @@ -# - Try to find SigC++-2.0 -# Once done, this will define -# -# SigC++_FOUND - system has SigC++ -# SigC++_INCLUDE_DIRS - the SigC++ include directories -# SigC++_LIBRARIES - link these to use SigC++ - -include(LibFindMacros) - -# Use pkg-config to get hints about paths -libfind_pkg_check_modules(SigC++_PKGCONF sigc++-2.0) - -# Main include dir -find_path(SigC++_INCLUDE_DIR - NAMES sigc++/sigc++.h - ${DEPS_INCLUDE_HINTS} - PATHS ${SigC++_PKGCONF_INCLUDE_DIRS} ${SigC++_PKGCONF_INCLUDE_DIRS}/include - PATH_SUFFIXES include/sigc++-2.0 sigc++-2.0 - ) - -# Glib-related libraries also use a separate config header, which is in lib dir -find_path(SigC++Config_INCLUDE_DIR - NAMES sigc++config.h - ${DEPS_INCLUDE_HINTS} - PATHS ${SigC++_PKGCONF_INCLUDE_DIRS} /usr - PATH_SUFFIXES lib/sigc++-2.0/include - ) - -# find lib -find_path(SigC++_PKGCONF_LIBRARY_DIRS - NAMES libsigc-2.0.so - ${DEPS_LIB_HINTS} - PATH_SUFFIXES lib - ) -libfind_library(SigC++ sigc 2.0) - -# Set the include dir variables and the libraries and let libfind_process do the rest. -# NOTE: Singular variables for this library, plural for libraries this this lib depends on. -set(SigC++_PROCESS_INCLUDES SigC++_INCLUDE_DIR SigC++Config_INCLUDE_DIR) -set(SigC++_PROCESS_LIBS SigC++_LIBRARY) -libfind_process(SigC++) diff --git a/xCmake/FindSqlite3.cmake b/xCmake/FindSqlite3.cmake deleted file mode 100644 index 65064e7acc..0000000000 --- a/xCmake/FindSqlite3.cmake +++ /dev/null @@ -1,58 +0,0 @@ -# - find Sqlite 3 -# SQLITE3_INCLUDE_DIR - Where to find Sqlite 3 header files (directory) -# SQLITE3_LIBRARIES - Sqlite 3 libraries -# SQLITE3_LIBRARY_RELEASE - Where the release library is -# SQLITE3_LIBRARY_DEBUG - Where the debug library is -# SQLITE3_FOUND - Set to TRUE if we found everything (library, includes and executable) - -# Copyright (c) 2010 Pau Garcia i Quiles, -# -# Redistribution and use is allowed according to the terms of the BSD license. -# For details see the accompanying COPYING-CMAKE-SCRIPTS file. -# -# Generated by CModuler, a CMake Module Generator - http://gitorious.org/cmoduler - -IF(SQLITE3_INCLUDE_DIR AND SQLITE3_LIBRARY_RELEASE AND SQLITE3_LIBRARY_DEBUG) - SET(SQLITE3_FIND_QUIETLY TRUE) -ENDIF(SQLITE3_INCLUDE_DIR AND SQLITE3_LIBRARY_RELEASE AND SQLITE3_LIBRARY_DEBUG) - -FIND_LIBRARY(SQLITE3_LIBRARY_RELEASE NAMES sqlite3 ${DEPS_LIB_HINTS}) -GET_FILENAME_COMPONENT(SQLITE3_INCLUDE_DIR "${SQLITE3_LIBRARY_RELEASE}" DIRECTORY) -GET_FILENAME_COMPONENT(SQLITE3_INCLUDE_DIR "${SQLITE3_INCLUDE_DIR}" DIRECTORY) -SET(SQLITE3_INCLUDE_DIR "${SQLITE3_INCLUDE_DIR}/include") - -FIND_LIBRARY(SQLITE3_LIBRARY_DEBUG NAMES sqlite3 sqlite3d - ${DEPS_LIB_HINTS} HINTS /usr/lib/debug/usr/lib/) - -IF(SQLITE3_LIBRARY_RELEASE OR SQLITE3_LIBRARY_DEBUG AND SQLITE3_INCLUDE_DIR) - SET(SQLITE3_FOUND TRUE) -ENDIF(SQLITE3_LIBRARY_RELEASE OR SQLITE3_LIBRARY_DEBUG AND SQLITE3_INCLUDE_DIR) - -IF(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - # if the generator supports configuration types then set - # optimized and debug libraries, or if the CMAKE_BUILD_TYPE has a value - IF(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) - SET(SQLITE3_LIBRARIES optimized ${SQLITE3_LIBRARY_RELEASE} debug ${SQLITE3_LIBRARY_DEBUG}) - ELSE(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) - # if there are no configuration types and CMAKE_BUILD_TYPE has no value - # then just use the release libraries - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_RELEASE}) - ENDIF(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE) -ELSEIF(SQLITE3_LIBRARY_RELEASE) - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_RELEASE}) -ELSE(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY_DEBUG}) -ENDIF(SQLITE3_LIBRARY_DEBUG AND SQLITE3_LIBRARY_RELEASE) - -IF(SQLITE3_FOUND) - IF(NOT SQLITE3_FIND_QUIETLY) - MESSAGE(STATUS "Found Sqlite3 header file in ${SQLITE3_INCLUDE_DIR}") - MESSAGE(STATUS "Found Sqlite3 libraries: ${SQLITE3_LIBRARIES}") - ENDIF(NOT SQLITE3_FIND_QUIETLY) -ELSE(SQLITE3_FOUND) - IF(SQLITE3_FIND_REQUIRED) - MESSAGE(FATAL_ERROR "Could not find Sqlite3") - ELSE(SQLITE3_FIND_REQUIRED) - MESSAGE(STATUS "Optional package Sqlite3 was not found") - ENDIF(SQLITE3_FIND_REQUIRED) -ENDIF(SQLITE3_FOUND) diff --git a/xCmake/FindTcmalloc.cmake b/xCmake/FindTcmalloc.cmake deleted file mode 100644 index ec549b5f3a..0000000000 --- a/xCmake/FindTcmalloc.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# - Find Tcmalloc -# Find the native Tcmalloc includes and library -# -# Tcmalloc_LIBRARIES - List of libraries when using Tcmalloc. -# Tcmalloc_FOUND - True if Tcmalloc found. - -if (USE_TCMALLOC) - set(Tcmalloc_NAMES tcmalloc) -else () - set(Tcmalloc_NAMES tcmalloc_minimal tcmalloc) -endif () - -find_library(Tcmalloc_LIBRARY NO_DEFAULT_PATH - NAMES ${Tcmalloc_NAMES} - ${DEPS_HINTS} - PATHS ${HT_DEPENDENCY_LIB_DIR} /lib /usr/lib /usr/local/lib /opt/local/lib -) - -if (Tcmalloc_LIBRARY) - set(Tcmalloc_FOUND TRUE) - set( Tcmalloc_LIBRARIES ${Tcmalloc_LIBRARY} ) -else () - set(Tcmalloc_FOUND FALSE) - set( Tcmalloc_LIBRARIES ) -endif () - -if (Tcmalloc_FOUND) - message(STATUS "Found Tcmalloc: ${Tcmalloc_LIBRARY}") -else () - message(STATUS "Not Found Tcmalloc: ${Tcmalloc_LIBRARY}") - if (Tcmalloc_FIND_REQUIRED) - message(STATUS "Looked for Tcmalloc libraries named ${Tcmalloc_NAMES}.") - message(FATAL_ERROR "Could NOT find Tcmalloc library") - endif () -endif () - -mark_as_advanced( - Tcmalloc_LIBRARY - ) From be844b048c0ba3abdbb9f50c8c6910a79fb57def Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 12:24:00 -0500 Subject: [PATCH 48/82] c++14 corrections --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 09ac57986c..3138beb500 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,10 +13,10 @@ ENDIF() # This project name is cyclus. PROJECT(CYCLUS) -# check for and enable c++11 support (required for cyclus) +# check for and enable c++14 support (required for cyclus) INCLUDE(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) -IF(COMPILER_SUPPORTS_CXX11) +IF(COMPILER_SUPPORTS_CXX14) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14") ELSE() MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++14 support. Please use a different C++ compiler.") From 978ab85dfb6423b65c7ffe8c85c580870279975e Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 12:24:32 -0500 Subject: [PATCH 49/82] remove commented cmake path --- CMakeLists.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3138beb500..102f2c4ee4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -106,7 +106,6 @@ IF(NOT CYCLUS_DOC_ONLY) # Tell CMake where the modules are MESSAGE("-- CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") LIST(APPEND CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} - # "/opt/conda/share/cmake-3.25/Modules" "${PROJECT_SOURCE_DIR}/cmake") # Include macros From 557a9ad3a55626af8193e3a66401d300f9168920 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 13:15:17 -0500 Subject: [PATCH 50/82] update all the way to C++ 17 --- CMakeLists.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 102f2c4ee4..41728e1d8c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,11 +15,11 @@ PROJECT(CYCLUS) # check for and enable c++14 support (required for cyclus) INCLUDE(CheckCXXCompilerFlag) -CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) -IF(COMPILER_SUPPORTS_CXX14) - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14") +CHECK_CXX_COMPILER_FLAG("-std=c++17" COMPILER_SUPPORTS_CXX17) +IF(COMPILER_SUPPORTS_CXX17) + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") ELSE() - MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++14 support. Please use a different C++ compiler.") + MESSAGE(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++17 support. Please use a different C++ compiler.") ENDIF() # enable assembly From 03737c665ff43cfde3002071f6124617031cfb06 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 13:18:40 -0500 Subject: [PATCH 51/82] remove unnecessary file --- test_foo.py | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 test_foo.py diff --git a/test_foo.py b/test_foo.py deleted file mode 100644 index 2e22adac16..0000000000 --- a/test_foo.py +++ /dev/null @@ -1,5 +0,0 @@ -def test_foo(): - print("got here A") - assert 1 == 2 - assert 3 == 3 - print("got here B") \ No newline at end of file From 1fb42e6ccdec2d2987809c125661a31974a8c370 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 13:30:59 -0500 Subject: [PATCH 52/82] remove nosetest line --- tests/cycpp_tests.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/cycpp_tests.py b/tests/cycpp_tests.py index 65083f86d3..78c1f6bb64 100644 --- a/tests/cycpp_tests.py +++ b/tests/cycpp_tests.py @@ -28,8 +28,6 @@ import cycpp -# assert_equal.__self__.maxDiff = None !! this may not be necessary for pytest??? - class MockMachine(object): def __init__(self): self.depth = 0 From ae84592c6b2f7533bc18f768d2c16b79a86a473d Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:00:33 -0500 Subject: [PATCH 53/82] perform setup as before --- tests/test_cycluslib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cycluslib.py b/tests/test_cycluslib.py index 54c6565088..12860b9a6f 100644 --- a/tests/test_cycluslib.py +++ b/tests/test_cycluslib.py @@ -8,7 +8,7 @@ from tools import libcyclus_setup, dbtest -#setup = libcyclus_setup +setup = libcyclus_setup @dbtest def test_name(db, fname, backend): From 039439f7a3e6a04434e7f0ff81fe1b24e6ba65f9 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:00:44 -0500 Subject: [PATCH 54/82] use built in decorators instead of functools --- tests/tools.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/tools.py b/tests/tools.py index 772a3e5d5d..18bee0b71c 100644 --- a/tests/tools.py +++ b/tests/tools.py @@ -9,7 +9,6 @@ import subprocess import tempfile from contextlib import contextmanager -from functools import wraps import pytest @@ -18,9 +17,6 @@ if sys.version_info[0] >= 3: basestring = str -#unit = attr('unit') -#integration = attr('integration') - INPUT = os.path.join(os.path.dirname(__file__), "input") CYCLUS_HAS_COIN = None @@ -168,7 +164,6 @@ def libcyclus_setup(): def dbtest(f): - @wraps(f) def wrapper(): for fname, oname, backend in DBS: if os.path.exists(fname): @@ -180,7 +175,6 @@ def wrapper(): - # # Here there be Hackons! # From 97f81862636afe2205efe32a741d927d1e6257da Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:01:08 -0500 Subject: [PATCH 55/82] search for the fully named module Not sure how this ever worked? --- tests/test_env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_env.py b/tests/test_env.py index f1a316829a..2fba65ba5d 100644 --- a/tests/test_env.py +++ b/tests/test_env.py @@ -22,7 +22,7 @@ def test_paths(): assert isinstance(path, str) assert len(ENV.env_delimiter) > 0 assert len(ENV.path_delimiter) > 0 - assert len(ENV.find_module('agents')) > 0 + assert len(ENV.find_module('libagents.so')) > 0 def test_nuc_data(): From acd284d87d32b84af38a95088c81b6e62f6f2d53 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:01:38 -0500 Subject: [PATCH 56/82] update pandas testing module --- tests/test_memback.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_memback.py b/tests/test_memback.py index d50fe28655..aac617e3e6 100644 --- a/tests/test_memback.py +++ b/tests/test_memback.py @@ -7,7 +7,7 @@ import numpy as np import pandas as pd -from pandas.util.testing import assert_frame_equal +from pandas.testing import assert_frame_equal def make_rec_back(inject_sim_id=False): From ecfa70c4f08dcbee88ea59f8074a435bebd00b41 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:02:05 -0500 Subject: [PATCH 57/82] use pytest fixture to loop over cases --- tests/test_source_to_sink.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/tests/test_source_to_sink.py b/tests/test_source_to_sink.py index 7750691ec0..2e83ea4459 100644 --- a/tests/test_source_to_sink.py +++ b/tests/test_source_to_sink.py @@ -14,11 +14,20 @@ INPUT = os.path.join(os.path.dirname(__file__), "input") -def check_source_to_sink(fname, source_spec, sink_spec): +@pytest.fixture(params=[("source_to_sink.xml", ":agents:Source", ":agents:Sink"), + ("source_to_sink.py", ":cyclus.pyagents:Source", ":cyclus.pyagents:Sink"), + ]) +def source_to_sink_case(request): + yield request.param + +def test_source_to_sink(source_to_sink_case): """Tests linear growth of sink inventory by checking if the transactions were of equal quantities and only between sink and source facilities. """ clean_outs() + + fname, source_spec, sink_spec = source_to_sink_case + if not cyclus_has_coin(): pytest.skip("Cyclus does not have COIN") @@ -102,10 +111,4 @@ def check_source_to_sink(fname, source_spec, sink_spec): clean_outs() -def test_source_to_sink(): - cases = [("source_to_sink.xml", ":agents:Source", ":agents:Sink"), - ("source_to_sink.py", ":cyclus.pyagents:Source", ":cyclus.pyagents:Sink"), - ] - for case in cases: - for x in check_source_to_sink(*case): - pass + From 4eb898a801dd788ac63df4dc5c7dcb0085f64e78 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:04:25 -0500 Subject: [PATCH 58/82] switch to std open and yield appropriately --- tests/test_stubs.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_stubs.py b/tests/test_stubs.py index c2d8008481..a7675500ec 100644 --- a/tests/test_stubs.py +++ b/tests/test_stubs.py @@ -3,7 +3,6 @@ import shutil import sys import tempfile -import io from contextlib import contextmanager import pytest @@ -11,11 +10,13 @@ @contextmanager def tmpdir(): d = tempfile.mkdtemp() + yield d shutil.rmtree(d) @contextmanager def tmplog(fname): - io.open(fname, mode='w') + file_ptr = open(fname, mode='w') + yield file_ptr os.remove(fname) def test_stubs(): From 49e3744d49ed79fbcfb5abbf2ef5fb3ee8a4321b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:53:18 -0500 Subject: [PATCH 59/82] change syntax to skip --- tests/test_smbchk.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_smbchk.py b/tests/test_smbchk.py index 5229434acc..5ac9301528 100644 --- a/tests/test_smbchk.py +++ b/tests/test_smbchk.py @@ -3,6 +3,7 @@ import platform import sys from argparse import Namespace +import pytest from tools import skip_then_continue @@ -16,16 +17,16 @@ except ImportError: smbchk = False +@pytest.mark.skip(reason="symbol test has been deprecated") def test_load(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return ns = Namespace(filename=os.path.join(reldir, 'symbols.json')) db = smbchk.load(ns) assert(isinstance(db, list)) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_nm(): - raise DeprecatedTest("symbol test has been deprecated") if platform.system() == 'Darwin': skip_then_continue("Skipping for Mac") if not smbchk: @@ -36,8 +37,8 @@ def test_nm(): syms = smbchk.nm(ns) assert ("cyclus::Agent::Agent(cyclus::Context*)" in syms) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_diff(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return db = [{'symbols': ["cyclus::Agent::Agent(cyclus::Context*)"], @@ -48,8 +49,8 @@ def test_diff(): obs = smbchk.diff(db, 0, 1) assert(len(obs) > 0) +@pytest.mark.skip(reason="symbol test has been deprecated") def test_check(): - raise DeprecatedTest("symbol test has been deprecated") if not smbchk: return # adds to API From 03df57ff0c2fb19ce80bff10e8a0007888702cdb Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:53:56 -0500 Subject: [PATCH 60/82] convert to pytest fixture for more resolution --- tests/test_main.py | 66 ++++++++++++++++++++++++---------------------- 1 file changed, 35 insertions(+), 31 deletions(-) diff --git a/tests/test_main.py b/tests/test_main.py index af39f9ed61..e0e81d866b 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,39 +1,43 @@ """Tests Python main CLI for Cyclus.""" from __future__ import print_function, unicode_literals - +import pytest from cyclus.main import main +cases = [ + ['-V'], + ['--schema'], + ['--flat-schema', '--schema'], + ['--agent-schema', ':agents:KFacility'], + ['--agent-version', ':agents:KFacility'], + ['--schema-path', '/path/to/schema'], + ['--agent-annotations', ':agents:NullRegion'], + ['--agent-listing', ':agents'], + ['--no-agent'], + ['--no-mem'], + ['-v', '5'], + ['--warn-limit', '21'], + ['-p'], + ['--include'], + ['--install-path'], + ['--cmake-module-path'], + ['--build-path'], + ['--rng-schema'], + ['--nuc-data'], + ] + + +@pytest.fixture(params=cases) +def case(request): + yield request.param -def test_main(): - cases = [ - ['-V'], - ['--schema'], - ['--flat-schema', '--schema'], - ['--agent-schema', ':agents:KFacility'], - ['--agent-version', ':agents:KFacility'], - ['--schema-path', '/path/to/schema'], - ['--agent-annotations', ':agents:NullRegion'], - ['--agent-listing', ':agents'], - ['--no-agent'], - ['--no-mem'], - ['-v', '5'], - ['--warn-limit', '21'], - ['-p'], - ['--include'], - ['--install-path'], - ['--cmake-module-path'], - ['--build-path'], - ['--rng-schema'], - ['--nuc-data'], - ] - for case in cases: - try: - main(args=case) - res = True - except Exception: - res = False - raise - assert res +def test_main(case): + try: + main(args=case) + res = True + except Exception: + res = False + raise + assert res From 208ac301f86656c28a1d8053ed62657ae39b2288 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 17:55:35 -0500 Subject: [PATCH 61/82] remove forced failure --- tests/test_null_sink.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index b1799156f5..7aedf0b71f 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -71,7 +71,6 @@ def check_null_sink(fname, given_spec): # No resource exchange is expected assert not tables_exist(outfile, illegal_paths) - assert 1 == 2 # FIX ME clean_outs() From e1a3bc83d2fae9d07f0ca55d9f1bb1f50f27b47f Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 18:00:08 -0500 Subject: [PATCH 62/82] convert to pytest fixture --- tests/test_null_sink.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/test_null_sink.py b/tests/test_null_sink.py index 7aedf0b71f..52d73991aa 100644 --- a/tests/test_null_sink.py +++ b/tests/test_null_sink.py @@ -3,9 +3,6 @@ import os import sqlite3 import pytest - - - import numpy as np import tables from helper import tables_exist, find_ids, exit_times, \ @@ -16,12 +13,21 @@ INPUT = os.path.join(os.path.dirname(__file__), "input") -def check_null_sink(fname, given_spec): +@pytest.fixture(params=[("null_sink.xml", ":agents:Sink"), + ("null_sink.py", ":cyclus.pyagents:Sink")]) +def null_sink_case(request): + yield request.param + + +def test_null_sink(null_sink_case): """Testing for null sink case without a source facility. No transactions are expected in this test; therefore, a table with transaction records must not exist in order to pass this test. """ + + fname, given_spec = null_sink_case + clean_outs() if not cyclus_has_coin(): pytest.skip("Cyclus does not have COIN") @@ -74,10 +80,3 @@ def check_null_sink(fname, given_spec): clean_outs() -def test_null_sink(): - cases = [("null_sink.xml", ":agents:Sink"), - ("null_sink.py", ":cyclus.pyagents:Sink")] - for case in cases: - for x in check_null_sink(*case): - pass - From 7cab1add5bafbfde9dba7f52b6ce3e44d3d2eb6d Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 21:05:01 -0500 Subject: [PATCH 63/82] fix comment - again! --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 41728e1d8c..188ed73db3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,7 +13,7 @@ ENDIF() # This project name is cyclus. PROJECT(CYCLUS) -# check for and enable c++14 support (required for cyclus) +# check for and enable c++17 support (required for cyclus) INCLUDE(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++17" COMPILER_SUPPORTS_CXX17) IF(COMPILER_SUPPORTS_CXX17) From 64e316531b4ce3a34f073e89660ff3dbf16dc331 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 21:15:46 -0500 Subject: [PATCH 64/82] update memback to new pandas API --- cyclus/memback.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cyclus/memback.pyx b/cyclus/memback.pyx index 9f59a7fa75..3c6106d61e 100644 --- a/cyclus/memback.pyx +++ b/cyclus/memback.pyx @@ -100,7 +100,7 @@ cdef cppclass CyclusMemBack "CyclusMemBack" (cpp_cyclus.RecBackend): if key_exists: pyobval = PyDict_GetItem( this.cache, pyname) pyval = pyobval - results = pyval.append(results, ignore_index=True) + results = pd.concat([pyval, results], ignore_index=True) PyDict_SetItem( this.cache, pyname, results) std_string Name(): From 480c84fa3a4555f9150e178529a51b3307fa5321 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Thu, 14 Sep 2023 21:27:11 -0500 Subject: [PATCH 65/82] revert debian package dependency --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 188ed73db3..646aa72154 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -490,7 +490,7 @@ IF(NOT CYCLUS_DOC_ONLY) SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-program-options-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libboost-serialization-dev (>= 1.54.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libhdf5-dev (>= 1.8.11)") - SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++4.0-dev (>= 4.0.0)") + SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, libxml++2.6-dev (>= 2.6.0)") SET(CPACK_DEBIAN_PACKAGE_DEPENDS "${CPACK_DEBIAN_PACKAGE_DEPENDS}, coinor-libcbc-dev (>= 2.8.7)") MESSAGE("CPACK_DEBIAN_PACKAGE_DEPENDS ${CPACK_DEBIAN_PACKAGE_DEPENDS}") From dd3228717caaebdb1db45075044cf20cf0c00aa2 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:25:38 -0500 Subject: [PATCH 66/82] update for new XML++ interface --- src/xml_parser.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/xml_parser.cc b/src/xml_parser.cc index 512dbcd11d..5969ba88e2 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -61,7 +61,11 @@ void XMLParser::Validate(const std::stringstream& xml_schema_snippet) { xmlpp::Document* XMLParser::Document() { xmlpp::Document* doc = parser_->get_document(); // This adds the capability to have nice include semantics - doc->process_xinclude(); + #if LIBXMLXX_MAJOR_VERSION == 2 + doc->process_xinclude(true); + #else + doc->process_xinclude(true, false); + #endif // This removes the stupid xml:base attribute that including adds, // but which is unvalidatable. The web is truly cobbled together // by a race of evil gnomes. From 53f0890a2eadfce066110babdb4bc90b54ab8376 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:27:41 -0500 Subject: [PATCH 67/82] remove magic number booleans --- src/xml_parser.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/xml_parser.cc b/src/xml_parser.cc index 5969ba88e2..55975a6dbb 100644 --- a/src/xml_parser.cc +++ b/src/xml_parser.cc @@ -61,10 +61,12 @@ void XMLParser::Validate(const std::stringstream& xml_schema_snippet) { xmlpp::Document* XMLParser::Document() { xmlpp::Document* doc = parser_->get_document(); // This adds the capability to have nice include semantics + bool generate_xinclude_nodes = true; + bool fixup_base_uris = false; #if LIBXMLXX_MAJOR_VERSION == 2 - doc->process_xinclude(true); + doc->process_xinclude(generate_xinclude_nodes); #else - doc->process_xinclude(true, false); + doc->process_xinclude(generate_xinclude_nodes, fixup_base_uris); #endif // This removes the stupid xml:base attribute that including adds, // but which is unvalidatable. The web is truly cobbled together From d4cc14ed6024b77782b258548affa25609139e9b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:35:58 -0500 Subject: [PATCH 68/82] rename plurals for pytest --- tests/{cycpp_tests.py => cycpp_test.py} | 0 tests/{hdf5_back_gen_tests.py => hdf5_back_gen_test.py} | 0 tests/{tests_hdf5_back_gen.py => test_hdf5_back_gen.py} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename tests/{cycpp_tests.py => cycpp_test.py} (100%) rename tests/{hdf5_back_gen_tests.py => hdf5_back_gen_test.py} (100%) rename tests/{tests_hdf5_back_gen.py => test_hdf5_back_gen.py} (100%) diff --git a/tests/cycpp_tests.py b/tests/cycpp_test.py similarity index 100% rename from tests/cycpp_tests.py rename to tests/cycpp_test.py diff --git a/tests/hdf5_back_gen_tests.py b/tests/hdf5_back_gen_test.py similarity index 100% rename from tests/hdf5_back_gen_tests.py rename to tests/hdf5_back_gen_test.py diff --git a/tests/tests_hdf5_back_gen.py b/tests/test_hdf5_back_gen.py similarity index 100% rename from tests/tests_hdf5_back_gen.py rename to tests/test_hdf5_back_gen.py From 9f6878a4fa5afc09326a625db843d6e56de74348 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:45:33 -0500 Subject: [PATCH 69/82] rename test for pytest discovery --- tests/hdf5_back_gen_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/hdf5_back_gen_test.py b/tests/hdf5_back_gen_test.py index 6b2850ed21..a90eec7feb 100644 --- a/tests/hdf5_back_gen_test.py +++ b/tests/hdf5_back_gen_test.py @@ -242,7 +242,7 @@ def get_shape(meta): ROW_NUM = 3 PATH = 'gen_db.h5' -def generate_and_test(): +def test_generate(): """Generate and run tests for supported Hdf5 datatypes.""" if sys.version_info[0] == 2: msg = 'Hdf5 backend gen tests do not support Python 2.x' @@ -271,6 +271,6 @@ def generate_and_test(): print("observed: \n", obs) assert_frame_equal, exp, obs rec.close() - os.remove(PATH) + os.remove(PATH) From a1808cc27f589ed734215b41b909d203ad1cc98b Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:45:55 -0500 Subject: [PATCH 70/82] correct reference to Maping --- cyclus/jsoncpp.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cyclus/jsoncpp.pyx b/cyclus/jsoncpp.pyx index 7df8ae7ff6..5135802d52 100644 --- a/cyclus/jsoncpp.pyx +++ b/cyclus/jsoncpp.pyx @@ -35,7 +35,7 @@ cdef cpp_jsoncpp.Value * tocppval(object doc) except NULL: cdef cpp_jsoncpp.Value * cval = NULL if isinstance(doc, Value): cval = new cpp_jsoncpp.Value( ( doc)._inst[0]) - elif isinstance(doc, collections.Mapping): + elif isinstance(doc, collections.abc.Mapping): cval = new cpp_jsoncpp.Value( cpp_jsoncpp.objectValue) for k, v in doc.items(): if not isinstance(k, basestring): From d59f4772c175915ff4cc9e8474592556cc58fd6a Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:46:35 -0500 Subject: [PATCH 71/82] correct reference to Mapping --- cyclus/gentypesystem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cyclus/gentypesystem.py b/cyclus/gentypesystem.py index 506dca2a9d..f20e13b24b 100644 --- a/cyclus/gentypesystem.py +++ b/cyclus/gentypesystem.py @@ -700,7 +700,7 @@ def convert_to_cpp(self, x, t): '{valdecl}\n' 'cdef {type} cpp{var}\n', 'cpp{var} = {type}()\n' - 'if not isinstance({var}, collections.Mapping):\n' + 'if not isinstance({var}, collections.abc.Mapping):\n' ' {var} = dict({var})\n' 'for {keyname}, {valname} in {var}.items():\n' ' {keybody.indent4}\n' From cdd292df9c0eb0f828b2b7aa1b5d663904b38406 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 11:54:21 -0500 Subject: [PATCH 72/82] add some env variables & add stage for pytest (not yet underway) --- docker/Dockerfile | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 4e46c58990..baa59ade8b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -106,10 +106,6 @@ ENV PYTHONPATH "/root/.local/lib/python3.10/site-packages/" FROM ${pkg_mgr}-deps as cyclus ARG make_cores=2 -# required for the nosetest -ENV PYTHONWARNINGS ignore -ENV PATH /root/.local/bin:$PATH - COPY . /cyclus WORKDIR /cyclus @@ -120,9 +116,14 @@ WORKDIR /cyclus # You may add the option "--cmake-debug" to the following command # for further CMake debugging. RUN python install.py -j ${make_cores} --build-type=Release --core-version 999999.999999 +ENV PATH /root/.local/bin:$PATH +ENV LD_LIBRARY_PATH /root/.local/lib:/root/.local/lib/cyclus FROM cyclus as cyclus-test RUN cyclus_unit_tests +FROM cyclus-test as cyclus-pytest + +RUN cd tests && pytest From 10fdaa92edc14848f03e71e15a9f1d81bf98e0ec Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 14:16:22 -0500 Subject: [PATCH 73/82] reimplement with fixture for test granularity --- tests/hdf5_back_gen_test.py | 56 ++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 25 deletions(-) diff --git a/tests/hdf5_back_gen_test.py b/tests/hdf5_back_gen_test.py index a90eec7feb..dde86939cd 100644 --- a/tests/hdf5_back_gen_test.py +++ b/tests/hdf5_back_gen_test.py @@ -5,7 +5,7 @@ from random import randint import uuid import pandas as pd -from pandas.util.testing import assert_frame_equal +from pandas.testing import assert_frame_equal import pytest from cyclus.lib import Hdf5Back, Recorder @@ -54,6 +54,8 @@ def setup(): CANON_TO_DB[canon] = db CANON_TO_VL[canon] = is_vl +setup() + def make_bytes(string): return string.encode() @@ -240,37 +242,41 @@ def get_shape(meta): shape.extend(get_shape(i)) return shape + +@pytest.fixture(params=CANON_TYPES) +def canon_type(request): + ret = request.param + yield ret + ROW_NUM = 3 PATH = 'gen_db.h5' -def test_generate(): +def test_generate(canon_type): """Generate and run tests for supported Hdf5 datatypes.""" if sys.version_info[0] == 2: msg = 'Hdf5 backend gen tests do not support Python 2.x' pytest.skip(msg) if os.path.isfile(PATH): os.remove(PATH) - for i in CANON_TYPES: - print(CANON_TO_DB[i],'\n') - rec = Recorder(inject_sim_id=False) - back = Hdf5Back(PATH) - rec.register_backend(back) - data_meta = generate_meta(i) - shape = get_shape(data_meta) - print("shape: ", shape) - data = [] - for j in range(ROW_NUM): - data.append(populate(data_meta)) - exp = pd.DataFrame({'col0': data}, columns=['col0']) - print("expected: \n", exp) - for j in data: - d = rec.new_datum("test0") - d.add_val("col0", j, shape=shape, type=ts.IDS[CANON_TO_DB[i]]) - d.record() - rec.flush() - obs = back.query("test0") - print("observed: \n", obs) - assert_frame_equal, exp, obs - rec.close() - os.remove(PATH) + print(CANON_TO_DB[canon_type],'\n') + rec = Recorder(inject_sim_id=False) + back = Hdf5Back(PATH) + rec.register_backend(back) + data_meta = generate_meta(canon_type) + shape = get_shape(data_meta) + print("shape: ", shape) + data = [] + for j in range(ROW_NUM): + data.append(populate(data_meta)) + exp = pd.DataFrame({'col0': data}, columns=['col0']) + print("expected: \n", exp) + for j in data: + d = rec.new_datum("test0") + d.add_val("col0", j, shape=shape, type=ts.IDS[CANON_TO_DB[canon_type]]) + d.record() + rec.flush() + obs = back.query("test0") + print("observed: \n", obs) + assert_frame_equal, exp, obs + rec.close() From 754316f97175212dac533f23fb860471b13c1152 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 16:16:06 -0500 Subject: [PATCH 74/82] transition from imp to importlib --- tests/tools.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/tools.py b/tests/tools.py index 18bee0b71c..a3be48eefa 100644 --- a/tests/tools.py +++ b/tests/tools.py @@ -3,7 +3,7 @@ import os import re import sys -import imp +import importlib import shutil import unittest import subprocess @@ -73,7 +73,10 @@ def clean_import(name, paths=None): """ sys.path = paths + sys.path origmods = set(sys.modules.keys()) - mod = imp.load_module(name, *imp.find_module(name, paths)) + spec = importlib.machinery.PathFinder.find_spec(name, paths) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + sys.modules[name] = mod yield mod sys.path = sys.path[len(paths):] del mod From d66bd16def3f50689c00d67b999ac160b8cd48fa Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Fri, 15 Sep 2023 17:03:35 -0500 Subject: [PATCH 75/82] remove unnecessary imp module --- cyclus/gentypesystem.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cyclus/gentypesystem.py b/cyclus/gentypesystem.py index f20e13b24b..f50a5e45b3 100644 --- a/cyclus/gentypesystem.py +++ b/cyclus/gentypesystem.py @@ -10,7 +10,6 @@ import io import os import sys -import imp import json import argparse import platform From d297cb3accbda8cc6d3fb129a67f6e01ff49db36 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Sat, 16 Sep 2023 11:54:03 -0500 Subject: [PATCH 76/82] further simplify CI actions --- ...{build_test.yml => build_test_publish.yml} | 22 ++-- .github/workflows/docker_publish.yml | 100 ------------------ 2 files changed, 15 insertions(+), 107 deletions(-) rename .github/workflows/{build_test.yml => build_test_publish.yml} (66%) delete mode 100644 .github/workflows/docker_publish.yml diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test_publish.yml similarity index 66% rename from .github/workflows/build_test.yml rename to .github/workflows/build_test_publish.yml index 443da16e7d..1926384c1a 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test_publish.yml @@ -1,13 +1,12 @@ -name: Build & Test PR +name: Build, Test & Publish docker images for future CI and other users on: # allows us to run workflows manually workflow_dispatch: - push: pull_request: - paths-ignore: - - 'docker/Dockerfile' - - '.github/workflows/docker_publish.yml' + push: + branches: + - master jobs: build-dependency-and-test-img: @@ -25,6 +24,15 @@ jobs: name: Installing Dependencies, Building cyclus and running tests steps: + - name: default environment + run: | + echo "tag-latest-on-default=false" >> "$GITHUB_ENV" + + - name: condition on trigger parameters + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/master' }} + run: | + echo "tag-latest-on-default=true" >> "$GITHUB_ENV" + - name: Log in to the Container registry uses: docker/login-action@v2 with: @@ -43,6 +51,6 @@ jobs: server-stage: cyclus-test quiet: false parallel: true - tag-latest-on-default: false + tag-latest-on-default: ${{ env.tag-latest-on-default }} dockerfile: docker/Dockerfile - build-args: pkg_mgr=${{ matrix.pkg_mgr }} + build-args: pkg_mgr=${{ matrix.pkg_mgr }} \ No newline at end of file diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml deleted file mode 100644 index 2e3035d00d..0000000000 --- a/.github/workflows/docker_publish.yml +++ /dev/null @@ -1,100 +0,0 @@ -name: Build & Publish docker image for CI - -on: - # allows us to run workflows manually - workflow_dispatch: - push: - paths: - - 'docker/Dockerfile' - - '.github/workflows/docker_publish.yml' - -jobs: - build-dependency-and-test-img: - runs-on: ubuntu-latest - - strategy: - matrix: - ubuntu_versions : [ - 22.04, - ] - pkg_mgr : [ - apt, - conda, - ] - - name: Installing Dependencies, Building cyclus and running tests - steps: - - name: default environment - run: | - echo "tag-latest-on-default=false" >> "$GITHUB_ENV" - - - name: condition on trigger parameters - if: ${{ github.repository_owner == 'svalinn' && github.ref == 'refs/heads/develop' }} - run: | - echo "tag-latest-on-default=true" >> "$GITHUB_ENV" - - - name: Log in to the Container registry - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Installing Dependencies in Docker image - uses: firehed/multistage-docker-build-action@v1 - with: - repository: ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} - stages: ${{ matrix.pkg_mgr }}-deps, cyclus - server-stage: cyclus-test - quiet: false - parallel: true - tag-latest-on-default: ${{ env.tag-latest-on-default }} - dockerfile: docker/Dockerfile - build-args: pkg_mgr=${{ matrix.pkg_mgr }} - - - push_stable_ci_img: - needs: [build-dependency-and-test-img] - runs-on: ubuntu-latest - - strategy: - matrix: - ubuntu_versions : [ - 22.04, - ] - pkg_mgr : [ - apt, - conda, - ] - - name: Pushing final images - steps: - - name: Log in to the Container registry - if: ${{ github.repository_owner == 'cyclus' }} - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Store image name - if: ${{ github.repository_owner == 'cyclus' }} - run: | - echo "image_base_tag=ghcr.io/${{ github.repository_owner }}/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }}" >> "$GITHUB_ENV" - - - name: Push Image as latest img - if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/develop' }} - uses: akhilerm/tag-push-action@v2.1.0 - with: - src: ${{ env.image_base_tag }}/cyclus:latest - dst: ${{ env.image_base_tag }}:latest - - - name: Push Image as latest img - if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/develop' }} - uses: akhilerm/tag-push-action@v2.1.0 - with: - src: ${{ env.image_base_tag }}:latest - dst: ${{ env.image_base_tag }}:stable From aecc477026fc886c7e273b9c6ced1d635da8bd2f Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 19 Sep 2023 07:21:10 -0500 Subject: [PATCH 77/82] update primary branch name --- .github/workflows/build_test_publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml index 1926384c1a..d6702282ba 100644 --- a/.github/workflows/build_test_publish.yml +++ b/.github/workflows/build_test_publish.yml @@ -6,7 +6,7 @@ on: pull_request: push: branches: - - master + - main jobs: build-dependency-and-test-img: From eee3e0d5a4f955edafa944395ad18add8712de11 Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 19 Sep 2023 07:21:18 -0500 Subject: [PATCH 78/82] remove reference to CircleCI --- circle.yml | 179 ----------------------------------------------------- 1 file changed, 179 deletions(-) delete mode 100644 circle.yml diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 0495b0a626..0000000000 --- a/circle.yml +++ /dev/null @@ -1,179 +0,0 @@ -version: 2 -jobs: - # Update docker container - deploy_latest: # Cyclus/dev -> Cyclus:latest - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/cyclus-ci/Dockerfile . - - setup_remote_docker - - run: - name: log into Docker - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Build Docker container - command: docker build --rm=false -t cyclus/cyclus:latest . - - run: - name: Push on DockerHub - command: docker push cyclus/cyclus:latest # push to docker depot - - deploy_stable: - docker: # Cyclus/master -> Cyclus:stable - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/cyclus-ci/Dockerfile . - - setup_remote_docker - - run: - name: Log on DockerHub - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Tag and Push on DockerHub - command: | - docker tag cyclus/cyclus:latest cyclus/cyclus:stable # creation - docker push cyclus/cyclus:stable # push to docker depot - - - # Debian package generation (on master update) - deb_generation: - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cyclus - steps: - - checkout - - setup_remote_docker - - run: - name: Tag and Push on DockerHub - command: | - docker/deb-ci/build_upload_deb.sh 14 - docker/deb-ci/build_upload_deb.sh 16 - -# Checking Cycamore and Cymetric compatibilities with the changes - cycamore_master: ## Cycamore/master against Cyclus/dev - docker: - - image: cyclus/cyclus-deps - working_directory: /root - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - run: apt-get -qq update; apt-get -y install git openssh-client - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Checkout Cycamore Master - command: | - git clone https://github.com/cyclus/cycamore.git - cd cycamore - git fetch --all - git checkout master - - run: - name: Build Cycamore - command: | - cd cycamore - python install.py -j 2 --build-type=Release \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - run: - name: Unit Test - command: /root/.local/bin/cycamore_unit_tests; exit $? - - run: - name: Nosetests - command: nosetests -w ~/cycamore/tests; exit $? - - cymetric_master: ## Cymetric/master against Cyclus/dev + Cycamore/dev - docker: - - image: cyclus/cyclus-deps - working_directory: /root - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - run: apt-get -qq update; apt-get -y install git openssh-client - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Checkout Cycamore Master - command: | - git clone https://github.com/cyclus/cycamore.git - cd cycamore - git fetch --all - git checkout master - - run: - name: Build Cycamore - command: | - cd cycamore - python install.py -j 2 --build-type=Release \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - run: cd ~/ - - run: - name: Checkout Cymetric Master - command: | - git clone https://github.com/cyclus/cymetric.git - cd cymetric - git fetch --all - git checkout master - - run: - name: Build/Install Cymetric - command: | - cd cymetric - python setup.py install - - run: - name: Cymetric Nosetest - command: nosetests -w ~/cymetric/tests; exit $? - - # some external triggers - cyXX_trig: - machine: true - steps: - - run: - name: Cymetric/Cycamore Master Triggers - command: | - curl -X POST https://circleci.com/api/v1.1/project/github/cyclus/cycamore/tree/master?circle-token=$CYCAMORE_CIRCLE_TOKEN - curl -X POST https://circleci.com/api/v1.1/project/github/cyclus/cymetric/tree/master?circle-token=$CYMETRIC_CIRCLE_TOKEN - -workflows: - version: 2 #Needed ?? (already on the top of the file) - build_and_test: - jobs: - - # Merge on Master - - deploy_latest: - filters: - branches: - only: master - - cyXX_trig: - filters: - branches: - only: master - requires: - - deploy_latest - - # The following should now be done on version tag. - - deploy_stable: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - - deb_generation: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ From 06d513207584e7eb5c6e50de292725f0de776c0d Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 19 Sep 2023 07:21:36 -0500 Subject: [PATCH 79/82] remove conda stuff from very old CI (?) - see conda-forge feedstock --- conda-recipe/build.sh | 55 --------------------------------------- conda-recipe/meta.yaml | 51 ------------------------------------ conda-recipe/post-link.sh | 50 ----------------------------------- 3 files changed, 156 deletions(-) delete mode 100644 conda-recipe/build.sh delete mode 100644 conda-recipe/meta.yaml delete mode 100755 conda-recipe/post-link.sh diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh deleted file mode 100644 index db4d8f06fc..0000000000 --- a/conda-recipe/build.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -mkdir build -cd build -export LD_LIBRARY_PATH=$PREFIX/lib/ -export CMAKE_LIBRARY_PATH=$PREFIX/lib/ -export PATH=$PREFIX/bin:$PATH - -export MACOSX_DEPLOYMENT_TARGET= - -# -# Previous iterations have ahd trouble with hdf build vs. link -# versions. Something like the following has helped in the past. -# -#### hack fix for hdf5 issues -### if [[ `uname` == 'Linux' ]]; then -### ln -s $PREFIX/lib/libhdf5.so.9 $PREFIX/lib/libhdf5.so.8 -### ln -s $PREFIX/lib/libhdf5_hl.so.9 $PREFIX/lib/libhdf5_hl.so.8 -### else -### ln -s $PREFIX/lib/libhdf5.9.dylib $PREFIX/lib/libhdf5.8.dylib -### ln -s $PREFIX/lib/libhdf5_hl.9.dylib $PREFIX/lib/libhdf5_hl.8.dylib -### fi - -if [[ `uname` == 'Linux' ]]; then - cmake .. \ - -DCMAKE_INSTALL_PREFIX=$PREFIX \ - -DHDF5_ROOT=$PREFIX \ - -DBOOST_ROOT=$PREFIX \ - -DBOOST_LIBRARYDIR=$PREFIX/lib \ - -DBoost_NO_SYSTEM_PATHS=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DLAPACK_LIBRARIES=$PREFIX/lib/liblapack.so \ - -DBLAS_LIBRARIES=$PREFIX/lib/libblas.so -else - echo $CFLAGS - echo $LDFLAGS - export MACOSX_DEPLOYMENT_TARGET= - export DYLD_LIBRARY_PATH=$PREFIX/lib - export LDFLAGS="-headerpad_max_install_names -headerpad" - export CFLAGS="-headerpad_max_install_names -headerpad" - export CXXFLAGS= - cmake .. \ - -DCMAKE_INSTALL_PREFIX=$PREFIX \ - -DHDF5_ROOT=$PREFIX \ - -DCOIN_ROOT_DIR=$PREFIX \ - -DBOOST_ROOT=$PREFIX \ - -DCMAKE_BUILD_TYPE=Release \ - -DLAPACK_LIBRARIES=$PREFIX/lib/liblapack.dylib \ - -DBLAS_LIBRARIES=$PREFIX/lib/libblas.dylib -fi - -make VERBOSE=1 -make install - -cd .. diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml deleted file mode 100644 index 51ade4fceb..0000000000 --- a/conda-recipe/meta.yaml +++ /dev/null @@ -1,51 +0,0 @@ -package: - name: cyclus - version: 0.0 - -# Only use fn and url for polyphemus compatability -source: - fn: cyclus-src.tar.gz # ["TRAVIS" not in environ] - url: https://github.com/cyclus/cyclus/archive/develop.tar.gz # ["TRAVIS" not in environ] - path: .. # ["TRAVIS" in environ] - -requirements: - build: - - sigcpp - - glibmm - - libxmlpp - - coincbc - - boost - - hdf5 - - mylibxml2 # [osx] - - libxml2 # [linux] - - myglib # [osx] - - libffi # [osx] - - gettext # [osx] - - pkg-config-lite # [osx] - - cmake - - python - run: - - sigcpp - - glibmm - - libxmlpp - - coincbc - - boost - - hdf5 - - mylibxml2 # [osx] - - libxml2 # [linux] - - myglib # [osx] - - libffi # [osx] - - gettext # [osx] - - pkg-config-lite # [osx] - -build: - string: nightly - -test: - requires: - - nose - - pytables - -about: - home: Cyclus - license: BSD Clause 3 diff --git a/conda-recipe/post-link.sh b/conda-recipe/post-link.sh deleted file mode 100755 index e9d444048d..0000000000 --- a/conda-recipe/post-link.sh +++ /dev/null @@ -1,50 +0,0 @@ -# This script replaces the cyclus and cyclus_unit_tests commands with simple -# wrappers that will modify the user's environment as needed to point -# cyclus-sepcific envrionment variables to the conda install location $PREFIX. -# Conda packaging has three phases which come to a head here. -# -# 1. builing the package on a builder's computer -# 2. installing the package on the user's machine, where this script is run -# 3. runtime, when the wrapper script(s) execute. -# -# At install time (2), the conda post-link phase will define some extra -# environment variables, such as $PREFIX, that are not available elsewhere. -# These variables are descriped at http://conda.pydata.org/docs/building/build-scripts.html -# Otherwise envrionment variables in the wrapper script (eg $CYCLUS_PATH) -# must be escaped here so that they are evaluated at run time (3) rather -# than at build (1) or install (2). -echo "post-link.sh, PREFIX: $PREFIX" - -mv $PREFIX/bin/cyclus $PREFIX/bin/cyclus_base -echo "#!/bin/bash -export CYCLUS_PATH=\"\$CYCLUS_PATH:\$HOME/.local/lib/cyclus:$PREFIX/lib/cyclus\" -if [ -z \"\$CYCLUS_NUC_DATA\" ]; then - export CYCLUS_NUC_DATA=\"$PREFIX/share/cyclus/cyclus_nuc_data.h5\" -fi -if [ -z \"\$CYCLUS_RNG_SCHEMA\" ]; then - export CYCLUS_RNG_SCHEMA=\"$PREFIX/share/cyclus/cyclus.rng.in\" -fi - -$PREFIX/bin/cyclus_base \$* -" > $PREFIX/bin/cyclus -chmod 755 $PREFIX/bin/cyclus - -# The library path modifications are here because cyclus installs -# libgtest and libbaseagentunittests into the lib/cyclus directory. -# We make this directory the last possible location to be searched. -mv $PREFIX/bin/cyclus_unit_tests $PREFIX/bin/cyclus_unit_tests_base -echo "#!/bin/bash -export LD_LIBRARY_PATH=\"\$LD_LIBRARY_PATH:$PREFIX/lib/cyclus\" -export DYLD_FALLBACK_LIBRARY_PATH=\"\$DYLD_FALLBACK_LIBRARY_PATH:$PREFIX/lib/cyclus\" -export CYCLUS_PATH=\"\$CYCLUS_PATH:\$HOME/.local/lib/cyclus:$PREFIX/lib/cyclus\" -if [ -z \"\$CYCLUS_NUC_DATA\" ]; then - export CYCLUS_NUC_DATA=\"$PREFIX/share/cyclus/cyclus_nuc_data.h5\" -fi -if [ -z \"\$CYCLUS_RNG_SCHEMA\" ]; then - export CYCLUS_RNG_SCHEMA=\"$PREFIX/share/cyclus/cyclus.rng.in\" -fi - -$PREFIX/bin/cyclus_unit_tests_base \$* -" > $PREFIX/bin/cyclus_unit_tests -chmod 755 $PREFIX/bin/cyclus_unit_tests - From 0a757671a99174afebc0aa6812ac51a2f77825ba Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Tue, 19 Sep 2023 07:23:26 -0500 Subject: [PATCH 80/82] remove changelog test script b/c now in action --- housekeeping_script/changelog_test.sh | 29 --------------------------- 1 file changed, 29 deletions(-) delete mode 100755 housekeeping_script/changelog_test.sh diff --git a/housekeeping_script/changelog_test.sh b/housekeeping_script/changelog_test.sh deleted file mode 100755 index d69c6a4f28..0000000000 --- a/housekeeping_script/changelog_test.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh - -OWNER=cyclus -REPO=cyclus -CHANGELOG_FILE=CHANGELOG.rst - -# default main repo setup -PR_BASE_BRANCH=master -echo "Testing changelog against $PR_BASE_BRANCH branch" - -master_repo="https://github.com/${OWNER}/${REPO}.git" -default_branch=$PR_BASE_BRANCH - -# setup temp remote -git_remote_name=ci_changelog_`git log --pretty=format:'%h' -n 1` -git remote add ${git_remote_name} ${master_repo} -git fetch ${git_remote_name} - -# diff against temp remote -added_changelog_entry=$((`git diff ${git_remote_name}/${default_branch} -- ${CHANGELOG_FILE} |wc -l`)) - -# cleaning temp remote -git remote remove ${git_remote_name} - -# analysing the diff and returning accordingly -if [ $added_changelog_entry -eq 0 ]; then - echo "No new changelog entry detected, please update the ${CHANGELOG_FILE} according to your submited changes!" - exit 1 -fi From 6e2bed1e61cd791a4f1f04f85b4a89f9716563bf Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 20 Sep 2023 08:53:41 -0500 Subject: [PATCH 81/82] replace all references to `master` except in specific lines of code --- .github/workflows/build_test_publish.yml | 2 +- .github/workflows/changelog_test.yml | 2 +- CONTRIBUTING.rst | 38 ++++++++++++------------ README.rst | 8 ++--- cli/cyclus.cc | 6 ++-- cyclus/lib.pyx | 4 +-- cyclus/main.py | 14 ++++----- cyclus/simstate.py | 4 +-- doc/release_notes/v0.3.rst | 6 ++-- 9 files changed, 42 insertions(+), 42 deletions(-) diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml index d6702282ba..5311b3fcd3 100644 --- a/.github/workflows/build_test_publish.yml +++ b/.github/workflows/build_test_publish.yml @@ -29,7 +29,7 @@ jobs: echo "tag-latest-on-default=false" >> "$GITHUB_ENV" - name: condition on trigger parameters - if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/master' }} + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/main' }} run: | echo "tag-latest-on-default=true" >> "$GITHUB_ENV" diff --git a/.github/workflows/changelog_test.yml b/.github/workflows/changelog_test.yml index cb327a45c6..b8cdd106d3 100644 --- a/.github/workflows/changelog_test.yml +++ b/.github/workflows/changelog_test.yml @@ -29,7 +29,7 @@ jobs: cd $GITHUB_WORKSPACE git remote add cyclus https://github.com/cyclus/cyclus.git git fetch cyclus - change=`git diff cyclus/master -- CHANGELOG.rst | wc -l` + change=`git diff cyclus/main -- CHANGELOG.rst | wc -l` git remote remove cyclus if [ $change -eq 0 ]; then echo "CHANGELOG.rst has not been updated" diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0bcf6210f6..7be4d0df16 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -12,9 +12,9 @@ General Notes * Use a branching workflow similar to the one described at http://progit.org/book/ch3-4.html. -* Keep your own "master" branch in sync with the mainline - repository's "master" branch. Specifically, do not push your - own commits directly to your "master" branch. +* Keep your own "main" branch in sync with the mainline + repository's "main" branch. Specifically, do not push your + own commits directly to your "main" branch. * Any commit should *pass all tests* (see `Running Tests`_). @@ -30,11 +30,11 @@ Issuing a Pull Request ====================== * When you are ready to move changes from one of your topic branches into the - "master" branch, it must be reviewed and accepted by another developer. + "main" branch, it must be reviewed and accepted by another developer. * You may want to review this `tutorial `_ before you make a - pull request to the master branch. + pull request to the main branch. Reviewing a Pull Request ======================== @@ -51,7 +51,7 @@ Reviewing a Pull Request * Click the green "Merge Pull Request" button * Note: if the button is not available, the requester needs to merge or rebase - from the current HEAD of the mainline "master" branch + from the current HEAD of the mainline "main" branch Running Tests ============= @@ -75,7 +75,7 @@ Cautions * **DO NOT** rebase any commits that have been pulled/pushed anywhere else other than your own fork (especially if those commits have been integrated into the blessed repository). You should NEVER rebase commits that are a part of the - 'master' branch. *If you do, we will never, ever accept your pull request*. + 'main' branch. *If you do, we will never, ever accept your pull request*. An Example ========== @@ -96,7 +96,7 @@ Acquiring Cyclus and Workflow ----------------------------- We begin with a fork of the mainline Cyclus repository. After initially forking -the repo, we will have the master branch in your fork. +the repo, we will have the main branch in your fork. Acquiring a Fork of the Cyclus Repository ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -111,9 +111,9 @@ First, let's make our "work" branch: .../cyclus_dir/$ git branch work .../cyclus_dir/$ git push origin work -We now have the following situation: there exists the mainline copy of the master -branch, there exists your fork's copy of the master and working branches, -*AND* there exists your *local* copy of the master and working branches. It is +We now have the following situation: there exists the mainline copy of the main +branch, there exists your fork's copy of the main and working branches, +*AND* there exists your *local* copy of the main and working branches. It is important now to note that you may wish to work from home or the office. If you keep your fork's branches up to date (i.e., "push" your changes before you leave), only your *local* copies of your branches may be different when you next sit down at the other location. @@ -128,22 +128,22 @@ work, finished, and successfully pushed your changes to your *Origin* repository. You are now at home and want to continue working a bit. To begin, let's update our *home's local branches*:: - .../cyclus_dir/$ git checkout master - .../cyclus_dir/$ git pull upstream master - .../cyclus_dir/$ git push origin master + .../cyclus_dir/$ git checkout main + .../cyclus_dir/$ git pull upstream main + .../cyclus_dir/$ git push origin main .../cyclus_dir/$ git checkout work .../cyclus_dir/$ git pull origin work - .../cyclus_dir/$ git rebase master + .../cyclus_dir/$ git rebase main .../cyclus_dir/$ git push origin work Perhaps a little explanation is required. We first want to make sure that this new local copy of -the master branch is up-to-date with respect to the remote origin's branch and remote upstream's +the main branch is up-to-date with respect to the remote origin's branch and remote upstream's branch. If there was a change from the remote upstream's branch, we want to push that to origin. We then follow the same process to update the work branch, except: #. we don't need to worry about the *upstream* repo because it doesn't have a work branch, and -#. we want to incorporate any changes which may have been introduced in the master branch update. +#. we want to incorporate any changes which may have been introduced in the main branch update. Workflow: The End ^^^^^^^^^^^^^^^^^ @@ -152,7 +152,7 @@ As time passes, you make some changes to files, and you commit those changes (to branch*). Eventually (hopefully) you come to a stopping point where you have finished your project on your work branch *AND* it compiles *AND* it runs input files correctly *AND* it passes all tests! Perhaps you have found Nirvana. In any case, you've performed the final commit to your work branch, -so it's time to make a pull request online and wait for our masterr friends to +so it's time to make a pull request online and wait for our main friends to review and accept it. Sometimes, your pull request will be held by the reviewer until further changes @@ -176,5 +176,5 @@ Releases If you are going through a release of Cyclus and Cycamore, check out the release procedure notes `here -`_ and +`_ and on the `website `_. diff --git a/README.rst b/README.rst index e33233ae7d..64b02e3572 100644 --- a/README.rst +++ b/README.rst @@ -12,13 +12,13 @@ Cyclus Projects Status ----------------------------------------------------------------------------------- **Branch** **Cyclus** **Cycamore** **Cymetric** ================ ================= =================== =================== -master |cyclus_master| |cycamore_master| |cymetric_master| +main |cyclus_main| |cycamore_main| |cymetric_main| ================ ================= =================== =================== -.. |cyclus_master| image:: https://circleci.com/gh/cyclus/cyclus/tree/master.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 -.. |cycamore_master| image:: https://circleci.com/gh/cyclus/cycamore/tree/master.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 -.. |cymetric_master| image:: https://circleci.com/gh/cyclus/cymetric/tree/master.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c +.. |cyclus_main| image:: https://circleci.com/gh/cyclus/cyclus/tree/main.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 +.. |cycamore_main| image:: https://circleci.com/gh/cyclus/cycamore/tree/main.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 +.. |cymetric_main| image:: https://circleci.com/gh/cyclus/cymetric/tree/main.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c diff --git a/cli/cyclus.cc b/cli/cyclus.cc index ba0c62f001..9e198240f1 100644 --- a/cli/cyclus.cc +++ b/cli/cyclus.cc @@ -240,14 +240,14 @@ int ParseCliArgs(ArgInfo* ai, int argc, char* argv[]) { ("restart", po::value(), "restart from the specified simulation snapshot [db-file]:[sim-id]:[timestep]") ("schema", - "dump the cyclus master schema including all installed module schemas") + "dump the cyclus main schema including all installed module schemas") ("agent-schema", po::value(), "dump the schema for the named agent") ("agent-version", po::value(), "print the version of the specified agent") ("schema-path", po::value(), - "manually specify the path to the cyclus master schema") - ("flat-schema", "use the flat master simulation schema") + "manually specify the path to the cyclus main schema") + ("flat-schema", "use the flat main simulation schema") ("agent-annotations", po::value(), "dump the annotations for the named agent") ("agent-listing,l", po::value(), diff --git a/cyclus/lib.pyx b/cyclus/lib.pyx index 258533e2f0..1512f280e5 100644 --- a/cyclus/lib.pyx +++ b/cyclus/lib.pyx @@ -854,7 +854,7 @@ class XMLFileLoader(_XMLFileLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". """ @@ -887,7 +887,7 @@ class XMLFlatLoader(_XMLFlatLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". diff --git a/cyclus/main.py b/cyclus/main.py index 19af6e6091..21c17125c0 100644 --- a/cyclus/main.py +++ b/cyclus/main.py @@ -314,7 +314,7 @@ def make_parser(): help='restart from the specified simulation snapshot, ' 'not supported.') p.add_argument('--schema', action=Schema, - help='dump the cyclus master schema including all ' + help='dump the cyclus main schema including all ' 'installed module schemas') p.add_argument('--agent-schema', action=AgentSchema, dest='agent_schema', @@ -323,10 +323,10 @@ def make_parser(): dest='agent_version', help='dump the version for the named agent') p.add_argument('--schema-path', dest='schema_path', default=None, - help='manually specify the path to the cyclus master schema') + help='manually specify the path to the cyclus main schema') p.add_argument('--flat-schema', action='store_true', default=False, dest='flat_schema', - help='use the flat master simulation schema') + help='use the flat main simulation schema') p.add_argument('--agent-annotations', action=AgentAnnotations, dest='agent_annotations', help='dump the annotations for the named agent') @@ -364,7 +364,7 @@ def make_parser(): p.add_argument('--rng-schema', action=RngSchema, help='print the path to cyclus.rng.in') p.add_argument('--rng-print', action=RngPrint, - help='print the master schema for the input simulation') + help='print the main schema for the input simulation') p.add_argument('--nuc-data', action=NucData, help='print the path to cyclus_nuc_data.h5') p.add_argument('--json-to-xml', action=JsonToXml, @@ -407,8 +407,8 @@ def run_simulation(ns): state.si.context.sim_id) print(msg) -def print_master_schema(ns): - """Prints the master schema for the simulation""" +def print_main_schema(ns): + """Prints the main schema for the simulation""" state = SimState(input_file=ns.input_file, input_format=ns.format, output_path=ns.output_path, schema_path=ns.schema_path, flat_schema=ns.flat_schema, print_ms=True) @@ -422,7 +422,7 @@ def main(args=None): p = make_parser() ns = p.parse_args(args=args) if(ns.rng_print): - print_master_schema(ns) + print_main_schema(ns) elif ns.input_file is not None: run_simulation(ns) diff --git a/cyclus/simstate.py b/cyclus/simstate.py index 4ba3e9da5a..32923c0e81 100644 --- a/cyclus/simstate.py +++ b/cyclus/simstate.py @@ -58,9 +58,9 @@ class SimState(object): The initial registry to start the in-memory backend with. Defaults is True, which stores all of the tables. schema_path : str or None, optional: - The path to the cyclus master schema. + The path to the cyclus main schema. flat_schema : bool, optional - Whether or not to use the flat master simulation schema. + Whether or not to use the flat main simulation schema. frequency : int or float, optional The amount of time [sec] to sleep for in tight loops, default 1 ms. repeating_actions : list or None, optional diff --git a/doc/release_notes/v0.3.rst b/doc/release_notes/v0.3.rst index 8cc2454312..2f4c8f7c4a 100644 --- a/doc/release_notes/v0.3.rst +++ b/doc/release_notes/v0.3.rst @@ -85,7 +85,7 @@ New features - updated namespace name to reflect directory name - made mock fac/inst/region/market classes to be used with testing. added the stub facility in a new stub namespace and related tests. - added initial stub directory and adjusted cmake files to include their tests. -- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added master and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. +- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added main and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. - i think moving loglevel and the macros into the same namespace encapsulation is more promising... still unable to confirm. - finishes remaining doxygen warnings - I believe that this will fix the warning stemming from logger.h, but I do not see the warning on my machine, so I can't be sure @@ -201,7 +201,7 @@ New features - literal 0 -> 0.0 for fp compares - abs() -> fabs(), types they are a-changing. - fixes doc errors, should clear up @gonuke's cron job errors -- fixes master schema building +- fixes main schema building - updated test files so cycamore can build - ran all files in Core dir through astyle for style guide formatting - updated enrichment function names @@ -271,7 +271,7 @@ New features - first cyclus ns changes. - made buildSchema private. Used Agent class module type list instead of custom one. - removed cyclus.rng.in generation - now done dynamically in cyclus core -- modified XML loading to dynamically build the master schema by searching for installed modules +- modified XML loading to dynamically build the main schema by searching for installed modules - created csv backend. - fixed name erro - updated setup with localdir as default for some params From cfb66f5c53359f00d06527dcf7c7646e269d6f3d Mon Sep 17 00:00:00 2001 From: "Paul P.H. Wilson" Date: Wed, 20 Sep 2023 08:53:41 -0500 Subject: [PATCH 82/82] replace all references to `master` except in specific lines of code --- .github/workflows/build_test_publish.yml | 2 +- .github/workflows/changelog_test.yml | 2 +- CONTRIBUTING.rst | 38 ++++++++++++------------ README.rst | 8 ++--- cli/cyclus.cc | 6 ++-- cyclus/lib.pyx | 4 +-- cyclus/main.py | 14 ++++----- cyclus/simstate.py | 4 +-- doc/release_notes/v0.3.rst | 6 ++-- src/query_backend.h | 2 +- 10 files changed, 43 insertions(+), 43 deletions(-) diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml index d6702282ba..5311b3fcd3 100644 --- a/.github/workflows/build_test_publish.yml +++ b/.github/workflows/build_test_publish.yml @@ -29,7 +29,7 @@ jobs: echo "tag-latest-on-default=false" >> "$GITHUB_ENV" - name: condition on trigger parameters - if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/master' }} + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/main' }} run: | echo "tag-latest-on-default=true" >> "$GITHUB_ENV" diff --git a/.github/workflows/changelog_test.yml b/.github/workflows/changelog_test.yml index cb327a45c6..b8cdd106d3 100644 --- a/.github/workflows/changelog_test.yml +++ b/.github/workflows/changelog_test.yml @@ -29,7 +29,7 @@ jobs: cd $GITHUB_WORKSPACE git remote add cyclus https://github.com/cyclus/cyclus.git git fetch cyclus - change=`git diff cyclus/master -- CHANGELOG.rst | wc -l` + change=`git diff cyclus/main -- CHANGELOG.rst | wc -l` git remote remove cyclus if [ $change -eq 0 ]; then echo "CHANGELOG.rst has not been updated" diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0bcf6210f6..7be4d0df16 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -12,9 +12,9 @@ General Notes * Use a branching workflow similar to the one described at http://progit.org/book/ch3-4.html. -* Keep your own "master" branch in sync with the mainline - repository's "master" branch. Specifically, do not push your - own commits directly to your "master" branch. +* Keep your own "main" branch in sync with the mainline + repository's "main" branch. Specifically, do not push your + own commits directly to your "main" branch. * Any commit should *pass all tests* (see `Running Tests`_). @@ -30,11 +30,11 @@ Issuing a Pull Request ====================== * When you are ready to move changes from one of your topic branches into the - "master" branch, it must be reviewed and accepted by another developer. + "main" branch, it must be reviewed and accepted by another developer. * You may want to review this `tutorial `_ before you make a - pull request to the master branch. + pull request to the main branch. Reviewing a Pull Request ======================== @@ -51,7 +51,7 @@ Reviewing a Pull Request * Click the green "Merge Pull Request" button * Note: if the button is not available, the requester needs to merge or rebase - from the current HEAD of the mainline "master" branch + from the current HEAD of the mainline "main" branch Running Tests ============= @@ -75,7 +75,7 @@ Cautions * **DO NOT** rebase any commits that have been pulled/pushed anywhere else other than your own fork (especially if those commits have been integrated into the blessed repository). You should NEVER rebase commits that are a part of the - 'master' branch. *If you do, we will never, ever accept your pull request*. + 'main' branch. *If you do, we will never, ever accept your pull request*. An Example ========== @@ -96,7 +96,7 @@ Acquiring Cyclus and Workflow ----------------------------- We begin with a fork of the mainline Cyclus repository. After initially forking -the repo, we will have the master branch in your fork. +the repo, we will have the main branch in your fork. Acquiring a Fork of the Cyclus Repository ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -111,9 +111,9 @@ First, let's make our "work" branch: .../cyclus_dir/$ git branch work .../cyclus_dir/$ git push origin work -We now have the following situation: there exists the mainline copy of the master -branch, there exists your fork's copy of the master and working branches, -*AND* there exists your *local* copy of the master and working branches. It is +We now have the following situation: there exists the mainline copy of the main +branch, there exists your fork's copy of the main and working branches, +*AND* there exists your *local* copy of the main and working branches. It is important now to note that you may wish to work from home or the office. If you keep your fork's branches up to date (i.e., "push" your changes before you leave), only your *local* copies of your branches may be different when you next sit down at the other location. @@ -128,22 +128,22 @@ work, finished, and successfully pushed your changes to your *Origin* repository. You are now at home and want to continue working a bit. To begin, let's update our *home's local branches*:: - .../cyclus_dir/$ git checkout master - .../cyclus_dir/$ git pull upstream master - .../cyclus_dir/$ git push origin master + .../cyclus_dir/$ git checkout main + .../cyclus_dir/$ git pull upstream main + .../cyclus_dir/$ git push origin main .../cyclus_dir/$ git checkout work .../cyclus_dir/$ git pull origin work - .../cyclus_dir/$ git rebase master + .../cyclus_dir/$ git rebase main .../cyclus_dir/$ git push origin work Perhaps a little explanation is required. We first want to make sure that this new local copy of -the master branch is up-to-date with respect to the remote origin's branch and remote upstream's +the main branch is up-to-date with respect to the remote origin's branch and remote upstream's branch. If there was a change from the remote upstream's branch, we want to push that to origin. We then follow the same process to update the work branch, except: #. we don't need to worry about the *upstream* repo because it doesn't have a work branch, and -#. we want to incorporate any changes which may have been introduced in the master branch update. +#. we want to incorporate any changes which may have been introduced in the main branch update. Workflow: The End ^^^^^^^^^^^^^^^^^ @@ -152,7 +152,7 @@ As time passes, you make some changes to files, and you commit those changes (to branch*). Eventually (hopefully) you come to a stopping point where you have finished your project on your work branch *AND* it compiles *AND* it runs input files correctly *AND* it passes all tests! Perhaps you have found Nirvana. In any case, you've performed the final commit to your work branch, -so it's time to make a pull request online and wait for our masterr friends to +so it's time to make a pull request online and wait for our main friends to review and accept it. Sometimes, your pull request will be held by the reviewer until further changes @@ -176,5 +176,5 @@ Releases If you are going through a release of Cyclus and Cycamore, check out the release procedure notes `here -`_ and +`_ and on the `website `_. diff --git a/README.rst b/README.rst index e33233ae7d..64b02e3572 100644 --- a/README.rst +++ b/README.rst @@ -12,13 +12,13 @@ Cyclus Projects Status ----------------------------------------------------------------------------------- **Branch** **Cyclus** **Cycamore** **Cymetric** ================ ================= =================== =================== -master |cyclus_master| |cycamore_master| |cymetric_master| +main |cyclus_main| |cycamore_main| |cymetric_main| ================ ================= =================== =================== -.. |cyclus_master| image:: https://circleci.com/gh/cyclus/cyclus/tree/master.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 -.. |cycamore_master| image:: https://circleci.com/gh/cyclus/cycamore/tree/master.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 -.. |cymetric_master| image:: https://circleci.com/gh/cyclus/cymetric/tree/master.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c +.. |cyclus_main| image:: https://circleci.com/gh/cyclus/cyclus/tree/main.png?&circle-token= 35d82ba8661d4f32e0f084b9d8a2388fa62c0262 +.. |cycamore_main| image:: https://circleci.com/gh/cyclus/cycamore/tree/main.png?&circle-token= 333211090d5d5a15110eed1adbe079a6f3a4a704 +.. |cymetric_main| image:: https://circleci.com/gh/cyclus/cymetric/tree/main.png?&circle-token= 72639b59387f077973af98e7ce72996eac18b96c diff --git a/cli/cyclus.cc b/cli/cyclus.cc index ba0c62f001..9e198240f1 100644 --- a/cli/cyclus.cc +++ b/cli/cyclus.cc @@ -240,14 +240,14 @@ int ParseCliArgs(ArgInfo* ai, int argc, char* argv[]) { ("restart", po::value(), "restart from the specified simulation snapshot [db-file]:[sim-id]:[timestep]") ("schema", - "dump the cyclus master schema including all installed module schemas") + "dump the cyclus main schema including all installed module schemas") ("agent-schema", po::value(), "dump the schema for the named agent") ("agent-version", po::value(), "print the version of the specified agent") ("schema-path", po::value(), - "manually specify the path to the cyclus master schema") - ("flat-schema", "use the flat master simulation schema") + "manually specify the path to the cyclus main schema") + ("flat-schema", "use the flat main simulation schema") ("agent-annotations", po::value(), "dump the annotations for the named agent") ("agent-listing,l", po::value(), diff --git a/cyclus/lib.pyx b/cyclus/lib.pyx index 258533e2f0..1512f280e5 100644 --- a/cyclus/lib.pyx +++ b/cyclus/lib.pyx @@ -854,7 +854,7 @@ class XMLFileLoader(_XMLFileLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". """ @@ -887,7 +887,7 @@ class XMLFlatLoader(_XMLFlatLoader): Create a new loader reading from the xml simulation input file and writing to and initializing the backends in the recorder. The recorder must - already have the backend registered. schema_file identifies the master + already have the backend registered. schema_file identifies the main xml rng schema used to validate the input file. The format specifies the input file format from one of: "none", "xml", "json", or "py". diff --git a/cyclus/main.py b/cyclus/main.py index 19af6e6091..21c17125c0 100644 --- a/cyclus/main.py +++ b/cyclus/main.py @@ -314,7 +314,7 @@ def make_parser(): help='restart from the specified simulation snapshot, ' 'not supported.') p.add_argument('--schema', action=Schema, - help='dump the cyclus master schema including all ' + help='dump the cyclus main schema including all ' 'installed module schemas') p.add_argument('--agent-schema', action=AgentSchema, dest='agent_schema', @@ -323,10 +323,10 @@ def make_parser(): dest='agent_version', help='dump the version for the named agent') p.add_argument('--schema-path', dest='schema_path', default=None, - help='manually specify the path to the cyclus master schema') + help='manually specify the path to the cyclus main schema') p.add_argument('--flat-schema', action='store_true', default=False, dest='flat_schema', - help='use the flat master simulation schema') + help='use the flat main simulation schema') p.add_argument('--agent-annotations', action=AgentAnnotations, dest='agent_annotations', help='dump the annotations for the named agent') @@ -364,7 +364,7 @@ def make_parser(): p.add_argument('--rng-schema', action=RngSchema, help='print the path to cyclus.rng.in') p.add_argument('--rng-print', action=RngPrint, - help='print the master schema for the input simulation') + help='print the main schema for the input simulation') p.add_argument('--nuc-data', action=NucData, help='print the path to cyclus_nuc_data.h5') p.add_argument('--json-to-xml', action=JsonToXml, @@ -407,8 +407,8 @@ def run_simulation(ns): state.si.context.sim_id) print(msg) -def print_master_schema(ns): - """Prints the master schema for the simulation""" +def print_main_schema(ns): + """Prints the main schema for the simulation""" state = SimState(input_file=ns.input_file, input_format=ns.format, output_path=ns.output_path, schema_path=ns.schema_path, flat_schema=ns.flat_schema, print_ms=True) @@ -422,7 +422,7 @@ def main(args=None): p = make_parser() ns = p.parse_args(args=args) if(ns.rng_print): - print_master_schema(ns) + print_main_schema(ns) elif ns.input_file is not None: run_simulation(ns) diff --git a/cyclus/simstate.py b/cyclus/simstate.py index 4ba3e9da5a..32923c0e81 100644 --- a/cyclus/simstate.py +++ b/cyclus/simstate.py @@ -58,9 +58,9 @@ class SimState(object): The initial registry to start the in-memory backend with. Defaults is True, which stores all of the tables. schema_path : str or None, optional: - The path to the cyclus master schema. + The path to the cyclus main schema. flat_schema : bool, optional - Whether or not to use the flat master simulation schema. + Whether or not to use the flat main simulation schema. frequency : int or float, optional The amount of time [sec] to sleep for in tight loops, default 1 ms. repeating_actions : list or None, optional diff --git a/doc/release_notes/v0.3.rst b/doc/release_notes/v0.3.rst index 8cc2454312..2f4c8f7c4a 100644 --- a/doc/release_notes/v0.3.rst +++ b/doc/release_notes/v0.3.rst @@ -85,7 +85,7 @@ New features - updated namespace name to reflect directory name - made mock fac/inst/region/market classes to be used with testing. added the stub facility in a new stub namespace and related tests. - added initial stub directory and adjusted cmake files to include their tests. -- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added master and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. +- moved all dynamic loading into xml_file_loader. Added a method for listing installed/discoverable dynamic modules to env class. added rng schema methods to test agents and removed rng files from them. removed rng installation from cmake module macro. added main and module schema dumping to cyclus binary. added schema agent test (that schema parses). moved heavy stuff out of xml-file-loader constructor. renamed LoadAll to LoadSim. - i think moving loglevel and the macros into the same namespace encapsulation is more promising... still unable to confirm. - finishes remaining doxygen warnings - I believe that this will fix the warning stemming from logger.h, but I do not see the warning on my machine, so I can't be sure @@ -201,7 +201,7 @@ New features - literal 0 -> 0.0 for fp compares - abs() -> fabs(), types they are a-changing. - fixes doc errors, should clear up @gonuke's cron job errors -- fixes master schema building +- fixes main schema building - updated test files so cycamore can build - ran all files in Core dir through astyle for style guide formatting - updated enrichment function names @@ -271,7 +271,7 @@ New features - first cyclus ns changes. - made buildSchema private. Used Agent class module type list instead of custom one. - removed cyclus.rng.in generation - now done dynamically in cyclus core -- modified XML loading to dynamically build the master schema by searching for installed modules +- modified XML loading to dynamically build the main schema by searching for installed modules - created csv backend. - fixed name erro - updated setup with localdir as default for some params diff --git a/src/query_backend.h b/src/query_backend.h index e70783e25c..51725ea88b 100644 --- a/src/query_backend.h +++ b/src/query_backend.h @@ -23,7 +23,7 @@ namespace cyclus { -/// This is the master list of all supported database types. All types must +/// This is the primary list of all supported database types. All types must /// have a constant length unless they begin with the prefix VL_, which stands /// for "variable length" or are implicitly variable length, such as blob. /// Changing the order here may invalidate previously created databases.