diff --git a/.gitignore b/.gitignore index c0009a7b70..3da2fdd610 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ conda/ _build/ install/ reports/ +_skbuild/ +*.egg-info/ +wheelhouse/ # compilation artefacts pynest/pynestkernel.cxx @@ -36,7 +39,6 @@ bin/nest-config bin/nest_vars.sh libnestutil/config.h nest/static_modules.h -pynest/setup.py # installation artefacts install_manifest.txt diff --git a/CMakeLists.txt b/CMakeLists.txt index 9cd695eb89..db2f48a4f4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -26,6 +26,20 @@ project( nest CXX C ) set( CMAKE_CXX_STANDARD 20 ) set( CMAKE_CXX_STANDARD_REQUIRED True ) +set( NEST_WHEELBUILD DEFINED ENV{NEST_CMAKE_BUILDWHEEL} ) +if ( NEST_WHEELBUILD ) + # Big warning that the wheel build reserves the right to destroy everything, as it + # should only run on throwaway containers. + message(WARNING "RUNNING WHEEL BUILD, DO NOT RUN ON LOCAL MACHINE, MAY CAUSE IRREVERSIBLE CHANGES!") +endif () +if ( with-openmp ) + if ( "${with-openmp}" STREQUAL "ON" ) + # This should be called from cmake/ProcessOptions.cmake:NEST_PROCESS_WITH_OPENMP + # but alas, on some unknown combination of newer versions not all compiler flags are + # found there, yet magically, are correctly found when we make the call here. + find_package( OpenMP REQUIRED ) + endif () +endif () set( NEST_USER_EMAIL "users@nest-simulator.org" ) @@ -132,6 +146,7 @@ get_target_triple( NEST_TARGET_TRIPLE NEST_TARGET_ARCH NEST_TARGET_VENDOR NEST_T nest_process_with_python() include( GNUInstallDirs ) nest_post_process_with_python() +message( STATUS "DID WE FIND UTILITY_PYTHON? ${UTILITY_PYTHON}") nest_process_with_intel_compiler_flags() nest_process_with_warning() nest_process_with_libraries() @@ -222,15 +237,12 @@ add_custom_target( installcheck ################## Define Subdirectories here ################## ################################################################################ -add_subdirectory( doc ) add_subdirectory( bin ) -add_subdirectory( examples ) add_subdirectory( build_support ) add_subdirectory( libnestutil ) add_subdirectory( models ) add_subdirectory( nestkernel ) add_subdirectory( thirdparty ) -add_subdirectory( testsuite ) if ( HAVE_PYTHON ) add_subdirectory( pynest ) endif () @@ -320,11 +332,6 @@ configure_file( "${PROJECT_BINARY_DIR}/libnestutil/config.h" @ONLY ) -configure_file( - "${PROJECT_SOURCE_DIR}/pynest/setup.py.in" - "${PROJECT_BINARY_DIR}/pynest/setup.py" @ONLY -) - configure_file( "${PROJECT_SOURCE_DIR}/bin/nest-config.in" "${PROJECT_BINARY_DIR}/bin/nest-config" @ONLY @@ -335,17 +342,6 @@ configure_file( "${PROJECT_BINARY_DIR}/bin/nest_vars.sh" @ONLY ) -configure_file( - "${PROJECT_SOURCE_DIR}/doc/fulldoc.conf.in" - "${PROJECT_BINARY_DIR}/doc/fulldoc.conf" @ONLY -) - -configure_file( - "${PROJECT_SOURCE_DIR}/pynest/nest/versionchecker.py.in" - "${PROJECT_BINARY_DIR}/pynest/nest/versionchecker.py" @ONLY -) - - ################################################################################ ################## Install Extra Files ################## ################################################################################ @@ -354,8 +350,4 @@ install( FILES LICENSE README.md DESTINATION ${CMAKE_INSTALL_DOCDIR} ) -install( DIRECTORY examples/ - DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples -) - nest_print_config_summary() diff --git a/build_support/prepare_wheel_container.py b/build_support/prepare_wheel_container.py new file mode 100644 index 0000000000..7e4feb34ec --- /dev/null +++ b/build_support/prepare_wheel_container.py @@ -0,0 +1,92 @@ +import subprocess +import os + +# This file exists as a Python script because running a Linux docker on Windows CI +# runners messes with the keyboard mapping of bash commands which affects symbols like +# -?/\ and makes writing commands impossible. Somehow, if we use .py files instead of +# .sh files, we can shell out from here with correct keyboard mapping. + +BOOST_VERSION = [1, 79, 0] +GSL_VERSION = [2, 7, 1] + + +def main(): + print("Installing libgomp...") + install_omp() + print("Done.") + # Containers run multiple builds, so check if a previous build has installed the + # dependency already + if not os.path.exists("/boost"): + print(f"Installing Boost {version(BOOST_VERSION)}...") + install_boost() + print("Done.") + if not os.path.exists("/gsl"): + print(f"Installing GSL {version(BOOST_VERSION)}...") + install_gsl() + print("Done.") + strip_wheel_bulk() + print("Container preparation complete.") + print() + + +def run_sequence(seq): + """Run a sequence of shell commands""" + for command in seq: + subprocess.run(command, shell=True, check=True) + + +def version(ver, delim="."): + """Format list of semver parts into a string""" + return delim.join(str(v) for v in ver) + + +def install_boost(): + """Download, unpack, and move Boost to `/boost`""" + boost_ver = version(BOOST_VERSION) + boost_ver_uscore = version(BOOST_VERSION, delim="_") + install_seq = ( + ( + "curl -L https://boostorg.jfrog.io/artifactory/main/release/" + + f"{boost_ver}/source/boost_{boost_ver_uscore}.tar.gz" + + " -o boost.tar.gz" + ), + "tar -xzf boost.tar.gz", + f"mv boost_{boost_ver_uscore} /boost", + ) + run_sequence(install_seq) + + +def install_gsl(): + """Download, unpack, configure and make install GSL to `/gsl`""" + gsl_ver = version(GSL_VERSION) + install_seq = ( + f"curl -L https://mirror.ibcp.fr/pub/gnu/gsl/gsl-{gsl_ver}.tar.gz -o gsl.tar.gz", + "tar -xzf gsl.tar.gz", + "mkdir /gsl", + f"cd gsl-{gsl_ver} && ./configure --prefix=/gsl && make && make install", + ) + run_sequence(install_seq) + + +def install_omp(): + """Use the yum package manager of CentOS to install OpemMP libraries""" + install_seq = ("yum install libgomp",) + run_sequence(install_seq) + + +def strip_wheel_bulk(): + from shutil import rmtree, move + from os import makedirs + + move("/project/doc/copyright_header.cpp", "/project") + move("/project/doc/copyright_header.py", "/project") + + rmtree("/project/doc") + rmtree("/project/examples") + makedirs("/project/doc") + + move("/project/copyright_header.cpp", "/project/doc") + move("/project/copyright_header.py", "/project/doc") + + +main() diff --git a/cmake/CheckExtraCompilerFeatures.cmake b/cmake/CheckExtraCompilerFeatures.cmake index 079a6ea133..fc48cc7578 100644 --- a/cmake/CheckExtraCompilerFeatures.cmake +++ b/cmake/CheckExtraCompilerFeatures.cmake @@ -42,6 +42,7 @@ function( NEST_CHECK_EXITCODE_ABORT ) RESULT_VARIABLE RETURN_VALUE ERROR_QUIET OUTPUT_QUIET ) + set( RETURN_VALUE 255 ) if ( NOT RETURN_VALUE EQUAL 0 ) set( ABORT_ERR ${RETURN_VALUE} ) endif () @@ -50,7 +51,7 @@ function( NEST_CHECK_EXITCODE_ABORT ) endif () endif () printInfo( "Check the abort exitcode. ${ABORT_ERR}" ) - set( NEST_EXITCODE_ABORT ${ABORT_ERR} PARENT_SCOPE ) + set( NEST_EXITCODE_ABORT 255 PARENT_SCOPE ) endfunction() ####### NEST_EXITCODE_SEGFAULT ######## @@ -70,6 +71,7 @@ function( NEST_CHECK_EXITCODE_SEGFAULT ) RESULT_VARIABLE RETURN_VALUE ERROR_QUIET OUTPUT_QUIET ) + set( SEG_ERR 255 ) if ( NOT RETURN_VALUE EQUAL 0 ) set( SEG_ERR ${RETURN_VALUE} ) endif () @@ -78,7 +80,7 @@ function( NEST_CHECK_EXITCODE_SEGFAULT ) endif () endif () printInfo( "Check the segmentation fault exitcode. ${SEG_ERR}" ) - set( NEST_EXITCODE_SEGFAULT ${SEG_ERR} PARENT_SCOPE ) + set( NEST_EXITCODE_SEGFAULT 255 PARENT_SCOPE ) endfunction() ####### HAVE_CMATH_MAKROS_IGNORED ######## diff --git a/cmake/FindCython.cmake b/cmake/FindCython.cmake index 682e443eb1..ed48f567af 100644 --- a/cmake/FindCython.cmake +++ b/cmake/FindCython.cmake @@ -1,11 +1,23 @@ -# Find the Cython compiler. +#.rst: # -# This code sets the following variables: +# Find ``cython`` executable. # -# CYTHON_EXECUTABLE +# This module will set the following variables in your project: +# +# ``CYTHON_EXECUTABLE`` +# path to the ``cython`` program +# +# ``CYTHON_VERSION`` +# version of ``cython`` +# +# ``CYTHON_FOUND`` +# true if the program was found +# +# For more information on the Cython project, see https://cython.org/. +# +# *Cython is a language that makes writing C extensions for the Python language +# as easy as Python itself.* # -# See also UseCython.cmake - #============================================================================= # Copyright 2011 Kitware, Inc. # @@ -22,51 +34,56 @@ # limitations under the License. #============================================================================= -# Modifications copyright (C) 2004 The NEST Initiative - -# Using the Cython executable that lives next to the Python executable +# Use the Cython executable that lives next to the Python executable # if it is a local installation. -if ( Python_FOUND ) - get_filename_component( _python_path ${Python_EXECUTABLE} PATH ) - find_program( CYTHON_EXECUTABLE - NAMES cython cython.bat cython3 - HINTS ${_python_path} - ) -else () - find_program( CYTHON_EXECUTABLE - NAMES cython cython.bat cython3 - ) -endif () +if(Python_EXECUTABLE) + get_filename_component(_python_path ${Python_EXECUTABLE} PATH) +elseif(Python3_EXECUTABLE) + get_filename_component(_python_path ${Python3_EXECUTABLE} PATH) +elseif(DEFINED PYTHON_EXECUTABLE) + get_filename_component(_python_path ${PYTHON_EXECUTABLE} PATH) +endif() + +if(DEFINED _python_path) + find_program(CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + HINTS ${_python_path} + DOC "path to the cython executable") +else() + find_program(CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + DOC "path to the cython executable") +endif() + +if(CYTHON_EXECUTABLE) + set(CYTHON_version_command "${CYTHON_EXECUTABLE} --version") + + execute_process(COMMAND ${CYTHON_EXECUTABLE} --version + OUTPUT_VARIABLE CYTHON_version_output + ERROR_VARIABLE CYTHON_version_error + RESULT_VARIABLE CYTHON_version_result + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_STRIP_TRAILING_WHITESPACE) -if ( NOT CYTHON_EXECUTABLE STREQUAL "CYTHON_EXECUTABLE-NOTFOUND" ) - execute_process( - COMMAND ${CYTHON_EXECUTABLE} --version - RESULT_VARIABLE RESULT - OUTPUT_VARIABLE CYTHON_VAR_OUTPUT - ERROR_VARIABLE CYTHON_ERR_OUTPUT - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - if ( RESULT EQUAL 0 ) - if ( "${CYTHON_VAR_OUTPUT}" STREQUAL "" ) - # In cython v0.29.3 the version string is written to stderr and not to stdout, as one would expect. - set( CYTHON_VAR_OUTPUT "${CYTHON_ERR_OUTPUT}" ) + if(NOT ${CYTHON_version_result} EQUAL 0) + cmake_path(GET CYTHON_EXECUTABLE PARENT_PATH result) + set(_error_msg "Command \"${CYTHON_version_command}\" failed with") + set(_error_msg "${_error_msg} output:\n${CYTHON_version_error}${CYTHON_version_result}${CYTHON_version_output}") + message(SEND_ERROR "${_error_msg}") + else() + if("${CYTHON_version_output}" MATCHES "^[Cc]ython version ([^,]+)") + set(CYTHON_VERSION "${CMAKE_MATCH_1}") + else() + if("${CYTHON_version_error}" MATCHES "^[Cc]ython version ([^,]+)") + set(CYTHON_VERSION "${CMAKE_MATCH_1}") + endif() endif() - string( REGEX REPLACE ".* ([0-9]+\\.[0-9]+(\\.[0-9]+)?).*" "\\1" - CYTHON_VERSION "${CYTHON_VAR_OUTPUT}" ) - else () - printError( "Cython error: ${CYTHON_ERR_OUTPUT}\nat ${CYTHON_EXECUTABLE}") - endif () + endif() +endif() -endif () +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(Cython REQUIRED_VARS CYTHON_EXECUTABLE) -include( FindPackageHandleStandardArgs ) -find_package_handle_standard_args( Cython - FOUND_VAR - CYTHON_FOUND - REQUIRED_VARS - CYTHON_EXECUTABLE - VERSION_VAR - CYTHON_VERSION - ) +mark_as_advanced(CYTHON_EXECUTABLE) -mark_as_advanced( CYTHON_EXECUTABLE ) +include(UseCython) diff --git a/cmake/ProcessOptions.cmake b/cmake/ProcessOptions.cmake index 311941e25d..10e1f6cf6f 100644 --- a/cmake/ProcessOptions.cmake +++ b/cmake/ProcessOptions.cmake @@ -192,14 +192,21 @@ function( NEST_PROCESS_STATIC_LIBRARIES ) "@loader_path/../../../nest" PARENT_SCOPE ) else () + message( STATUS "Looking for libs in install prefix: ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}/nest") + message( STATUS "Looking for libs in relfolder: \$ORIGIN/../../${CMAKE_INSTALL_LIBDIR}/nest") + set( CMAKE_INSTALL_RPATH # for binaries "\$ORIGIN/../${CMAKE_INSTALL_LIBDIR}/nest" # for libraries (except pynestkernel) "\$ORIGIN/../../${CMAKE_INSTALL_LIBDIR}/nest" - # for pynestkernel: origin at /lib(64)/python3.x/site-packages/nest - # while libs are at the root of that at /lib(64)/nest + # for wheel pynestkernel: origin at /lib/python3.x/site-packages/nest + # On the target machine, the libs are installed in these 2 locations: "\$ORIGIN/../../../nest" + "\$ORIGIN/../nest_simulator.libs" + # During wheel building, the libs are found here: + "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}/nest" + "\$ORIGIN/../../${CMAKE_INSTALL_LIBDIR}/nest" PARENT_SCOPE ) endif () @@ -320,8 +327,9 @@ endfunction() function( NEST_PROCESS_WITH_PYTHON ) # Find Python set( HAVE_PYTHON OFF PARENT_SCOPE ) - - if ( ${with-python} STREQUAL "ON" ) + if ( ${with-python} STREQUAL "ON") + # Localize the Python interpreter and lib/header files + find_package( Python 3.8 REQUIRED Interpreter Development.Module ) # Localize the Python interpreter and ABI find_package( Python 3.8 QUIET COMPONENTS Interpreter Development.Module ) @@ -384,6 +392,10 @@ endfunction() function( NEST_POST_PROCESS_WITH_PYTHON ) if ( Python_FOUND ) set( PYEXECDIR "${CMAKE_INSTALL_LIBDIR}/python${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}/site-packages" PARENT_SCOPE ) + set( UTILITY_PYTHON "${Python_EXECUTABLE}" PARENT_SCOPE) + else () + find_package( Python 3 REQUIRED Interpreter ) + set( UTILITY_PYTHON "${Python_EXECUTABLE}" PARENT_SCOPE) endif() endfunction() @@ -602,16 +614,14 @@ function( NEST_PROCESS_MODELS ) endif () file(STRINGS "${PROJECT_SOURCE_DIR}/modelsets/${with-modelset}" BUILTIN_MODELS) endif() - - # We use python3 here directly, as some of the CI jobs don't seem to have PYTHON - # or Python_EXECUTABLE set properly. + message( STATUS "UTIL? ${UTILITY_PYTHON}") execute_process( - COMMAND "python3" "${PROJECT_SOURCE_DIR}/build_support/generate_modelsmodule.py" + COMMAND "${UTILITY_PYTHON}" "${PROJECT_SOURCE_DIR}/build_support/generate_modelsmodule.py" "${PROJECT_SOURCE_DIR}" "${PROJECT_BINARY_DIR}" "${BUILTIN_MODELS}" WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" OUTPUT_VARIABLE MODELS_SOURCES ERROR_VARIABLE MODELS_SOURCES_ERROR - # Uncomment for debugging: ECHO_OUTPUT_VARIABLE ECHO_ERROR_VARIABLE COMMAND_ECHO STDOUT + ECHO_OUTPUT_VARIABLE ECHO_ERROR_VARIABLE COMMAND_ECHO STDOUT COMMAND_ERROR_IS_FATAL ANY ) diff --git a/cmake/UseCython.cmake b/cmake/UseCython.cmake index e29d447ea7..5edf839cbf 100644 --- a/cmake/UseCython.cmake +++ b/cmake/UseCython.cmake @@ -1,51 +1,83 @@ -# Define a function to create Cython modules. +#.rst: # -# For more information on the Cython project, see http://cython.org/. -# "Cython is a language that makes writing C extensions for the Python language -# as easy as Python itself." +# The following functions are defined: # -# This file defines a CMake function to build a Cython Python module. -# To use it, first include this file. +# .. cmake:command:: add_cython_target # -# include( UseCython ) +# Create a custom rule to generate the source code for a Python extension module +# using cython. # -# Then call cython_add_module to create a module. +# add_cython_target( [] +# [EMBED_MAIN] +# [C | CXX] +# [PY2 | PY3] +# [OUTPUT_VAR ]) # -# cython_add_module( ... ) +# ```` is the name of the new target, and ```` +# is the path to a cython source file. Note that, despite the name, no new +# targets are created by this function. Instead, see ``OUTPUT_VAR`` for +# retrieving the path to the generated source for subsequent targets. # -# To create a standalone executable, the function +# If only ```` is provided, and it ends in the ".pyx" extension, then it +# is assumed to be the ````. The name of the input without the +# extension is used as the target name. If only ```` is provided, and it +# does not end in the ".pyx" extension, then the ```` is assumed to +# be ``.pyx``. # -# cython_add_standalone_executable( [MAIN_MODULE src1] ... ) +# The Cython include search path is amended with any entries found in the +# ``INCLUDE_DIRECTORIES`` property of the directory containing the +# ```` file. Use ``include_directories`` to add to the Cython +# include search path. # -# To avoid dependence on Python, set the PYTHON_LIBRARY cache variable to point -# to a static library. If a MAIN_MODULE source is specified, -# the "if __name__ == '__main__':" from that module is used as the C main() method -# for the executable. If MAIN_MODULE, the source with the same basename as -# is assumed to be the MAIN_MODULE. +# Options: # -# Where is the name of the resulting Python module and -# ... are source files to be compiled into the module, e.g. *.pyx, -# *.py, *.c, *.cxx, etc. A CMake target is created with name . This can -# be used for target_link_libraries(), etc. +# ``EMBED_MAIN`` +# Embed a main() function in the generated output (for stand-alone +# applications that initialize their own Python runtime). # -# The sample paths set with the CMake include_directories() command will be used -# for include directories to search for *.pxd when running the Cython complire. +# ``C | CXX`` +# Force the generation of either a C or C++ file. By default, a C file is +# generated, unless the C language is not enabled for the project; in this +# case, a C++ file is generated by default. # -# Cache variables that effect the behavior include: +# ``PY2 | PY3`` +# Force compilation using either Python-2 or Python-3 syntax and code +# semantics. By default, Python-2 syntax and semantics are used if the major +# version of Python found is 2. Otherwise, Python-3 syntax and semantics are +# used. # -# CYTHON_ANNOTATE -# CYTHON_NO_DOCSTRINGS -# CYTHON_FLAGS +# ``OUTPUT_VAR `` +# Set the variable ```` in the parent scope to the path to the +# generated source file. By default, ```` is used as the output +# variable name. # -# Source file properties that effect the build process are +# Defined variables: # -# CYTHON_IS_CXX +# ```` +# The path of the generated source file. # -# If this is set of a *.pyx file with CMake set_source_files_properties() -# command, the file will be compiled as a C++ file. +# Cache variables that affect the behavior include: +# +# ``CYTHON_ANNOTATE`` +# Whether to create an annotated .html file when compiling. +# +# ``CYTHON_FLAGS`` +# Additional flags to pass to the Cython compiler. +# +# Example usage +# ^^^^^^^^^^^^^ +# +# .. code-block:: cmake +# +# find_package(Cython) +# +# # Note: In this case, either one of these arguments may be omitted; their +# # value would have been inferred from that of the other. +# add_cython_target(cy_code cy_code.pyx) +# +# add_library(cy_code MODULE ${cy_code}) +# target_link_libraries(cy_code ...) # -# See also FindCython.cmake - #============================================================================= # Copyright 2011 Kitware, Inc. # @@ -62,265 +94,290 @@ # limitations under the License. #============================================================================= -# Modifications copyright (C) 2004 The NEST Initiative - # Configuration options. -set( CYTHON_ANNOTATE OFF - CACHE BOOL "Create an annotated .html file when compiling *.pyx." ) -set( CYTHON_NO_DOCSTRINGS OFF - CACHE BOOL "Strip docstrings from the compiled module." ) -set( CYTHON_FLAGS "" CACHE STRING - "Extra flags to the cython compiler." ) -mark_as_advanced( CYTHON_ANNOTATE CYTHON_NO_DOCSTRINGS CYTHON_FLAGS ) - -# The following function is vendored from the deprecated file FindPythonLibs.cmake. -# PYTHON_ADD_MODULE( src1 src2 ... srcN) is used to build modules for python. -# PYTHON_WRITE_MODULES_HEADER() writes a header file you can include -# in your sources to initialize the static python modules -function(PYTHON_ADD_MODULE _NAME ) - get_property(_TARGET_SUPPORTS_SHARED_LIBS - GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS) - option(PYTHON_ENABLE_MODULE_${_NAME} "Add module ${_NAME}" TRUE) - option(PYTHON_MODULE_${_NAME}_BUILD_SHARED - "Add module ${_NAME} shared" ${_TARGET_SUPPORTS_SHARED_LIBS}) - - # Mark these options as advanced - mark_as_advanced(PYTHON_ENABLE_MODULE_${_NAME} - PYTHON_MODULE_${_NAME}_BUILD_SHARED) - - if(PYTHON_ENABLE_MODULE_${_NAME}) - if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) - set(PY_MODULE_TYPE MODULE) +set(CYTHON_ANNOTATE OFF + CACHE BOOL "Create an annotated .html file when compiling *.pyx.") + +set(CYTHON_FLAGS "" CACHE STRING + "Extra flags to the cython compiler.") +mark_as_advanced(CYTHON_ANNOTATE CYTHON_FLAGS) + +set(CYTHON_CXX_EXTENSION "cxx") +set(CYTHON_C_EXTENSION "c") + +get_property(languages GLOBAL PROPERTY ENABLED_LANGUAGES) + +function(add_cython_target _name) + set(options EMBED_MAIN C CXX PY2 PY3) + set(options1 OUTPUT_VAR) + cmake_parse_arguments(_args "${options}" "${options1}" "" ${ARGN}) + + list(GET _args_UNPARSED_ARGUMENTS 0 _arg0) + + # if provided, use _arg0 as the input file path + if(_arg0) + set(_source_file ${_arg0}) + + # otherwise, must determine source file from name, or vice versa + else() + get_filename_component(_name_ext "${_name}" EXT) + + # if extension provided, _name is the source file + if(_name_ext) + set(_source_file ${_name}) + get_filename_component(_name "${_source_file}" NAME_WE) + + # otherwise, assume the source file is ${_name}.pyx else() - set(PY_MODULE_TYPE STATIC) - set_property(GLOBAL APPEND PROPERTY PY_STATIC_MODULES_LIST ${_NAME}) + set(_source_file ${_name}.pyx) endif() + endif() - set_property(GLOBAL APPEND PROPERTY PY_MODULES_LIST ${_NAME}) - add_library(${_NAME} ${PY_MODULE_TYPE} ${ARGN}) -# target_link_libraries(${_NAME} ${PYTHON_LIBRARIES}) + set(_embed_main FALSE) - if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) - set_target_properties(${_NAME} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}") - if(WIN32 AND NOT CYGWIN) - set_target_properties(${_NAME} PROPERTIES SUFFIX ".pyd") - endif() - endif() + if("C" IN_LIST languages) + set(_output_syntax "C") + elseif("CXX" IN_LIST languages) + set(_output_syntax "CXX") + else() + message(FATAL_ERROR "Either C or CXX must be enabled to use Cython") + endif() + if(_args_EMBED_MAIN) + set(_embed_main TRUE) endif() -endfunction() -set( CYTHON_CXX_EXTENSION "cxx" ) -set( CYTHON_C_EXTENSION "c" ) - -# Create a *.c or *.cxx file from a *.pyx file. -# Input the generated file basename. The generate file will put into the variable -# placed in the "generated_file" argument. Finally all the *.py and *.pyx files. -function( compile_pyx _name generated_file ) - # Default to assuming all files are C. - set( cxx_arg "" ) - set( extension ${CYTHON_C_EXTENSION} ) - set( pyx_lang "C" ) - set( comment "Compiling Cython C source for ${_name}..." ) - - set( cython_include_directories "" ) - set( pxd_dependencies "" ) - set( c_header_dependencies "" ) - set( pyx_locations "" ) - - foreach ( pyx_file ${ARGN} ) - get_filename_component( pyx_file_basename "${pyx_file}" NAME_WE ) - - # Determine if it is a C or C++ file. - get_source_file_property( property_is_cxx ${pyx_file} CYTHON_IS_CXX ) - if ( ${property_is_cxx} ) - set( cxx_arg "--cplus" ) - set( extension ${CYTHON_CXX_EXTENSION} ) - set( pyx_lang "CXX" ) - set( comment "Compiling Cython CXX source for ${_name}..." ) - endif () - - # Get the include directories. - get_source_file_property( pyx_location ${pyx_file} LOCATION ) - get_filename_component( pyx_path ${pyx_location} PATH ) - get_directory_property( cmake_include_directories DIRECTORY ${pyx_path} INCLUDE_DIRECTORIES ) - list( APPEND cython_include_directories ${cmake_include_directories} ) - list( APPEND pyx_locations "${pyx_location}" ) - - # Determine dependencies. - # Add the pxd file will the same name as the given pyx file. - unset( corresponding_pxd_file CACHE ) - find_file( corresponding_pxd_file ${pyx_file_basename}.pxd - PATHS "${pyx_path}" ${cmake_include_directories} - NO_DEFAULT_PATH ) - if ( corresponding_pxd_file ) - list( APPEND pxd_dependencies "${corresponding_pxd_file}" ) - endif () - - # pxd files to check for additional dependencies. - set( pxds_to_check "${pyx_file}" "${pxd_dependencies}" ) - set( pxds_checked "" ) - set( number_pxds_to_check 1 ) - while ( ${number_pxds_to_check} GREATER 0 ) - foreach ( pxd ${pxds_to_check} ) - list( APPEND pxds_checked "${pxd}" ) - list( REMOVE_ITEM pxds_to_check "${pxd}" ) - - # check for C header dependencies - file( STRINGS "${pxd}" extern_from_statements - REGEX "cdef[ ]+extern[ ]+from.*$" ) - foreach ( statement ${extern_from_statements} ) - # Had trouble getting the quote in the regex - string( REGEX REPLACE "cdef[ ]+extern[ ]+from[ ]+[\"]([^\"]+)[\"].*" "\\1" header "${statement}" ) - unset( header_location CACHE ) - find_file( header_location ${header} PATHS ${cmake_include_directories} ) - if ( header_location ) - list( FIND c_header_dependencies "${header_location}" header_idx ) - if ( ${header_idx} LESS 0 ) - list( APPEND c_header_dependencies "${header_location}" ) - endif () - endif () - endforeach () - - # check for pxd dependencies - - # Look for cimport statements. - set( module_dependencies "" ) - file( STRINGS "${pxd}" cimport_statements REGEX cimport ) - foreach ( statement ${cimport_statements} ) - if ( ${statement} MATCHES from ) - string( REGEX REPLACE "from[ ]+([^ ]+).*" "\\1" module "${statement}" ) - else () - string( REGEX REPLACE "cimport[ ]+([^ ]+).*" "\\1" module "${statement}" ) - endif () - list( APPEND module_dependencies ${module} ) - endforeach () - list( REMOVE_DUPLICATES module_dependencies ) - # Add the module to the files to check, if appropriate. - foreach ( module ${module_dependencies} ) - unset( pxd_location CACHE ) - find_file( pxd_location ${module}.pxd - PATHS "${pyx_path}" ${cmake_include_directories} NO_DEFAULT_PATH ) - if ( pxd_location ) - list( FIND pxds_checked ${pxd_location} pxd_idx ) - if ( ${pxd_idx} LESS 0 ) - list( FIND pxds_to_check ${pxd_location} pxd_idx ) - if ( ${pxd_idx} LESS 0 ) - list( APPEND pxds_to_check ${pxd_location} ) - list( APPEND pxd_dependencies ${pxd_location} ) - endif () # if it is not already going to be checked - endif () # if it has not already been checked - endif () # if pxd file can be found - endforeach () # for each module dependency discovered - endforeach () # for each pxd file to check - list( LENGTH pxds_to_check number_pxds_to_check ) - endwhile () - endforeach () # pyx_file + if(_args_C) + set(_output_syntax "C") + endif() - # Set additional flags. - if ( CYTHON_ANNOTATE ) - set( annotate_arg "--annotate" ) - endif () + if(_args_CXX) + set(_output_syntax "CXX") + endif() + + # Doesn't select an input syntax - Cython + # defaults to 2 for Cython 2 and 3 for Cython 3 + set(_input_syntax "default") + + if(_args_PY2) + set(_input_syntax "PY2") + endif() + + if(_args_PY3) + set(_input_syntax "PY3") + endif() - if ( CYTHON_NO_DOCSTRINGS ) - set( no_docstrings_arg "--no-docstrings" ) - endif () + set(embed_arg "") + if(_embed_main) + set(embed_arg "--embed") + endif() + + set(cxx_arg "") + set(extension "c") + if(_output_syntax STREQUAL "CXX") + set(cxx_arg "--cplus") + set(extension "cxx") + endif() - if ( "${CMAKE_BUILD_TYPE}" STREQUAL "Debug" OR - "${CMAKE_BUILD_TYPE}" STREQUAL "RelWithDebInfo" ) - set( cython_debug_arg "--gdb" ) - endif () + set(py_version_arg "") + if(_input_syntax STREQUAL "PY2") + set(py_version_arg "-2") + elseif(_input_syntax STREQUAL "PY3") + set(py_version_arg "-3") + endif() - # Set version to 3 for Python 3 - set( version_arg "-3" ) + set(generated_file "${CMAKE_CURRENT_BINARY_DIR}/${_name}.${extension}") + set_source_files_properties(${generated_file} PROPERTIES GENERATED TRUE) + + set(_output_var ${_name}) + if(_args_OUTPUT_VAR) + set(_output_var ${_args_OUTPUT_VAR}) + endif() + set(${_output_var} ${generated_file} PARENT_SCOPE) + + file(RELATIVE_PATH generated_file_relative + ${CMAKE_BINARY_DIR} ${generated_file}) + + set(comment "Generating ${_output_syntax} source ${generated_file_relative}") + set(cython_include_directories "") + set(pxd_dependencies "") + set(c_header_dependencies "") + + # Get the include directories. + get_source_file_property(pyx_location ${_source_file} LOCATION) + get_filename_component(pyx_path ${pyx_location} PATH) + get_directory_property(cmake_include_directories + DIRECTORY ${pyx_path} + INCLUDE_DIRECTORIES) + list(APPEND cython_include_directories ${cmake_include_directories}) + + # Determine dependencies. + # Add the pxd file with the same basename as the given pyx file. + get_filename_component(pyx_file_basename ${_source_file} NAME_WE) + unset(corresponding_pxd_file CACHE) + find_file(corresponding_pxd_file ${pyx_file_basename}.pxd + PATHS "${pyx_path}" ${cmake_include_directories} + NO_DEFAULT_PATH) + if(corresponding_pxd_file) + list(APPEND pxd_dependencies "${corresponding_pxd_file}") + endif() + + # pxd files to check for additional dependencies + set(pxds_to_check "${_source_file}" "${pxd_dependencies}") + set(pxds_checked "") + set(number_pxds_to_check 1) + while(number_pxds_to_check GREATER 0) + foreach(pxd ${pxds_to_check}) + list(APPEND pxds_checked "${pxd}") + list(REMOVE_ITEM pxds_to_check "${pxd}") + + # look for C headers + file(STRINGS "${pxd}" extern_from_statements + REGEX "cdef[ ]+extern[ ]+from.*$") + foreach(statement ${extern_from_statements}) + # Had trouble getting the quote in the regex + string(REGEX REPLACE + "cdef[ ]+extern[ ]+from[ ]+[\"]([^\"]+)[\"].*" "\\1" + header "${statement}") + unset(header_location CACHE) + find_file(header_location ${header} PATHS ${cmake_include_directories}) + if(header_location) + list(FIND c_header_dependencies "${header_location}" header_idx) + if(${header_idx} LESS 0) + list(APPEND c_header_dependencies "${header_location}") + endif() + endif() + endforeach() + + # check for pxd dependencies + # Look for cimport statements. + set(module_dependencies "") + file(STRINGS "${pxd}" cimport_statements REGEX cimport) + foreach(statement ${cimport_statements}) + if(${statement} MATCHES from) + string(REGEX REPLACE + "from[ ]+([^ ]+).*" "\\1" + module "${statement}") + else() + string(REGEX REPLACE + "cimport[ ]+([^ ]+).*" "\\1" + module "${statement}") + endif() + list(APPEND module_dependencies ${module}) + endforeach() + + # check for pxi dependencies + # Look for include statements. + set(include_dependencies "") + file(STRINGS "${pxd}" include_statements REGEX include) + foreach(statement ${include_statements}) + string(REGEX REPLACE + "include[ ]+[\"]([^\"]+)[\"].*" "\\1" + module "${statement}") + list(APPEND include_dependencies ${module}) + endforeach() + + list(REMOVE_DUPLICATES module_dependencies) + list(REMOVE_DUPLICATES include_dependencies) + + # Add modules to the files to check, if appropriate. + foreach(module ${module_dependencies}) + unset(pxd_location CACHE) + find_file(pxd_location ${module}.pxd + PATHS "${pyx_path}" ${cmake_include_directories} + NO_DEFAULT_PATH) + if(pxd_location) + list(FIND pxds_checked ${pxd_location} pxd_idx) + if(${pxd_idx} LESS 0) + list(FIND pxds_to_check ${pxd_location} pxd_idx) + if(${pxd_idx} LESS 0) + list(APPEND pxds_to_check ${pxd_location}) + list(APPEND pxd_dependencies ${pxd_location}) + endif() # if it is not already going to be checked + endif() # if it has not already been checked + endif() # if pxd file can be found + endforeach() # for each module dependency discovered + + # Add includes to the files to check, if appropriate. + foreach(_include ${include_dependencies}) + unset(pxi_location CACHE) + find_file(pxi_location ${_include} + PATHS "${pyx_path}" ${cmake_include_directories} + NO_DEFAULT_PATH) + if(pxi_location) + list(FIND pxds_checked ${pxi_location} pxd_idx) + if(${pxd_idx} LESS 0) + list(FIND pxds_to_check ${pxi_location} pxd_idx) + if(${pxd_idx} LESS 0) + list(APPEND pxds_to_check ${pxi_location}) + list(APPEND pxd_dependencies ${pxi_location}) + endif() # if it is not already going to be checked + endif() # if it has not already been checked + endif() # if include file can be found + endforeach() # for each include dependency discovered + endforeach() # for each include file to check + + list(LENGTH pxds_to_check number_pxds_to_check) + endwhile() + + # Set additional flags. + set(annotate_arg "") + if(CYTHON_ANNOTATE) + set(annotate_arg "--annotate") + endif() + + set(cython_debug_arg "") + set(line_directives_arg "") + if(CMAKE_BUILD_TYPE STREQUAL "Debug" OR + CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") + set(cython_debug_arg "--gdb") + set(line_directives_arg "--line-directives") + endif() # Include directory arguments. - list( REMOVE_DUPLICATES cython_include_directories ) - set( include_directory_arg "" ) - foreach ( _include_dir ${cython_include_directories} ) - set( include_directory_arg ${include_directory_arg} "-I" "${_include_dir}" ) - endforeach () + list(REMOVE_DUPLICATES cython_include_directories) + set(include_directory_arg "") + foreach(_include_dir ${cython_include_directories}) + set(include_directory_arg + ${include_directory_arg} "--include-dir" "${_include_dir}") + endforeach() - # Determining generated file name. - set( _generated_file "${CMAKE_CURRENT_BINARY_DIR}/${_name}.${extension}" ) - set_source_files_properties( ${_generated_file} PROPERTIES GENERATED TRUE ) - set( ${generated_file} ${_generated_file} PARENT_SCOPE ) + list(REMOVE_DUPLICATES pxd_dependencies) + list(REMOVE_DUPLICATES c_header_dependencies) - list( REMOVE_DUPLICATES pxd_dependencies ) - list( REMOVE_DUPLICATES c_header_dependencies ) + string(REGEX REPLACE " " ";" CYTHON_FLAGS_LIST "${CYTHON_FLAGS}") # Add the command to run the compiler. - add_custom_command( OUTPUT ${_generated_file} - COMMAND ${CYTHON_EXECUTABLE} - ARGS ${cxx_arg} ${include_directory_arg} ${version_arg} - ${annotate_arg} ${no_docstrings_arg} ${cython_debug_arg} ${CYTHON_FLAGS} - --output-file ${_generated_file} ${pyx_locations} - DEPENDS ${pyx_locations} ${pxd_dependencies} - IMPLICIT_DEPENDS ${pyx_lang} ${c_header_dependencies} - COMMENT ${comment} - ) + add_custom_command(OUTPUT ${generated_file} + COMMAND ${CYTHON_EXECUTABLE} + ARGS ${cxx_arg} ${include_directory_arg} ${py_version_arg} + ${embed_arg} ${annotate_arg} ${cython_debug_arg} + ${line_directives_arg} ${CYTHON_FLAGS_LIST} ${pyx_location} + --output-file ${generated_file} + DEPENDS ${_source_file} + ${pxd_dependencies} + IMPLICIT_DEPENDS ${_output_syntax} + ${c_header_dependencies} + COMMENT ${comment}) + + # NOTE(opadron): I thought about making a proper target, but after trying it + # out, I decided that it would be far too convenient to use the same name as + # the target for the extension module (e.g.: for single-file modules): + # + # ... + # add_cython_target(_module.pyx) + # add_library(_module ${_module}) + # ... + # + # The above example would not be possible since the "_module" target name + # would already be taken by the cython target. Since I can't think of a + # reason why someone would need the custom target instead of just using the + # generated file directly, I decided to leave this commented out. + # + # add_custom_target(${_name} DEPENDS ${generated_file}) # Remove their visibility to the user. - set( corresponding_pxd_file "" CACHE INTERNAL "" ) - set( header_location "" CACHE INTERNAL "" ) - set( pxd_location "" CACHE INTERNAL "" ) -endfunction() - -# cython_add_module( src1 src2 ... srcN ) -# Build the Cython Python module. -function( cython_add_module _name ) - set( pyx_module_sources "" ) - set( other_module_sources "" ) - foreach ( _file ${ARGN} ) - if ( ${_file} MATCHES ".*\\.py[x]?$" ) - list( APPEND pyx_module_sources ${_file} ) - else () - list( APPEND other_module_sources ${_file} ) - endif () - endforeach () - compile_pyx( ${_name} generated_file ${pyx_module_sources} ) - include_directories( ${Python_INCLUDE_DIRS} ) - python_add_module( ${_name} ${generated_file} ${other_module_sources} ) - if ( APPLE ) - set_target_properties( ${_name} PROPERTIES LINK_FLAGS "-undefined dynamic_lookup" ) - else () - target_link_libraries( ${_name} ${Python_LIBRARIES} ) - endif () -endfunction() - -include( CMakeParseArguments ) -# cython_add_standalone_executable( _name [MAIN_MODULE src3.py] src1 src2 ... srcN ) -# Creates a standalone executable the given sources. -function( cython_add_standalone_executable _name ) - set( pyx_module_sources "" ) - set( other_module_sources "" ) - set( main_module "" ) - cmake_parse_arguments( cython_arguments "" "MAIN_MODULE" "" ${ARGN} ) - include_directories( ${Python_INCLUDE_DIRS} ) - foreach ( _file ${cython_arguments_UNPARSED_ARGUMENTS} ) - if ( ${_file} MATCHES ".*\\.py[x]?$" ) - get_filename_component( _file_we ${_file} NAME_WE ) - if ( "${_file_we}" STREQUAL "${_name}" ) - set( main_module "${_file}" ) - elseif ( NOT "${_file}" STREQUAL "${cython_arguments_MAIN_MODULE}" ) - set( PYTHON_MODULE_${_file_we}_static_BUILD_SHARED OFF ) - compile_pyx( "${_file_we}_static" generated_file "${_file}" ) - list( APPEND pyx_module_sources "${generated_file}" ) - endif () - else () - list( APPEND other_module_sources ${_file} ) - endif () - endforeach () - - if ( cython_arguments_MAIN_MODULE ) - set( main_module ${cython_arguments_MAIN_MODULE} ) - endif () - if ( NOT main_module ) - printError( "main module not found." ) - endif () - get_filename_component( main_module_we "${main_module}" NAME_WE ) - set( CYTHON_FLAGS ${CYTHON_FLAGS} --embed ) - compile_pyx( "${main_module_we}_static" generated_file ${main_module} ) - add_executable( ${_name} ${generated_file} ${pyx_module_sources} ${other_module_sources} ) - target_link_libraries( ${_name} ${Python_LIBRARIES} ${pyx_module_libs} ) + set(corresponding_pxd_file "" CACHE INTERNAL "") + set(header_location "" CACHE INTERNAL "") + set(pxd_location "" CACHE INTERNAL "") endfunction() diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt deleted file mode 100644 index 3346b6c456..0000000000 --- a/doc/CMakeLists.txt +++ /dev/null @@ -1,63 +0,0 @@ -# doc/CMakeLists.txt -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -if( BUILD_DOCS ) - # If we hit this, any part of the documentation was configured to be built. - # The top-level 'docs' target will contain all sub-documentations such as `sphinxdocs` - # and `doxygendocs`. Using `ALL` we make it run on `make install` as well. - add_custom_target( docs ALL ) -endif() - -# Determine in or out of tree building -if ( "${PROJECT_SOURCE_DIR}" STREQUAL "${PROJECT_BINARY_DIR}" ) - set( OUT_OF_TREE_BUILD "False" ) -else () - set( OUT_OF_TREE_BUILD "True" ) -endif () - - -if ( BUILD_SPHINX_DOCS ) - message( STATUS "Configuring Sphinx documentation" ) - set( _SPHINX_SOURCE_DIR "${PROJECT_SOURCE_DIR}/doc/htmldoc" ) - set( _SPHINX_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/_build/html" ) - add_custom_target( sphinxdocs - WORKING_DIRECTORY ${_SPHINX_SOURCE_DIR} - COMMAND ${Python_EXECUTABLE} clean_source_dirs.py - COMMAND ${SPHINX_EXECUTABLE} -b html . ${_SPHINX_BUILD_DIR} - COMMAND ${Python_EXECUTABLE} resolve_includes.py ${SPHINX_BUILD_DIR}/models - ) - - add_dependencies( docs sphinxdocs ) - - install( DIRECTORY ${_SPHINX_BUILD_DIR} - DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_DOCDIR} - OPTIONAL - ) - install( DIRECTORY logos - DESTINATION ${CMAKE_INSTALL_DOCDIR} - ) -endif () - -if ( BUILD_DOXYGEN_DOCS ) - add_custom_target( doxygendocs - COMMAND ${DOXYGEN_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/fulldoc.conf" - WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" - ) - add_dependencies( docs doxygendocs ) -endif () diff --git a/doc/userdoc/release_notes/v3.2/index.rst b/doc/userdoc/release_notes/v3.2/index.rst new file mode 100644 index 0000000000..3f975dba6b --- /dev/null +++ b/doc/userdoc/release_notes/v3.2/index.rst @@ -0,0 +1,22 @@ +All about NEST 3.2 +================== + +This page contains a summary of all breaking and non-breaking changes +from NEST 3.1 to NEST 3.2. In addition to the `auto-generated release +notes on GitHub `_, +this page also contains transition information that helps you to +update your simulation scripts when you come from an older version of +NEST. + +If you transition from a version earlier than 3.1, please see our +selection of earlier :doc:`transition guides `. + +.. contents:: On this page you'll find + :local: + :depth: 1 + +ConnPlotter +~~~~~~~~~~~ +All files related to ConnPlotter have been removed from NEST 3.2 and +moved to a separate repository `connplotter _`. \ No newline at end of file diff --git a/examples/BrodyHopfield.py b/examples/BrodyHopfield.py index 66c1357a91..5b5453225a 100755 --- a/examples/BrodyHopfield.py +++ b/examples/BrodyHopfield.py @@ -46,8 +46,7 @@ # First, we import all necessary modules for simulation, analysis, and plotting. import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot ############################################################################### # Second, the simulation parameters are assigned to variables. diff --git a/examples/brunel_alpha_nest.py b/examples/brunel_alpha_nest.py index cae0a6ca1e..2cac1d0b17 100755 --- a/examples/brunel_alpha_nest.py +++ b/examples/brunel_alpha_nest.py @@ -51,8 +51,7 @@ import time import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot import numpy as np import scipy.special as sp diff --git a/examples/brunel_delta_nest.py b/examples/brunel_delta_nest.py index 6d8d27bd69..694e38f688 100755 --- a/examples/brunel_delta_nest.py +++ b/examples/brunel_delta_nest.py @@ -47,8 +47,7 @@ import time import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot nest.ResetKernel() diff --git a/examples/brunel_exp_multisynapse_nest.py b/examples/brunel_exp_multisynapse_nest.py index bcd5eec8ca..31727211b4 100644 --- a/examples/brunel_exp_multisynapse_nest.py +++ b/examples/brunel_exp_multisynapse_nest.py @@ -59,8 +59,7 @@ import time import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot nest.ResetKernel() diff --git a/examples/gif_population.py b/examples/gif_population.py index 9f73dbccdb..9a0980f5ab 100644 --- a/examples/gif_population.py +++ b/examples/gif_population.py @@ -51,8 +51,7 @@ # Import all necessary modules for simulation and plotting. import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot nest.ResetKernel() diff --git a/examples/hpc_benchmark.py b/examples/hpc_benchmark.py index 32acf595a3..f7ce6e1cad 100644 --- a/examples/hpc_benchmark.py +++ b/examples/hpc_benchmark.py @@ -95,7 +95,7 @@ import time import nest -import nest.raster_plot +import nest.plot.raster_plot import numpy as np import scipy.special as sp diff --git a/examples/repeated_stimulation.py b/examples/repeated_stimulation.py index cd2fe44513..977ff39124 100644 --- a/examples/repeated_stimulation.py +++ b/examples/repeated_stimulation.py @@ -45,8 +45,7 @@ import matplotlib.pyplot as plt -import nest -import nest.raster_plot +import nest.plot.raster_plot ############################################################################### # Second, we set the parameters so the ``poisson_generator`` generates 1000 diff --git a/pynest/CMakeLists.txt b/pynest/CMakeLists.txt index a172283bf4..616aa6a02e 100644 --- a/pynest/CMakeLists.txt +++ b/pynest/CMakeLists.txt @@ -17,24 +17,26 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -if ( HAVE_PYTHON ) +find_package(Cython) - # We use python3 here directly, as some of the CI jobs don't seem to have PYTHON - # or Python_EXECUTABLE set properly. +if ( HAVE_PYTHON ) execute_process( - COMMAND "python3" "${PROJECT_SOURCE_DIR}/pynest/generate_exception_header.py" + COMMAND "${UTILITY_PYTHON}" "${PROJECT_SOURCE_DIR}/pynest/generate_exception_header.py" "${PROJECT_SOURCE_DIR}" "${PROJECT_BINARY_DIR}" WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" - # Uncomment for debugging: ECHO_OUTPUT_VARIABLE ECHO_ERROR_VARIABLE COMMAND_ECHO STDOUT + ECHO_OUTPUT_VARIABLE ECHO_ERROR_VARIABLE COMMAND_ECHO STDOUT COMMAND_ERROR_IS_FATAL ANY ) if ( CYTHON_FOUND ) - include( UseCython ) - set_source_files_properties( nestkernel_api.pyx PROPERTIES CYTHON_IS_CXX TRUE ) - cython_add_module( nestkernel_api nestkernel_api.pyx ) + add_cython_target ( nestkernel_api nestkernel_api.pyx CXX PY3) + add_library(nestkernel_api MODULE ${nestkernel_api}) + set_target_properties(nestkernel_api PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}") else () - message( FATAL_ERROR "Building PyNEST requires Cython." ) + message( + FATAL_ERROR + "Building the PyNEST Python bindings requires Cython." + ) endif () # TODO PYNEST NG: Add models, once the refacoring of the module system is done. @@ -66,10 +68,8 @@ if ( HAVE_PYTHON ) -D_IS_PYNEST ) - install( DIRECTORY nest/ ${PROJECT_BINARY_DIR}/pynest/nest/ - DESTINATION ${CMAKE_INSTALL_PREFIX}/${PYEXECDIR}/nest - PATTERN "versionchecker.py.in" EXCLUDE - ) + # Install the PyNEST libraries into the PyNEST build folder, for Python packagers + # to pick up. + install( TARGETS nestkernel_api DESTINATION pynest/nest ) - install( TARGETS nestkernel_api DESTINATION ${PYEXECDIR}/nest/ ) endif () diff --git a/examples/CMakeLists.txt b/pynest/nest/plot/__init__.py similarity index 73% rename from examples/CMakeLists.txt rename to pynest/nest/plot/__init__.py index b1c834a050..ea692cf2ef 100644 --- a/examples/CMakeLists.txt +++ b/pynest/nest/plot/__init__.py @@ -1,22 +1,25 @@ -# CMakeLists.txt -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -install( FILES run_examples.sh - DESTINATION ${CMAKE_INSTALL_DOCDIR} - ) +# -*- coding: utf-8 -*- +# +# __init__.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from .visualization import plot_network +from .raster_plot import extract_events, from_device, from_file + +__all__ = ["extract_events", "from_device", "from_file", "plot_network"] diff --git a/pynest/nest/plot/raster_plot.py b/pynest/nest/plot/raster_plot.py new file mode 100644 index 0000000000..adf64a2ce0 --- /dev/null +++ b/pynest/nest/plot/raster_plot.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# +# raster_plot.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +""" Functions for raster plotting.""" + +import nest +import numpy + +__all__ = ["extract_events", "from_data", "from_device", "from_file", "from_file_numpy", "from_file_pandas"] + + +def extract_events(data, time=None, sel=None): + """Extract all events within a given time interval. + + Both time and sel may be used at the same time such that all + events are extracted for which both conditions are true. + + Parameters + ---------- + data : list + Matrix such that + data[:,0] is a vector of all node_ids and + data[:,1] a vector with the corresponding time stamps. + time : list, optional + List with at most two entries such that + time=[t_max] extracts all events with t< t_max + time=[t_min, t_max] extracts all events with t_min <= t < t_max + sel : list, optional + List of node_ids such that + sel=[node_id1, ... , node_idn] extracts all events from these node_ids. + All others are discarded. + + Returns + ------- + numpy.array + List of events as (node_id, t) tuples + """ + val = [] + + if time: + t_max = time[-1] + if len(time) > 1: + t_min = time[0] + else: + t_min = 0 + + for v in data: + t = v[1] + node_id = v[0] + if time and (t < t_min or t >= t_max): + continue + if not sel or node_id in sel: + val.append(v) + + return numpy.array(val) + + +def from_data(data, sel=None, **kwargs): + """Plot raster plot from data array. + + Parameters + ---------- + data : list + Matrix such that + data[:,0] is a vector of all node_ids and + data[:,1] a vector with the corresponding time stamps. + sel : list, optional + List of node_ids such that + sel=[node_id1, ... , node_idn] extracts all events from these node_ids. + All others are discarded. + kwargs: + Parameters passed to _make_plot + """ + if len(data) == 0: + raise nest.NESTError("No data to plot.") + ts = data[:, 1] + d = extract_events(data, sel=sel) + ts1 = d[:, 1] + node_ids = d[:, 0] + + return _make_plot(ts, ts1, node_ids, data[:, 0], **kwargs) + + +def from_file(fname, **kwargs): + """Plot raster from file. + + Parameters + ---------- + fname : str or tuple(str) or list(str) + File name or list of file names + + If a list of files is given, the data from them is concatenated as if + it had been stored in a single file - useful when MPI is enabled and + data is logged separately for each MPI rank, for example. + kwargs: + Parameters passed to _make_plot + """ + if isinstance(fname, str): + fname = [fname] + + if isinstance(fname, (list, tuple)): + try: + global pandas + pandas = __import__("pandas") + from_file_pandas(fname, **kwargs) + except ImportError: + from_file_numpy(fname, **kwargs) + else: + print("fname should be one of str/list(str)/tuple(str).") + + +def from_file_pandas(fname, **kwargs): + """Use pandas.""" + data = None + for f in fname: + dataFrame = pandas.read_table(f, header=2, skipinitialspace=True) + newdata = dataFrame.values + + if data is None: + data = newdata + else: + data = numpy.concatenate((data, newdata)) + + return from_data(data, **kwargs) + + +def from_file_numpy(fname, **kwargs): + """Use numpy.""" + data = None + for f in fname: + newdata = numpy.loadtxt(f, skiprows=3) + + if data is None: + data = newdata + else: + data = numpy.concatenate((data, newdata)) + + return from_data(data, **kwargs) + + +def from_device(detec, **kwargs): + """ + Plot raster from a spike recorder. + + Parameters + ---------- + detec : TYPE + Description + kwargs: + Parameters passed to _make_plot + + Raises + ------ + nest.NESTError + """ + + type_id = nest.GetDefaults(detec.get("model"), "type_id") + if not type_id == "spike_recorder": + raise nest.NESTError("Please provide a spike_recorder.") + + if detec.get("record_to") == "memory": + ts, node_ids = _from_memory(detec) + + if not len(ts): + raise nest.NESTError("No events recorded!") + + if "title" not in kwargs: + kwargs["title"] = "Raster plot from device '%i'" % detec.get("global_id") + + if detec.get("time_in_steps"): + xlabel = "Steps" + else: + xlabel = "Time (ms)" + + return _make_plot(ts, ts, node_ids, node_ids, xlabel=xlabel, **kwargs) + + elif detec.get("record_to") == "ascii": + fname = detec.get("filenames") + return from_file(fname, **kwargs) + + else: + raise nest.NESTError("No data to plot. Make sure that record_to is set to either 'ascii' or 'memory'.") + + +def _from_memory(detec): + ev = detec.get("events") + return ev["times"], ev["senders"] + + +def _make_plot(ts, ts1, node_ids, neurons, hist=True, hist_binwidth=5.0, grayscale=False, title=None, xlabel=None): + """Generic plotting routine. + + Constructs a raster plot along with an optional histogram (common part in + all routines above). + + Parameters + ---------- + ts : list + All timestamps + ts1 : list + Timestamps corresponding to node_ids + node_ids : list + Global ids corresponding to ts1 + neurons : list + Node IDs of neurons to plot + hist : bool, optional + Display histogram + hist_binwidth : float, optional + Width of histogram bins + grayscale : bool, optional + Plot in grayscale + title : str, optional + Plot title + xlabel : str, optional + Label for x-axis + """ + import matplotlib.pyplot as plt + + plt.figure() + + if grayscale: + color_marker = ".k" + color_bar = "gray" + else: + color_marker = "." + color_bar = "blue" + + color_edge = "black" + + if xlabel is None: + xlabel = "Time (ms)" + + ylabel = "Neuron ID" + + if hist: + ax1 = plt.axes([0.1, 0.3, 0.85, 0.6]) + plotid = plt.plot(ts1, node_ids, color_marker) + plt.ylabel(ylabel) + plt.xticks([]) + xlim = plt.xlim() + + plt.axes([0.1, 0.1, 0.85, 0.17]) + t_bins = numpy.arange(numpy.amin(ts), numpy.amax(ts), float(hist_binwidth)) + n, _ = _histogram(ts, bins=t_bins) + num_neurons = len(numpy.unique(neurons)) + heights = 1000 * n / (hist_binwidth * num_neurons) + + plt.bar(t_bins, heights, width=hist_binwidth, color=color_bar, edgecolor=color_edge) + plt.yticks([int(x) for x in numpy.linspace(0.0, int(max(heights) * 1.1) + 5, 4)]) + plt.ylabel("Rate (Hz)") + plt.xlabel(xlabel) + plt.xlim(xlim) + plt.axes(ax1) + else: + plotid = plt.plot(ts1, node_ids, color_marker) + plt.xlabel(xlabel) + plt.ylabel(ylabel) + + if title is None: + plt.title("Raster plot") + else: + plt.title(title) + + plt.draw() + + return plotid + + +def _histogram(a, bins=10, bin_range=None, normed=False): + """Calculate histogram for data. + + Parameters + ---------- + a : list + Data to calculate histogram for + bins : int, optional + Number of bins + bin_range : TYPE, optional + Range of bins + normed : bool, optional + Whether distribution should be normalized + + Raises + ------ + ValueError + """ + from numpy import asarray, concatenate, iterable, linspace, sort + + a = asarray(a).ravel() + + if bin_range is not None: + mn, mx = bin_range + if mn > mx: + raise ValueError("max must be larger than min in range parameter") + + if not iterable(bins): + if bin_range is None: + bin_range = (a.min(), a.max()) + mn, mx = [mi + 0.0 for mi in bin_range] + if mn == mx: + mn -= 0.5 + mx += 0.5 + bins = linspace(mn, mx, bins, endpoint=False) + else: + if (bins[1:] - bins[:-1] < 0).any(): + raise ValueError("bins must increase monotonically") + + # best block size probably depends on processor cache size + block = 65536 + n = sort(a[:block]).searchsorted(bins) + for i in range(block, a.size, block): + n += sort(a[i : i + block]).searchsorted(bins) + n = concatenate([n, [len(a)]]) + n = n[1:] - n[:-1] + + if normed: + db = bins[1] - bins[0] + return 1.0 / (a.size * db) * n, bins + else: + return n, bins diff --git a/pynest/nest/plot/visualization.py b/pynest/nest/plot/visualization.py new file mode 100644 index 0000000000..58cb6a442f --- /dev/null +++ b/pynest/nest/plot/visualization.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# visualization.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +""" +Functions to visualize a network built in NEST. +""" + +import nest +import pydot + +__all__ = [ + "plot_network", +] + + +def plot_network(nodes, filename, ext_conns=False, plot_modelnames=False): + """Plot the given nodes and the connections that originate from + them. + + This function depends on the availability of the pydot module. + + Simplified version for NEST 3. + + Parameters + ---------- + nodes : NodeCollection + NodeCollection containing node IDs of nodes to plot + filename : str + Filename to save the plot to. Can end either in .pdf or .png to + determine the type of the output. + ext_conns : bool, optional + Draw connections to targets that are not in nodes. If it is True, + these are drawn to a node named 'ext'. + plot_modelnames : bool, optional + Description + + Raises + ------ + nest.NESTError + """ + + if len(nodes) == 0: + nest.NESTError("nodes must at least contain one node") + + if not isinstance(nodes, nest.NodeCollection): + raise nest.NESTError("nodes must be a NodeCollection") + + if ext_conns: + raise NotImplementedError("ext_conns") + if plot_modelnames: + raise NotImplementedError("plot_modelnames") + + conns = nest.GetConnections(nodes) + + graph = pydot.Dot(rankdir="LR", ranksep="5") + for source, target in zip(conns.sources(), conns.targets()): + graph.add_edge(pydot.Edge(str(source), str(target))) + + filetype = filename.rsplit(".", 1)[1] + if filetype == "pdf": + graph.write_pdf(filename) + elif filetype == "png": + graph.write_png(filename) + else: + raise nest.NESTError("Filename must end in '.png' or '.pdf'.") diff --git a/pyproject.toml b/pyproject.toml index 9f7cc6f130..b140d7614d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,31 @@ +[build-system] +requires = [ + "wheel", + "scikit-build", + "cmake", + "Cython", + "ninja; platform_system!='Windows'" +] +build-backend = "setuptools.build_meta" + +[tool.cibuildwheel] +skip = ["*-musllinux_*", "*cp36*", "*cp37*", "*pp3*"] +archs = "auto64" +build-verbosity = 3 + +[tool.cibuildwheel.environment] +NEST_CMAKE_BUILDWHEEL="ON" +# We have to set the CMAKE_MODULE_PATH to have our own FindCython loaded before skbuild's +# Cython. I was unable to append the correct skbuild modules path as well. It is quite +# variable per build. The flag is passed twice, let's hope they stack up. Example: +# -DCMAKE_MODULE_PATH:PATH=/tmp/pip-build-env-po15qu7j/overlay/lib/python3.9/site-packages/skbuild/resources/cmake +CMAKE_ARGS="-DCMAKE_MODULE_PATH=/project/cmake -Dwith-userdoc=OFF" +BOOST_ROOT="/boost" +GSL_ROOT_DIR="/gsl" + +[tool.cibuildwheel.linux] +before-build = "python3 build_support/prepare_wheel_container.py" + [tool.pytest.ini_options] markers = [ "skipif_missing_gsl: skip test if NEST was built without GSL support", diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000..ca68a15eac --- /dev/null +++ b/setup.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# +# setup.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from setuptools import find_packages +from skbuild import setup + +setup( + name="nest-simulator", + version="3.4.0-dev0", + description="Python bindings for NEST", + author="The NEST Initiative", + url="https://www.nest-simulator.org", + license="GPLv2+", + packages=find_packages(where="pynest"), + package_dir={"": "pynest"}, + install_requires=["numpy", "scipy"], + extras_require={"test": ["junitparser", "matplotlib", "nose"]}, + classifiers=[ + "Development Status :: 6 - Mature", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + ], + python_requires=">=3.8, <4", + keywords=( + ",".join( + [ + "nest", + "simulator", + "neuroscience", + "neural", + "neuron", + "network", + "ai", + "spike", + "spiking", + ] + ) + ), + project_urls={ + "Homepage": "https://www.nest-simulator.org/", + "Bug Reports": "https://github.com/nest/nest-simulator/issues", + "Source": "https://github.com/nest/nest-simulator", + "Documentation": "https://nest-simulator.readthedocs.io/", + }, +) diff --git a/testsuite/CMakeLists.txt b/testsuite/CMakeLists.txt deleted file mode 100644 index d9c445a873..0000000000 --- a/testsuite/CMakeLists.txt +++ /dev/null @@ -1,41 +0,0 @@ -# testsuite/CMakeLists.txt -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -set( TESTSUBDIRS - regressiontests - cpptests - pytests -) - -add_subdirectory( regressiontests ) -add_subdirectory( cpptests ) - -install( DIRECTORY ${TESTSUBDIRS} - DESTINATION ${CMAKE_INSTALL_DATADIR}/testsuite -) - -install( PROGRAMS - do_tests.sh - DESTINATION ${CMAKE_INSTALL_DATADIR}/testsuite -) - -install( FILES - junit_xml.sh run_test.sh summarize_tests.py - DESTINATION ${CMAKE_INSTALL_DATADIR}/testsuite -) diff --git a/testsuite/pytests/test_tsodyks2_synapse.py b/testsuite/pytests/test_tsodyks2_synapse.py index 0a7694dbe5..ae807a63d5 100644 --- a/testsuite/pytests/test_tsodyks2_synapse.py +++ b/testsuite/pytests/test_tsodyks2_synapse.py @@ -128,7 +128,9 @@ def reproduce_weight_drift(self, _pre_spikes, absolute_weight=1.0): if time_in_simulation_steps in pre_spikes_forced_to_grid: # A presynaptic spike occurred now. # Adjusting the current time to make it exact. - t_spike = _pre_spikes[pre_spikes_forced_to_grid.index(time_in_simulation_steps)] + t_spike = _pre_spikes[ + pre_spikes_forced_to_grid.index(time_in_simulation_steps) + ] # Evaluating the depression rule. h = t_spike - t_lastspike diff --git a/testsuite/pytests/test_urbanczik_synapse.py b/testsuite/pytests/test_urbanczik_synapse.py index e47ea80f2d..b0bc83b340 100644 --- a/testsuite/pytests/test_urbanczik_synapse.py +++ b/testsuite/pytests/test_urbanczik_synapse.py @@ -40,7 +40,10 @@ def test_ConnectNeuronsWithUrbanczikSynapse(self): nest.set_verbosity(nest.verbosity.M_WARNING) - mc_models = ["iaf_cond_alpha_mc", "pp_cond_exp_mc_urbanczik"] # Multi-compartment models + mc_models = [ + "iaf_cond_alpha_mc", + "pp_cond_exp_mc_urbanczik", + ] # Multi-compartment models supported_models = ["pp_cond_exp_mc_urbanczik"] unsupported_models = [n for n in nest.node_models if n not in supported_models] @@ -231,7 +234,9 @@ def test_SynapseDepressionFacilitation(self): comparison between Nest and python implementation """ # extract the weight computed in python at the times of the presynaptic spikes - idx = np.nonzero(np.in1d(np.around(t, 4), np.around(pre_syn_spike_times + resolution, 4)))[0] + idx = np.nonzero( + np.in1d(np.around(t, 4), np.around(pre_syn_spike_times + resolution, 4)) + )[0] syn_w_comp_at_spike_times = syn_weight_comp[idx] realtive_error = (weights[-1] - syn_w_comp_at_spike_times[-1]) / (weights[-1] - init_w) self.assertTrue(abs(realtive_error) < 0.001) diff --git a/testsuite/pytests/test_visualization.py b/testsuite/pytests/test_visualization.py index 77f6eb7d8d..33b674073a 100644 --- a/testsuite/pytests/test_visualization.py +++ b/testsuite/pytests/test_visualization.py @@ -73,7 +73,7 @@ def setUp(self): @pytest.mark.skipif(not HAVE_PYDOT, reason="pydot not found") def test_plot_network(self): """Test plot_network""" - import nest.visualization as nvis + import nest.plot.visualization as nvis nest.ResetKernel() sources = nest.Create("iaf_psc_alpha", 10) @@ -112,7 +112,7 @@ def test_voltage_trace_from_device(self): # Test with data from device plt.close("all") - nest.voltage_trace.from_device(device) + nest.plot.voltage_trace.from_device(device) self.voltage_trace_verify(device) # Test with data from file @@ -126,7 +126,7 @@ def test_voltage_trace_from_device(self): np.savetxt(filename, data) plt.close("all") - nest.voltage_trace.from_file(filename) + nest.plot._voltage_trace.from_file(filename) self.voltage_trace_verify(device) def spike_recorder_data_setup(self, to_file=False): @@ -160,15 +160,15 @@ def spike_recorder_raster_verify(self, sr_ref): @pytest.mark.skipif(not PLOTTING_POSSIBLE, reason="Plotting impossible because matplotlib or display missing") def test_raster_plot(self): """Test raster_plot""" - import nest.raster_plot + import nest.plot._raster_plot sr, sr_to_file = self.spike_recorder_data_setup(to_file=True) spikes = sr.events["times"] senders = sr.events["senders"] # Test from_device - nest.raster_plot.from_device(sr) - self.spike_recorder_raster_verify(spikes) + nest.plot.raster_plot.from_device(sr) + self.spike_recorder_raster_verify(sr_ref) # Test from_data data = np.zeros([len(senders), 2]) @@ -180,17 +180,17 @@ def test_raster_plot(self): # Test from_file filename = sr_to_file.filenames[0] self.filenames.append(filename) - nest.raster_plot.from_file(filename) - self.spike_recorder_raster_verify(spikes) + nest.plot.raster_plot.from_file(filename) + self.spike_recorder_raster_verify(sr_ref) # Test from_file_numpy - nest.raster_plot.from_file_numpy([filename]) - self.spike_recorder_raster_verify(spikes) + nest.plot.raster_plot.from_file_numpy([filename]) + self.spike_recorder_raster_verify(sr_ref) if HAVE_PANDAS: # Test from_file_pandas - nest.raster_plot.from_file_pandas([filename]) - self.spike_recorder_raster_verify(spikes) + nest.plot._raster_plot.from_file_pandas([filename]) + self.spike_recorder_raster_verify(sr_ref) # Test extract_events all_extracted = nest.raster_plot.extract_events(data)