diff --git a/.clang-tidy b/.clang-tidy new file mode 100644 index 00000000..03368ffa --- /dev/null +++ b/.clang-tidy @@ -0,0 +1,2 @@ +Checks: '-*,modernize-*,cppcoreguidelines-*,-modernize-use-trailing-return-type' +HeaderFilterRegex: 'triqs_dft_tools' diff --git a/.dockerignore b/.dockerignore index 6e7a76a5..b627e92a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,2 +1,4 @@ +.travis.yml Dockerfile Jenkinsfile +.git/objects/pack diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index 79dfa304..db102d9e 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -32,7 +32,7 @@ Please provide the application version that you used. You can get this information from copy and pasting the output of ```bash -python -c "from app4triqs.version import *; show_version(); show_git_hash();" +python -c "from triqs_dft_tools.version import *; show_version(); show_git_hash();" ``` from the command line. Also, please include the OS you are running and its version. diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..226118b4 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +compile_commands.json +doc/cpp2rst_generated diff --git a/.travis.yml b/.travis.yml index 1b2df662..7ffe737b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,47 +1,34 @@ language: cpp sudo: required -dist: trusty +dist: bionic compiler: - gcc - # - clang + - clang before_install: - - sudo add-apt-repository 'deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty-5.0 main' -y - - wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key|sudo apt-key add - - - sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y - sudo apt-get update - - sudo apt-get install -y --allow-unauthenticated g++-7 clang-5.0 - - export LIBRARY_PATH=/usr/lib/llvm-5.0/lib:$LIBRARY_PATH - - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 60 --slave /usr/bin/g++ g++ /usr/bin/g++-7 - - sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-5.0 60 --slave /usr/bin/clang++ clang++ /usr/bin/clang++-5.0 - - sudo apt-get install -y --allow-unauthenticated libboost-all-dev cmake git libgfortran3 gfortran openmpi-bin openmpi-common openmpi-doc libopenmpi-dev libblas-dev liblapack-dev libfftw3-dev libgmp-dev hdf5-tools libhdf5-serial-dev python-h5py python-dev python-numpy python-scipy python-jinja2 python-virtualenv python-matplotlib python-tornado python-zmq python-mpi4py python-mako clang-format-5.0 libclang-5.0-dev python-clang-5.0 python-sphinx libjs-mathjax valgrind libnfft3-dev + - sudo apt-get install -y --allow-unauthenticated libblas-dev libboost-all-dev libfftw3-dev libgfortran3 libhdf5-serial-dev libgmp-dev liblapack-dev libopenmpi-dev libclang-dev python-clang-6.0 python-dev python-h5py python-mako python-matplotlib python-mpi4py python-numpy python-scipy python-sphinx libjs-mathjax libnfft3-dev install: true script: - # ===== Set up Cpp2Py - - git clone https://github.com/triqs/cpp2py - - mkdir cpp2py/build && cd cpp2py/build - - git checkout master - - cmake .. -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DPYTHON_INTERPRETER=/usr/bin/python -DCMAKE_INSTALL_PREFIX=$TRAVIS_BUILD_DIR/root_install - - make -j8 install - - cd $TRAVIS_BUILD_DIR - - source root_install/share/cpp2pyvars.sh + - export INSTALL_DIR=$HOME/root_install # We install outside the repository # ===== Set up TRIQS - - git clone https://github.com/TRIQS/triqs --branch $TRAVIS_BRANCH - - mkdir triqs/build && cd triqs/build - - cmake .. -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$TRAVIS_BUILD_DIR/root_install -DCMAKE_BUILD_TYPE=Debug - - make -j8 install - cd $TRAVIS_BUILD_DIR - - source root_install/share/triqsvars.sh - # ===== Set up dft_tools and Test using fsanitize=address + - git clone https://github.com/TRIQS/triqs --branch unstable + - mkdir triqs/build && cd triqs/build + - cmake .. -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR + - make -j2 install + - source $INSTALL_DIR/share/triqsvars.sh + # ===== Set up triqs_dft_tools and test + - cd $TRAVIS_BUILD_DIR - mkdir build && cd build - - cmake .. -DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DCMAKE_CXX_FLAGS='-fsanitize=address -fno-omit-frame-pointer -fuse-ld=gold' - - make -j8 - - export ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-5.0/bin/llvm-symbolizer + - cmake .. -DASAN=ON -DUBSAN=ON + - export UBSAN_SYMBOLIZER_PATH=$(which llvm-symbolizer) + - export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer) + - export UBSAN_OPTIONS=symbolize=1:print_stacktrace=1 - export ASAN_OPTIONS=symbolize=1:detect_leaks=0 - export CTEST_OUTPUT_ON_FAILURE=1 - - if [ "$CXX" = g++ ]; then export LD_PRELOAD=/usr/lib/gcc/x86_64-linux-gnu/7/libasan.so; elif [ "$CXX" = clang++ ]; then export LD_PRELOAD=/usr/lib/llvm-5.0/lib/clang/5.0.1/lib/linux/libclang_rt.asan-x86_64.so; fi - - cd test && ctest + - make -j2 && make test diff --git a/CMakeLists.txt b/CMakeLists.txt index 669c61a2..ceeeaf6f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,105 +1,164 @@ -# Start configuration -cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR) -project(triqs_dft_tools C CXX Fortran) +# ############################################################################## +# +# triqs_dft_tools - An example application using triqs and cpp2py +# +# Copyright (C) ... +# +# triqs_dft_tools is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# triqs_dft_tools is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# triqs_dft_tools (in the file COPYING.txt in this directory). If not, see +# . +# +# ############################################################################## + +cmake_minimum_required(VERSION 3.3.2 FATAL_ERROR) +cmake_policy(VERSION 3.3.2) if(POLICY CMP0074) cmake_policy(SET CMP0074 NEW) endif() +if(POLICY CMP0077) + cmake_policy(SET CMP0077 NEW) +endif() + +# ############ +# Define Project +project(triqs_dft_tools VERSION 3.0.0 LANGUAGES C CXX Fortran) +get_directory_property(IS_SUBPROJECT PARENT_DIRECTORY) + +# ############ +# Load TRIQS and CPP2PY +find_package(TRIQS 3.0 REQUIRED) + +# Get the git hash & print status +triqs_get_git_hash_of_source_dir(PROJECT_GIT_HASH) +message(STATUS "${PROJECT_NAME} version : ${PROJECT_VERSION}") +message(STATUS "${PROJECT_NAME} Git hash: ${PROJECT_GIT_HASH}") + +# Enforce Consistent Versioning +if(NOT ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} VERSION_EQUAL ${TRIQS_VERSION_MAJOR}.${TRIQS_VERSION_MINOR}) + message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.") +endif() + +# Default Install directory to TRIQS_ROOT if not given or invalid. +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX})) + message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT") + set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE) + set(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT FALSE) +endif() +if(NOT IS_SUBPROJECT) + message(STATUS "-------- CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} --------") +endif() +set(${PROJECT_NAME}_BINARY_DIR ${PROJECT_BINARY_DIR} CACHE STRING "Binary directory of the ${PROJECT_NAME} Project") + + +# ############ +# Options + +# Make additional Find Modules available +list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/share/cmake/Modules) # Default to Release build type if(NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE Release CACHE STRING "Type of build" FORCE) endif() -message( STATUS "-------- BUILD-TYPE: ${CMAKE_BUILD_TYPE} --------") - -# Use shared libraries -set(BUILD_SHARED_LIBS ON) - -# Load TRIQS and Cpp2Py -find_package(TRIQS 2.2 REQUIRED) -find_package(Cpp2Py 1.6 REQUIRED) - -if (NOT ${TRIQS_WITH_PYTHON_SUPPORT}) - MESSAGE(FATAL_ERROR "dft_tools require Python support in TRIQS") +if(NOT IS_SUBPROJECT) + message(STATUS "-------- BUILD-TYPE: ${CMAKE_BUILD_TYPE} --------") endif() -# Default Install directory to TRIQS_ROOT if not given. Checks an absolute name is given. -if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX})) - message(STATUS " No install prefix given (or invalid). Defaulting to TRIQS_ROOT") - set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE) +# Python Support +option(PythonSupport "Build with Python support" ON) +if(PythonSupport AND NOT TRIQS_WITH_PYTHON_SUPPORT) + message(FATAL_ERROR "TRIQS was installed without Python support. Cannot build the Python Interface. Disable the build with -DPythonSupport=OFF") endif() -message(STATUS "-------- CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} -------------") -# Define the dft_tools version numbers and get the git hash -set(DFT_TOOLS_VERSION_MAJOR 2) -set(DFT_TOOLS_VERSION_MINOR 2) -set(DFT_TOOLS_VERSION_PATCH 0) -set(DFT_TOOLS_VERSION ${DFT_TOOLS_VERSION_MAJOR}.${DFT_TOOLS_VERSION_MINOR}.${DFT_TOOLS_VERSION_PATCH}) -triqs_get_git_hash_of_source_dir(DFT_TOOLS_GIT_HASH) -message(STATUS "Dft_tools version : ${DFT_TOOLS_VERSION}") -message(STATUS "Git hash: ${DFT_TOOLS_GIT_HASH}") +# Documentation +option(Build_Documentation "Build documentation" OFF) +# Testing +option(Build_Tests "Build tests" ON) +if(Build_Tests) + enable_testing() +endif() + +# Export the list of compile-commands into compile_commands.json +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# Global compiler options +option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF) +add_compile_options($<$:-ggdb3>) + +# Create an Interface target for compiler warnings +add_library(${PROJECT_NAME}_warnings INTERFACE) +target_compile_options(${PROJECT_NAME}_warnings + INTERFACE + -Wall + -Wextra + -Wpedantic + -Wno-sign-compare + $<$:-Wshadow=local> + $<$:-Wno-attributes> + $<$:-Wshadow> + $<$:-Wno-gcc-compat> + $<$:-Wshadow> + $<$:-Wno-gcc-compat> +) + +# ############# +# Build Project + +# Find / Build dependencies +add_subdirectory(deps) + +# Build and install the library +add_subdirectory(c++/${PROJECT_NAME}) + +# add here stuff for the Fortran part in DFTTools add_subdirectory(fortran/dmftproj) -# Add the compiling options (-D... ) for C++ -message(STATUS "TRIQS : Adding compilation flags detected by the library (C++11/14, libc++, etc...) ") - -add_subdirectory(c++) -add_subdirectory(python python/triqs_dft_tools) -add_subdirectory(shells) - -#------------------------ -# tests -#------------------------ - -option(TEST_COVERAGE "Analyze the coverage of tests" OFF) - -# perform tests with coverage info -if (${TEST_COVERAGE}) - # we try to locate the coverage program - find_program(PYTHON_COVERAGE python-coverage) - find_program(PYTHON_COVERAGE coverage) - if(NOT PYTHON_COVERAGE) - message(FATAL_ERROR "Program coverage (or python-coverage) not found.\nEither set PYTHON_COVERAGE explicitly or disable TEST_COVERAGE!\nYou need to install the python package coverage, e.g. with\n pip install coverage\nor with\n apt install python-coverage") - endif() - - message(STATUS "Setting up test coverage") - add_custom_target(coverage ${PYTHON_COVERAGE} combine --append .coverage plovasp/.coverage || true COMMAND ${PYTHON_COVERAGE} html COMMAND echo "Open ${CMAKE_BINARY_DIR}/test/htmlcov/index.html in browser!" WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/test) +# Tests +if(Build_Tests) + add_subdirectory(test) endif() -enable_testing() - -option(Build_Tests "Build the tests of the library " ON) -if (Build_Tests) - message(STATUS "-------- Preparing tests -------------") - add_subdirectory(test) +# Python +if(PythonSupport) + add_subdirectory(python/${PROJECT_NAME}) endif() -#------------------------ -# Documentation -#------------------------ -option(Build_Documentation "Build documentation" OFF) -if(${Build_Documentation}) - if(NOT ${TRIQS_WITH_DOCUMENTATION}) - message("Error: TRIQS library has not been compiled with its documentation") - endif() +# Docs +if(Build_Documentation) add_subdirectory(doc) endif() -#-------------------------------------------------------- -# Packaging -#-------------------------------------------------------- +# dfttols vasp interface bash scripts +add_subdirectory(bin) + +# Additional configuration files +add_subdirectory(share) + +# ############# +# Debian Package + option(BUILD_DEBIAN_PACKAGE "Build a deb package" OFF) -if(BUILD_DEBIAN_PACKAGE) +if(BUILD_DEBIAN_PACKAGE AND NOT IS_SUBPROJECT) if(NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr") message(FATAL_ERROR "CMAKE_INSTALL_PREFIX must be /usr for packaging") endif() - SET(CPACK_GENERATOR "DEB") - SET(CPACK_PACKAGE_VERSION ${DFT_TOOLS_VERSION}) - SET(CPACK_PACKAGE_CONTACT "https://github.com/TRIQS/dft_tools") - EXECUTE_PROCESS(COMMAND dpkg --print-architecture OUTPUT_VARIABLE CMAKE_DEBIAN_PACKAGE_ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE) - SET(CPACK_DEBIAN_PACKAGE_DEPENDS "triqs (>= 2.2)") - SET(CPACK_DEBIAN_PACKAGE_CONFLICTS "dft_tools") - SET(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) - SET(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON) - INCLUDE(CPack) + set(CPACK_PACKAGE_NAME ${PROJECT_NAME}) + set(CPACK_GENERATOR "DEB") + set(CPACK_PACKAGE_VERSION ${PROJECT_VERSION}) + set(CPACK_PACKAGE_CONTACT "https://github.com/TRIQS/${PROJECT_NAME}") + execute_process(COMMAND dpkg --print-architecture OUTPUT_VARIABLE CMAKE_DEBIAN_PACKAGE_ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE) + set(CPACK_DEBIAN_PACKAGE_DEPENDS "triqs (>= 3.0)") + set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON) + set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON) + include(CPack) endif() diff --git a/ChangeLog.md b/ChangeLog.md new file mode 120000 index 00000000..9e080e26 --- /dev/null +++ b/ChangeLog.md @@ -0,0 +1 @@ +doc/ChangeLog.md \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 1d7149fb..e05e9a12 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,16 @@ # See ../triqs/packaging for other options -FROM flatironinstitute/triqs:master-ubuntu-clang +FROM flatironinstitute/triqs:unstable-ubuntu-clang +ARG APPNAME=triqs_dft_tools -ARG APPNAME -COPY . $SRC/$APPNAME +COPY requirements.txt /src/$APPNAME/requirements.txt +RUN pip3 install -r /src/$APPNAME/requirements.txt + +COPY --chown=build . $SRC/$APPNAME WORKDIR $BUILD/$APPNAME RUN chown build . USER build ARG BUILD_DOC=0 -RUN cmake $SRC/$APPNAME -DTRIQS_ROOT=${INSTALL} -DBuild_Documentation=${BUILD_DOC} && make -j2 && make test CTEST_OUTPUT_ON_FAILURE=1 +ARG BUILD_ID +RUN cmake $SRC/$APPNAME -DTRIQS_ROOT=${INSTALL} -DBuild_Documentation=${BUILD_DOC} -DBuild_Deps=Always && make -j2 || make -j1 VERBOSE=1 USER root RUN make install diff --git a/Jenkinsfile b/Jenkinsfile index 0cf7bdea..9338c5d1 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,5 +1,6 @@ def projectName = "dft_tools" /* set to app/repo name */ +def dockerName = projectName.toLowerCase(); /* which platform to build documentation on */ def documentationPlatform = "ubuntu-clang" /* depend on triqs upstream branch/project */ @@ -37,7 +38,12 @@ for (int i = 0; i < dockerPlatforms.size(); i++) { mv -f Dockerfile.jenkins Dockerfile """ /* build and tag */ - def img = docker.build("flatironinstitute/${projectName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_DOC=${platform==documentationPlatform} .") + def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_DOC=${platform==documentationPlatform} --build-arg BUILD_ID=${env.BUILD_TAG} .") + catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { + img.inside() { + sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1" + } + } if (!keepInstall) { sh "docker rmi --no-prune ${img.imageName()}" } @@ -48,7 +54,7 @@ for (int i = 0; i < dockerPlatforms.size(); i++) { /****************** osx builds (on host) */ def osxPlatforms = [ ["gcc", ['CC=gcc-9', 'CXX=g++-9', 'FC=gfortran-9']], - ["clang", ['CC=$BREW/opt/llvm/bin/clang', 'CXX=$BREW/opt/llvm/bin/clang++', 'CXXFLAGS=-I$BREW/opt/llvm/include', 'LDFLAGS=-L$BREW/opt/llvm/lib', 'FC=gfortran-9']] + ["clang", ['CC=$BREW/opt/llvm/bin/clang', 'CXX=$BREW/opt/llvm/bin/clang++', 'FC=gfortran-9', 'CXXFLAGS=-I$BREW/opt/llvm/include', 'LDFLAGS=-L$BREW/opt/llvm/lib']] ] for (int i = 0; i < osxPlatforms.size(); i++) { def platformEnv = osxPlatforms[i] @@ -58,29 +64,37 @@ for (int i = 0; i < osxPlatforms.size(); i++) { def srcDir = pwd() def tmpDir = pwd(tmp:true) def buildDir = "$tmpDir/build" + /* install real branches in a fixed predictable place so apps can find them */ def installDir = keepInstall ? "${env.HOME}/install/${projectName}/${env.BRANCH_NAME}/${platform}" : "$tmpDir/install" def triqsDir = "${env.HOME}/install/triqs/${triqsBranch}/${platform}" + def venv = triqsDir dir(installDir) { deleteDir() } checkout scm + + def hdf5 = "${env.BREW}/opt/hdf5@1.10" dir(buildDir) { withEnv(platformEnv[1].collect { it.replace('\$BREW', env.BREW) } + [ - "PATH=$triqsDir/bin:${env.BREW}/bin:/usr/bin:/bin:/usr/sbin", - "CPLUS_INCLUDE_PATH=$triqsDir/include:${env.BREW}/include", - "LIBRARY_PATH=$triqsDir/lib:${env.BREW}/lib", - "CMAKE_PREFIX_PATH=$triqsDir/lib/cmake/triqs"]) { + "PATH=$venv/bin:${env.BREW}/bin:/usr/bin:/bin:/usr/sbin", + "HDF5_ROOT=$hdf5", + "C_INCLUDE_PATH=$hdf5/include:${env.BREW}/include", + "CPLUS_INCLUDE_PATH=$venv/include:$hdf5/include:${env.BREW}/include", + "LIBRARY_PATH=$venv/lib:$hdf5/lib:${env.BREW}/lib", + "LD_LIBRARY_PATH=$hdf5/lib", + "PYTHONPATH=$installDir/lib/python3.7/site-packages", + "CMAKE_PREFIX_PATH=$venv/lib/cmake/triqs"]) { deleteDir() /* note: this is installing into the parent (triqs) venv (install dir), which is thus shared among apps and so not be completely safe */ - sh "pip install -r $srcDir/requirements.txt" - sh "cmake $srcDir -DCMAKE_INSTALL_PREFIX=$installDir -DTRIQS_ROOT=$triqsDir" - sh "make -j3" - try { + sh "pip3 install -U -r $srcDir/requirements.txt" + sh "cmake $srcDir -DCMAKE_INSTALL_PREFIX=$installDir -DTRIQS_ROOT=$triqsDir -DBuild_Deps=Always" + sh "make -j2" + catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { try { sh "make test CTEST_OUTPUT_ON_FAILURE=1" } catch (exc) { archiveArtifacts(artifacts: 'Testing/Temporary/LastTest.log') throw exc - } + } } sh "make install" } } } } @@ -95,15 +109,20 @@ try { stage("publish") { timeout(time: 5, unit: 'MINUTES') { def commit = sh(returnStdout: true, script: "git rev-parse HEAD").trim() def release = env.BRANCH_NAME == "master" || env.BRANCH_NAME == "unstable" || sh(returnStdout: true, script: "git describe --exact-match HEAD || true").trim() - def workDir = pwd() + def workDir = pwd(tmp:true) lock('triqs_publish') { /* Update documention on gh-pages branch */ dir("$workDir/gh-pages") { def subdir = "${projectName}/${env.BRANCH_NAME}" git(url: "ssh://git@github.com/TRIQS/TRIQS.github.io.git", branch: "master", credentialsId: "ssh", changelog: false) sh "rm -rf ${subdir}" - docker.image("flatironinstitute/${projectName}:${env.BRANCH_NAME}-${documentationPlatform}").inside() { - sh "cp -rp \$INSTALL/share/doc/triqs_${projectName} ${subdir}" + docker.image("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${documentationPlatform}").inside() { + sh """#!/bin/bash -ex + base=\$INSTALL/share/doc + dir="${projectName}" + [[ -d \$base/triqs_\$dir ]] && dir=triqs_\$dir || [[ -d \$base/\$dir ]] + cp -rp \$base/\$dir ${subdir} + """ } sh "git add -A ${subdir}" sh """ diff --git a/README.txt b/README.md similarity index 100% rename from README.txt rename to README.md diff --git a/shells/CMakeLists.txt b/bin/CMakeLists.txt similarity index 80% rename from shells/CMakeLists.txt rename to bin/CMakeLists.txt index 42e6bc38..0d5af60b 100644 --- a/shells/CMakeLists.txt +++ b/bin/CMakeLists.txt @@ -1,9 +1,9 @@ -configure_file(plovasp.bash.in plovasp) +configure_file(plovasp.in plovasp) install(FILES ${CMAKE_CURRENT_BINARY_DIR}/plovasp DESTINATION bin PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) -configure_file(vasp_dmft.bash.in vasp_dmft) +configure_file(vasp_dmft.in vasp_dmft) install(FILES ${CMAKE_CURRENT_BINARY_DIR}/vasp_dmft DESTINATION bin PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) diff --git a/shells/plovasp.bash.in b/bin/plovasp.in similarity index 100% rename from shells/plovasp.bash.in rename to bin/plovasp.in diff --git a/shells/vasp_dmft.bash.in b/bin/vasp_dmft.in similarity index 100% rename from shells/vasp_dmft.bash.in rename to bin/vasp_dmft.in diff --git a/c++/plovasp/atm/.gitignore b/c++/plovasp/atm/.gitignore deleted file mode 100644 index 675673c3..00000000 --- a/c++/plovasp/atm/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -makefile -Makefile -*.so -*.o -*.pyc diff --git a/c++/plovasp/atm/CMakeLists.txt b/c++/plovasp/atm/CMakeLists.txt deleted file mode 100644 index afcb0176..00000000 --- a/c++/plovasp/atm/CMakeLists.txt +++ /dev/null @@ -1,7 +0,0 @@ -add_library(atm_c dos_tetra3d.hpp dos_tetra3d.cpp argsort.hpp argsort.cpp) -target_link_libraries(atm_c triqs) -target_compile_options(atm_c PRIVATE -std=c++17) - -install(TARGETS atm_c DESTINATION lib) - -add_subdirectory(test) diff --git a/c++/plovasp/atm/test/CMakeLists.txt b/c++/plovasp/atm/test/CMakeLists.txt deleted file mode 100644 index d592f23c..00000000 --- a/c++/plovasp/atm/test/CMakeLists.txt +++ /dev/null @@ -1,13 +0,0 @@ -enable_testing() - -FILE(GLOB TestList RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp) -FOREACH( TestName1 ${TestList} ) - STRING(REPLACE ".cpp" "" TestName ${TestName1}) - add_executable( ${TestName} ${TestName}.cpp ) - target_link_libraries( ${TestName} atm_c triqs) - triqs_set_rpath_for_target( ${TestName} ) - add_test(NAME ${TestName} COMMAND ${CMAKE_CURRENT_BINARY_DIR}/${t}) -ENDFOREACH( TestName1 ${TestList} ) - - - diff --git a/c++/triqs_dft_tools/CMakeLists.txt b/c++/triqs_dft_tools/CMakeLists.txt new file mode 100644 index 00000000..e847bf2f --- /dev/null +++ b/c++/triqs_dft_tools/CMakeLists.txt @@ -0,0 +1,81 @@ +file(GLOB_RECURSE sources *.cpp) +add_library(${PROJECT_NAME}_c ${sources}) +add_library(${PROJECT_NAME}::${PROJECT_NAME}_c ALIAS ${PROJECT_NAME}_c) + +# Link against triqs and enable warnings +target_link_libraries(${PROJECT_NAME}_c PUBLIC triqs PRIVATE $) + +# Configure target and compilation +set_property(TARGET ${PROJECT_NAME}_c PROPERTY POSITION_INDEPENDENT_CODE ON) +target_include_directories(${PROJECT_NAME}_c PUBLIC $) +target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $) +target_compile_definitions(${PROJECT_NAME}_c PUBLIC + TRIQS_DFT_TOOLS_GIT_HASH=${PROJECT_GIT_HASH} + TRIQS_GIT_HASH=${TRIQS_GIT_HASH} + $<$:TRIQS_DFT_TOOLS_DEBUG> + $<$:TRIQS_DEBUG> + $<$:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK> + ) + +# Install library and headers +install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION lib) +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h") + + +# ========= Static Analyzer Checks ========== + +option(ANALYZE_SOURCES OFF "Run static analyzer checks if found (clang-tidy, cppcheck)") +if(ANALYZE_SOURCES) + + # Locate static analyzer tools + find_program(CPPCHECK_EXECUTABLE NAMES "cppcheck" PATHS ENV PATH) + find_program(CLANG_TIDY_EXECUTABLE NAMES "clang-tidy" PATHS ENV PATH) + + # Run clang-tidy if found + if(CLANG_TIDY_EXECUTABLE) + message(STATUS "clang-tidy found: ${CLANG_TIDY_EXECUTABLE}") + set_target_properties(${PROJECT_NAME}_c PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}") + else() + message(STATUS "clang-tidy not found in $PATH. Please consider installing clang-tidy for additional checks!") + endif() + + # Run cppcheck if found + if(CPPCHECK_EXECUTABLE) + message(STATUS "cppcheck found: ${CPPCHECK_EXECUTABLE}") + add_custom_command( + TARGET ${PROJECT_NAME}_c + COMMAND ${CPPCHECK_EXECUTABLE} + --enable=warning,style,performance,portability + --std=c++17 + --template=gcc + --verbose + --force + --quiet + ${sources} + WORKING_DIRECTORY + ${CMAKE_CURRENT_SOURCE_DIR} + ) + else() + message(STATUS "cppcheck not found in $PATH. Please consider installing cppcheck for additional checks!") + endif() + +endif() + + +# ========= Dynamic Analyzer Checks ========== + +option(ASAN OFF "Compile library and executables with LLVM Address Sanitizer") +option(UBSAN OFF "Compile library and executables with LLVM Undefined Behavior Sanitizer") + +if(ASAN) + if(NOT TARGET asan) + find_package(sanitizer REQUIRED "asan") + endif() + target_link_libraries(${PROJECT_NAME}_c PUBLIC $) +endif() +if(UBSAN) + if(NOT TARGET ubsan) + find_package(sanitizer REQUIRED "ubsan") + endif() + target_link_libraries(${PROJECT_NAME}_c PUBLIC $) +endif() diff --git a/c++/triqs_dft_tools/converters/vasp.hpp b/c++/triqs_dft_tools/converters/vasp.hpp new file mode 100644 index 00000000..0aa018d7 --- /dev/null +++ b/c++/triqs_dft_tools/converters/vasp.hpp @@ -0,0 +1,3 @@ +#pragma once +#include "./vasp/argsort.hpp" +#include "./vasp/dos_tetra3d.hpp" diff --git a/c++/plovasp/atm/__init__.py b/c++/triqs_dft_tools/converters/vasp/__init__.py similarity index 100% rename from c++/plovasp/atm/__init__.py rename to c++/triqs_dft_tools/converters/vasp/__init__.py diff --git a/c++/plovasp/atm/argsort.cpp b/c++/triqs_dft_tools/converters/vasp/argsort.cpp similarity index 100% rename from c++/plovasp/atm/argsort.cpp rename to c++/triqs_dft_tools/converters/vasp/argsort.cpp diff --git a/c++/plovasp/atm/argsort.hpp b/c++/triqs_dft_tools/converters/vasp/argsort.hpp similarity index 100% rename from c++/plovasp/atm/argsort.hpp rename to c++/triqs_dft_tools/converters/vasp/argsort.hpp diff --git a/c++/plovasp/atm/dos_tetra3d.cpp b/c++/triqs_dft_tools/converters/vasp/dos_tetra3d.cpp similarity index 100% rename from c++/plovasp/atm/dos_tetra3d.cpp rename to c++/triqs_dft_tools/converters/vasp/dos_tetra3d.cpp diff --git a/c++/plovasp/atm/dos_tetra3d.hpp b/c++/triqs_dft_tools/converters/vasp/dos_tetra3d.hpp similarity index 86% rename from c++/plovasp/atm/dos_tetra3d.hpp rename to c++/triqs_dft_tools/converters/vasp/dos_tetra3d.hpp index abdda06b..5420f9f2 100644 --- a/c++/plovasp/atm/dos_tetra3d.hpp +++ b/c++/triqs_dft_tools/converters/vasp/dos_tetra3d.hpp @@ -22,16 +22,14 @@ #include -using triqs::arrays::array; -using triqs::arrays::array_view; /// DOS of a band by analytical tetrahedron method /// /// Returns corner weights for all tetrahedra for a given band and real energy. -array -dos_tetra_weights_3d(array_view eigk, /// Band energies for each k-point +triqs::arrays::array +dos_tetra_weights_3d(triqs::arrays::array_view eigk, /// Band energies for each k-point double en, /// Energy at which DOS weights are to be calculated - array_view itt /// Tetrahedra defined by k-point indices + triqs::arrays::array_view itt /// Tetrahedra defined by k-point indices ); //array //dos_tetra_weights_3d(array eigk, /// Band energies for each k-point diff --git a/c++/plovasp/atm/makefile.linux b/c++/triqs_dft_tools/converters/vasp/makefile.linux similarity index 100% rename from c++/plovasp/atm/makefile.linux rename to c++/triqs_dft_tools/converters/vasp/makefile.linux diff --git a/c++/plovasp/atm/setup.py b/c++/triqs_dft_tools/converters/vasp/setup.py similarity index 100% rename from c++/plovasp/atm/setup.py rename to c++/triqs_dft_tools/converters/vasp/setup.py diff --git a/c++/triqs_dft_tools/triqs_dft_tools.hpp b/c++/triqs_dft_tools/triqs_dft_tools.hpp new file mode 100644 index 00000000..ee70a5dc --- /dev/null +++ b/c++/triqs_dft_tools/triqs_dft_tools.hpp @@ -0,0 +1,3 @@ +#pragma once +#include "./converters/vasp.hpp" + diff --git a/cmake/sitecustomize.py b/cmake/sitecustomize.py deleted file mode 100644 index 0f31ba93..00000000 --- a/cmake/sitecustomize.py +++ /dev/null @@ -1,8 +0,0 @@ -def application_pytriqs_import(name,*args,**kwargs): - if name.startswith('@package_name@'): - name = name[len('@package_name@')+1:] - return builtin_import(name,*args,**kwargs) - -import __builtin__ -__builtin__.__import__, builtin_import = application_pytriqs_import, __builtin__.__import__ - diff --git a/deps/.gitignore b/deps/.gitignore new file mode 100644 index 00000000..72e8ffc0 --- /dev/null +++ b/deps/.gitignore @@ -0,0 +1 @@ +* diff --git a/deps/CMakeLists.txt b/deps/CMakeLists.txt new file mode 100644 index 00000000..25c49960 --- /dev/null +++ b/deps/CMakeLists.txt @@ -0,0 +1,66 @@ +include(external_dependency.cmake) + +# Add your dependencies with the function +# +# external_dependency(name +# [VERSION ] +# [GIT_REPO ] +# [GIT_TAG ] +# [BUILD_ALWAYS] +# [EXCLUDE_FROM_ALL] +# ) +# +# Resolve the dependency using the following steps in order. +# If a step was successful, skip the remaining ones. +# +# 1. Use find_package(name []) +# to locate the package in the system. +# Skip this step if Build_Deps option is set. +# 2. Try to find a directory containing the sources +# at ${PROJECT_SOURCE_DIR}/deps/name. If found +# build it as a cmake sub-project. +# 3. If GIT_REPO is provided, git clone the sources, +# and build them as a cmake sub-project. +# +# Addtional options: +# +# GIT_TAG - Use this keyword to specify the git-tag, branch or commit hash +# +# BUILD_ALWAYS - If set, this dependency will always be built from source +# and will never be searched in the system. +# +# EXCLUDE_FROM_ALL - If set, targets of the dependency cmake subproject +# will not be included in the ALL target of the project. +# In particular the dependency will not be installed. + +if(NOT DEFINED Build_Deps) + set(Build_Deps "Never" CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]") +else() + set(Build_Deps_Opts "Never" "Always" "IfNotFound") + if(NOT ${Build_Deps} IN_LIST Build_Deps_Opts) + message(FATAL_ERROR "Build_Deps option should be either 'Never', 'Always' or 'IfNotFound'") + endif() + set(Build_Deps ${Build_Deps} CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]") + if(NOT IS_SUBPROJECT AND NOT Build_Deps STREQUAL "Always" AND (ASAN OR UBSAN)) + message(WARNING "For builds with llvm sanitizers (ASAN/UBSAN) it is recommended to use -DBuild_Deps=Always to avoid false positives.") + endif() +endif() + +# -- Cpp2Py -- +if(PythonSupport OR Build_Documentation) + external_dependency(Cpp2Py + GIT_REPO https://github.com/TRIQS/cpp2py + VERSION 2.0 + GIT_TAG master + BUILD_ALWAYS + EXCLUDE_FROM_ALL + ) +endif() + +# -- GTest -- +external_dependency(GTest + GIT_REPO https://github.com/google/googletest + GIT_TAG release-1.10.0 + BUILD_ALWAYS + EXCLUDE_FROM_ALL +) diff --git a/deps/external_dependency.cmake b/deps/external_dependency.cmake new file mode 100644 index 00000000..d29225eb --- /dev/null +++ b/deps/external_dependency.cmake @@ -0,0 +1,70 @@ +################################################################################### +# +# TRIQS_DFT_TOOLS: a Toolbox for Research in Interacting Quantum Systems +# +# Copyright (C) 2020 Simons Foundation +# authors: N. Wentzell +# +# TRIQS_DFT_TOOLS is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# TRIQS_DFT_TOOLS is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with +# TRIQS_DFT_TOOLS. If not, see . +# +################################################################################### + +function(external_dependency) + cmake_parse_arguments(ARG "EXCLUDE_FROM_ALL;BUILD_ALWAYS" "VERSION;GIT_REPO;GIT_TAG" "" ${ARGN}) + + # -- Was dependency already found? + get_property(${ARGV0}_FOUND GLOBAL PROPERTY ${ARGV0}_FOUND) + if(${ARGV0}_FOUND) + message(STATUS "Dependency ${ARGV0} was already resolved.") + return() + endif() + + # -- Try to find package in system. + if(NOT ARG_BUILD_ALWAYS AND NOT Build_Deps STREQUAL "Always") + find_package(${ARGV0} ${ARG_VERSION} QUIET HINTS ${CMAKE_INSTALL_PREFIX}) + if(${ARGV0}_FOUND) + message(STATUS "Found dependency ${ARGV0} in system ${${ARGV0}_ROOT}") + return() + elseif(Build_Deps STREQUAL "Never") + message(FATAL_ERROR "Could not find dependency ${ARGV0} in system. Please install the dependency manually or use -DBuild_Deps=IfNotFound during cmake configuration to automatically build all dependencies that are not found.") + endif() + endif() + + # -- Build package from source + message(STATUS " =============== Configuring Dependency ${ARGV0} =============== ") + if(ARG_EXCLUDE_FROM_ALL) + set(subdir_opts EXCLUDE_FROM_ALL) + set(Build_Tests OFF) + set(Build_Documentation OFF) + endif() + if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0}) + message(STATUS "Found sources for dependency ${ARGV0} at ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0}") + add_subdirectory(${ARGV0} ${subdir_opts}) + elseif(ARG_GIT_REPO) + set(bin_dir ${CMAKE_CURRENT_BINARY_DIR}/${ARGV0}) + set(src_dir ${bin_dir}_src) + if(NOT IS_DIRECTORY ${src_dir}) + if(ARG_GIT_TAG) + set(clone_opts --branch ${ARG_GIT_TAG} -c advice.detachedHead=false) + endif() + execute_process(COMMAND git clone ${ARG_GIT_REPO} --depth 1 ${clone_opts} ${src_dir}) + endif() + add_subdirectory(${src_dir} ${bin_dir} ${subdir_opts}) + else() + message(FATAL_ERROR "Could not find or build dependency ${ARGV0}") + endif() + message(STATUS " =============== End ${ARGV0} Configuration =============== ") + set_property(GLOBAL PROPERTY ${ARGV0}_FOUND TRUE) + +endfunction() diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt index 0e137f0e..bde5a81c 100644 --- a/doc/CMakeLists.txt +++ b/doc/CMakeLists.txt @@ -1,23 +1,81 @@ -# generate the conf.py +# Generate the sphinx config file configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in ${CMAKE_CURRENT_BINARY_DIR}/conf.py @ONLY) +# ----------------------------------------------------------------------------- +# Create an optional target that allows us to regenerate the C++ doc with c++2rst +# ----------------------------------------------------------------------------- +add_custom_target(${PROJECT_NAME}_docs_cpp2rst) +include(${PROJECT_SOURCE_DIR}/share/cmake/extract_flags.cmake) +extract_flags(${PROJECT_NAME}_c BUILD_INTERFACE) +separate_arguments(${PROJECT_NAME}_c_CXXFLAGS) +macro(generate_docs header_file) + add_custom_command( + TARGET ${PROJECT_NAME}_docs_cpp2rst + COMMAND rm -rf ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated + COMMAND + PYTHONPATH=${CPP2PY_BINARY_DIR}:$ENV{PYTHONPATH} + PATH=${CPP2PY_BINARY_DIR}/bin:${CPP2PY_ROOT}/bin:$ENV{PATH} + c++2rst + ${header_file} + -N ${PROJECT_NAME} + --output_directory ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated + -I${PROJECT_SOURCE_DIR}/c++ + --cxxflags="${${PROJECT_NAME}_c_CXXFLAGS}" + ) +endmacro(generate_docs) + +generate_docs(${PROJECT_SOURCE_DIR}/c++/${PROJECT_NAME}/${PROJECT_NAME}.hpp) + +# -------------------------------------------------------- +# Build & Run the C++ doc examples and capture the output +# -------------------------------------------------------- + +add_custom_target(${PROJECT_NAME}_docs_example_output) +file(GLOB_RECURSE ExampleList RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp) +foreach(example ${ExampleList}) + get_filename_component(f ${example} NAME_WE) + get_filename_component(d ${example} DIRECTORY) + add_executable(${PROJECT_NAME}_doc_${f} EXCLUDE_FROM_ALL ${example}) + set_property(TARGET ${PROJECT_NAME}_doc_${f} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${d}) + target_link_libraries(${PROJECT_NAME}_doc_${f} triqs) + add_custom_command(TARGET ${PROJECT_NAME}_doc_${f} + COMMAND ${PROJECT_NAME}_doc_${f} > ${CMAKE_CURRENT_SOURCE_DIR}/${d}/${f}.output 2>/dev/null + WORKING_DIRECTORY ${d} + ) + add_dependencies(${PROJECT_NAME}_docs_example_output ${PROJECT_NAME}_doc_${f}) +endforeach() + # --------------------------------- # Top Sphinx target # --------------------------------- -# Sources -file(GLOB_RECURSE sources *.rst) +if(NOT DEFINED SPHINXBUILD_EXECUTABLE) + find_package(Sphinx) +endif() -# create documentation target -set(sphinx_top ${CMAKE_CURRENT_BINARY_DIR}/html/index.html) -add_custom_command(OUTPUT ${sphinx_top} DEPENDS ${sources} - COMMAND ${TRIQS_SPHINXBUILD_EXECUTABLE} -c . -j8 -b html ${CMAKE_CURRENT_SOURCE_DIR} html) -add_custom_target(doc_sphinx ALL DEPENDS ${sphinx_top} ${CMAKE_CURRENT_BINARY_DIR}) +# Sphinx has internal caching, always run it +add_custom_target(${PROJECT_NAME}_docs_sphinx ALL) +add_custom_command( + TARGET ${PROJECT_NAME}_docs_sphinx + COMMAND PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SPHINXBUILD_EXECUTABLE} -c . -j8 -b html ${CMAKE_CURRENT_SOURCE_DIR} html +) + +option(Sphinx_Only "When building the documentation, skip the Python Modules and the generation of C++ Api and example outputs" OFF) +if(NOT Sphinx_Only) + # Autodoc usage requires the python modules to be built first + get_property(CPP2PY_MODULES_LIST GLOBAL PROPERTY CPP2PY_MODULES_LIST) + if(CPP2PY_MODULES_LIST) + add_dependencies(${PROJECT_NAME}_docs_sphinx ${CPP2PY_MODULES_LIST}) + endif() + + # Generation of C++ Api and Example Outputs + add_dependencies(${PROJECT_NAME}_docs_sphinx ${PROJECT_NAME}_docs_cpp2rst ${PROJECT_NAME}_docs_example_output) +endif() # --------------------------------- # Install # --------------------------------- -install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/triqs_dft_tools - FILES_MATCHING - REGEX "\\.(html|pdf|png|gif|jpg|js|xsl|css|py|txt|inv|bib|cfg)$" - PATTERN "_*" - ) +install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/${PROJECT_NAME} + FILES_MATCHING + REGEX "\\.(html|pdf|png|gif|jpg|svg|js|xsl|css|py|txt|inv|bib)$" + PATTERN "_*" +) diff --git a/doc/_templates/sideb.html b/doc/_templates/sideb.html index 24d2ffc2..563bba82 100644 --- a/doc/_templates/sideb.html +++ b/doc/_templates/sideb.html @@ -10,5 +10,5 @@


- Visit the project on GitHub + Visit the project on GitHub

diff --git a/doc/changelog.rst b/doc/changelog.rst index b1e75038..7465fef9 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -3,6 +3,6 @@ Changelog ========= -This document describes the main changes in DFTTools. +This document describes the main changes in TRIQS_DFT_TOOLS. .. include:: ChangeLog.md diff --git a/doc/conf.py.in b/doc/conf.py.in index 90497936..4a931db2 100644 --- a/doc/conf.py.in +++ b/doc/conf.py.in @@ -3,34 +3,36 @@ # TRIQS documentation build configuration file import sys -sys.path.insert(0, "@TRIQS_SPHINXEXT_PATH@/numpydoc") +sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext") +sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext/numpydoc") sys.path.insert(0, "@CMAKE_BINARY_DIR@/python") extensions = ['sphinx.ext.autodoc', 'sphinx.ext.mathjax', 'sphinx.ext.intersphinx', - 'matplotlib.sphinxext.plot_directive', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.autosummary', - 'nbsphinx', - 'numpydoc', 'sphinx.ext.githubpages', - 'IPython.sphinxext.ipython_console_highlighting' - ] + 'sphinx_autorun', + 'matplotlib.sphinxext.plot_directive', + 'nbsphinx', + 'IPython.sphinxext.ipython_console_highlighting', + 'numpydoc'] source_suffix = '.rst' -project = u'TRIQS DFTTools' -copyright = u'2011-2019' -version = '@DFT_TOOLS_VERSION@' +project = '@PROJECT_NAME@' +version = '@PROJECT_VERSION@' -mathjax_path = "@TRIQS_MATHJAX_PATH@/MathJax.js?config=default" -templates_path = ['@CMAKE_SOURCE_DIR@/doc/_templates'] +copyright = '2011-2020' + +mathjax_path = "https://raw.githubusercontent.com/mathjax/MathJax/2.7.8/MathJax.js" +templates_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_templates'] html_theme = 'triqs' -html_theme_path = ['@TRIQS_THEMES_PATH@'] +html_theme_path = ['@CMAKE_CURRENT_SOURCE_DIR@/themes'] html_show_sphinx = False html_context = {'header_title': 'dft tools', 'header_subtitle': 'connecting TRIQS to DFT packages', @@ -39,9 +41,9 @@ html_context = {'header_title': 'dft tools', ['Tutorials', 'tutorials'], ['Issues', 'issues'], ['About DFTTools', 'about']]} -html_static_path = ['@CMAKE_SOURCE_DIR@/doc/_static'] +html_static_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_static'] html_sidebars = {'index': ['sideb.html', 'searchbox.html']} -htmlhelp_basename = 'TRIQSDFTToolsdoc' +htmlhelp_basename = '@PROJECT_NAME@doc' -intersphinx_mapping = {'python': ('http://docs.python.org/2.7', None), 'triqslibs': ('http://triqs.github.io/triqs/latest', None), 'triqscthyb': ('https://triqs.github.io/cthyb/latest', None)} +intersphinx_mapping = {'python': ('https://docs.python.org/3.8', None), 'triqslibs': ('https://triqs.github.io/triqs/latest', None), 'triqscthyb': ('https://triqs.github.io/cthyb/latest', None)} diff --git a/doc/contents.rst b/doc/contents.rst index 4f990b6a..7b55d920 100644 --- a/doc/contents.rst +++ b/doc/contents.rst @@ -7,6 +7,7 @@ Table of contents index install documentation + tutorials issues changelog about diff --git a/doc/guide/Sr2RuO4/calculate_dos_wannier_basis.py b/doc/guide/Sr2RuO4/calculate_dos_wannier_basis.py index 7cb8462a..98c395d1 100644 --- a/doc/guide/Sr2RuO4/calculate_dos_wannier_basis.py +++ b/doc/guide/Sr2RuO4/calculate_dos_wannier_basis.py @@ -1,4 +1,4 @@ -from triqs_dft_tools.converters.wien2k_converter import Wien2kConverter +from triqs_dft_tools.converters.wien2k import Wien2kConverter from triqs_dft_tools import SumkDFTTools filename = 'Sr2RuO4' diff --git a/doc/guide/analysis.rst b/doc/guide/analysis.rst index 90aca2bd..b511be78 100644 --- a/doc/guide/analysis.rst +++ b/doc/guide/analysis.rst @@ -37,7 +37,7 @@ class:: Note that all routines available in :class:`SumkDFT ` are also available here. If required, we have to load and initialise the real-frequency self energy. Most conveniently, -you have your self energy already stored as a real-frequency :class:`BlockGf ` object +you have your self energy already stored as a real-frequency :class:`BlockGf ` object in a hdf5 file:: with HDFArchive('case.h5', 'r') as ar: @@ -45,10 +45,10 @@ in a hdf5 file:: You may also have your self energy stored in text files. For this case the :ref:`TRIQS ` library offers the function :meth:`read_gf_from_txt`, which is able to load the data from text files of one Green function block -into a real-frequency :class:`ReFreqGf ` object. Loading each block separately and -building up a :class:´BlockGf ´ is done with:: +into a real-frequency :class:`ReFreqGf ` object. Loading each block separately and +building up a :class:´BlockGf ´ is done with:: - from pytriqs.gf.tools import * + from triqs.gf.tools import * # get block names n_list = [n for n,nl in SK.gf_struct_solver[0].iteritems()] # load sigma for each block - in this example sigma is composed of 1x1 blocks @@ -128,7 +128,7 @@ Momentum resolved spectral function (with real-frequency self energy) Another quantity of interest is the momentum-resolved spectral function, which can directly be compared to ARPES experiments. First we have to execute `lapw1`, `lapw2 -almd` and :program:`dmftproj` with the `-band` -option and use the :meth:`convert_bands_input ` +option and use the :meth:`convert_bands_input ` routine, which converts the required files (for a more detailed description see :ref:`conversion`). The spectral function is then calculated by typing:: SK.spaghettis(broadening=0.01,plot_shift=0.0,plot_range=None,ishell=None,save_to_file='Akw_') diff --git a/doc/guide/blockstructure.rst b/doc/guide/blockstructure.rst index 7d99ba24..2f51d937 100644 --- a/doc/guide/blockstructure.rst +++ b/doc/guide/blockstructure.rst @@ -21,7 +21,7 @@ We can create a simple :class:`BlockStructure ` in the same fashion as with the other `DFTTools` converters:: - from triqs_dft_tools.converters.vasp_converter import * + from triqs_dft_tools.converters.vasp import * Converter = VaspConverter(filename = 'vasp') Converter.convert_dft_input() diff --git a/doc/guide/conv_wien2k.rst b/doc/guide/conv_wien2k.rst index 61d387ab..342726b5 100644 --- a/doc/guide/conv_wien2k.rst +++ b/doc/guide/conv_wien2k.rst @@ -94,9 +94,9 @@ directory name): Now we convert these files into an hdf5 file that can be used for the DMFT calculations. For this purpose we -use the python module :class:`Wien2kConverter `. It is initialized as:: +use the python module :class:`Wien2kConverter `. It is initialized as:: - from triqs_dft_tools.converters.wien2k_converter import * + from triqs_dft_tools.converters.wien2k import * Converter = Wien2kConverter(filename = case) The only necessary parameter to this construction is the parameter `filename`. diff --git a/doc/guide/dftdmft_singleshot.rst b/doc/guide/dftdmft_singleshot.rst index 1ec43e0a..57610af9 100644 --- a/doc/guide/dftdmft_singleshot.rst +++ b/doc/guide/dftdmft_singleshot.rst @@ -80,8 +80,8 @@ for :emphasis:`use_dc_formula` are: At the end of the calculation, we can save the Green function and self energy into a file:: - from pytriqs.archive import HDFArchive - import pytriqs.utility.mpi as mpi + from h5 import HDFArchive + import triqs.utility.mpi as mpi if mpi.is_master_node(): ar = HDFArchive("YourDFTDMFTcalculation.h5",'w') ar["G"] = S.G_iw diff --git a/doc/guide/transport.rst b/doc/guide/transport.rst index 2aa1d1e5..69db4753 100644 --- a/doc/guide/transport.rst +++ b/doc/guide/transport.rst @@ -52,13 +52,13 @@ real-frequency self energy. it is crucial to perform the analytic continuation in such a way that the real-frequency self energy is accurate around the Fermi energy as low-energy features strongly influence the final results. -Besides the self energy the Wien2k files read by the transport converter (:meth:`convert_transport_input `) are: +Besides the self energy the Wien2k files read by the transport converter (:meth:`convert_transport_input `) are: * :file:`.struct`: The lattice constants specified in the struct file are used to calculate the unit cell volume. * :file:`.outputs`: In this file the k-point symmetries are given. * :file:`.oubwin`: Contains the indices of the bands within the projected subspace (written by :program:`dmftproj`) for each k-point. * :file:`.pmat`: This file is the output of the Wien2k optics package and contains the velocity (momentum) matrix elements between all bands in the desired energy window for each k-point. How to use the optics package is described below. - * :file:`.h5`: The hdf5 archive has to be present and should contain the dft_input subgroup. Otherwise :meth:`convert_dft_input ` needs to be called before :meth:`convert_transport_input `. + * :file:`.h5`: The hdf5 archive has to be present and should contain the dft_input subgroup. Otherwise :meth:`convert_dft_input ` needs to be called before :meth:`convert_transport_input `. Wien2k optics package @@ -84,7 +84,7 @@ Using the transport code First we have to read the Wien2k files and store the relevant information in the hdf5 archive:: - from triqs_dft_tools.converters.wien2k_converter import * + from triqs_dft_tools.converters.wien2k import * from triqs_dft_tools.sumk_dft_tools import * Converter = Wien2kConverter(filename='case', repacking=True) @@ -92,7 +92,7 @@ First we have to read the Wien2k files and store the relevant information in the SK = SumkDFTTools(hdf_file='case.h5', use_dft_blocks=True) -The converter :meth:`convert_transport_input ` +The converter :meth:`convert_transport_input ` reads the required data of the Wien2k output and stores it in the `dft_transp_input` subgroup of your hdf file. Additionally we need to read and set the self energy, the chemical potential and the double counting:: diff --git a/doc/install.rst b/doc/install.rst index 8d2a1bc0..0def24bd 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -37,34 +37,41 @@ Compiling DFTTools from source Prerequisites ------------- -#. The :ref:`TRIQS ` toolbox. - +#. The :ref:`TRIQS ` library, see :ref:`TRIQS installation instruction `. + In the following, we assume that TRIQS is installed in the directory ``path_to_triqs``. #. Likely, you will also need at least one impurity solver, e.g. the :ref:`CTHYB solver `. -Installation steps +Installation steps ------------------ -#. Download the source code by cloning the ``TRIQS/dft_tools`` repository from GitHub:: - - $ git clone https://github.com/TRIQS/dft_tools.git dft_tools.src - +#. Download the source code of the latest stable version by cloning the ``TRIQS/dft_tools`` repository from GitHub:: + + $ git clone https://github.com/TRIQS/dft_tools dft_tools.src + +#. Make sure that all additional dependencies are installed on your system and available in your environment. + Alternatively build the dependencies from source instead with:: + + $ (cd deps && ./download.sh) + + In this case they will be installed together with your application. + #. Create and move to a new directory where you will compile the code:: - + $ mkdir dft_tools.build && cd dft_tools.build - + #. Ensure that your shell contains the TRIQS environment variables by sourcing the ``triqsvars.sh`` file from your TRIQS installation:: $ source path_to_triqs/share/triqsvarsh.sh - + #. In the build directory call cmake, including any additional custom CMake options, see below:: $ cmake ../dft_tools.src - -#. Compile the code, run the tests and install the application:: - - $ make - $ make test - $ make install + +#. Compile the code, run the tests and install the application:: + + $ make + $ make test + $ make install Installation steps for the use with WIEN2K version 14.2 and older @@ -114,37 +121,41 @@ Finally, you will have to change the calls to :program:`python_with_DMFT` to your :program:`python` installation in the Wien2k :file:`path_to_Wien2k/run*` files. -Version compatibility +Version compatibility --------------------- - -Be careful that the version of the TRIQS library and of the :program:`DFTTools` must be -compatible (more information on the :ref:`TRIQS website `. -If you want to use a version of the :program:`DFTTools` that is not the latest one, go -into the directory with the sources and look at all available versions:: - - $ cd src && git tag - -Checkout the version of the code that you want, for instance:: - - $ git co 2.1 - -Then follow the steps 2 to 5 described above to compile the code. + +Keep in mind that the version of ``dft_tools`` must be compatible with your TRIQS library version, +see :ref:`TRIQS website `. +In particular the Major and Minor Version numbers have to be the same. +To use a particular version, go into the directory with the sources, and look at all available versions:: + + $ cd dft_tools.src && git tag + +Checkout the version of the code that you want:: + + $ git checkout 2.1.0 + +and follow steps 2 to 4 above to compile the code. Custom CMake options -------------------- -Functionality of ``dft_tools`` can be tweaked using extra compile-time options passed to CMake:: +The compilation of ``dft_tools`` can be configured using CMake-options:: - cmake -DOPTION1=value1 -DOPTION2=value2 ... ../dft_tools.src + cmake ../dft_tools.src -DOPTION1=value1 -DOPTION2=value2 ... -+---------------------------------------------------------------+-----------------------------------------------+ -| Options | Syntax | -+===============================================================+===============================================+ -| Disable testing (not recommended) | -DBuild_Tests=OFF | -+---------------------------------------------------------------+-----------------------------------------------+ -| Build the documentation locally | -DBuild_Documentation=ON | -+---------------------------------------------------------------+-----------------------------------------------+ -| Check test coverage when testing | -DTEST_COVERAGE=ON | -| (run ``make coverage`` to show the results; requires the | | -| python ``coverage`` package) | | -+---------------------------------------------------------------+-----------------------------------------------+ ++-----------------------------------------------------------------+-----------------------------------------------+ +| Options | Syntax | ++=================================================================+===============================================+ +| Specify an installation path other than path_to_triqs | -DCMAKE_INSTALL_PREFIX=path_to_dft_tools| ++-----------------------------------------------------------------+-----------------------------------------------+ +| Build in Debugging Mode | -DCMAKE_BUILD_TYPE=Debug | ++-----------------------------------------------------------------+-----------------------------------------------+ +| Disable testing (not recommended) | -DBuild_Tests=OFF | ++-----------------------------------------------------------------+-----------------------------------------------+ +| Build the documentation | -DBuild_Documentation=ON | ++-----------------------------------------------------------------+-----------------------------------------------+ +| Check test coverage when testing | -DTEST_COVERAGE=ON | +| (run ``make coverage`` to show the results; requires the | | +| python ``coverage`` package) | | ++-----------------------------------------------------------------+-----------------------------------------------+ diff --git a/doc/issues.rst b/doc/issues.rst index efe3e9ca..d89de6d2 100644 --- a/doc/issues.rst +++ b/doc/issues.rst @@ -1,13 +1,14 @@ +.. _issues: Reporting issues ================ Please report all problems and bugs directly at the github issue page -``_. In order to make it easier -for us to solve the issue please follow these guidelines: +``_. In order to make it easier for us +to solve the issue please follow these guidelines: #. In all cases specify which version of the application you are using. You can - find the version number in the file :file:`README.txt` at the root of the + find the version number in the file :file:`CMakeLists.txt` at the root of the application sources. #. If you have a problem during the installation, give us information about diff --git a/doc/reference/converters.rst b/doc/reference/converters.rst index 122dbd9c..ba448810 100644 --- a/doc/reference/converters.rst +++ b/doc/reference/converters.rst @@ -5,20 +5,20 @@ Converters Wien2k Converter ---------------- -.. autoclass:: triqs_dft_tools.converters.wien2k_converter.Wien2kConverter +.. autoclass:: triqs_dft_tools.converters.wien2k.Wien2kConverter :members: :special-members: :show-inheritance: H(k) Converter -------------- -.. autoclass:: triqs_dft_tools.converters.hk_converter.HkConverter +.. autoclass:: triqs_dft_tools.converters.hk.HkConverter :members: :special-members: Wannier90 Converter ------------------- -.. autoclass:: triqs_dft_tools.converters.wannier90_converter.Wannier90Converter +.. autoclass:: triqs_dft_tools.converters.wannier90.Wannier90Converter :members: :special-members: @@ -53,7 +53,7 @@ PLOVASP reference, the classes / functions are sorted the way the converter uses VASP Converter ------------------- .. _refVASPconverter: -.. autoclass:: triqs_dft_tools.converters.vasp_converter.VaspConverter +.. autoclass:: triqs_dft_tools.converters.vasp.VaspConverter :members: :special-members: diff --git a/doc/sphinxext/numpydoc/apigen.py b/doc/sphinxext/numpydoc/apigen.py new file mode 100644 index 00000000..2619fbbd --- /dev/null +++ b/doc/sphinxext/numpydoc/apigen.py @@ -0,0 +1,427 @@ +"""Attempt to generate templates for module reference with Sphinx + +XXX - we exclude extension modules + +To include extension modules, first identify them as valid in the +``_uri2path`` method, then handle them in the ``_parse_module`` script. + +We get functions and classes by parsing the text of .py files. +Alternatively we could import the modules for discovery, and we'd have +to do that for extension modules. This would involve changing the +``_parse_module`` method to work via import and introspection, and +might involve changing ``discover_modules`` (which determines which +files are modules, and therefore which module URIs will be passed to +``_parse_module``). + +NOTE: this is a modified version of a script originally shipped with the +PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed +project.""" + +# Stdlib imports +import os +import re + +# Functions and classes +class ApiDocWriter: + ''' Class for automatic detection and parsing of API docs + to Sphinx-parsable reST format''' + + # only separating first two levels + rst_section_levels = ['*', '=', '-', '~', '^'] + + def __init__(self, + package_name, + rst_extension='.rst', + package_skip_patterns=None, + module_skip_patterns=None, + ): + ''' Initialize package for parsing + + Parameters + ---------- + package_name : string + Name of the top-level package. *package_name* must be the + name of an importable package + rst_extension : string, optional + Extension for reST files, default '.rst' + package_skip_patterns : None or sequence of {strings, regexps} + Sequence of strings giving URIs of packages to be excluded + Operates on the package path, starting at (including) the + first dot in the package path, after *package_name* - so, + if *package_name* is ``sphinx``, then ``sphinx.util`` will + result in ``.util`` being passed for earching by these + regexps. If is None, gives default. Default is: + ['\.tests$'] + module_skip_patterns : None or sequence + Sequence of strings giving URIs of modules to be excluded + Operates on the module name including preceding URI path, + back to the first dot after *package_name*. For example + ``sphinx.util.console`` results in the string to search of + ``.util.console`` + If is None, gives default. Default is: + ['\.setup$', '\._'] + ''' + if package_skip_patterns is None: + package_skip_patterns = ['\\.tests$'] + if module_skip_patterns is None: + module_skip_patterns = ['\\.setup$', '\\._'] + self.package_name = package_name + self.rst_extension = rst_extension + self.package_skip_patterns = package_skip_patterns + self.module_skip_patterns = module_skip_patterns + + def get_package_name(self): + return self._package_name + + def set_package_name(self, package_name): + ''' Set package_name + + >>> docwriter = ApiDocWriter('sphinx') + >>> import sphinx + >>> docwriter.root_path == sphinx.__path__[0] + True + >>> docwriter.package_name = 'docutils' + >>> import docutils + >>> docwriter.root_path == docutils.__path__[0] + True + ''' + # It's also possible to imagine caching the module parsing here + self._package_name = package_name + self.root_module = __import__(package_name) + self.root_path = self.root_module.__path__[0] + self.written_modules = None + + package_name = property(get_package_name, set_package_name, None, + 'get/set package_name') + + def _get_object_name(self, line): + ''' Get second token in line + >>> docwriter = ApiDocWriter('sphinx') + >>> docwriter._get_object_name(" def func(): ") + 'func' + >>> docwriter._get_object_name(" class Klass: ") + 'Klass' + >>> docwriter._get_object_name(" class Klass: ") + 'Klass' + ''' + name = line.split()[1].split('(')[0].strip() + # in case we have classes which are not derived from object + # ie. old style classes + return name.rstrip(':') + + def _uri2path(self, uri): + ''' Convert uri to absolute filepath + + Parameters + ---------- + uri : string + URI of python module to return path for + + Returns + ------- + path : None or string + Returns None if there is no valid path for this URI + Otherwise returns absolute file system path for URI + + Examples + -------- + >>> docwriter = ApiDocWriter('sphinx') + >>> import sphinx + >>> modpath = sphinx.__path__[0] + >>> res = docwriter._uri2path('sphinx.builder') + >>> res == os.path.join(modpath, 'builder.py') + True + >>> res = docwriter._uri2path('sphinx') + >>> res == os.path.join(modpath, '__init__.py') + True + >>> docwriter._uri2path('sphinx.does_not_exist') + + ''' + if uri == self.package_name: + return os.path.join(self.root_path, '__init__.py') + path = uri.replace('.', os.path.sep) + path = path.replace(self.package_name + os.path.sep, '') + path = os.path.join(self.root_path, path) + # XXX maybe check for extensions as well? + if os.path.exists(path + '.py'): # file + path += '.py' + elif os.path.exists(os.path.join(path, '__init__.py')): + path = os.path.join(path, '__init__.py') + else: + return None + return path + + def _path2uri(self, dirpath): + ''' Convert directory path to uri ''' + relpath = dirpath.replace(self.root_path, self.package_name) + if relpath.startswith(os.path.sep): + relpath = relpath[1:] + return relpath.replace(os.path.sep, '.') + + def _parse_module(self, uri): + ''' Parse module defined in *uri* ''' + filename = self._uri2path(uri) + if filename is None: + # nothing that we could handle here. + return ([],[]) + f = open(filename, 'rt') + functions, classes = self._parse_lines(f) + f.close() + return functions, classes + + def _parse_lines(self, linesource): + ''' Parse lines of text for functions and classes ''' + functions = [] + classes = [] + for line in linesource: + if line.startswith('def ') and line.count('('): + # exclude private stuff + name = self._get_object_name(line) + if not name.startswith('_'): + functions.append(name) + elif line.startswith('class '): + # exclude private stuff + name = self._get_object_name(line) + if not name.startswith('_'): + classes.append(name) + else: + pass + functions.sort() + classes.sort() + return functions, classes + + def generate_api_doc(self, uri): + '''Make autodoc documentation template string for a module + + Parameters + ---------- + uri : string + python location of module - e.g 'sphinx.builder' + + Returns + ------- + S : string + Contents of API doc + ''' + # get the names of all classes and functions + functions, classes = self._parse_module(uri) + if not len(functions) and not len(classes): + print('WARNING: Empty -',uri) # dbg + return '' + + # Make a shorter version of the uri that omits the package name for + # titles + uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) + + ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' + + chap_title = uri_short + ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title) + + '\n\n') + + # Set the chapter title to read 'module' for all modules except for the + # main packages + if '.' in uri: + title = 'Module: :mod:`' + uri_short + '`' + else: + title = ':mod:`' + uri_short + '`' + ad += title + '\n' + self.rst_section_levels[2] * len(title) + + if len(classes): + ad += '\nInheritance diagram for ``%s``:\n\n' % uri + ad += '.. inheritance-diagram:: %s \n' % uri + ad += ' :parts: 3\n' + + ad += '\n.. automodule:: ' + uri + '\n' + ad += '\n.. currentmodule:: ' + uri + '\n' + multi_class = len(classes) > 1 + multi_fx = len(functions) > 1 + if multi_class: + ad += '\n' + 'Classes' + '\n' + \ + self.rst_section_levels[2] * 7 + '\n' + elif len(classes) and multi_fx: + ad += '\n' + 'Class' + '\n' + \ + self.rst_section_levels[2] * 5 + '\n' + for c in classes: + ad += '\n:class:`' + c + '`\n' \ + + self.rst_section_levels[multi_class + 2 ] * \ + (len(c)+9) + '\n\n' + ad += '\n.. autoclass:: ' + c + '\n' + # must NOT exclude from index to keep cross-refs working + ad += ' :members:\n' \ + ' :undoc-members:\n' \ + ' :show-inheritance:\n' \ + ' :inherited-members:\n' \ + '\n' \ + ' .. automethod:: __init__\n' + if multi_fx: + ad += '\n' + 'Functions' + '\n' + \ + self.rst_section_levels[2] * 9 + '\n\n' + elif len(functions) and multi_class: + ad += '\n' + 'Function' + '\n' + \ + self.rst_section_levels[2] * 8 + '\n\n' + for f in functions: + # must NOT exclude from index to keep cross-refs working + ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n' + return ad + + def _survives_exclude(self, matchstr, match_type): + ''' Returns True if *matchstr* does not match patterns + + ``self.package_name`` removed from front of string if present + + Examples + -------- + >>> dw = ApiDocWriter('sphinx') + >>> dw._survives_exclude('sphinx.okpkg', 'package') + True + >>> dw.package_skip_patterns.append('^\\.badpkg$') + >>> dw._survives_exclude('sphinx.badpkg', 'package') + False + >>> dw._survives_exclude('sphinx.badpkg', 'module') + True + >>> dw._survives_exclude('sphinx.badmod', 'module') + True + >>> dw.module_skip_patterns.append('^\\.badmod$') + >>> dw._survives_exclude('sphinx.badmod', 'module') + False + ''' + if match_type == 'module': + patterns = self.module_skip_patterns + elif match_type == 'package': + patterns = self.package_skip_patterns + else: + raise ValueError('Cannot interpret match type "%s"' + % match_type) + # Match to URI without package name + L = len(self.package_name) + if matchstr[:L] == self.package_name: + matchstr = matchstr[L:] + for pat in patterns: + try: + pat.search + except AttributeError: + pat = re.compile(pat) + if pat.search(matchstr): + return False + return True + + def discover_modules(self): + ''' Return module sequence discovered from ``self.package_name`` + + + Parameters + ---------- + None + + Returns + ------- + mods : sequence + Sequence of module names within ``self.package_name`` + + Examples + -------- + >>> dw = ApiDocWriter('sphinx') + >>> mods = dw.discover_modules() + >>> 'sphinx.util' in mods + True + >>> dw.package_skip_patterns.append('\.util$') + >>> 'sphinx.util' in dw.discover_modules() + False + >>> + ''' + modules = [self.package_name] + # raw directory parsing + for dirpath, dirnames, filenames in os.walk(self.root_path): + # Check directory names for packages + root_uri = self._path2uri(os.path.join(self.root_path, + dirpath)) + for dirname in dirnames[:]: # copy list - we modify inplace + package_uri = '.'.join((root_uri, dirname)) + if (self._uri2path(package_uri) and + self._survives_exclude(package_uri, 'package')): + modules.append(package_uri) + else: + dirnames.remove(dirname) + # Check filenames for modules + for filename in filenames: + module_name = filename[:-3] + module_uri = '.'.join((root_uri, module_name)) + if (self._uri2path(module_uri) and + self._survives_exclude(module_uri, 'module')): + modules.append(module_uri) + return sorted(modules) + + def write_modules_api(self, modules,outdir): + # write the list + written_modules = [] + for m in modules: + api_str = self.generate_api_doc(m) + if not api_str: + continue + # write out to file + outfile = os.path.join(outdir, + m + self.rst_extension) + fileobj = open(outfile, 'wt') + fileobj.write(api_str) + fileobj.close() + written_modules.append(m) + self.written_modules = written_modules + + def write_api_docs(self, outdir): + """Generate API reST files. + + Parameters + ---------- + outdir : string + Directory name in which to store files + We create automatic filenames for each module + + Returns + ------- + None + + Notes + ----- + Sets self.written_modules to list of written modules + """ + if not os.path.exists(outdir): + os.mkdir(outdir) + # compose list of modules + modules = self.discover_modules() + self.write_modules_api(modules,outdir) + + def write_index(self, outdir, froot='gen', relative_to=None): + """Make a reST API index file from written files + + Parameters + ---------- + path : string + Filename to write index to + outdir : string + Directory to which to write generated index file + froot : string, optional + root (filename without extension) of filename to write to + Defaults to 'gen'. We add ``self.rst_extension``. + relative_to : string + path to which written filenames are relative. This + component of the written file path will be removed from + outdir, in the generated index. Default is None, meaning, + leave path as it is. + """ + if self.written_modules is None: + raise ValueError('No modules written') + # Get full filename path + path = os.path.join(outdir, froot+self.rst_extension) + # Path written into index is relative to rootpath + if relative_to is not None: + relpath = outdir.replace(relative_to + os.path.sep, '') + else: + relpath = outdir + idx = open(path,'wt') + w = idx.write + w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') + w('.. toctree::\n\n') + for f in self.written_modules: + w(' %s\n' % os.path.join(relpath,f)) + idx.close() diff --git a/doc/sphinxext/numpydoc/docscrape.py b/doc/sphinxext/numpydoc/docscrape.py new file mode 100644 index 00000000..25489154 --- /dev/null +++ b/doc/sphinxext/numpydoc/docscrape.py @@ -0,0 +1,497 @@ +"""Extract reference documentation from the NumPy source tree. + +""" + +import inspect +import textwrap +import re +import pydoc +from io import StringIO +from warnings import warn +4 +class Reader: + """A line-based string reader. + + """ + def __init__(self, data): + """ + Parameters + ---------- + data : str + String with lines separated by '\n'. + + """ + if isinstance(data,list): + self._str = data + else: + self._str = data.split('\n') # store string as list of lines + + self.reset() + + def __getitem__(self, n): + return self._str[n] + + def reset(self): + self._l = 0 # current line nr + + def read(self): + if not self.eof(): + out = self[self._l] + self._l += 1 + return out + else: + return '' + + def seek_next_non_empty_line(self): + for l in self[self._l:]: + if l.strip(): + break + else: + self._l += 1 + + def eof(self): + return self._l >= len(self._str) + + def read_to_condition(self, condition_func): + start = self._l + for line in self[start:]: + if condition_func(line): + return self[start:self._l] + self._l += 1 + if self.eof(): + return self[start:self._l+1] + return [] + + def read_to_next_empty_line(self): + self.seek_next_non_empty_line() + def is_empty(line): + return not line.strip() + return self.read_to_condition(is_empty) + + def read_to_next_unindented_line(self): + def is_unindented(line): + return (line.strip() and (len(line.lstrip()) == len(line))) + return self.read_to_condition(is_unindented) + + def peek(self,n=0): + if self._l + n < len(self._str): + return self[self._l + n] + else: + return '' + + def is_empty(self): + return not ''.join(self._str).strip() + + +class NumpyDocString: + def __init__(self,docstring): + docstring = textwrap.dedent(docstring).split('\n') + + self._doc = Reader(docstring) + self._parsed_data = { + 'Signature': '', + 'Summary': [''], + 'Extended Summary': [], + 'Parameters': [], + 'Returns': [], + 'Raises': [], + 'Warns': [], + 'Other Parameters': [], + 'Attributes': [], + 'Methods': [], + 'See Also': [], + 'Notes': [], + 'Warnings': [], + 'References': '', + 'Examples': '', + 'index': {} + } + + self._parse() + + def __getitem__(self,key): + return self._parsed_data[key] + + def __setitem__(self,key,val): + if key not in self._parsed_data: + warn("Unknown section %s" % key) + else: + self._parsed_data[key] = val + + def _is_at_section(self): + self._doc.seek_next_non_empty_line() + + if self._doc.eof(): + return False + + l1 = self._doc.peek().strip() # e.g. Parameters + + if l1.startswith('.. index::'): + return True + + l2 = self._doc.peek(1).strip() # ---------- or ========== + return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) + + def _strip(self,doc): + i = 0 + j = 0 + for i,line in enumerate(doc): + if line.strip(): break + + for j,line in enumerate(doc[::-1]): + if line.strip(): break + + return doc[i:len(doc)-j] + + def _read_to_next_section(self): + section = self._doc.read_to_next_empty_line() + + while not self._is_at_section() and not self._doc.eof(): + if not self._doc.peek(-1).strip(): # previous line was empty + section += [''] + + section += self._doc.read_to_next_empty_line() + + return section + + def _read_sections(self): + while not self._doc.eof(): + data = self._read_to_next_section() + name = data[0].strip() + + if name.startswith('..'): # index section + yield name, data[1:] + elif len(data) < 2: + yield StopIteration + else: + yield name, self._strip(data[2:]) + + def _parse_param_list(self,content): + r = Reader(content) + params = [] + while not r.eof(): + header = r.read().strip() + if ' : ' in header: + arg_name, arg_type = header.split(' : ')[:2] + else: + arg_name, arg_type = header, '' + + desc = r.read_to_next_unindented_line() + desc = dedent_lines(desc) + + params.append((arg_name,arg_type,desc)) + + return params + + + _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" + r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) + def _parse_see_also(self, content): + """ + func_name : Descriptive text + continued text + another_func_name : Descriptive text + func_name1, func_name2, :meth:`func_name`, func_name3 + + """ + items = [] + + def parse_item_name(text): + """Match ':role:`name`' or 'name'""" + m = self._name_rgx.match(text) + if m: + g = m.groups() + if g[1] is None: + return g[3], None + else: + return g[2], g[1] + raise ValueError("%s is not a item name" % text) + + def push_item(name, rest): + if not name: + return + name, role = parse_item_name(name) + items.append((name, list(rest), role)) + del rest[:] + + current_func = None + rest = [] + + for line in content: + if not line.strip(): continue + + m = self._name_rgx.match(line) + if m and line[m.end():].strip().startswith(':'): + push_item(current_func, rest) + current_func, line = line[:m.end()], line[m.end():] + rest = [line.split(':', 1)[1].strip()] + if not rest[0]: + rest = [] + elif not line.startswith(' '): + push_item(current_func, rest) + current_func = None + if ',' in line: + for func in line.split(','): + push_item(func, []) + elif line.strip(): + current_func = line + elif current_func is not None: + rest.append(line.strip()) + push_item(current_func, rest) + return items + + def _parse_index(self, section, content): + """ + .. index: default + :refguide: something, else, and more + + """ + def strip_each_in(lst): + return [s.strip() for s in lst] + + out = {} + section = section.split('::') + if len(section) > 1: + out['default'] = strip_each_in(section[1].split(','))[0] + for line in content: + line = line.split(':') + if len(line) > 2: + out[line[1]] = strip_each_in(line[2].split(',')) + return out + + def _parse_summary(self): + """Grab signature (if given) and summary""" + if self._is_at_section(): + return + + summary = self._doc.read_to_next_empty_line() + summary_str = " ".join([s.strip() for s in summary]).strip() + if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): + self['Signature'] = summary_str + if not self._is_at_section(): + self['Summary'] = self._doc.read_to_next_empty_line() + else: + self['Summary'] = summary + + if not self._is_at_section(): + self['Extended Summary'] = self._read_to_next_section() + + def _parse(self): + self._doc.reset() + self._parse_summary() + + for (section,content) in self._read_sections(): + if not section.startswith('..'): + section = ' '.join([s.capitalize() for s in section.split(' ')]) + if section in ('Parameters', 'Attributes', 'Methods', + 'Returns', 'Raises', 'Warns'): + self[section] = self._parse_param_list(content) + elif section.startswith('.. index::'): + self['index'] = self._parse_index(section, content) + elif section == 'See Also': + self['See Also'] = self._parse_see_also(content) + else: + self[section] = content + + # string conversion routines + + def _str_header(self, name, symbol='-'): + return [name, len(name)*symbol] + + def _str_indent(self, doc, indent=4): + out = [] + for line in doc: + out += [' '*indent + line] + return out + + def _str_signature(self): + if self['Signature']: + return [self['Signature'].replace('*','\*')] + [''] + else: + return [''] + + def _str_summary(self): + if self['Summary']: + return self['Summary'] + [''] + else: + return [] + + def _str_extended_summary(self): + if self['Extended Summary']: + return self['Extended Summary'] + [''] + else: + return [] + + def _str_param_list(self, name): + out = [] + if self[name]: + out += self._str_header(name) + for param,param_type,desc in self[name]: + out += ['%s : %s' % (param, param_type)] + out += self._str_indent(desc) + out += [''] + return out + + def _str_section(self, name): + out = [] + if self[name]: + out += self._str_header(name) + out += self[name] + out += [''] + return out + + def _str_see_also(self, func_role): + if not self['See Also']: return [] + out = [] + out += self._str_header("See Also") + last_had_desc = True + for func, desc, role in self['See Also']: + if role: + link = ':%s:`%s`' % (role, func) + elif func_role: + link = ':%s:`%s`' % (func_role, func) + else: + link = "`%s`_" % func + if desc or last_had_desc: + out += [''] + out += [link] + else: + out[-1] += ", %s" % link + if desc: + out += self._str_indent([' '.join(desc)]) + last_had_desc = True + else: + last_had_desc = False + out += [''] + return out + + def _str_index(self): + idx = self['index'] + out = [] + out += ['.. index:: %s' % idx.get('default','')] + for section, references in idx.items(): + if section == 'default': + continue + out += [' :%s: %s' % (section, ', '.join(references))] + return out + + def __str__(self, func_role=''): + out = [] + out += self._str_signature() + out += self._str_summary() + out += self._str_extended_summary() + for param_list in ('Parameters','Returns','Raises'): + out += self._str_param_list(param_list) + out += self._str_section('Warnings') + out += self._str_see_also(func_role) + for s in ('Notes','References','Examples'): + out += self._str_section(s) + out += self._str_index() + return '\n'.join(out) + + +def indent(str,indent=4): + indent_str = ' '*indent + if str is None: + return indent_str + lines = str.split('\n') + return '\n'.join(indent_str + l for l in lines) + +def dedent_lines(lines): + """Deindent a list of lines maximally""" + return textwrap.dedent("\n".join(lines)).split("\n") + +def header(text, style='-'): + return text + '\n' + style*len(text) + '\n' + + +class FunctionDoc(NumpyDocString): + def __init__(self, func, role='func', doc=None): + self._f = func + self._role = role # e.g. "func" or "meth" + if doc is None: + doc = inspect.getdoc(func) or '' + try: + NumpyDocString.__init__(self, doc) + except ValueError as e: + print('*'*78) + print("ERROR: '%s' while parsing `%s`" % (e, self._f)) + print('*'*78) + #print "Docstring follows:" + #print doclines + #print '='*78 + + if not self['Signature']: + func, func_name = self.get_func() + try: + # try to read signature + argspec = inspect.getargspec(func) + argspec = inspect.formatargspec(*argspec) + argspec = argspec.replace('*','\*') + signature = '%s%s' % (func_name, argspec) + except TypeError as e: + signature = '%s()' % func_name + self['Signature'] = signature + + def get_func(self): + func_name = getattr(self._f, '__name__', self.__class__.__name__) + if inspect.isclass(self._f): + func = getattr(self._f, '__call__', self._f.__init__) + else: + func = self._f + return func, func_name + + def __str__(self): + out = '' + + func, func_name = self.get_func() + signature = self['Signature'].replace('*', '\*') + + roles = {'func': 'function', + 'meth': 'method'} + + if self._role: + if self._role not in roles: + print("Warning: invalid role %s" % self._role) + out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), + func_name) + + out += super(FunctionDoc, self).__str__(func_role=self._role) + return out + + +class ClassDoc(NumpyDocString): + def __init__(self,cls,modulename='',func_doc=FunctionDoc,doc=None): + if not inspect.isclass(cls): + raise ValueError("Initialise using a class. Got %r" % cls) + self._cls = cls + + if modulename and not modulename.endswith('.'): + modulename += '.' + self._mod = modulename + self._name = cls.__name__ + self._func_doc = func_doc + + if doc is None: + doc = pydoc.getdoc(cls) + + NumpyDocString.__init__(self, doc) + + @property + def methods(self): + return [name for name,func in inspect.getmembers(self._cls) + if not name.startswith('_') and callable(func)] + + def __str__(self): + out = '' + out += super(ClassDoc, self).__str__() + out += "\n\n" + + #for m in self.methods: + # print "Parsing `%s`" % m + # out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n' + # out += '.. index::\n single: %s; %s\n\n' % (self._name, m) + + return out + + diff --git a/doc/sphinxext/numpydoc/docscrape_sphinx.py b/doc/sphinxext/numpydoc/docscrape_sphinx.py new file mode 100644 index 00000000..2660f14f --- /dev/null +++ b/doc/sphinxext/numpydoc/docscrape_sphinx.py @@ -0,0 +1,136 @@ +import re, inspect, textwrap, pydoc +from docscrape import NumpyDocString, FunctionDoc, ClassDoc + +class SphinxDocString(NumpyDocString): + # string conversion routines + def _str_header(self, name, symbol='`'): + return ['.. rubric:: ' + name, ''] + + def _str_field_list(self, name): + return [':' + name + ':'] + + def _str_indent(self, doc, indent=4): + out = [] + for line in doc: + out += [' '*indent + line] + return out + + def _str_signature(self): + return [''] + if self['Signature']: + return ['``%s``' % self['Signature']] + [''] + else: + return [''] + + def _str_summary(self): + return self['Summary'] + [''] + + def _str_extended_summary(self): + return self['Extended Summary'] + [''] + + def _str_param_list(self, name): + out = [] + if self[name]: + out += self._str_field_list(name) + out += [''] + for param,param_type,desc in self[name]: + out += self._str_indent(['**%s** : %s' % (param.strip(), + param_type)]) + out += [''] + out += self._str_indent(desc,8) + out += [''] + return out + + def _str_section(self, name): + out = [] + if self[name]: + out += self._str_header(name) + out += [''] + content = textwrap.dedent("\n".join(self[name])).split("\n") + out += content + out += [''] + return out + + def _str_see_also(self, func_role): + out = [] + if self['See Also']: + see_also = super(SphinxDocString, self)._str_see_also(func_role) + out = ['.. seealso::', ''] + out += self._str_indent(see_also[2:]) + return out + + def _str_warnings(self): + out = [] + if self['Warnings']: + out = ['.. warning::', ''] + out += self._str_indent(self['Warnings']) + return out + + def _str_index(self): + idx = self['index'] + out = [] + if len(idx) == 0: + return out + + out += ['.. index:: %s' % idx.get('default','')] + for section, references in idx.items(): + if section == 'default': + continue + elif section == 'refguide': + out += [' single: %s' % (', '.join(references))] + else: + out += [' %s: %s' % (section, ','.join(references))] + return out + + def _str_references(self): + out = [] + if self['References']: + out += self._str_header('References') + if isinstance(self['References'], str): + self['References'] = [self['References']] + out.extend(self['References']) + out += [''] + return out + + def __str__(self, indent=0, func_role="obj"): + out = [] + out += self._str_signature() + out += self._str_index() + [''] + out += self._str_summary() + out += self._str_extended_summary() + for param_list in ('Parameters', 'Attributes', 'Methods', + 'Returns','Raises'): + out += self._str_param_list(param_list) + out += self._str_warnings() + out += self._str_see_also(func_role) + out += self._str_section('Notes') + out += self._str_references() + out += self._str_section('Examples') + out = self._str_indent(out,indent) + return '\n'.join(out) + +class SphinxFunctionDoc(SphinxDocString, FunctionDoc): + pass + +class SphinxClassDoc(SphinxDocString, ClassDoc): + pass + +def get_doc_object(obj, what=None, doc=None): + if what is None: + if inspect.isclass(obj): + what = 'class' + elif inspect.ismodule(obj): + what = 'module' + elif callable(obj): + what = 'function' + else: + what = 'object' + if what == 'class': + return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc) + elif what in ('function', 'method'): + return SphinxFunctionDoc(obj, '', doc=doc) + else: + if doc is None: + doc = pydoc.getdoc(obj) + return SphinxDocString(doc) + diff --git a/doc/sphinxext/numpydoc/inheritance_diagram.py b/doc/sphinxext/numpydoc/inheritance_diagram.py new file mode 100644 index 00000000..a1ba7e7e --- /dev/null +++ b/doc/sphinxext/numpydoc/inheritance_diagram.py @@ -0,0 +1,407 @@ +""" +Defines a docutils directive for inserting inheritance diagrams. + +Provide the directive with one or more classes or modules (separated +by whitespace). For modules, all of the classes in that module will +be used. + +Example:: + + Given the following classes: + + class A: pass + class B(A): pass + class C(A): pass + class D(B, C): pass + class E(B): pass + + .. inheritance-diagram: D E + + Produces a graph like the following: + + A + / \ + B C + / \ / + E D + +The graph is inserted as a PNG+image map into HTML and a PDF in +LaTeX. +""" + +import inspect +import os +import re +import subprocess +try: + from hashlib import md5 +except ImportError: + from md5 import md5 + +from docutils.nodes import Body, Element +from docutils.parsers.rst import directives +from sphinx.roles import xfileref_role + +def my_import(name): + """Module importer - taken from the python documentation. + + This function allows importing names with dots in them.""" + + mod = __import__(name) + components = name.split('.') + for comp in components[1:]: + mod = getattr(mod, comp) + return mod + +class DotException(Exception): + pass + +class InheritanceGraph: + """ + Given a list of classes, determines the set of classes that + they inherit from all the way to the root "object", and then + is able to generate a graphviz dot graph from them. + """ + def __init__(self, class_names, show_builtins=False): + """ + *class_names* is a list of child classes to show bases from. + + If *show_builtins* is True, then Python builtins will be shown + in the graph. + """ + self.class_names = class_names + self.classes = self._import_classes(class_names) + self.all_classes = self._all_classes(self.classes) + if len(self.all_classes) == 0: + raise ValueError("No classes found for inheritance diagram") + self.show_builtins = show_builtins + + py_sig_re = re.compile(r'''^([\w.]*\.)? # class names + (\w+) \s* $ # optionally arguments + ''', re.VERBOSE) + + def _import_class_or_module(self, name): + """ + Import a class using its fully-qualified *name*. + """ + try: + path, base = self.py_sig_re.match(name).groups() + except: + raise ValueError( + "Invalid class or module '%s' specified for inheritance diagram" % name) + fullname = (path or '') + base + path = (path and path.rstrip('.')) + if not path: + path = base + try: + module = __import__(path, None, None, []) + # We must do an import of the fully qualified name. Otherwise if a + # subpackage 'a.b' is requested where 'import a' does NOT provide + # 'a.b' automatically, then 'a.b' will not be found below. This + # second call will force the equivalent of 'import a.b' to happen + # after the top-level import above. + my_import(fullname) + + except ImportError: + raise ValueError( + "Could not import class or module '%s' specified for inheritance diagram" % name) + + try: + todoc = module + for comp in fullname.split('.')[1:]: + todoc = getattr(todoc, comp) + except AttributeError: + raise ValueError( + "Could not find class or module '%s' specified for inheritance diagram" % name) + + # If a class, just return it + if inspect.isclass(todoc): + return [todoc] + elif inspect.ismodule(todoc): + classes = [] + for cls in list(todoc.__dict__.values()): + if inspect.isclass(cls) and cls.__module__ == todoc.__name__: + classes.append(cls) + return classes + raise ValueError( + "'%s' does not resolve to a class or module" % name) + + def _import_classes(self, class_names): + """ + Import a list of classes. + """ + classes = [] + for name in class_names: + classes.extend(self._import_class_or_module(name)) + return classes + + def _all_classes(self, classes): + """ + Return a list of all classes that are ancestors of *classes*. + """ + all_classes = {} + + def recurse(cls): + all_classes[cls] = None + for c in cls.__bases__: + if c not in all_classes: + recurse(c) + + for cls in classes: + recurse(cls) + + return list(all_classes.keys()) + + def class_name(self, cls, parts=0): + """ + Given a class object, return a fully-qualified name. This + works for things I've tested in matplotlib so far, but may not + be completely general. + """ + module = cls.__module__ + if module == '__builtin__': + fullname = cls.__name__ + else: + fullname = "%s.%s" % (module, cls.__name__) + if parts == 0: + return fullname + name_parts = fullname.split('.') + return '.'.join(name_parts[-parts:]) + + def get_all_class_names(self): + """ + Get all of the class names involved in the graph. + """ + return [self.class_name(x) for x in self.all_classes] + + # These are the default options for graphviz + default_graph_options = { + "rankdir": "LR", + "size": '"8.0, 12.0"' + } + default_node_options = { + "shape": "box", + "fontsize": 10, + "height": 0.25, + "fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans", + "style": '"setlinewidth(0.5)"' + } + default_edge_options = { + "arrowsize": 0.5, + "style": '"setlinewidth(0.5)"' + } + + def _format_node_options(self, options): + return ','.join(["%s=%s" % x for x in list(options.items())]) + def _format_graph_options(self, options): + return ''.join(["%s=%s;\n" % x for x in list(options.items())]) + + def generate_dot(self, fd, name, parts=0, urls={}, + graph_options={}, node_options={}, + edge_options={}): + """ + Generate a graphviz dot graph from the classes that + were passed in to __init__. + + *fd* is a Python file-like object to write to. + + *name* is the name of the graph + + *urls* is a dictionary mapping class names to http urls + + *graph_options*, *node_options*, *edge_options* are + dictionaries containing key/value pairs to pass on as graphviz + properties. + """ + g_options = self.default_graph_options.copy() + g_options.update(graph_options) + n_options = self.default_node_options.copy() + n_options.update(node_options) + e_options = self.default_edge_options.copy() + e_options.update(edge_options) + + fd.write('digraph %s {\n' % name) + fd.write(self._format_graph_options(g_options)) + + for cls in self.all_classes: + if not self.show_builtins and cls in list(__builtins__.values()): + continue + + name = self.class_name(cls, parts) + + # Write the node + this_node_options = n_options.copy() + url = urls.get(self.class_name(cls)) + if url is not None: + this_node_options['URL'] = '"%s"' % url + fd.write(' "%s" [%s];\n' % + (name, self._format_node_options(this_node_options))) + + # Write the edges + for base in cls.__bases__: + if not self.show_builtins and base in list(__builtins__.values()): + continue + + base_name = self.class_name(base, parts) + fd.write(' "%s" -> "%s" [%s];\n' % + (base_name, name, + self._format_node_options(e_options))) + fd.write('}\n') + + def run_dot(self, args, name, parts=0, urls={}, + graph_options={}, node_options={}, edge_options={}): + """ + Run graphviz 'dot' over this graph, returning whatever 'dot' + writes to stdout. + + *args* will be passed along as commandline arguments. + + *name* is the name of the graph + + *urls* is a dictionary mapping class names to http urls + + Raises DotException for any of the many os and + installation-related errors that may occur. + """ + try: + dot = subprocess.Popen(['dot'] + list(args), + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + close_fds=True) + except OSError: + raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") + except ValueError: + raise DotException("'dot' called with invalid arguments") + except: + raise DotException("Unexpected error calling 'dot'") + + self.generate_dot(dot.stdin, name, parts, urls, graph_options, + node_options, edge_options) + dot.stdin.close() + result = dot.stdout.read() + returncode = dot.wait() + if returncode != 0: + raise DotException("'dot' returned the errorcode %d" % returncode) + return result + +class inheritance_diagram(Body, Element): + """ + A docutils node to use as a placeholder for the inheritance + diagram. + """ + pass + +def inheritance_diagram_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, + state_machine): + """ + Run when the inheritance_diagram directive is first encountered. + """ + node = inheritance_diagram() + + class_names = arguments + + # Create a graph starting with the list of classes + graph = InheritanceGraph(class_names) + + # Create xref nodes for each target of the graph's image map and + # add them to the doc tree so that Sphinx can resolve the + # references to real URLs later. These nodes will eventually be + # removed from the doctree after we're done with them. + for name in graph.get_all_class_names(): + refnodes, x = xfileref_role( + 'class', ':class:`%s`' % name, name, 0, state) + node.extend(refnodes) + # Store the graph object so we can use it to generate the + # dot file later + node['graph'] = graph + # Store the original content for use as a hash + node['parts'] = options.get('parts', 0) + node['content'] = " ".join(class_names) + return [node] + +def get_graph_hash(node): + return md5(node['content'] + str(node['parts'])).hexdigest()[-10:] + +def html_output_graph(self, node): + """ + Output the graph for HTML. This will insert a PNG with clickable + image map. + """ + graph = node['graph'] + parts = node['parts'] + + graph_hash = get_graph_hash(node) + name = "inheritance%s" % graph_hash + path = '_images' + dest_path = os.path.join(setup.app.builder.outdir, path) + if not os.path.exists(dest_path): + os.makedirs(dest_path) + png_path = os.path.join(dest_path, name + ".png") + path = setup.app.builder.imgpath + + # Create a mapping from fully-qualified class names to URLs. + urls = {} + for child in node: + if child.get('refuri') is not None: + urls[child['reftitle']] = child.get('refuri') + elif child.get('refid') is not None: + urls[child['reftitle']] = '#' + child.get('refid') + + # These arguments to dot will save a PNG file to disk and write + # an HTML image map to stdout. + image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'], + name, parts, urls) + return ('%s' % + (path, name, name, image_map)) + +def latex_output_graph(self, node): + """ + Output the graph for LaTeX. This will insert a PDF. + """ + graph = node['graph'] + parts = node['parts'] + + graph_hash = get_graph_hash(node) + name = "inheritance%s" % graph_hash + dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images')) + if not os.path.exists(dest_path): + os.makedirs(dest_path) + pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf")) + + graph.run_dot(['-Tpdf', '-o%s' % pdf_path], + name, parts, graph_options={'size': '"6.0,6.0"'}) + return '\n\\includegraphics{%s}\n\n' % pdf_path + +def visit_inheritance_diagram(inner_func): + """ + This is just a wrapper around html/latex_output_graph to make it + easier to handle errors and insert warnings. + """ + def visitor(self, node): + try: + content = inner_func(self, node) + except DotException as e: + # Insert the exception as a warning in the document + warning = self.document.reporter.warning(str(e), line=node.line) + warning.parent = node + node.children = [warning] + else: + source = self.document.attributes['source'] + self.body.append(content) + node.children = [] + return visitor + +def do_nothing(self, node): + pass + +def setup(app): + setup.app = app + setup.confdir = app.confdir + + app.add_node( + inheritance_diagram, + latex=(visit_inheritance_diagram(latex_output_graph), do_nothing), + html=(visit_inheritance_diagram(html_output_graph), do_nothing)) + app.add_directive( + 'inheritance-diagram', inheritance_diagram_directive, + False, (1, 100, 0), parts = directives.nonnegative_int) diff --git a/doc/sphinxext/numpydoc/ipython_console_highlighting.py b/doc/sphinxext/numpydoc/ipython_console_highlighting.py new file mode 100644 index 00000000..217b779d --- /dev/null +++ b/doc/sphinxext/numpydoc/ipython_console_highlighting.py @@ -0,0 +1,114 @@ +"""reST directive for syntax-highlighting ipython interactive sessions. + +XXX - See what improvements can be made based on the new (as of Sept 2009) +'pycon' lexer for the python console. At the very least it will give better +highlighted tracebacks. +""" + +#----------------------------------------------------------------------------- +# Needed modules + +# Standard library +import re + +# Third party +from pygments.lexer import Lexer, do_insertions +from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, + PythonTracebackLexer) +from pygments.token import Comment, Generic + +from sphinx import highlighting + +#----------------------------------------------------------------------------- +# Global constants +line_re = re.compile('.*?\n') + +#----------------------------------------------------------------------------- +# Code begins - classes and functions + +class IPythonConsoleLexer(Lexer): + """ + For IPython console output or doctests, such as: + + .. sourcecode:: ipython + + In [1]: a = 'foo' + + In [2]: a + Out[2]: 'foo' + + In [3]: print a + foo + + In [4]: 1 / 0 + + Notes: + + - Tracebacks are not currently supported. + + - It assumes the default IPython prompts, not customized ones. + """ + + name = 'IPython console session' + aliases = ['ipython'] + mimetypes = ['text/x-ipython-console'] + input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") + output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") + continue_prompt = re.compile(" \.\.\.+:") + tb_start = re.compile("\-+") + + def get_tokens_unprocessed(self, text): + pylexer = PythonLexer(**self.options) + tblexer = PythonTracebackLexer(**self.options) + + curcode = '' + insertions = [] + for match in line_re.finditer(text): + line = match.group() + input_prompt = self.input_prompt.match(line) + continue_prompt = self.continue_prompt.match(line.rstrip()) + output_prompt = self.output_prompt.match(line) + if line.startswith("#"): + insertions.append((len(curcode), + [(0, Comment, line)])) + elif input_prompt is not None: + insertions.append((len(curcode), + [(0, Generic.Prompt, input_prompt.group())])) + curcode += line[input_prompt.end():] + elif continue_prompt is not None: + insertions.append((len(curcode), + [(0, Generic.Prompt, continue_prompt.group())])) + curcode += line[continue_prompt.end():] + elif output_prompt is not None: + # Use the 'error' token for output. We should probably make + # our own token, but error is typicaly in a bright color like + # red, so it works fine for our output prompts. + insertions.append((len(curcode), + [(0, Generic.Error, output_prompt.group())])) + curcode += line[output_prompt.end():] + else: + if curcode: + for item in do_insertions(insertions, + pylexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + yield match.start(), Generic.Output, line + if curcode: + for item in do_insertions(insertions, + pylexer.get_tokens_unprocessed(curcode)): + yield item + + +def setup(app): + """Setup as a sphinx extension.""" + + # This is only a lexer, so adding it below to pygments appears sufficient. + # But if somebody knows that the right API usage should be to do that via + # sphinx, by all means fix it here. At least having this setup.py + # suppresses the sphinx warning we'd get without it. + pass + +#----------------------------------------------------------------------------- +# Register the extension as a valid pygments lexer +highlighting.lexers['ipython'] = IPythonConsoleLexer() diff --git a/doc/sphinxext/numpydoc/numpydoc.py b/doc/sphinxext/numpydoc/numpydoc.py new file mode 100644 index 00000000..eea482dc --- /dev/null +++ b/doc/sphinxext/numpydoc/numpydoc.py @@ -0,0 +1,116 @@ +""" +======== +numpydoc +======== + +Sphinx extension that handles docstrings in the Numpy standard format. [1] + +It will: + +- Convert Parameters etc. sections to field lists. +- Convert See Also section to a See also entry. +- Renumber references. +- Extract the signature from the docstring, if it can't be determined otherwise. + +.. [1] http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines#docstring-standard + +""" + +import os, re, pydoc +from docscrape_sphinx import get_doc_object, SphinxDocString +import inspect + +def mangle_docstrings(app, what, name, obj, options, lines, + reference_offset=[0]): + if what == 'module': + # Strip top title + title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', + re.I|re.S) + lines[:] = title_re.sub('', "\n".join(lines)).split("\n") + else: + doc = get_doc_object(obj, what, "\n".join(lines)) + lines[:] = str(doc).split("\n") + + if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ + obj.__name__: + if hasattr(obj, '__module__'): + v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__)) + else: + v = dict(full_name=obj.__name__) + lines += ['', '.. htmlonly::', ''] + lines += [' %s' % x for x in + (app.config.numpydoc_edit_link % v).split("\n")] + + # replace reference numbers so that there are no duplicates + references = [] + for l in lines: + l = l.strip() + if l.startswith('.. ['): + try: + references.append(int(l[len('.. ['):l.index(']')])) + except ValueError: + print("WARNING: invalid reference in %s docstring" % name) + + # Start renaming from the biggest number, otherwise we may + # overwrite references. + references.sort() + if references: + for i, line in enumerate(lines): + for r in references: + new_r = reference_offset[0] + r + lines[i] = lines[i].replace('[%d]_' % r, + '[%d]_' % new_r) + lines[i] = lines[i].replace('.. [%d]' % r, + '.. [%d]' % new_r) + + reference_offset[0] += len(references) + +def mangle_signature(app, what, name, obj, options, sig, retann): + # Do not try to inspect classes that don't define `__init__` + if (inspect.isclass(obj) and + 'initializes x; see ' in pydoc.getdoc(obj.__init__)): + return '', '' + + if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return + if not hasattr(obj, '__doc__'): return + + doc = SphinxDocString(pydoc.getdoc(obj)) + if doc['Signature']: + sig = re.sub("^[^(]*", "", doc['Signature']) + return sig, '' + +def initialize(app): + try: + app.connect('autodoc-process-signature', mangle_signature) + except: + monkeypatch_sphinx_ext_autodoc() + +def setup(app, get_doc_object_=get_doc_object): + global get_doc_object + get_doc_object = get_doc_object_ + + app.connect('autodoc-process-docstring', mangle_docstrings) + app.connect('builder-inited', initialize) + app.add_config_value('numpydoc_edit_link', None, True) + +#------------------------------------------------------------------------------ +# Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5) +#------------------------------------------------------------------------------ + +def monkeypatch_sphinx_ext_autodoc(): + global _original_format_signature + import sphinx.ext.autodoc + + if sphinx.ext.autodoc.format_signature is our_format_signature: + return + + print("[numpydoc] Monkeypatching sphinx.ext.autodoc ...") + _original_format_signature = sphinx.ext.autodoc.format_signature + sphinx.ext.autodoc.format_signature = our_format_signature + +def our_format_signature(what, obj): + r = mangle_signature(None, what, None, obj, None, None, None) + if r is not None: + return r[0] + else: + return _original_format_signature(what, obj) diff --git a/doc/sphinxext/numpydoc/plot_directive.py b/doc/sphinxext/numpydoc/plot_directive.py new file mode 100644 index 00000000..be03c357 --- /dev/null +++ b/doc/sphinxext/numpydoc/plot_directive.py @@ -0,0 +1,773 @@ +""" +A directive for including a matplotlib plot in a Sphinx document. + +By default, in HTML output, `plot` will include a .png file with a +link to a high-res .png and .pdf. In LaTeX output, it will include a +.pdf. + +The source code for the plot may be included in one of three ways: + + 1. **A path to a source file** as the argument to the directive:: + + .. plot:: path/to/plot.py + + When a path to a source file is given, the content of the + directive may optionally contain a caption for the plot:: + + .. plot:: path/to/plot.py + + This is the caption for the plot + + Additionally, one my specify the name of a function to call (with + no arguments) immediately after importing the module:: + + .. plot:: path/to/plot.py plot_function1 + + 2. Included as **inline content** to the directive:: + + .. plot:: + + import matplotlib.pyplot as plt + import matplotlib.image as mpimg + import numpy as np + img = mpimg.imread('_static/stinkbug.png') + imgplot = plt.imshow(img) + + 3. Using **doctest** syntax:: + + .. plot:: + A plotting example: + >>> import matplotlib.pyplot as plt + >>> plt.plot([1,2,3], [4,5,6]) + +Options +------- + +The ``plot`` directive supports the following options: + + format : {'python', 'doctest'} + Specify the format of the input + + include-source : bool + Whether to display the source code. The default can be changed + using the `plot_include_source` variable in conf.py + + encoding : str + If this source file is in a non-UTF8 or non-ASCII encoding, + the encoding must be specified using the `:encoding:` option. + The encoding will not be inferred using the ``-*- coding -*-`` + metacomment. + + context : bool + If provided, the code will be run in the context of all + previous plot directives for which the `:context:` option was + specified. This only applies to inline code plot directives, + not those run from files. + + nofigs : bool + If specified, the code block will be run, but no figures will + be inserted. This is usually useful with the ``:context:`` + option. + +Additionally, this directive supports all of the options of the +`image` directive, except for `target` (since plot will add its own +target). These include `alt`, `height`, `width`, `scale`, `align` and +`class`. + +Configuration options +--------------------- + +The plot directive has the following configuration options: + + plot_include_source + Default value for the include-source option + + plot_pre_code + Code that should be executed before each plot. + + plot_basedir + Base directory, to which ``plot::`` file names are relative + to. (If None or empty, file names are relative to the + directoly where the file containing the directive is.) + + plot_formats + File formats to generate. List of tuples or strings:: + + [(suffix, dpi), suffix, ...] + + that determine the file format and the DPI. For entries whose + DPI was omitted, sensible defaults are chosen. + + plot_html_show_formats + Whether to show links to the files in HTML. + + plot_rcparams + A dictionary containing any non-standard rcParams that should + be applied before each plot. + +""" + +import sys, os, glob, shutil, imp, warnings, io, re, textwrap, \ + traceback, exceptions + +from docutils.parsers.rst import directives +from docutils import nodes +from docutils.parsers.rst.directives.images import Image +align = Image.align +import sphinx + +sphinx_version = sphinx.__version__.split(".") +# The split is necessary for sphinx beta versions where the string is +# '6b1' +sphinx_version = tuple([int(re.split('[a-z]', x)[0]) + for x in sphinx_version[:2]]) + +try: + # Sphinx depends on either Jinja or Jinja2 + import jinja2 + def format_template(template, **kw): + return jinja2.Template(template).render(**kw) +except ImportError: + import jinja + def format_template(template, **kw): + return jinja.from_string(template, **kw) + +import matplotlib +import matplotlib.cbook as cbook +matplotlib.use('Agg') +import matplotlib.pyplot as plt +from matplotlib import _pylab_helpers + +__version__ = 2 + +#------------------------------------------------------------------------------ +# Relative pathnames +#------------------------------------------------------------------------------ + +# os.path.relpath is new in Python 2.6 +try: + from os.path import relpath +except ImportError: + # Copied from Python 2.7 + if 'posix' in sys.builtin_module_names: + def relpath(path, start=os.path.curdir): + """Return a relative version of a path""" + from os.path import sep, curdir, join, abspath, commonprefix, \ + pardir + + if not path: + raise ValueError("no path specified") + + start_list = abspath(start).split(sep) + path_list = abspath(path).split(sep) + + # Work out how much of the filepath is shared by start and path. + i = len(commonprefix([start_list, path_list])) + + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + elif 'nt' in sys.builtin_module_names: + def relpath(path, start=os.path.curdir): + """Return a relative version of a path""" + from os.path import sep, curdir, join, abspath, commonprefix, \ + pardir, splitunc + + if not path: + raise ValueError("no path specified") + start_list = abspath(start).split(sep) + path_list = abspath(path).split(sep) + if start_list[0].lower() != path_list[0].lower(): + unc_path, rest = splitunc(path) + unc_start, rest = splitunc(start) + if bool(unc_path) ^ bool(unc_start): + raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" + % (path, start)) + else: + raise ValueError("path is on drive %s, start on drive %s" + % (path_list[0], start_list[0])) + # Work out how much of the filepath is shared by start and path. + for i in range(min(len(start_list), len(path_list))): + if start_list[i].lower() != path_list[i].lower(): + break + else: + i += 1 + + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + else: + raise RuntimeError("Unsupported platform (no relpath available!)") + +#------------------------------------------------------------------------------ +# Registration hook +#------------------------------------------------------------------------------ + +def plot_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + return run(arguments, content, options, state_machine, state, lineno) +plot_directive.__doc__ = __doc__ + +def _option_boolean(arg): + if not arg or not arg.strip(): + # no argument given, assume used as a flag + return True + elif arg.strip().lower() in ('no', '0', 'false'): + return False + elif arg.strip().lower() in ('yes', '1', 'true'): + return True + else: + raise ValueError('"%s" unknown boolean' % arg) + +def _option_format(arg): + return directives.choice(arg, ('python', 'doctest')) + +def _option_align(arg): + return directives.choice(arg, ("top", "middle", "bottom", "left", "center", + "right")) + +def mark_plot_labels(app, document): + """ + To make plots referenceable, we need to move the reference from + the "htmlonly" (or "latexonly") node to the actual figure node + itself. + """ + for name, explicit in document.nametypes.items(): + if not explicit: + continue + labelid = document.nameids[name] + if labelid is None: + continue + node = document.ids[labelid] + if node.tagname in ('html_only', 'latex_only'): + for n in node: + if n.tagname == 'figure': + sectname = name + for c in n: + if c.tagname == 'caption': + sectname = c.astext() + break + + node['ids'].remove(labelid) + node['names'].remove(name) + n['ids'].append(labelid) + n['names'].append(name) + document.settings.env.labels[name] = \ + document.settings.env.docname, labelid, sectname + break + +def setup(app): + setup.app = app + setup.config = app.config + setup.confdir = app.confdir + + options = {'alt': directives.unchanged, + 'height': directives.length_or_unitless, + 'width': directives.length_or_percentage_or_unitless, + 'scale': directives.nonnegative_int, + 'align': _option_align, + 'class': directives.class_option, + 'include-source': _option_boolean, + 'format': _option_format, + 'context': directives.flag, + 'nofigs': directives.flag, + 'encoding': directives.encoding + } + + app.add_directive('plot', plot_directive, True, (0, 2, False), **options) + app.add_config_value('plot_pre_code', None, True) + app.add_config_value('plot_include_source', False, True) + app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True) + app.add_config_value('plot_basedir', None, True) + app.add_config_value('plot_html_show_formats', True, True) + app.add_config_value('plot_rcparams', {}, True) + + app.connect('doctree-read', mark_plot_labels) + +#------------------------------------------------------------------------------ +# Doctest handling +#------------------------------------------------------------------------------ + +def contains_doctest(text): + try: + # check if it's valid Python as-is + compile(text, '', 'exec') + return False + except SyntaxError: + pass + r = re.compile(r'^\s*>>>', re.M) + m = r.search(text) + return bool(m) + +def unescape_doctest(text): + """ + Extract code from a piece of text, which contains either Python code + or doctests. + + """ + if not contains_doctest(text): + return text + + code = "" + for line in text.split("\n"): + m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line) + if m: + code += m.group(2) + "\n" + elif line.strip(): + code += "# " + line.strip() + "\n" + else: + code += "\n" + return code + +def split_code_at_show(text): + """ + Split code at plt.show() + + """ + + parts = [] + is_doctest = contains_doctest(text) + + part = [] + for line in text.split("\n"): + if (not is_doctest and line.strip() == 'plt.show()') or \ + (is_doctest and line.strip() == '>>> plt.show()'): + part.append(line) + parts.append("\n".join(part)) + part = [] + else: + part.append(line) + if "\n".join(part).strip(): + parts.append("\n".join(part)) + return parts + +#------------------------------------------------------------------------------ +# Template +#------------------------------------------------------------------------------ + + +TEMPLATE = """ +{{ source_code }} + +{{ only_html }} + + {% if source_link or (html_show_formats and not multi_image) %} + ( + {%- if source_link -%} + `Source code <{{ source_link }}>`__ + {%- endif -%} + {%- if html_show_formats and not multi_image -%} + {%- for img in images -%} + {%- for fmt in img.formats -%} + {%- if source_link or not loop.first -%}, {% endif -%} + `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ + {%- endfor -%} + {%- endfor -%} + {%- endif -%} + ) + {% endif %} + + {% for img in images %} + .. figure:: {{ build_dir }}/{{ img.basename }}.png + {%- for option in options %} + {{ option }} + {% endfor %} + + {% if html_show_formats and multi_image -%} + ( + {%- for fmt in img.formats -%} + {%- if not loop.first -%}, {% endif -%} + `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ + {%- endfor -%} + ) + {%- endif -%} + + {{ caption }} + {% endfor %} + +{{ only_latex }} + + {% for img in images %} + .. image:: {{ build_dir }}/{{ img.basename }}.pdf + {% endfor %} + +""" + +exception_template = """ +.. htmlonly:: + + [`source code <%(linkdir)s/%(basename)s.py>`__] + +Exception occurred rendering plot. + +""" + +# the context of the plot for all directives specified with the +# :context: option +plot_context = dict() + +class ImageFile: + def __init__(self, basename, dirname): + self.basename = basename + self.dirname = dirname + self.formats = [] + + def filename(self, format): + return os.path.join(self.dirname, "%s.%s" % (self.basename, format)) + + def filenames(self): + return [self.filename(fmt) for fmt in self.formats] + +def out_of_date(original, derived): + """ + Returns True if derivative is out-of-date wrt original, + both of which are full file paths. + """ + return (not os.path.exists(derived) or + (os.path.exists(original) and + os.stat(derived).st_mtime < os.stat(original).st_mtime)) + +class PlotError(RuntimeError): + pass + +def run_code(code, code_path, ns=None, function_name=None): + """ + Import a Python module from a path, and run the function given by + name, if function_name is not None. + """ + + # Change the working directory to the directory of the example, so + # it can get at its data files, if any. Add its path to sys.path + # so it can import any helper modules sitting beside it. + + pwd = os.getcwd() + old_sys_path = list(sys.path) + if code_path is not None: + dirname = os.path.abspath(os.path.dirname(code_path)) + os.chdir(dirname) + sys.path.insert(0, dirname) + + # Redirect stdout + stdout = sys.stdout + sys.stdout = io.StringIO() + + # Reset sys.argv + old_sys_argv = sys.argv + sys.argv = [code_path] + + try: + try: + code = unescape_doctest(code) + if ns is None: + ns = {} + if not ns: + if setup.config.plot_pre_code is None: + exec("import numpy as np\nfrom matplotlib import pyplot as plt\n", ns) + else: + exec(setup.config.plot_pre_code, ns) + if "__main__" in code: + exec("__name__ = '__main__'", ns) + exec(code, ns) + if function_name is not None: + exec(function_name + "()", ns) + except (Exception, SystemExit) as err: + raise PlotError(traceback.format_exc()) + finally: + os.chdir(pwd) + sys.argv = old_sys_argv + sys.path[:] = old_sys_path + sys.stdout = stdout + return ns + +def clear_state(plot_rcparams): + plt.close('all') + matplotlib.rc_file_defaults() + matplotlib.rcParams.update(plot_rcparams) + +def render_figures(code, code_path, output_dir, output_base, context, + function_name, config): + """ + Run a pyplot script and save the low and high res PNGs and a PDF + in outdir. + + Save the images under *output_dir* with file names derived from + *output_base* + """ + # -- Parse format list + default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} + formats = [] + plot_formats = config.plot_formats + if isinstance(plot_formats, str): + plot_formats = eval(plot_formats) + for fmt in plot_formats: + if isinstance(fmt, str): + formats.append((fmt, default_dpi.get(fmt, 80))) + elif type(fmt) in (tuple, list) and len(fmt)==2: + formats.append((str(fmt[0]), int(fmt[1]))) + else: + raise PlotError('invalid image format "%r" in plot_formats' % fmt) + + # -- Try to determine if all images already exist + + code_pieces = split_code_at_show(code) + + # Look for single-figure output files first + # Look for single-figure output files first + all_exists = True + img = ImageFile(output_base, output_dir) + for format, dpi in formats: + if out_of_date(code_path, img.filename(format)): + all_exists = False + break + img.formats.append(format) + + if all_exists: + return [(code, [img])] + + # Then look for multi-figure output files + results = [] + all_exists = True + for i, code_piece in enumerate(code_pieces): + images = [] + for j in range(1000): + if len(code_pieces) > 1: + img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir) + else: + img = ImageFile('%s_%02d' % (output_base, j), output_dir) + for format, dpi in formats: + if out_of_date(code_path, img.filename(format)): + all_exists = False + break + img.formats.append(format) + + # assume that if we have one, we have them all + if not all_exists: + all_exists = (j > 0) + break + images.append(img) + if not all_exists: + break + results.append((code_piece, images)) + + if all_exists: + return results + + # We didn't find the files, so build them + + results = [] + if context: + ns = plot_context + else: + ns = {} + + for i, code_piece in enumerate(code_pieces): + if not context: + clear_state(config.plot_rcparams) + run_code(code_piece, code_path, ns, function_name) + + images = [] + fig_managers = _pylab_helpers.Gcf.get_all_fig_managers() + for j, figman in enumerate(fig_managers): + if len(fig_managers) == 1 and len(code_pieces) == 1: + img = ImageFile(output_base, output_dir) + elif len(code_pieces) == 1: + img = ImageFile("%s_%02d" % (output_base, j), output_dir) + else: + img = ImageFile("%s_%02d_%02d" % (output_base, i, j), + output_dir) + images.append(img) + for format, dpi in formats: + try: + figman.canvas.figure.savefig(img.filename(format), dpi=dpi) + except Exception as err: + raise PlotError(traceback.format_exc()) + img.formats.append(format) + + results.append((code_piece, images)) + + if not context: + clear_state(config.plot_rcparams) + + return results + +def run(arguments, content, options, state_machine, state, lineno): + # The user may provide a filename *or* Python code content, but not both + if arguments and content: + raise RuntimeError("plot:: directive can't have both args and content") + + document = state_machine.document + config = document.settings.env.config + nofigs = 'nofigs' in options + + options.setdefault('include-source', config.plot_include_source) + context = 'context' in options + + rst_file = document.attributes['source'] + rst_dir = os.path.dirname(rst_file) + + if len(arguments): + if not config.plot_basedir: + source_file_name = os.path.join(setup.app.builder.srcdir, + directives.uri(arguments[0])) + else: + source_file_name = os.path.join(setup.app.builder.srcdir, config.plot_basedir, + directives.uri(arguments[0])) + + # If there is content, it will be passed as a caption. + caption = '\n'.join(content) + + # If the optional function name is provided, use it + if len(arguments) == 2: + function_name = arguments[1] + else: + function_name = None + + fd = open(source_file_name, 'r') + code = fd.read() + fd.close() + output_base = os.path.basename(source_file_name) + else: + source_file_name = rst_file + code = textwrap.dedent("\n".join(map(str, content))) + counter = document.attributes.get('_plot_counter', 0) + 1 + document.attributes['_plot_counter'] = counter + base, ext = os.path.splitext(os.path.basename(source_file_name)) + output_base = '%s-%d.py' % (base, counter) + function_name = None + caption = '' + + base, source_ext = os.path.splitext(output_base) + if source_ext in ('.py', '.rst', '.txt'): + output_base = base + else: + source_ext = '' + + # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames + output_base = output_base.replace('.', '-') + + # is it in doctest format? + is_doctest = contains_doctest(code) + if 'format' in options: + if options['format'] == 'python': + is_doctest = False + else: + is_doctest = True + + # determine output directory name fragment + source_rel_name = relpath(source_file_name, setup.app.srcdir) + source_rel_dir = os.path.dirname(source_rel_name) + while source_rel_dir.startswith(os.path.sep): + source_rel_dir = source_rel_dir[1:] + + # build_dir: where to place output files (temporarily) + build_dir = os.path.join(os.path.dirname(setup.app.doctreedir), + 'plot_directive', + source_rel_dir) + # get rid of .. in paths, also changes pathsep + # see note in Python docs for warning about symbolic links on Windows. + # need to compare source and dest paths at end + build_dir = os.path.normpath(build_dir) + + if not os.path.exists(build_dir): + os.makedirs(build_dir) + + # output_dir: final location in the builder's directory + dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir, + source_rel_dir)) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) # no problem here for me, but just use built-ins + + # how to link to files from the RST file + dest_dir_link = os.path.join(relpath(setup.app.srcdir, rst_dir), + source_rel_dir).replace(os.path.sep, '/') + build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/') + source_link = dest_dir_link + '/' + output_base + source_ext + + # make figures + try: + results = render_figures(code, source_file_name, build_dir, output_base, + context, function_name, config) + errors = [] + except PlotError as err: + reporter = state.memo.reporter + sm = reporter.system_message( + 2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base, + source_file_name, err), + line=lineno) + results = [(code, [])] + errors = [sm] + + # Properly indent the caption + caption = '\n'.join(' ' + line.strip() + for line in caption.split('\n')) + + # generate output restructuredtext + total_lines = [] + for j, (code_piece, images) in enumerate(results): + if options['include-source']: + if is_doctest: + lines = [''] + lines += [row.rstrip() for row in code_piece.split('\n')] + else: + lines = ['.. code-block:: python', ''] + lines += [' %s' % row.rstrip() + for row in code_piece.split('\n')] + source_code = "\n".join(lines) + else: + source_code = "" + + if nofigs: + images = [] + + opts = [':%s: %s' % (key, val) for key, val in list(options.items()) + if key in ('alt', 'height', 'width', 'scale', 'align', 'class')] + + only_html = ".. only:: html" + only_latex = ".. only:: latex" + + if j == 0: + src_link = source_link + else: + src_link = None + + result = format_template( + TEMPLATE, + dest_dir=dest_dir_link, + build_dir=build_dir_link, + source_link=src_link, + multi_image=len(images) > 1, + only_html=only_html, + only_latex=only_latex, + options=opts, + images=images, + source_code=source_code, + html_show_formats=config.plot_html_show_formats, + caption=caption) + + total_lines.extend(result.split("\n")) + total_lines.extend("\n") + + if total_lines: + state_machine.insert_input(total_lines, source=source_file_name) + + # copy image files to builder's output directory, if necessary + if not os.path.exists(dest_dir): + cbook.mkdirs(dest_dir) + + for code_piece, images in results: + for img in images: + for fn in img.filenames(): + destimg = os.path.join(dest_dir, os.path.basename(fn)) + if fn != destimg: + shutil.copyfile(fn, destimg) + + # copy script (if necessary) + #if source_file_name == rst_file: + target_name = os.path.join(dest_dir, output_base + source_ext) + f = open(target_name, 'w') + f.write(unescape_doctest(code)) + f.close() + + return errors diff --git a/doc/sphinxext/sphinx_autorun/__init__.py b/doc/sphinxext/sphinx_autorun/__init__.py new file mode 100644 index 00000000..1afa0377 --- /dev/null +++ b/doc/sphinxext/sphinx_autorun/__init__.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +""" +sphinxcontirb.autorun +~~~~~~~~~~~~~~~~~~~~~~ + +Run the code and insert stdout after the code block. +""" +import os +from subprocess import PIPE, Popen + +from docutils import nodes +from docutils.parsers.rst import Directive, directives +from sphinx.errors import SphinxError + +from sphinx_autorun import version + +__version__ = version.version + + +class RunBlockError(SphinxError): + category = 'runblock error' + + +class AutoRun(object): + here = os.path.abspath(__file__) + pycon = os.path.join(os.path.dirname(here), 'pycon.py') + config = { + 'pycon': 'python ' + pycon, + 'pycon_prefix_chars': 4, + 'pycon_show_source': False, + 'console': 'bash', + 'console_prefix_chars': 1, + } + + @classmethod + def builder_init(cls, app): + cls.config.update(app.builder.config.autorun_languages) + + +class RunBlock(Directive): + has_content = True + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = { + 'linenos': directives.flag, + } + + def run(self): + config = AutoRun.config + language = self.arguments[0] + + if language not in config: + raise RunBlockError('Unknown language %s' % language) + + # Get configuration values for the language + args = config[language].split() + input_encoding = config.get(language+'_input_encoding', 'utf8') + output_encoding = config.get(language+'_output_encoding', 'utf8') + prefix_chars = config.get(language+'_prefix_chars', 0) + show_source = config.get(language+'_show_source', True) + + # Build the code text + proc = Popen(args, bufsize=1, stdin=PIPE, stdout=PIPE, stderr=PIPE) + codelines = (line[prefix_chars:] for line in self.content) + code = u'\n'.join(codelines).encode(input_encoding) + + # Run the code + stdout, stderr = proc.communicate(code) + + # Process output + if stdout: + out = stdout.decode(output_encoding) + if stderr: + out = stderr.decode(output_encoding) + + # Get the original code with prefixes + if show_source: + code = u'\n'.join(self.content) + code_out = u'\n'.join((code, out)) + else: + code_out = out + + literal = nodes.literal_block(code_out, code_out) + literal['language'] = language + literal['linenos'] = 'linenos' in self.options + return [literal] + + +def setup(app): + app.add_directive('runblock', RunBlock) + app.connect('builder-inited', AutoRun.builder_init) + app.add_config_value('autorun_languages', AutoRun.config, 'env') diff --git a/doc/sphinxext/sphinx_autorun/pycon.py b/doc/sphinxext/sphinx_autorun/pycon.py new file mode 100644 index 00000000..c0edf861 --- /dev/null +++ b/doc/sphinxext/sphinx_autorun/pycon.py @@ -0,0 +1,31 @@ +import sys +from code import InteractiveInterpreter + + +def main(): + """ + Print lines of input along with output. + """ + source_lines = (line.rstrip() for line in sys.stdin) + console = InteractiveInterpreter() + source = '' + try: + while True: + source = next(source_lines) + # Allow the user to ignore specific lines of output. + if not source.endswith('# ignore'): + print('>>>', source) + more = console.runsource(source) + while more: + next_line = next(source_lines) + print('...', next_line) + source += '\n' + next_line + more = console.runsource(source) + except StopIteration: + if more: + print('... ') + more = console.runsource(source + '\n') + + +if __name__ == '__main__': + main() diff --git a/doc/sphinxext/sphinx_autorun/version.py b/doc/sphinxext/sphinx_autorun/version.py new file mode 100644 index 00000000..433d173a --- /dev/null +++ b/doc/sphinxext/sphinx_autorun/version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '1.1.1' diff --git a/doc/sphinxext/triqs_example/triqs_example.py b/doc/sphinxext/triqs_example/triqs_example.py new file mode 100644 index 00000000..2c90ac4c --- /dev/null +++ b/doc/sphinxext/triqs_example/triqs_example.py @@ -0,0 +1,123 @@ +import tempfile +# -*- coding: utf-8 -*- +# seems to be executed at the level of the conf.py +# so we need to link the lib at that place... +""" +""" +import os +import codecs +from os import path +from subprocess import Popen,PIPE +from docutils import nodes +from docutils.parsers.rst import Directive +from docutils.parsers.rst import directives +from sphinx.errors import SphinxError + +class TriqsExampleError(SphinxError): + category = 'triqs_example error' + +class TriqsExampleRun: + #here = os.path.abspath(__file__) + #pycon = os.path.join(os.path.dirname(here),'pycon.py') + config = dict( + ) + @classmethod + def builder_init(cls,app): + #cls.config.update(app.builder.config.autorun_languages) + #cls.config.update(app.builder.config.autocompile_opts) + pass + +class TriqsExample(Directive): + has_content = True + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = { + 'linenos': directives.flag, + } + + def run(self): + document = self.state.document + filename = self.arguments[0] + if not document.settings.file_insertion_enabled: + return [document.reporter.warning('File insertion disabled', + line=self.lineno)] + env = document.settings.env + if filename.startswith('/') or filename.startswith(os.sep): + rel_fn = filename[1:] + else: + docdir = path.dirname(env.doc2path(env.docname, base=None)) + rel_fn = path.normpath(path.join(docdir, filename)) + try: + fn = path.join(env.srcdir, rel_fn) + except UnicodeDecodeError: + # the source directory is a bytestring with non-ASCII characters; + # let's try to encode the rel_fn in the file system encoding + rel_fn = rel_fn.encode(sys.getfilesystemencoding()) + fn = path.join(env.srcdir, rel_fn) + + encoding = self.options.get('encoding', env.config.source_encoding) + try: + f = codecs.open(fn, 'rU', encoding) + lines = f.readlines() + f.close() + except (IOError, OSError): + return [document.reporter.warning( + 'Include file %r not found or reading it failed' % filename, + line=self.lineno)] + except UnicodeError: + return [document.reporter.warning( + 'Encoding %r used for reading included file %r seems to ' + 'be wrong, try giving an :encoding: option' % + (encoding, filename))] + + config = TriqsExampleRun.config + + # Get configuration values for the language + input_encoding = 'utf8' #config.get(language+'_input_encoding','ascii') + output_encoding = 'utf8' #config.get(language+'_output_encoding','ascii') + show_source = True + + # Build the code text + code = ''.join(lines).strip() + filename_clean = filename.rsplit('.',1)[0] + if filename_clean.startswith('./') : filename_clean = filename_clean[2:] + #print "Running the example ....",filename_clean + #print "Root ?", env.doc2path(env.docname, base=None) + + import subprocess as S + error = True + try : + stdout ='' + #resout = S.check_output("./example_bin/doc_%s"%(filename_clean) ,stderr=S.STDOUT,shell=True) + resout = S.check_output("./%s/doc_%s"%(docdir,filename_clean) ,stderr=S.STDOUT,shell=True) + if resout : + stdout = '---------- Result is -------\n' + resout.strip() + error = False + except S.CalledProcessError as E : + stdout ='---------- RunTime error -------\n' + stdout += E.output + + # Process output + if stdout: + stdout = stdout.decode(output_encoding,'ignore') + out = ''.join(stdout).decode(output_encoding) + else: + out = '' #.join(stderr).decode(output_encoding) + + # Get the original code with prefixes + code_out = '\n'.join((code,out)) + + if error : # report on console + print(" Error in processing ") + print(code_out) + + literal = nodes.literal_block(code_out,code_out) + literal['language'] = 'c' + literal['linenos'] = 'linenos' in self.options + return [literal] + +def setup(app): + app.add_directive('triqs_example', TriqsExample) + app.connect('builder-inited',TriqsExampleRun.builder_init) + diff --git a/doc/themes/agogo/layout.html b/doc/themes/agogo/layout.html new file mode 100644 index 00000000..869d76ad --- /dev/null +++ b/doc/themes/agogo/layout.html @@ -0,0 +1,92 @@ +{# + agogo/layout.html + ~~~~~~~~~~~~~~~~~ + + Sphinx layout template for the agogo theme, originally written + by Andi Albrecht. + + :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{% extends "basic/layout.html" %} + +{% block header %} +
+
+ {%- if logo %} + + {%- endif %} + {%- block headertitle %} +

{{ shorttitle|e }}

+ {%- endblock %} +
+ HOME | + {%- for rellink in rellinks %} + {{ rellink[3] }} + {%- if not loop.last %}{{ reldelim2 }}{% endif %} + {%- endfor %} +
+
+
+{% endblock %} + +{% block content %} +
+
+
+ {%- block document %} + {{ super() }} + {%- endblock %} +
+ +
+
+
+{% endblock %} + +{% block footer %} + +{% endblock %} + +{% block relbar1 %}{% endblock %} +{% block relbar2 %}{% endblock %} diff --git a/doc/themes/agogo/static/agogo.css_t b/doc/themes/agogo/static/agogo.css_t new file mode 100644 index 00000000..5e4b0c66 --- /dev/null +++ b/doc/themes/agogo/static/agogo.css_t @@ -0,0 +1,519 @@ +/* + * agogo.css_t + * ~~~~~~~~~~~ + * + * Sphinx stylesheet -- agogo theme. + * + * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +* { + margin: 0px; + padding: 0px; +} + +body { + font-family: {{ theme_bodyfont }}; + font-size: 100%; // TRIQS + line-height: 1.0em; // TRIQS + color: black; + background-color: {{ theme_bgcolor }}; +} + + +/* Page layout */ + +div.header, div.content, div.footer { + width: {{ theme_pagewidth }}; + margin-left: auto; + margin-right: auto; +} + +div.header-wrapper { + background: {{ theme_headerbg }}; + border-bottom: 3px solid #2e3436; +} + + +/* Default body styles */ +a { + color: {{ theme_linkcolor }}; +} + +div.bodywrapper a, div.footer a { + text-decoration: none; // TRIQS +} + +div.bodywrapper a:hover, div.footer a:hover { + text-decoration: underline; // TRIQS +} + +.clearer { + clear: both; +} + +.left { + float: left; +} + +.right { + float: right; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +h1, h2, h3, h4 { + font-family: {{ theme_headerfont }}; + font-weight: normal; + color: {{ theme_headercolor2 }}; + margin-bottom: .8em; +} + +h1 { + color: {{ theme_headercolor1 }}; +} + +h2 { + padding-bottom: .5em; + border-bottom: 1px solid {{ theme_headercolor2 }}; +} + +a.headerlink { + visibility: hidden; + color: #dddddd; + padding-left: .3em; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +img { + border: 0; +} + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 2px 7px 1px 7px; + border-left: 0.2em solid black; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; + color: red; // TRIQS +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +/* Header */ + +div.header { + padding-top: 10px; + padding-bottom: 10px; +} + +div.header h1 { + font-family: {{ theme_headerfont }}; + font-weight: normal; + font-size: 180%; + letter-spacing: .08em; +} + +div.header h1 a { + color: white; +} + +div.header div.rel { + margin-top: -1em; // TRIQS +} + +div.header div.rel a { + color: {{ theme_headerlinkcolor }}; + letter-spacing: .1em; + text-transform: uppercase; +} + +p.logo { + float: right; +} + +img.logo { + border: 0; +} + + +/* Content */ +div.content-wrapper { + background-color: white; + padding-top: 20px; + padding-bottom: 20px; +} + +div.document { + width: {{ theme_documentwidth }}; + float: left; +} + +div.body { + padding-right: 2em; + text-align: {{ theme_textalign }}; +} + +div.document ul { + margin: 1.5em; + list-style-type: square; +} + +div.document dd { + margin-left: 1.2em; + margin-top: .4em; + margin-bottom: 1em; +} + +div.document .section { + margin-top: 1.7em; +} +div.document .section:first-child { + margin-top: 0px; +} + +div.document div.highlight { + line-height: 1.0em; // TRIQS + padding: 3px; + background-color: #eeeeec; + border-top: 2px solid #dddddd; + border-bottom: 2px solid #dddddd; + margin-top: .8em; + margin-bottom: .8em; +} + +div.document h2 { + margin-top: .7em; +} + +div.document p { + margin-bottom: .5em; +} + +div.document li.toctree-l1 { + margin-bottom: 0em; // TRIQS +} +div.document li.toctree-l2 { + margin-bottom: 0em; // TRIQS +} + +div.document .descname { + font-weight: bold; + color: blue; // TRIQS +} + +div.document .docutils.literal { + background-color: #eeeeec; + padding: 1px; +} + +div.document .docutils.xref.literal { + background-color: transparent; + padding: 0px; +} + +div.document blockquote { + margin: 1em; +} + +div.document ol { + margin: 1.5em; +} + + +/* Sidebar */ + +div.sidebar { + width: {{ theme_sidebarwidth }}; + float: right; + font-size: .9em; +} + +div.sidebar a, div.header a { + text-decoration: none; +} + +div.sidebar a:hover, div.header a:hover { + text-decoration: underline; +} + +div.sidebar h3 { + color: #2e3436; + text-transform: uppercase; + font-size: 130%; + letter-spacing: .1em; +} + +div.sidebar ul { + list-style-type: none; +} + +div.sidebar li.toctree-l1 a { + display: block; + padding: 1px; + border: 1px solid #dddddd; + background-color: #eeeeec; + margin-bottom: .4em; + padding-left: 3px; + color: #2e3436; +} + +div.sidebar li.toctree-l2 a { + background-color: transparent; + border: none; + margin-left: 1em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l3 a { + background-color: transparent; + border: none; + margin-left: 2em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l2:last-child a { + border-bottom: none; +} + +div.sidebar li.toctree-l1.current a { + border-right: 5px solid {{ theme_headerlinkcolor }}; +} + +div.sidebar li.toctree-l1.current li.toctree-l2 a { + border-right: none; +} + + +/* Footer */ + +div.footer-wrapper { + background: {{ theme_footerbg }}; + border-top: 4px solid #babdb6; + padding-top: 10px; + padding-bottom: 10px; + min-height: 80px; +} + +div.footer, div.footer a { + color: #888a85; +} + +div.footer .right { + text-align: right; +} + +div.footer .left { + text-transform: uppercase; +} + + +/* Styles copied from basic theme */ + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 100%; +} + +table.contentstable td { + padding: 10px 10px 10px 0; +} + +table.contentstable p.biglink { + line-height: 150%; + text-align: left; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +/* -- viewcode extension ---------------------------------------------------- */ + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family:: {{ theme_bodyfont }}; +} + +div.viewcode-block:target { + margin: -1px -3px; + padding: 0 3px; + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} + +/* -- added for TRIQS ------------------------------------------------------- */ + +div.warning { + margin: 1em 0 1em 0; + border: 1px solid #86989B; + background-color: lightpink; //#f7f7f7; +} + +div.note { + margin: 1em 0 1em 0; + border: 1px solid #86989B; + background-color: lightcyan; //#f7f7f7; +} + +div.note p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; + color : blue; +} + +div.highlight-python { + margin-top : 5px; + margin-bottom : 7px; + background-color: lightcyan; //#f4debf; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils head { + color : blue; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + diff --git a/doc/themes/agogo/theme.conf b/doc/themes/agogo/theme.conf new file mode 100644 index 00000000..e2108d96 --- /dev/null +++ b/doc/themes/agogo/theme.conf @@ -0,0 +1,20 @@ +[theme] +inherit = basic +stylesheet = agogo.css +pygments_style = tango + +[options] +bodyfont = "Verdana", Arial, sans-serif +headerfont = "Georgia", "Times New Roman", serif +pagewidth = 80% +documentwidth = 75% +sidebarwidth = 22.5% +bgcolor = #eeeeec +headerbg = url(bgtop.png) top left repeat-x +footerbg = url(bgfooter.png) top left repeat-x +linkcolor = #ce5c00 +headercolor1 = #204a87 +headercolor2 = #3465a4 +headerlinkcolor = #fcaf3e +textalign = justify +index = index diff --git a/doc/themes/triqs/layout.html b/doc/themes/triqs/layout.html new file mode 100644 index 00000000..0275e11a --- /dev/null +++ b/doc/themes/triqs/layout.html @@ -0,0 +1,52 @@ +{# + layout.html + ~~~~~~~~~~~ + + TRIQS layout template heavily based on the sphinxdoc theme. + + :copyright: Copyright 2013 by the TRIQS team. + :copyright: Copyright 2007-2013 by the Sphinx team. + :license: BSD, see LICENSE for details. +#} +{%- extends "basic/layout.html" %} + +{# put the sidebar before the body #} +{% block sidebar1 %}{{ sidebar() }}{% endblock %} +{% block sidebar2 %}{% endblock %} + +{% block extrahead %} + + + + +{{ super() }} +{%- if not embedded %} + +{%- endif %} +{% endblock %} + +{% block rootrellink %} +
  • Home »
  • +{% endblock %} + +{% block header %} + +{% endblock %} diff --git a/doc/themes/triqs/static/cufon-yui.js b/doc/themes/triqs/static/cufon-yui.js new file mode 100644 index 00000000..935614e1 --- /dev/null +++ b/doc/themes/triqs/static/cufon-yui.js @@ -0,0 +1,7 @@ +/* + * Copyright (c) 2009 Simo Kinnunen. + * Licensed under the MIT license. + * + * @version 1.09i + */ +var Cufon=(function(){var m=function(){return m.replace.apply(null,arguments)};var x=m.DOM={ready:(function(){var C=false,E={loaded:1,complete:1};var B=[],D=function(){if(C){return}C=true;for(var F;F=B.shift();F()){}};if(document.addEventListener){document.addEventListener("DOMContentLoaded",D,false);window.addEventListener("pageshow",D,false)}if(!window.opera&&document.readyState){(function(){E[document.readyState]?D():setTimeout(arguments.callee,10)})()}if(document.readyState&&document.createStyleSheet){(function(){try{document.body.doScroll("left");D()}catch(F){setTimeout(arguments.callee,1)}})()}q(window,"load",D);return function(F){if(!arguments.length){D()}else{C?F():B.push(F)}}})(),root:function(){return document.documentElement||document.body}};var n=m.CSS={Size:function(C,B){this.value=parseFloat(C);this.unit=String(C).match(/[a-z%]*$/)[0]||"px";this.convert=function(D){return D/B*this.value};this.convertFrom=function(D){return D/this.value*B};this.toString=function(){return this.value+this.unit}},addClass:function(C,B){var D=C.className;C.className=D+(D&&" ")+B;return C},color:j(function(C){var B={};B.color=C.replace(/^rgba\((.*?),\s*([\d.]+)\)/,function(E,D,F){B.opacity=parseFloat(F);return"rgb("+D+")"});return B}),fontStretch:j(function(B){if(typeof B=="number"){return B}if(/%$/.test(B)){return parseFloat(B)/100}return{"ultra-condensed":0.5,"extra-condensed":0.625,condensed:0.75,"semi-condensed":0.875,"semi-expanded":1.125,expanded:1.25,"extra-expanded":1.5,"ultra-expanded":2}[B]||1}),getStyle:function(C){var B=document.defaultView;if(B&&B.getComputedStyle){return new a(B.getComputedStyle(C,null))}if(C.currentStyle){return new a(C.currentStyle)}return new a(C.style)},gradient:j(function(F){var G={id:F,type:F.match(/^-([a-z]+)-gradient\(/)[1],stops:[]},C=F.substr(F.indexOf("(")).match(/([\d.]+=)?(#[a-f0-9]+|[a-z]+\(.*?\)|[a-z]+)/ig);for(var E=0,B=C.length,D;E0){E=" "+E}}else{if(B400}if(I==500){I=400}for(var J in G){if(!k(G,J)){continue}J=parseInt(J,10);if(!F||JD){D=J}K.push(J)}if(ID){I=D}K.sort(function(M,L){return(E?(M>=I&&L>=I)?ML:(M<=I&&L<=I)?M>L:Mcufoncanvas{text-indent:0;}@media screen{cvml\\:shape,cvml\\:rect,cvml\\:fill,cvml\\:shadow{behavior:url(#default#VML);display:block;antialias:true;position:absolute;}cufoncanvas{position:absolute;text-align:left;}cufon{display:inline-block;position:relative;vertical-align:'+(h?"middle":"text-bottom")+";}cufon cufontext{position:absolute;left:-10000in;font-size:1px;}a cufon{cursor:pointer}}@media print{cufon cufoncanvas{display:none;}}").replace(/;/g,"!important;"));function c(i,j){return a(i,/(?:em|ex|%)$|^[a-z-]+$/i.test(j)?"1em":j)}function a(l,m){if(m==="0"){return 0}if(/px$/i.test(m)){return parseFloat(m)}var k=l.style.left,j=l.runtimeStyle.left;l.runtimeStyle.left=l.currentStyle.left;l.style.left=m.replace("%","em");var i=l.style.pixelLeft;l.style.left=k;l.runtimeStyle.left=j;return i}function f(l,k,j,n){var i="computed"+n,m=k[i];if(isNaN(m)){m=k.get(n);k[i]=m=(m=="normal")?0:~~j.convertFrom(a(l,m))}return m}var g={};function d(p){var q=p.id;if(!g[q]){var n=p.stops,o=document.createElement("cvml:fill"),i=[];o.type="gradient";o.angle=180;o.focus="0";o.method="sigma";o.color=n[0][1];for(var m=1,l=n.length-1;mO){O=K}if(I>N){N=I}if(K":{"w":232},"?":{"d":"413,-133r0,133r-192,0r0,-133r192,0xm221,-188v-13,-173,78,-281,237,-282r341,0v53,0,80,-24,80,-71v0,-47,-27,-71,-80,-71r-799,0r0,-166r817,-1v141,-1,227,93,227,236v0,149,-87,234,-227,234r-315,0v-67,-1,-99,45,-89,121r-192,0","w":1218},"@":{"d":"305,-388v0,55,25,70,85,70r436,0r0,-146r-439,0v-55,0,-82,25,-82,76xm339,-62v-218,3,-334,-116,-339,-327v-4,-168,93,-282,227,-315v106,-26,574,-16,605,-3v56,23,105,70,105,151r0,316r-579,0v-98,2,-164,-54,-164,-148v0,-106,69,-154,188,-154r444,0v-2,-97,-62,-97,-185,-100v-135,-3,-342,-4,-427,22v-86,26,-136,115,-136,231v0,166,96,250,261,250r483,0r0,77r-483,0","w":1060},"A":{"d":"754,-341v0,-61,-24,-84,-89,-84r-626,0r0,-131r667,0v135,5,215,66,215,217r0,339r-745,0v-108,1,-176,-63,-176,-171v0,-107,69,-171,176,-170r578,0xm754,-131r0,-85r-534,0v-35,0,-53,14,-53,43v0,28,18,42,53,42r534,0","w":1096},"B":{"d":"969,-278v0,176,-93,278,-267,278r-702,0r0,-778r167,0r0,222r535,0v170,-2,267,101,267,278xm794,-278v0,-81,-38,-139,-114,-139r-513,0r0,278r513,0v76,0,114,-58,114,-139","w":1097},"C":{"d":"0,-278v0,-173,94,-278,267,-278r595,0r0,139r-573,0v-76,0,-114,58,-114,139v0,81,38,139,114,139r573,0r0,139r-595,0v-173,1,-267,-105,-267,-278","w":1022},"D":{"d":"0,-278v-1,-176,93,-278,267,-278r521,0r0,-222r167,0r0,778r-688,0v-171,2,-266,-102,-267,-278xm175,-278v0,81,38,139,114,139r499,0r0,-278r-499,0v-76,0,-114,58,-114,139","w":1130},"E":{"d":"176,-216v24,58,48,85,113,85r581,0r0,131r-603,0v-173,1,-267,-105,-267,-278v0,-173,94,-278,267,-278r603,0r0,131r-581,0v-65,4,-87,27,-113,84r694,0r0,125r-694,0","w":1022},"F":{"d":"105,-341v-10,-142,29,-222,167,-222r501,1r0,130r-423,0v-60,-4,-81,31,-78,91r501,-2r0,131r-501,2r0,425r-167,0r0,-425r-105,0r0,-131r105,0","w":906},"G":{"d":"0,-278v0,-173,94,-278,267,-278r770,1r0,571v3,126,-82,208,-203,208r-773,-2r0,-130r743,1v55,0,68,-33,66,-93r-603,0v-173,1,-267,-105,-267,-278xm175,-278v0,81,37,139,114,139r581,-2r0,-275r-581,-1v-76,0,-114,58,-114,139","w":1204},"H":{"d":"735,-359v1,-35,-25,-58,-58,-58r-510,0r0,417r-167,0r0,-778r167,0r0,222r537,0v116,-4,198,68,198,170r0,386r-167,0r0,-359"},"I":{"d":"167,0r-167,0r0,-556r167,0r0,556xm167,-612r-167,0r0,-133r167,0r0,133","w":334},"J":{"d":"743,-612r-167,0r0,-133r167,0r0,133xm743,40v-5,114,-100,182,-221,182r-522,0r0,-139r510,0v44,0,66,-24,66,-73r0,-566r167,0r0,596","w":916},"K":{"d":"767,-481r146,0r0,-77r-146,0r0,77xm1040,-2v25,-199,-69,-334,-263,-334r-610,1r0,-442r-167,0r0,775r167,0r0,-195r588,0v95,-4,128,100,107,195r178,0xm914,-481v0,97,-49,145,-146,145r0,-145r146,0xm767,-481v4,77,-98,149,-175,146r175,0r0,-146","w":1195},"L":{"d":"183,-299v0,80,47,132,132,132r523,0r0,167r-569,0v-174,4,-268,-107,-268,-282r0,-276r182,0r0,259","w":996},"M":{"d":"961,-556v116,-4,197,66,197,170r0,386r-167,0r0,-359v0,-39,-19,-58,-58,-58r-273,0r0,417r-167,0r0,-417r-326,0r0,417r-167,0r0,-556r961,0","w":1312},"N":{"d":"688,-556v116,-4,198,68,198,170r0,386r-167,0r0,-359v0,-39,-19,-58,-58,-58r-494,0r0,417r-167,0r0,-556r688,0","w":1057},"O":{"d":"7,-368v-3,-111,96,-207,207,-207r536,0v112,-3,207,95,207,207r0,162v3,111,-96,207,-207,207r-536,0v-110,3,-207,-97,-207,-207r0,-162xm264,-445v-45,-1,-85,40,-85,85r0,138v-1,45,40,85,85,85r428,0v45,1,85,-40,85,-85r0,-138v1,-45,-40,-85,-85,-85r-428,0","w":1104},"P":{"d":"986,-278v1,176,-93,278,-267,278r-552,0r0,222r-167,0r0,-778r719,0v171,-2,266,102,267,278xm811,-278v0,-81,-38,-139,-114,-139r-530,0r0,278r530,0v76,0,114,-58,114,-139","w":1141},"Q":{"d":"0,-278v-1,-176,93,-278,267,-278r777,2r0,776r-167,0r0,-223r-610,1v-171,2,-266,-102,-267,-278xm175,-278v0,81,38,139,114,139r588,-1r0,-275r-588,-2v-77,0,-114,58,-114,139","w":1211},"R":{"d":"610,-558v134,0,213,83,197,228r-167,0v4,-51,-8,-86,-58,-86r-415,-1r0,417r-167,0r0,-556","w":950},"S":{"d":"913,-170v0,103,-65,170,-175,170r-738,0r0,-131r705,1v37,0,55,-14,55,-43v0,-28,-18,-42,-55,-42r-530,-1v-102,1,-175,-66,-175,-169v0,-101,66,-171,175,-171r708,0r0,131r-675,0v-37,0,-55,14,-55,42v0,28,18,42,55,42r530,1v102,-1,175,67,175,170","w":1039},"T":{"d":"267,-208v1,58,20,77,78,77r425,0r0,131r-504,0v-116,-3,-166,-47,-166,-167r0,-258r-100,0r0,-131r100,0r0,-222r167,0r0,222r503,0r0,131r-503,0r0,217","w":917},"U":{"d":"198,0v-119,6,-198,-71,-198,-170r0,-386r167,0r0,358v0,39,19,59,58,59r506,0r0,-417r167,0r0,556r-700,0"},"V":{"d":"167,-139r564,0r0,-417r167,0r0,556r-898,0r0,-556r167,0r0,417"},"W":{"d":"197,0v-115,4,-197,-68,-197,-170r0,-386r167,0r0,358v0,39,19,59,58,59r273,0r0,-417r167,0r0,417r326,0r0,-417r167,0r0,556r-961,0","w":1323},"X":{"d":"132,-208r646,0r0,-131r-646,0r0,131xm1,-138v-2,-70,61,-133,131,-131r0,131r-131,0xm0,-138r132,0r0,138r-132,0r0,-138xm1,-396v-2,71,61,133,131,131r0,-131r-131,0xm0,-396r132,0r0,-158r-132,0r0,158xm909,-138v2,-70,-61,-133,-131,-131r0,131r131,0xm910,-138r-132,0r0,138r132,0r0,-138xm909,-396v2,71,-61,133,-131,131r0,-131r131,0xm910,-396r-132,0r0,-158r132,0r0,158xm133,-454v-2,62,53,117,115,115r-115,0r0,-115xm133,-94v-2,-62,53,-117,115,-115r-115,0r0,115xm777,-454v2,62,-53,117,-115,115r115,0r0,-115xm778,-93v2,-62,-54,-117,-115,-115r115,0r0,115","w":1017},"Y":{"d":"754,92v53,-1,68,-31,66,-92r-622,0v-119,6,-198,-71,-198,-170r0,-386r167,0r0,336v-1,49,32,82,81,81r572,0r0,-417r167,0r0,576v3,124,-85,207,-204,207r-769,0r0,-135r740,0","w":1169},"Z":{"d":"0,-170v0,101,66,170,175,170r738,0r0,-131r-704,1v-37,0,-56,-14,-56,-43v0,-28,19,-42,56,-42r529,-1v102,1,175,-66,175,-169v0,-102,-66,-171,-175,-171r-707,-2r0,131r674,2v37,0,55,14,55,42v0,28,-18,42,-55,42r-530,1v-102,-1,-175,67,-175,170","w":1039},"[":{"d":"0,-931r306,0r0,153r-153,0r0,778r153,0r0,153r-306,0r0,-1084","w":361},"\\":{"d":"877,0r-692,-778r-185,0r681,778r196,0","w":942},"]":{"w":366},"^":{"w":406},"_":{"d":"0,61r1001,0r0,161r-1001,0r0,-161","w":1172},"`":{"d":"0,-806r94,0r112,111r-95,0","w":261},"a":{"d":"754,-341v0,-61,-24,-84,-89,-84r-626,0r0,-131r667,0v135,5,215,66,215,217r0,339r-745,0v-108,1,-176,-63,-176,-171v0,-107,69,-171,176,-170r578,0xm754,-131r0,-85r-534,0v-35,0,-53,14,-53,43v0,28,18,42,53,42r534,0","w":1096},"b":{"d":"969,-278v0,176,-93,278,-267,278r-702,0r0,-778r167,0r0,222r535,0v170,-2,267,101,267,278xm794,-278v0,-81,-38,-139,-114,-139r-513,0r0,278r513,0v76,0,114,-58,114,-139","w":1097},"c":{"d":"0,-278v0,-173,94,-278,267,-278r595,0r0,139r-573,0v-76,0,-114,58,-114,139v0,81,38,139,114,139r573,0r0,139r-595,0v-173,1,-267,-105,-267,-278","w":1022},"d":{"d":"0,-278v-1,-176,93,-278,267,-278r521,0r0,-222r167,0r0,778r-688,0v-171,2,-266,-102,-267,-278xm175,-278v0,81,38,139,114,139r499,0r0,-278r-499,0v-76,0,-114,58,-114,139","w":1130},"e":{"d":"176,-216v24,58,48,85,113,85r581,0r0,131r-603,0v-173,1,-267,-105,-267,-278v0,-173,94,-278,267,-278r603,0r0,131r-581,0v-65,4,-87,27,-113,84r694,0r0,125r-694,0","w":1022},"f":{"d":"105,-341v-10,-142,29,-222,167,-222r501,1r0,130r-423,0v-60,-4,-81,31,-78,91r501,-2r0,131r-501,2r0,425r-167,0r0,-425r-105,0r0,-131r105,0","w":906},"g":{"d":"0,-278v0,-173,94,-278,267,-278r770,1r0,571v3,126,-82,208,-203,208r-773,-2r0,-130r743,1v55,0,68,-33,66,-93r-603,0v-173,1,-267,-105,-267,-278xm175,-278v0,81,37,139,114,139r581,-2r0,-275r-581,-1v-76,0,-114,58,-114,139","w":1204},"h":{"d":"735,-359v1,-35,-25,-58,-58,-58r-510,0r0,417r-167,0r0,-778r167,0r0,222r537,0v116,-4,198,68,198,170r0,386r-167,0r0,-359"},"i":{"d":"167,0r-167,0r0,-556r167,0r0,556xm167,-612r-167,0r0,-133r167,0r0,133","w":334},"j":{"d":"743,-612r-167,0r0,-133r167,0r0,133xm743,40v-5,114,-100,182,-221,182r-522,0r0,-139r510,0v44,0,66,-24,66,-73r0,-566r167,0r0,596","w":916},"k":{"d":"767,-481r146,0r0,-77r-146,0r0,77xm1040,-2v25,-199,-69,-334,-263,-334r-610,1r0,-442r-167,0r0,775r167,0r0,-195r588,0v95,-4,128,100,107,195r178,0xm914,-481v0,97,-49,145,-146,145r0,-145r146,0xm767,-481v4,77,-98,149,-175,146r175,0r0,-146","w":1195},"l":{"d":"183,-299v0,80,47,132,132,132r523,0r0,167r-569,0v-174,4,-268,-107,-268,-282r0,-276r182,0r0,259","w":996},"m":{"d":"961,-556v116,-4,197,66,197,170r0,386r-167,0r0,-359v0,-39,-19,-58,-58,-58r-273,0r0,417r-167,0r0,-417r-326,0r0,417r-167,0r0,-556r961,0","w":1312},"n":{"d":"688,-556v116,-4,198,68,198,170r0,386r-167,0r0,-359v0,-39,-19,-58,-58,-58r-494,0r0,417r-167,0r0,-556r688,0","w":1057},"o":{"d":"7,-368v-3,-111,96,-207,207,-207r536,0v112,-3,207,95,207,207r0,162v3,111,-96,207,-207,207r-536,0v-110,3,-207,-97,-207,-207r0,-162xm264,-445v-45,-1,-85,40,-85,85r0,138v-1,45,40,85,85,85r428,0v45,1,85,-40,85,-85r0,-138v1,-45,-40,-85,-85,-85r-428,0","w":1104},"p":{"d":"986,-278v1,176,-93,278,-267,278r-552,0r0,222r-167,0r0,-778r719,0v171,-2,266,102,267,278xm811,-278v0,-81,-38,-139,-114,-139r-530,0r0,278r530,0v76,0,114,-58,114,-139","w":1141},"q":{"d":"0,-278v-1,-176,93,-278,267,-278r777,2r0,776r-167,0r0,-223r-610,1v-171,2,-266,-102,-267,-278xm175,-278v0,81,38,139,114,139r588,-1r0,-275r-588,-2v-77,0,-114,58,-114,139","w":1211},"r":{"d":"610,-558v134,0,213,83,197,228r-167,0v4,-51,-8,-86,-58,-86r-415,-1r0,417r-167,0r0,-556","w":950},"s":{"d":"913,-170v0,103,-65,170,-175,170r-738,0r0,-131r705,1v37,0,55,-14,55,-43v0,-28,-18,-42,-55,-42r-530,-1v-102,1,-175,-66,-175,-169v0,-101,66,-171,175,-171r708,0r0,131r-675,0v-37,0,-55,14,-55,42v0,28,18,42,55,42r530,1v102,-1,175,67,175,170","w":1039},"t":{"d":"267,-208v1,58,20,77,78,77r425,0r0,131r-504,0v-116,-3,-166,-47,-166,-167r0,-258r-100,0r0,-131r100,0r0,-222r167,0r0,222r503,0r0,131r-503,0r0,217","w":917},"u":{"d":"198,0v-119,6,-198,-71,-198,-170r0,-386r167,0r0,358v0,39,19,59,58,59r506,0r0,-417r167,0r0,556r-700,0"},"v":{"d":"167,-139r564,0r0,-417r167,0r0,556r-898,0r0,-556r167,0r0,417"},"w":{"d":"197,0v-115,4,-197,-68,-197,-170r0,-386r167,0r0,358v0,39,19,59,58,59r273,0r0,-417r167,0r0,417r326,0r0,-417r167,0r0,556r-961,0","w":1323},"x":{"d":"132,-208r646,0r0,-131r-646,0r0,131xm1,-138v-2,-70,61,-133,131,-131r0,131r-131,0xm0,-138r132,0r0,138r-132,0r0,-138xm1,-396v-2,71,61,133,131,131r0,-131r-131,0xm0,-396r132,0r0,-158r-132,0r0,158xm909,-138v2,-70,-61,-133,-131,-131r0,131r131,0xm910,-138r-132,0r0,138r132,0r0,-138xm909,-396v2,71,-61,133,-131,131r0,-131r131,0xm910,-396r-132,0r0,-158r132,0r0,158xm133,-454v-2,62,53,117,115,115r-115,0r0,-115xm133,-94v-2,-62,53,-117,115,-115r-115,0r0,115xm777,-454v2,62,-53,117,-115,115r115,0r0,-115xm778,-93v2,-62,-54,-117,-115,-115r115,0r0,115","w":1017},"y":{"d":"754,92v53,-1,68,-31,66,-92r-622,0v-119,6,-198,-71,-198,-170r0,-386r167,0r0,336v-1,49,32,82,81,81r572,0r0,-417r167,0r0,576v3,124,-85,207,-204,207r-769,0r0,-135r740,0","w":1169},"z":{"d":"0,-170v0,101,66,170,175,170r738,0r0,-131r-704,1v-37,0,-56,-14,-56,-43v0,-28,19,-42,56,-42r529,-1v102,1,175,-66,175,-169v0,-102,-66,-171,-175,-171r-707,-2r0,131r674,2v37,0,55,14,55,42v0,28,-18,42,-55,42r-530,1v-102,-1,-175,67,-175,170","w":1039},"{":{"d":"0,-466v58,7,98,-17,100,-66v3,-102,-10,-222,10,-308v23,-50,68,-91,143,-91r196,0r0,153r-163,0v-22,0,-33,11,-33,33r0,289v1,35,-29,68,-64,67v36,0,65,30,64,66r0,290v0,22,11,33,33,33r163,0r0,153r-196,0v-87,1,-153,-65,-153,-153r0,-246v-1,-49,-42,-75,-100,-67r0,-153","w":515},"|":{"w":211},"}":{"d":"349,-778v0,-93,-61,-152,-153,-153r-196,0r0,153r163,0v22,0,33,11,33,33r0,289v-1,35,29,68,64,67v-36,0,-65,30,-64,66r0,290v0,22,-11,33,-33,33r-163,0r0,153r196,0v91,0,153,-64,153,-153r0,-246v1,-49,42,-75,100,-67r0,-153v-58,7,-99,-17,-100,-66r0,-246","w":515},"~":{"w":342},"\u0131":{"w":256},"\u00c7":{"w":729},"\u00d6":{"w":1084},"\u00dc":{"w":761},"\u00e7":{"w":578},"\u00f6":{"d":"7,-368v-3,-111,96,-207,207,-207r536,0v112,-3,207,95,207,207r0,162v3,111,-96,207,-207,207r-536,0v-110,3,-207,-97,-207,-207r0,-162xm264,-445v-45,-1,-85,40,-85,85r0,138v-1,45,40,85,85,85r428,0v45,1,85,-40,85,-85r0,-138v1,-45,-40,-85,-85,-85r-428,0xm289,-650r0,-128r379,0r0,128r-379,0","w":1084},"\u00fc":{"d":"198,0v-119,6,-198,-71,-198,-170r0,-386r167,0r0,358v0,39,19,59,58,59r506,0r0,-417r167,0r0,556r-700,0xm284,-650r0,-128r379,0r0,128r-379,0","w":1032},"\u00a0":{"w":668}}}); diff --git a/doc/themes/triqs/static/triqs.css b/doc/themes/triqs/static/triqs.css new file mode 100644 index 00000000..e183cb21 --- /dev/null +++ b/doc/themes/triqs/static/triqs.css @@ -0,0 +1,449 @@ +/* + * sphinx13.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- sphinx13 theme. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; + font-size: 14px; + text-align: center; + background-image: url(bodybg.png); + color: black; + padding: 0; + border-right: 1px solid #0a507a; + border-left: 1px solid #0a507a; + + margin: 0 auto; + min-width: 780px; + max-width: 1080px; +} + + +.red{ + color: red +} +.blue{ + color: blue +} +.green{ + color: green +} + +.param{ + color: blue +} + +a.triqs { + color: #073958; + text-decoration: none; +} + +a.triqs:hover { + color: #0a527f; + text-decoration: none; +} + +.pageheader { + background-color: #dcdcdc; + text-align: left; + padding: 10px 15px; + color: #073958; + border: none; +} + +.pageheader ul { + float: right; + color: #073958; + list-style-type: none; + padding-left: 0; + margin-top: 22px; + margin-right: 10px; +} + +.pageheader li { + float: left; + margin: 0 0 0 10px; +} + +.pageheader li a { + padding: 8px 12px; + color: #073958; + text-shadow: none; +} + +.pageheader li a:hover { + background-color: #f9f9f0; + color: #0a507a; + text-shadow: none; +} + +div.document { + background-color: white; + text-align: left; +} + +div.bodywrapper { + margin: 0 240px 0 0; + border-right: 1px solid #0a507a; +} + +div.body { + margin: 0; + padding: 0.5em 20px 20px 20px; +} + +div.related { + font-size: 1em; + color: white; +} + +div.related ul { + background-image: url(relbg.png); + height: 1.9em; + border-top: 1px solid #002e50; + border-bottom: 1px solid #002e50; +} + +div.related ul li { + margin: 0 5px 0 0; + padding: 0; + float: left; +} + +div.related ul li.right { + float: right; + margin-right: 5px; +} + +div.related ul li a { + margin: 0; + padding: 0 5px 0 5px; + line-height: 1.75em; + color: #f9f9f0; + text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5); +} + +div.related ul li a:hover { + color: white; + /*text-decoration: underline;*/ + text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5); +} + +div.sphinxsidebarwrapper { + position: relative; + top: 0px; + padding: 0; +} + +div.sphinxsidebar { + margin: 0; + padding: 0 15px 15px 0; + width: 210px; + float: right; + font-size: 1em; + text-align: left; +} + +div.sphinxsidebar .logo { + font-size: 1.8em; + color: #0A507A; + font-weight: 300; + text-align: center; +} + +div.sphinxsidebar .logo img { + vertical-align: middle; +} + +div.sphinxsidebar input { + border: 1px solid #aaa; + font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; + font-size: 1em; +} + +div.sphinxsidebar h3 { + font-size: 1.5em; + border-top: 1px solid #0a507a; + margin-top: 1em; + margin-bottom: 0.5em; + padding-top: 0.5em; +} + +div.sphinxsidebar h4 { + font-size: 1.2em; + margin-bottom: 0; +} + +div.sphinxsidebar h3, div.sphinxsidebar h4 { + margin-right: -15px; + margin-left: -15px; + padding-right: 14px; + padding-left: 14px; + color: #333; + font-weight: 300; + /*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/ +} + +div.sphinxsidebarwrapper > h3:first-child { + margin-top: 0.5em; + border: none; +} + +div.sphinxsidebar h3 a { + color: #333; +} + +div.sphinxsidebar ul { + color: #444; + margin-top: 7px; + padding: 0; + line-height: 130%; +} + +div.sphinxsidebar ul ul { + margin-left: 20px; + list-style-image: url(listitem.png); +} + +div.footer { + background-image: url(footerbg.png); + color: #ccc; + text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8); + padding: 3px 8px 3px 0; + clear: both; + font-size: 0.8em; + text-align: right; +} + +/* no need to make a visible link to Sphinx on the Sphinx page */ +div.footer a { + color: #ccc; +} + +/* -- body styles ----------------------------------------------------------- */ + +p { + margin: 0.8em 0 0.5em 0; +} + +a { + color: #A2881D; + text-decoration: none; +} + +a:hover { + color: #E1C13F; +} + +div.body a { + text-decoration: underline; +} + +h1 { + margin: 10px 0 0 0; + font-size: 2.4em; + color: #0A507A; + font-weight: 300; +} + +h2 { + margin: 1.em 0 0.2em 0; + font-size: 1.5em; + font-weight: 300; + padding: 0; + color: #174967; +} + +h3 { + margin: 1em 0 -0.3em 0; + font-size: 1.3em; + font-weight: 300; +} + +div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { + text-decoration: none; +} + +div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt { + color: #0A507A !important; + font-size: inherit !important; +} + +a.headerlink { + color: #0A507A !important; + font-size: 12px; + margin-left: 6px; + padding: 0 4px 0 4px; + text-decoration: none !important; + float: right; +} + +a.headerlink:hover { + background-color: #ccc; + color: white!important; +} + +cite, code, tt { + font-family: 'Consolas', 'DejaVu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + font-size: 14px; + letter-spacing: -0.02em; +} + +tt { + background-color: #f2f2f2; + border: 1px solid #ddd; + border-radius: 2px; + color: #333; + padding: 1px; +} + +tt.descname, tt.descclassname, tt.xref { + border: 0; +} + +hr { + border: 1px solid #abc; + margin: 2em; +} + +a tt { + border: 0; + color: #a2881d; +} + +a tt:hover { + color: #e1c13f; +} + +pre { + font-family: 'Consolas', 'DejaVu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + font-size: 13px; + letter-spacing: 0.015em; + line-height: 120%; + padding: 0.5em; + border: 1px solid #ccc; + border-radius: 2px; + background-color: #f8f8f8; +} + +pre a { + color: inherit; + text-decoration: underline; +} + +td.linenos pre { + padding: 0.5em 0; +} + +div.quotebar { + background-color: #f8f8f8; + max-width: 250px; + float: right; + padding: 0px 7px; + border: 1px solid #ccc; + margin-left: 1em; +} + +div.topic { + background-color: #f8f8f8; +} + +table { + border-collapse: collapse; + margin: 0 -0.5em 0 -0.5em; +} + +table td, table th { + padding: 0.2em 0.5em 0.2em 0.5em; +} + +div.admonition, div.warning { + font-size: 0.9em; + margin: 1em 0 1em 0; + border: 1px solid #86989B; + border-radius: 2px; + background-color: #f7f7f7; + padding: 0; +} + +div.admonition p, div.warning p { + margin: 0.5em 1em 0.5em 1em; + padding: 0; +} + +div.admonition pre, div.warning pre { + margin: 0.4em 1em 0.4em 1em; +} + +div.admonition p.admonition-title, +div.warning p.admonition-title { + margin-top: 1em; + padding-top: 0.5em; + font-weight: bold; +} + +div.warning { + border: 1px solid #940000; +/* background-color: #FFCCCF;*/ +} + +div.warning p.admonition-title { +} + +div.admonition ul, div.admonition ol, +div.warning ul, div.warning ol { + margin: 0.1em 0.5em 0.5em 3em; + padding: 0; +} + +div.admonition .highlight, div.warning .highlight { + background-color: #f7f7f7; +} + +.viewcode-back { + font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} + + +.my-code-block.std-ref { + color : red; +} + +.cppbrief { + color: #C6792C; + font-style: oblique; +} + +.cppsynopsis { + background-color: #E7EDF9; + /*font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif;*/ + /*font-family: monospace; */ + font-family: Verdana, Arial, Lucida Console; + font-size=80%; + /*font-style: oblique;*/ + /* white-space: pre;*/ +} + + + diff --git a/doc/themes/triqs/theme.conf b/doc/themes/triqs/theme.conf new file mode 100644 index 00000000..96ca439a --- /dev/null +++ b/doc/themes/triqs/theme.conf @@ -0,0 +1,4 @@ +[theme] +inherit = basic +stylesheet = triqs.css +pygments_style = sphinx diff --git a/doc/tutorials/ce-gamma-fscs_wien2k.rst b/doc/tutorials/ce-gamma-fscs_wien2k.rst index 5ee73992..cd21ecb3 100644 --- a/doc/tutorials/ce-gamma-fscs_wien2k.rst +++ b/doc/tutorials/ce-gamma-fscs_wien2k.rst @@ -96,7 +96,7 @@ In order to run DFT+DMFT calculations within Hubbard-I we need the corresponding with the CT-QMC solver (see :ref:`singleshot`), however there are also some differences. First difference is that we import the Hubbard-I solver by:: - from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver + from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver The Hubbard-I solver is very fast and we do not need to take into account the DFT block structure or use any approximation for the *U*-matrix. We load and convert the :program:`dmftproj` output diff --git a/doc/tutorials/images_scripts/Ce-gamma.py b/doc/tutorials/images_scripts/Ce-gamma.py index 258ca52a..599be898 100644 --- a/doc/tutorials/images_scripts/Ce-gamma.py +++ b/doc/tutorials/images_scripts/Ce-gamma.py @@ -1,6 +1,6 @@ from triqs_dft_tools.sumk_dft import * -from triqs_dft_tools.converters.wien2k_converter import * -from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver +from triqs_dft_tools.converters.wien2k import * +from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver import os dft_filename = os.getcwd().rpartition('/')[2] @@ -111,7 +111,7 @@ for iteration_number in range(1,Loops+1): #Save essential SumkDFT data: SK.save(['chemical_potential','dc_imp','dc_energ','correnerg']) if (mpi.is_master_node()): - print 'DC after solver: ',SK.dc_imp[0] + print('DC after solver: ',SK.dc_imp[0]) # print out occupancy matrix of Ce 4f mpi.report("Orbital densities of impurity Green function:") diff --git a/doc/tutorials/images_scripts/Ce-gamma_DOS.py b/doc/tutorials/images_scripts/Ce-gamma_DOS.py index c96d756f..b3285dcf 100644 --- a/doc/tutorials/images_scripts/Ce-gamma_DOS.py +++ b/doc/tutorials/images_scripts/Ce-gamma_DOS.py @@ -1,6 +1,6 @@ from triqs_dft_tools.sumk_dft_tools import * -from triqs_dft_tools.converters.wien2k_converter import * -from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver +from triqs_dft_tools.converters.wien2k import * +from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver # Creates the data directory, cd into it: #Prepare_Run_Directory(DirectoryName = "Ce-Gamma") @@ -31,7 +31,7 @@ SK.dc_imp = mpi.bcast(SK.dc_imp) SK.dc_energ = mpi.bcast(SK.dc_energ) if (mpi.is_master_node()): - print 'DC after reading SK: ',SK.dc_imp[0] + print('DC after reading SK: ',SK.dc_imp[0]) N = SK.corr_shells[0]['dim'] l = SK.corr_shells[0]['l'] diff --git a/doc/tutorials/images_scripts/NiO_local_lattice_GF.py b/doc/tutorials/images_scripts/NiO_local_lattice_GF.py index 21cf2eb6..b48c9b98 100644 --- a/doc/tutorials/images_scripts/NiO_local_lattice_GF.py +++ b/doc/tutorials/images_scripts/NiO_local_lattice_GF.py @@ -1,12 +1,12 @@ from itertools import * import numpy as np -import pytriqs.utility.mpi as mpi -from pytriqs.archive import * -from pytriqs.gf import * +import triqs.utility.mpi as mpi +from h5 import * +from triqs.gf import * from triqs_dft_tools.sumk_dft import * from triqs_dft_tools.sumk_dft_tools import * -from pytriqs.operators.util.hamiltonians import * -from pytriqs.operators.util.U_matrix import * +from triqs.operators.util.hamiltonians import * +from triqs.operators.util.U_matrix import * from triqs_cthyb import * import warnings warnings.filterwarnings("ignore", category=FutureWarning) @@ -39,7 +39,7 @@ if mpi.is_master_node(): if not 'Iterations' in ar['DMFT_results']: ar['DMFT_results'].create_group('Iterations') if 'iteration_count' in ar['DMFT_results']: iteration_offset = ar['DMFT_results']['iteration_count']+1 - print('offset',iteration_offset) + print(('offset',iteration_offset)) Sigma_iw = ar['DMFT_results']['Iterations']['Sigma_it'+str(iteration_offset-1)] SK.dc_imp = ar['DMFT_results']['Iterations']['dc_imp'+str(iteration_offset-1)] SK.dc_energ = ar['DMFT_results']['Iterations']['dc_energ'+str(iteration_offset-1)] @@ -54,13 +54,13 @@ SK.chemical_potential = mpi.bcast(SK.chemical_potential) SK.put_Sigma(Sigma_imp = [Sigma_iw]) -ikarray = numpy.array(range(SK.n_k)) +ikarray = numpy.array(list(range(SK.n_k))) # set up the orbitally resolved local lattice greens function: n_orbs = SK.proj_mat_csc.shape[2] spn = SK.spin_block_names[SK.SO] mesh = Sigma_iw.mesh -block_structure = [range(n_orbs) for sp in spn] +block_structure = [list(range(n_orbs)) for sp in spn] gf_struct = [(spn[isp], block_structure[isp]) for isp in range(SK.n_spin_blocks[SK.SO])] block_ind_list = [block for block, inner in gf_struct] diff --git a/doc/tutorials/images_scripts/Sr2MgOsO6_SOC.py b/doc/tutorials/images_scripts/Sr2MgOsO6_SOC.py index fe07a9a7..136107ea 100644 --- a/doc/tutorials/images_scripts/Sr2MgOsO6_SOC.py +++ b/doc/tutorials/images_scripts/Sr2MgOsO6_SOC.py @@ -1,11 +1,11 @@ # Import the modules: from triqs_dft_tools.sumk_dft import * -from pytriqs.gf import * -from pytriqs.archive import HDFArchive -from pytriqs.operators.util import * -from pytriqs.operators.util.U_matrix import * +from triqs.gf import * +from h5 import HDFArchive +from triqs.operators.util import * +from triqs.operators.util.U_matrix import * from triqs_cthyb import * -import pytriqs.utility.mpi as mpi +import triqs.utility.mpi as mpi # Init the SumK class: filename = 'Sr2MgOsO6_SOC.h5' diff --git a/doc/tutorials/images_scripts/Sr2MgOsO6_noSOC.py b/doc/tutorials/images_scripts/Sr2MgOsO6_noSOC.py index 8740b061..ef2fd5df 100644 --- a/doc/tutorials/images_scripts/Sr2MgOsO6_noSOC.py +++ b/doc/tutorials/images_scripts/Sr2MgOsO6_noSOC.py @@ -1,14 +1,14 @@ # Import the modules: from triqs_dft_tools.sumk_dft import * -from pytriqs.gf import * -from pytriqs.archive import HDFArchive -from pytriqs.operators.util import * -from pytriqs.operators.util.U_matrix import * +from triqs.gf import * +from h5 import HDFArchive +from triqs.operators.util import * +from triqs.operators.util.U_matrix import * from triqs_cthyb import * -import pytriqs.utility.mpi as mpi +import triqs.utility.mpi as mpi # Convert the input -from triqs_dft_tools.converters.wien2k_converter import * +from triqs_dft_tools.converters.wien2k import * Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC") Converter.convert_dft_input() diff --git a/doc/tutorials/images_scripts/converter.py b/doc/tutorials/images_scripts/converter.py index 8bdabb9b..bef29d99 100644 --- a/doc/tutorials/images_scripts/converter.py +++ b/doc/tutorials/images_scripts/converter.py @@ -1,3 +1,3 @@ -from triqs_dft_tools.converters.vasp_converter import * +from triqs_dft_tools.converters.vasp import * Converter = VaspConverter(filename = 'nio') Converter.convert_dft_input() diff --git a/doc/tutorials/images_scripts/dft_dmft_cthyb.py b/doc/tutorials/images_scripts/dft_dmft_cthyb.py index 4d6d9dc9..9d8ae67b 100644 --- a/doc/tutorials/images_scripts/dft_dmft_cthyb.py +++ b/doc/tutorials/images_scripts/dft_dmft_cthyb.py @@ -1,10 +1,10 @@ -import pytriqs.utility.mpi as mpi -from pytriqs.operators.util import * -from pytriqs.archive import HDFArchive +import triqs.utility.mpi as mpi +from triqs.operators.util import * +from h5 import HDFArchive from triqs_cthyb import * -from pytriqs.gf import * +from triqs.gf import * from triqs_dft_tools.sumk_dft import * -from triqs_dft_tools.converters.wien2k_converter import * +from triqs_dft_tools.converters.wien2k import * dft_filename='SrVO3' beta = 40 @@ -49,7 +49,7 @@ p["fit_min_n"] = 30 p["fit_max_n"] = 60 # If conversion step was not done, we could do it here. Uncomment the lines it you want to do this. -#from triqs_dft_tools.converters.wien2k_converter import * +#from triqs_dft_tools.converters.wien2k import * #Converter = Wien2kConverter(filename=dft_filename, repacking=True) #Converter.convert_dft_input() #mpi.barrier() @@ -76,7 +76,7 @@ spin_names = ["up","down"] orb_names = [i for i in range(n_orb)] # Use GF structure determined by DFT blocks -gf_struct = [(block, indices) for block, indices in SK.gf_struct_solver[0].iteritems()] +gf_struct = [(block, indices) for block, indices in SK.gf_struct_solver[0].items()] # Construct Solver S = Solver(beta=beta, gf_struct=gf_struct) @@ -97,7 +97,7 @@ if previous_present: SK.set_dc(dc_imp,dc_energ) for iteration_number in range(1,loops+1): - if mpi.is_master_node(): print "Iteration = ", iteration_number + if mpi.is_master_node(): print("Iteration = ", iteration_number) SK.symm_deg_gf(S.Sigma_iw,orb=0) # symmetrise Sigma SK.set_Sigma([ S.Sigma_iw ]) # set Sigma into the SumK class diff --git a/doc/tutorials/images_scripts/maxent.py b/doc/tutorials/images_scripts/maxent.py index 841d8067..00f2ac49 100644 --- a/doc/tutorials/images_scripts/maxent.py +++ b/doc/tutorials/images_scripts/maxent.py @@ -1,5 +1,5 @@ -from pytriqs.gf import * -from pytriqs.archive import * +from triqs.gf import * +from h5 import * from triqs_maxent import * filename = 'nio' @@ -12,7 +12,7 @@ if 'iteration_count' in ar['DMFT_results']: tm = TauMaxEnt(cost_function='bryan', probability='normal') -print(G_latt['up'][0,0]) +print((G_latt['up'][0,0])) t2g_orbs = [0,1,3] eg_orbs = [2,4] op_orbs = [5,6,7] @@ -22,7 +22,7 @@ orbs = [t2g_orbs, eg_orbs, op_orbs] for orb in orbs: - print '\n'+str(orb[0])+'\n' + print('\n'+str(orb[0])+'\n') gf = 0*G_latt['up'][0,0] for iO in orb: @@ -43,7 +43,7 @@ for orb in orbs: # you may be interested in the details of the line analyzer: - # from pytriqs.plot.mpl_interface import oplot + # from triqs.plot.mpl_interface import oplot #plt.figure(2) #result.analyzer_results['LineFitAnalyzer'].plot_linefit() #plt.savefig('ana'+str(orb[0])+'.pdf',fmt='pdf') diff --git a/doc/tutorials/images_scripts/nio.py b/doc/tutorials/images_scripts/nio.py index 0195e797..c202825d 100644 --- a/doc/tutorials/images_scripts/nio.py +++ b/doc/tutorials/images_scripts/nio.py @@ -1,13 +1,13 @@ from itertools import * import numpy as np -import pytriqs.utility.mpi as mpi -from pytriqs.archive import * -from pytriqs.gf import * -import sys, pytriqs.version as triqs_version +import triqs.utility.mpi as mpi +from h5 import * +from triqs.gf import * +import sys, triqs.version as triqs_version from triqs_dft_tools.sumk_dft import * from triqs_dft_tools.sumk_dft_tools import * -from pytriqs.operators.util.hamiltonians import * -from pytriqs.operators.util.U_matrix import * +from triqs.operators.util.hamiltonians import * +from triqs.operators.util.U_matrix import * from triqs_cthyb import * import triqs_cthyb.version as cthyb_version import triqs_dft_tools.version as dft_tools_version @@ -30,7 +30,7 @@ for i_sh in range(len(SK.deg_shells)): mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh)) for iblock in range(num_block_deg_orbs): mpi.report('block {0:d} consists of orbitals:'.format(iblock)) - for keys in SK.deg_shells[i_sh][iblock].keys(): + for keys in list(SK.deg_shells[i_sh][iblock].keys()): mpi.report(' '+keys) # Setup CTQMC Solver diff --git a/doc/tutorials/images_scripts/nio_csc.py b/doc/tutorials/images_scripts/nio_csc.py index 75f2c2fa..849b8b30 100644 --- a/doc/tutorials/images_scripts/nio_csc.py +++ b/doc/tutorials/images_scripts/nio_csc.py @@ -1,17 +1,17 @@ from itertools import * import numpy as np -import pytriqs.utility.mpi as mpi -from pytriqs.archive import * -from pytriqs.gf import * -import sys, pytriqs.version as triqs_version +import triqs.utility.mpi as mpi +from h5 import * +from triqs.gf import * +import sys, triqs.version as triqs_version from triqs_dft_tools.sumk_dft import * from triqs_dft_tools.sumk_dft_tools import * -from pytriqs.operators.util.hamiltonians import * -from pytriqs.operators.util.U_matrix import * +from triqs.operators.util.hamiltonians import * +from triqs.operators.util.U_matrix import * from triqs_cthyb import * import triqs_cthyb.version as cthyb_version import triqs_dft_tools.version as dft_tools_version -from triqs_dft_tools.converters.vasp_converter import * +from triqs_dft_tools.converters.vasp import * import warnings @@ -37,7 +37,7 @@ def dmft_cycle(): mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh)) for iblock in range(num_block_deg_orbs): mpi.report('block {0:d} consists of orbitals:'.format(iblock)) - for keys in SK.deg_shells[i_sh][iblock].keys(): + for keys in list(SK.deg_shells[i_sh][iblock].keys()): mpi.report(' '+keys) # Setup CTQMC Solver @@ -176,15 +176,15 @@ def dmft_cycle(): if mpi.is_master_node(): - print 'calculating mu...' + print('calculating mu...') SK.chemical_potential = SK.calc_mu( precision = 0.000001 ) if mpi.is_master_node(): - print 'calculating GAMMA' + print('calculating GAMMA') SK.calc_density_correction(dm_type='vasp') if mpi.is_master_node(): - print 'calculating energy corrections' + print('calculating energy corrections') correnerg = 0.5 * (S.G_iw * S.Sigma_iw).total_density() diff --git a/doc/tutorials/sr2mgoso6_nosoc.rst b/doc/tutorials/sr2mgoso6_nosoc.rst index 05ff1d08..4bbd3684 100644 --- a/doc/tutorials/sr2mgoso6_nosoc.rst +++ b/doc/tutorials/sr2mgoso6_nosoc.rst @@ -57,9 +57,9 @@ At the end of the run you see the density matrix in Wannier space: As you can see, there are off-diagonal elements between the :math:`d_{x^2-y^2}` and the :math:`d_{xy}` orbital. We convert the output to the hdf5 archive, using -the python module :class:`Wien2kConverter `. A simple python script doing this is:: +the python module :class:`Wien2kConverter `. A simple python script doing this is:: - from triqs_dft_tools.converters.wien2k_converter import * + from triqs_dft_tools.converters.wien2k import * Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC") Converter.convert_dft_input() @@ -123,8 +123,8 @@ The interaction Hamiltonian We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it:: - from pytriqs.operators.util import * - from pytriqs.operators.util.U_matrix import * + from triqs.operators.util import * + from triqs.operators.util.U_matrix import * U = 2.0 J = 0.2 @@ -141,7 +141,7 @@ Note that we needed to set up the interaction Hamiltonian for the full set of fi Now we have the interaction Hamiltonian for the solver, which we set up next:: from triqs_cthyb import * - import pytriqs.utility.mpi as mpi + import triqs.utility.mpi as mpi beta = 40.0 S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0]) diff --git a/doc/tutorials/sr2mgoso6_soc.rst b/doc/tutorials/sr2mgoso6_soc.rst index 23f2aa82..847a375c 100644 --- a/doc/tutorials/sr2mgoso6_soc.rst +++ b/doc/tutorials/sr2mgoso6_soc.rst @@ -58,9 +58,9 @@ At the end of the run you see the density matrix in Wannier space: As you can see, there are a lot of off-diagonal elements now, in particular also off-diagonal in spin space. This is just telling us that spin is not a good quantum number any more in the presence of SOC. We convert the output to the hdf5 archive, using -the python module :class:`Wien2kConverter `. A simple python script doing this is:: +the python module :class:`Wien2kConverter `. A simple python script doing this is:: - from triqs_dft_tools.converters.wien2k_converter import * + from triqs_dft_tools.converters.wien2k import * Converter = Wien2kConverter(filename = "Sr2MgOsO6_SOC") Converter.convert_dft_input() @@ -119,8 +119,8 @@ The interaction Hamiltonian We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it. We use *l=2* for *d* orbitals. Also, for SOC calculations, we need to inflate the resulting matrix to size 10x10:: - from pytriqs.operators.util import * - from pytriqs.operators.util.U_matrix import * + from triqs.operators.util import * + from triqs.operators.util.U_matrix import * U = 2.0 J = 0.2 @@ -139,7 +139,7 @@ Note that we needed to set up the interaction Hamiltonian first for the full set Now we have the interaction Hamiltonian for the solver, which we set up next:: from triqs_cthyb import * - import pytriqs.utility.mpi as mpi + import triqs.utility.mpi as mpi beta = 40.0 S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0]) diff --git a/doc/tutorials/srvo3.rst b/doc/tutorials/srvo3.rst index 16558fd3..49ebb0c4 100644 --- a/doc/tutorials/srvo3.rst +++ b/doc/tutorials/srvo3.rst @@ -51,10 +51,10 @@ Then :program:`dmftproj` is executed in its default mode (i.e. without spin-pola dmftproj This program produces the necessary files for the conversion to the hdf5 file structure. This is done using -the python module :class:`Wien2kConverter `. +the python module :class:`Wien2kConverter `. A simple python script that initialises the converter is:: - from triqs_dft_tools.converters.wien2k_converter import * + from triqs_dft_tools.converters.wien2k import * Converter = Wien2kConverter(filename = "SrVO3") After initializing the interface module, we can now convert the input @@ -77,11 +77,11 @@ Loading modules First, we load the necessary modules:: from triqs_dft_tools.sumk_dft import * - from pytriqs.gf import * - from pytriqs.archive import HDFArchive - from pytriqs.operators.util import * + from triqs.gf import * + from h5 import HDFArchive + from triqs.operators.util import * from triqs_cthyb import * - import pytriqs.utility.mpi as mpi + import triqs.utility.mpi as mpi The last two lines load the modules for the construction of the :ref:`CTHYB solver `. @@ -271,7 +271,7 @@ and perform only one DMFT iteration. The resulting self energy can be tail fitte Sigma_iw_fit << tail_fit(S.Sigma_iw, fit_max_moment = 4, fit_min_n = 40, fit_max_n = 160)[0] Plot the self energy and adjust the tail fit parameters such that you obtain a -proper fit. The :meth:`fit_tail function ` is part +proper fit. The :meth:`fit_tail function ` is part of the :ref:`TRIQS ` library. For a self energy which is going to zero for :math:`i\omega \rightarrow 0` our suggestion is diff --git a/doc/tutorials/svo_vasp/svo_notebook.ipynb b/doc/tutorials/svo_vasp/svo_notebook.ipynb index bf544ed8..87c60afb 100644 --- a/doc/tutorials/svo_vasp/svo_notebook.ipynb +++ b/doc/tutorials/svo_vasp/svo_notebook.ipynb @@ -247,7 +247,7 @@ ], "source": [ "# import VASPconverter\n", - "from triqs_dft_tools.converters.vasp_converter import *\n", + "from triqs_dft_tools.converters.vasp import *\n", "\n", "\n", "# create Converter\n", @@ -312,7 +312,7 @@ " mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))\n", " for iblock in range(num_block_deg_orbs):\n", " mpi.report('block {0:d} consists of orbitals:'.format(iblock))\n", - " for keys in SK.deg_shells[i_sh][iblock].keys():\n", + " for keys in list(SK.deg_shells[i_sh][iblock].keys()):\n", " mpi.report(' '+keys)" ] }, @@ -335,21 +335,20 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "Python 3", "language": "python", - "name": "python2" + "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.15+" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/fortran/dmftproj/CMakeLists.txt b/fortran/dmftproj/CMakeLists.txt index 7e646998..a3d627ef 100644 --- a/fortran/dmftproj/CMakeLists.txt +++ b/fortran/dmftproj/CMakeLists.txt @@ -6,8 +6,7 @@ set(SOURCES modules.f dmftproj.f readcomline.f set_ang_trans.f setsym.f # The main target and what to link with... add_executable(dmftproj ${SOURCES}) -find_package(LAPACK) -target_link_libraries(dmftproj ${LAPACK_LIBRARIES}) +target_link_libraries(dmftproj triqs::blas_lapack) # where to install install (TARGETS dmftproj DESTINATION bin) @@ -17,7 +16,7 @@ SET(D ${CMAKE_CURRENT_SOURCE_DIR}/SRC_templates/) SET(WIEN_SRC_TEMPL_FILES ${D}/case.cf_f_mm2 ${D}/case.cf_p_cubic ${D}/case.indmftpr ${D}/run_triqs ${D}/runsp_triqs) message(STATUS "-----------------------------------------------------------------------------") message(STATUS " ******** WARNING ******** ") -message(STATUS " Wien2k 14.2 and older : after installation of TRIQS, copy the files from ") +message(STATUS " Wien2k 14.2 and older : after installation of DFTTools, copy the files from ") message(STATUS " ${CMAKE_INSTALL_PREFIX}/share/triqs/Wien2k_SRC_files/SRC_templates ") message(STATUS " to your Wien2k installation WIENROOT/SRC_templates (Cf documentation). ") message(STATUS " For newer versions these files are already shipped with Wien2k. ") diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt deleted file mode 100644 index 2ad35566..00000000 --- a/python/CMakeLists.txt +++ /dev/null @@ -1,19 +0,0 @@ -# where will the python end up in triqs? -set(PYTHON_LIB_DEST ${CPP2PY_PYTHON_LIB_DEST_ROOT}/triqs_dft_tools) - -# site_customize for build -set(package_name "triqs_dft_tools") - -# Create a temporary copy of the python modules so that we can run before installation with the test -FILE(GLOB PYTHON_SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py ) -foreach(f ${PYTHON_SOURCES}) - configure_file(${f} ${f} COPYONLY) -endforeach() - -# add version file -configure_file(version.py.in version.py @ONLY) - -# install files -install(FILES ${PYTHON_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/version.py DESTINATION ${PYTHON_LIB_DEST}) - -add_subdirectory(converters) diff --git a/python/converters/.gitignore b/python/converters/.gitignore deleted file mode 100644 index 0d20b648..00000000 --- a/python/converters/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.pyc diff --git a/python/converters/CMakeLists.txt b/python/converters/CMakeLists.txt deleted file mode 100644 index 3c719f26..00000000 --- a/python/converters/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -# Create a temporary copy of the python modules so that we can run before installation with the test -FILE(GLOB PYTHON_SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py) -foreach(f ${PYTHON_SOURCES}) - configure_file(${f} ${f} COPYONLY) -endforeach() - -# install files -install(FILES ${PYTHON_SOURCES} DESTINATION ${PYTHON_LIB_DEST}/converters) - -add_subdirectory(plovasp) diff --git a/python/converters/plovasp/CMakeLists.txt b/python/converters/plovasp/CMakeLists.txt deleted file mode 100644 index 9f2fbdaa..00000000 --- a/python/converters/plovasp/CMakeLists.txt +++ /dev/null @@ -1,19 +0,0 @@ -# === Build and install atm module -add_cpp2py_module(atm) -target_link_libraries(atm atm_c triqs) -target_compile_options(atm PRIVATE -std=c++17) -target_include_directories(atm PRIVATE ${CMAKE_SOURCE_DIR}/c++) - -install(TARGETS atm DESTINATION ${PYTHON_LIB_DEST}/converters/plovasp) - -# === Copy Python files to current build directory and register for install -set(PYTHON_SOURCES __init__.py converter.py elstruct.py inpconf.py plotools.py proj_group.py proj_shell.py sc_dmft.py vaspio.py) -foreach(f ${PYTHON_SOURCES}) - configure_file(${f} ${f} COPYONLY) -endforeach() - -# install files -install(FILES ${PYTHON_SOURCES} DESTINATION ${PYTHON_LIB_DEST}/converters/plovasp) - -# This we need in order for tests to work -#add_custom_command(TARGET atm POST_BUILD COMMAND ln -fs ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}/atm.so ${CMAKE_BINARY_DIR}/python/dft/converters/plovasp) diff --git a/python/triqs_dft_tools/CMakeLists.txt b/python/triqs_dft_tools/CMakeLists.txt new file mode 100644 index 00000000..df7b79c3 --- /dev/null +++ b/python/triqs_dft_tools/CMakeLists.txt @@ -0,0 +1,25 @@ +# Configure the version +configure_file(version.py.in version.py) + +# All Python files. Copy them in the build dir to have a complete package for the tests. +file(GLOB_RECURSE python_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py) +file(GLOB_RECURSE wrap_generators RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *_desc.py) +list(REMOVE_ITEM python_sources "${wrap_generators}") +foreach(file ${python_sources}) + configure_file(${file} ${file} COPYONLY) +endforeach() + +# Install python files to proper location +set(PYTHON_LIB_DEST ${TRIQS_PYTHON_LIB_DEST_ROOT}/${PROJECT_NAME}) +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/version.py DESTINATION ${PYTHON_LIB_DEST}) +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${TRIQS_PYTHON_LIB_DEST_ROOT} FILES_MATCHING PATTERN "*.py" PATTERN "*_desc.py" EXCLUDE) + +# Build and install any python modules +foreach(gen ${wrap_generators}) + string(REPLACE "_desc.py" "" gen ${gen}) + get_filename_component(module_name ${gen} NAME_WE) + get_filename_component(module_dir ${gen} DIRECTORY) + add_cpp2py_module(NAME ${module_name} DIRECTORY ${module_dir}) + target_link_libraries(${module_name} ${PROJECT_NAME}_c triqs_py) + install(TARGETS ${module_name} DESTINATION ${PYTHON_LIB_DEST}/${module_dir}) +endforeach() diff --git a/python/__init__.py b/python/triqs_dft_tools/__init__.py similarity index 85% rename from python/__init__.py rename to python/triqs_dft_tools/__init__.py index 137355ae..380c1ed9 100644 --- a/python/__init__.py +++ b/python/triqs_dft_tools/__init__.py @@ -20,11 +20,11 @@ # ########################################################################## -from sumk_dft import SumkDFT -from symmetry import Symmetry -from block_structure import BlockStructure -from sumk_dft_tools import SumkDFTTools -from converters import * +from .sumk_dft import SumkDFT +from .symmetry import Symmetry +from .block_structure import BlockStructure +from .sumk_dft_tools import SumkDFTTools +from .converters import * __all__ = ['SumkDFT', 'Symmetry', 'SumkDFTTools', 'Wien2kConverter', 'HkConverter','BlockStructure'] diff --git a/python/block_structure.py b/python/triqs_dft_tools/block_structure.py similarity index 95% rename from python/block_structure.py rename to python/triqs_dft_tools/block_structure.py index 86147109..fb9e3786 100644 --- a/python/block_structure.py +++ b/python/triqs_dft_tools/block_structure.py @@ -25,9 +25,9 @@ import copy import numpy as np -from pytriqs.gf import GfImFreq, BlockGf +from triqs.gf import GfImFreq, BlockGf from ast import literal_eval -import pytriqs.utility.mpi as mpi +import triqs.utility.mpi as mpi from warnings import warn from collections import defaultdict @@ -142,7 +142,7 @@ class BlockStructure(object): if self.gf_struct_solver is None: return None # we sort by block name in order to get a reproducible result - return [sorted([(k, v) for k, v in gfs.iteritems()], key=lambda x: x[0]) + return [sorted([(k, v) for k, v in list(gfs.items())], key=lambda x: x[0]) for gfs in self.gf_struct_solver] @property @@ -203,10 +203,10 @@ class BlockStructure(object): N_solver = len(np.unique(self.corr_to_inequiv)) if self.gf_struct_solver is not None: assert N_solver == len(self.gf_struct_solver) - assert sorted(np.unique(self.corr_to_inequiv)) == range(N_solver),\ + assert sorted(np.unique(self.corr_to_inequiv)) == list(range(N_solver)),\ "an inequivalent shell is missing in corr_to_inequiv" return [self.corr_to_inequiv.index(icrsh) - for icrsh in range(N_solver)] + for icrsh in list(range(N_solver))] @inequiv_to_corr.setter def inequiv_to_corr(self, value): @@ -222,7 +222,7 @@ class BlockStructure(object): ret = [] for ish, icrsh in enumerate(self.inequiv_to_corr): d = defaultdict(list) - for block_solver, block_sumk in self.solver_to_sumk_block[ish].iteritems(): + for block_solver, block_sumk in list(self.solver_to_sumk_block[ish].items()): d[block_sumk].append(block_solver) ret.append(d) return ret @@ -251,7 +251,7 @@ class BlockStructure(object): assert len(trans) == len(self.gf_struct_sumk),\ "give one transformation per correlated shell" - for icrsh in range(len(trans)): + for icrsh in list(range(len(trans))): ish = self.corr_to_inequiv[icrsh] if trans[icrsh] is None: trans[icrsh] = {block: np.eye(len(indices)) @@ -261,7 +261,7 @@ class BlockStructure(object): trans[icrsh] = {block: copy.deepcopy(trans[icrsh]) for block, indices in self.gf_struct_sumk[icrsh]} - assert trans[icrsh].keys() == self.gf_struct_sumk_dict[icrsh].keys(),\ + assert list(trans[icrsh].keys()) == list(self.gf_struct_sumk_dict[icrsh].keys()),\ "wrong block names used in transformation (icrsh = {})".format(icrsh) for block in trans[icrsh]: @@ -422,15 +422,15 @@ class BlockStructure(object): # create new solver_to_sumk so2su = {} so2su_block = {} - for blk, idxs in gf_struct.items(): + for blk,idxs in list(gf_struct.items()): for i in range(len(idxs)): so2su[(blk, i)] = self.solver_to_sumk[ish][(blk, idxs[i])] so2su_block[blk] = so2su[(blk, i)][0] self.solver_to_sumk[ish] = so2su self.solver_to_sumk_block[ish] = so2su_block # create new sumk_to_solver - for k, v in self.sumk_to_solver[ish].items(): - blk, ind = v + for k,v in list(self.sumk_to_solver[ish].items()): + blk,ind=v if blk in gf_struct and ind in gf_struct[blk]: new_ind = gf_struct[blk].index(ind) self.sumk_to_solver[ish][k] = (blk, new_ind) @@ -443,8 +443,8 @@ class BlockStructure(object): # reindexing gf_struct so that it starts with 0 for k in gf_struct: - gf_struct[k] = range(len(gf_struct[k])) - self.gf_struct_solver[ish] = gf_struct + gf_struct[k]=list(range(len(gf_struct[k]))) + self.gf_struct_solver[ish]=gf_struct def adapt_deg_shells(self, gf_struct, ish=0): """ Adapts the deg_shells to a new gf_struct @@ -453,7 +453,7 @@ class BlockStructure(object): if self.deg_shells is not None: for degsh in self.deg_shells[ish]: if isinstance(degsh, dict): - for key in degsh.keys(): + for key in list(degsh.keys()): if not key in gf_struct: del degsh[key] continue @@ -539,11 +539,11 @@ class BlockStructure(object): for icrsh in range(len(new_gf_struct_transformed)): ish = self.corr_to_inequiv[icrsh] gfs.append({}) - for block in new_gf_struct_transformed[icrsh].keys(): + for block in list(new_gf_struct_transformed[icrsh].keys()): for ind in new_gf_struct_transformed[icrsh][block]: - ind_sol = self.sumk_to_solver[ish][(block, ind)] + ind_sol = self.sumk_to_solver[ish][(block,ind)] if not ind_sol[0] in gfs[icrsh]: - gfs[icrsh][ind_sol[0]] = [] + gfs[icrsh][ind_sol[0]]=[] gfs[icrsh][ind_sol[0]].append(ind_sol[1]) self.pick_gf_struct_solver(gfs) @@ -579,7 +579,7 @@ class BlockStructure(object): so2su = {} su2so = {} so2su_block = {} - for frm, to in mapping[ish].iteritems(): + for frm,to in list(mapping[ish].items()): if not to[0] in gf_struct: gf_struct[to[0]] = [] gf_struct[to[0]].append(to[1]) @@ -594,7 +594,7 @@ class BlockStructure(object): else: so2su_block[to[0]] =\ self.solver_to_sumk_block[ish][frm[0]] - for k in self.sumk_to_solver[ish].keys(): + for k in list(self.sumk_to_solver[ish].keys()): if not k in su2so: su2so[k] = (None, None) @@ -669,7 +669,7 @@ class BlockStructure(object): raise Exception( "Argument space has to be either 'solver' or 'sumk'.") - names = gf_struct[ish].keys() + names = list(gf_struct[ish].keys()) blocks = [] for n in names: G = gf_function(indices=gf_struct[ish][n], **kwargs) @@ -738,7 +738,7 @@ class BlockStructure(object): assert len(G) == len(gf_struct),\ "list of G does not have the correct length" if ish is None: - ishs = range(len(gf_struct)) + ishs = list(range(len(gf_struct))) else: ishs = [ish] for ish in ishs: @@ -755,17 +755,17 @@ class BlockStructure(object): for block, gf in G: assert block in gf_struct[ish],\ "block " + block + " not in struct (shell {})".format(ish) - assert list(gf.indices) == 2 * [map(str, gf_struct[ish][block])],\ + assert list(gf.indices) == 2 * [list(map(str, gf_struct[ish][block]))],\ "block " + block + \ " has wrong indices (shell {})".format(ish) else: for block in gf_struct[ish]: assert block in G,\ "block " + block + " not in G (shell {})".format(ish) - for block, gf in G.iteritems(): + for block, gf in list(G.items()): assert block in gf_struct[ish],\ "block " + block + " not in struct (shell {})".format(ish) - assert range(len(gf)) == 2 * [map(str, gf_struct[ish][block])],\ + assert list(range(len(gf))) == 2 * [list(map(str, gf_struct[ish][block]))],\ "block " + block + \ " has wrong indices (shell {})".format(ish) @@ -775,20 +775,20 @@ class BlockStructure(object): Parameters ---------- - O : pytriqs.operators.Operator + O : triqs.operators.Operator Operator in sumk structure ish : int shell index on which the operator acts """ - from pytriqs.operators import Operator, c, c_dag - + from triqs.operators import Operator, c, c_dag + T = self.transformation[ish] sk2s = self.sumk_to_solver[ish] - + O_out = Operator(0) - + for monomial in O: coefficient = monomial[-1] new_monomial = Operator(1) @@ -796,7 +796,7 @@ class BlockStructure(object): for single_operator in monomial[0]: new_single_operator = Operator(0) daggered = single_operator[0] - + blockname = single_operator[1][0] i = single_operator[1][1] for j in range(len(T[blockname])): @@ -961,7 +961,7 @@ class BlockStructure(object): else: raise Exception('G is neither BlockGf nor dict.') - for block_to in gf_struct_to.keys(): + for block_to in list(gf_struct_to.keys()): if isinstance(G, BlockGf): G_out[block_to].zero() else: @@ -989,7 +989,7 @@ class BlockStructure(object): ish_to=ish_from, show_warnings=False, # else we get an endless loop space_from=space_to, space_to=space_from, **kwargs) - for name, gf in (G_back if isinstance(G, BlockGf) else G_back.iteritems()): + for name, gf in (G_back if isinstance(G, BlockGf) else list(G_back.items())): if isinstance(G, BlockGf): maxdiff = np.max(np.abs(G_back[name].data - G[name].data), axis=0) @@ -1033,7 +1033,7 @@ class BlockStructure(object): self.gf_struct_solver.append({}) self.solver_to_sumk.append({}) self.solver_to_sumk_block.append({}) - for frm,to in self.sumk_to_solver[ish].iteritems(): + for frm,to in list(self.sumk_to_solver[ish].items()): if to[0] is not None: self.gf_struct_solver[ish][frm[0]+'_'+str(frm[1])]=[0] self.sumk_to_solver[ish][frm]=(frm[0]+'_'+str(frm[1]),0) @@ -1061,7 +1061,7 @@ class BlockStructure(object): elif isinstance(one,dict): if set(one.keys()) != set(two.keys()): return False - for k in set(one.keys()).intersection(two.keys()): + for k in set(one.keys()).intersection(list(two.keys())): if not compare(one[k],two[k]): return False return True @@ -1096,7 +1096,7 @@ class BlockStructure(object): d = [] for ish in range(len(mapping)): d.append({}) - for k,v in mapping[ish].iteritems(): + for k,v in list(mapping[ish].items()): d[ish][repr(k)] = repr(v) return d @@ -1112,7 +1112,7 @@ class BlockStructure(object): d = [] for ish in range(len(mapping)): d.append({}) - for k,v in mapping[ish].iteritems(): + for k,v in list(mapping[ish].items()): # literal_eval is a saje alternative to eval d[ish][literal_eval(k)] = literal_eval(v) return d @@ -1138,7 +1138,7 @@ class BlockStructure(object): s+=' shell '+str(ish)+'\n' def keyfun(el): return '{}_{:05d}'.format(el[0],el[1]) - keys = sorted(element[ish].keys(),key=keyfun) + keys = sorted(list(element[ish].keys()),key=keyfun) for k in keys: s+=' '+str(k)+str(element[ish][k])+'\n' s += "deg_shells\n" @@ -1147,7 +1147,7 @@ class BlockStructure(object): for l in range(len(self.deg_shells[ish])): s+=' equivalent group '+str(l)+'\n' if isinstance(self.deg_shells[ish][l],dict): - for key, val in self.deg_shells[ish][l].iteritems(): + for key, val in list(self.deg_shells[ish][l].items()): s+=' '+key+('*' if val[1] else '')+':\n' s+=' '+str(val[0]).replace('\n','\n ')+'\n' else: @@ -1157,5 +1157,5 @@ class BlockStructure(object): s += str(self.transformation) return s -from pytriqs.archive.hdf_archive_schemes import register_class +from h5.formats import register_class register_class(BlockStructure) diff --git a/python/clear_h5_output.py b/python/triqs_dft_tools/clear_h5_output.py similarity index 84% rename from python/clear_h5_output.py rename to python/triqs_dft_tools/clear_h5_output.py index a9135771..c6bb4621 100644 --- a/python/clear_h5_output.py +++ b/python/triqs_dft_tools/clear_h5_output.py @@ -3,13 +3,13 @@ import sys import subprocess if len(sys.argv) < 2: - print "Usage: python clear_h5_output.py archive" + print("Usage: python clear_h5_output.py archive") sys.exit() -print """ +print(""" This script is to remove any SumkDFT generated output from the h5 archive and to restore it to the original post-converter state. -""" +""") filename = sys.argv[1] A = h5py.File(filename) @@ -21,6 +21,6 @@ A.close() # Repack to reclaim disk space retcode = subprocess.call(["h5repack", "-i%s" % filename, "-otemphgfrt.h5"]) if retcode != 0: - print "h5repack failed!" + print("h5repack failed!") else: subprocess.call(["mv", "-f", "temphgfrt.h5", "%s" % filename]) diff --git a/python/converters/__init__.py b/python/triqs_dft_tools/converters/__init__.py similarity index 84% rename from python/converters/__init__.py rename to python/triqs_dft_tools/converters/__init__.py index b835323b..8900eeec 100644 --- a/python/converters/__init__.py +++ b/python/triqs_dft_tools/converters/__init__.py @@ -20,10 +20,10 @@ # ########################################################################## -from wien2k_converter import Wien2kConverter -from hk_converter import HkConverter -from vasp_converter import VaspConverter -from wannier90_converter import Wannier90Converter +from .wien2k import Wien2kConverter +from .hk import HkConverter +from .vasp import VaspConverter +from .wannier90 import Wannier90Converter __all__ =['Wien2kConverter','HkConverter','Wannier90Converter','VaspConverter'] diff --git a/python/converters/converter_tools.py b/python/triqs_dft_tools/converters/converter_tools.py similarity index 95% rename from python/converters/converter_tools.py rename to python/triqs_dft_tools/converters/converter_tools.py index b971518d..2d393c75 100644 --- a/python/converters/converter_tools.py +++ b/python/triqs_dft_tools/converters/converter_tools.py @@ -19,7 +19,7 @@ # TRIQS. If not, see . # ########################################################################## -import pytriqs.utility.mpi as mpi +import triqs.utility.mpi as mpi class ConverterTools: @@ -46,12 +46,12 @@ class ConverterTools: import os.path import string if not(os.path.exists(filename)): - raise IOError, "File %s does not exist." % filename + raise IOError("File %s does not exist." % filename) for line in open(filename, 'r'): - for old, new in to_replace.iteritems(): + for old, new in to_replace.items(): line = line.replace(old, new) for x in line.split(): - yield string.atof(x) + yield float(x) def repack(self): """ diff --git a/python/converters/hk_converter.py b/python/triqs_dft_tools/converters/hk.py similarity index 93% rename from python/converters/hk_converter.py rename to python/triqs_dft_tools/converters/hk.py index 510a7cca..0984a7b4 100644 --- a/python/converters/hk_converter.py +++ b/python/triqs_dft_tools/converters/hk.py @@ -22,10 +22,10 @@ from types import * import numpy -from pytriqs.archive import * -import pytriqs.utility.mpi as mpi +from h5 import * +import triqs.utility.mpi as mpi from math import sqrt -from converter_tools import * +from .converter_tools import * class HkConverter(ConverterTools): @@ -53,8 +53,7 @@ class HkConverter(ConverterTools): """ - assert type( - filename) == StringType, "HkConverter: filename must be a filename." + assert isinstance(filename, str), "HkConverter: filename must be a filename." if hdf_filename is None: hdf_filename = filename + '.h5' self.hdf_file = hdf_filename @@ -96,20 +95,20 @@ class HkConverter(ConverterTools): # the energy conversion factor is 1.0, we assume eV in files energy_unit = 1.0 # read the number of k points - n_k = int(R.next()) + n_k = int(next(R)) k_dep_projection = 0 SP = 0 # no spin-polarision SO = 0 # no spin-orbit # total charge below energy window is set to 0 charge_below = 0.0 # density required, for setting the chemical potential - density_required = R.next() + density_required = next(R) symm_op = 0 # No symmetry groups for the k-sum # the information on the non-correlated shells is needed for # defining dimension of matrices: # number of shells considered in the Wanniers - n_shells = int(R.next()) + n_shells = int(next(R)) # corresponds to index R in formulas # now read the information about the shells (atom, sort, l, dim): shell_entries = ['atom', 'sort', 'l', 'dim'] @@ -117,7 +116,7 @@ class HkConverter(ConverterTools): shell_entries, R)} for ish in range(n_shells)] # number of corr. shells (e.g. Fe d, Ce f) in the unit cell, - n_corr_shells = int(R.next()) + n_corr_shells = int(next(R)) # corresponds to index R in formulas # now read the information about the shells (atom, sort, l, dim, SO # flag, irep): @@ -141,8 +140,8 @@ class HkConverter(ConverterTools): T = [] for ish in range(n_inequiv_shells): # number of representatives ("subsets"), e.g. t2g and eg - n_reps[ish] = int(R.next()) - dim_reps[ish] = [int(R.next()) for i in range( + n_reps[ish] = int(next(R)) + dim_reps[ish] = [int(next(R)) for i in range( n_reps[ish])] # dimensions of the subsets # The transformation matrix: @@ -201,7 +200,7 @@ class HkConverter(ConverterTools): if (weights_in_file): # weights in the file for ik in range(n_k): - bz_weights[ik] = R.next() + bz_weights[ik] = next(R) # if the sum over spins is in the weights, take it out again!! sm = sum(bz_weights) @@ -222,7 +221,7 @@ class HkConverter(ConverterTools): else: istart = 0 for j in range(istart, n_orb): - hopping[ik, isp, i, j] = R.next() + hopping[ik, isp, i, j] = next(R) for i in range(n_orb): if (only_upper_triangle): @@ -230,7 +229,7 @@ class HkConverter(ConverterTools): else: istart = 0 for j in range(istart, n_orb): - hopping[ik, isp, i, j] += R.next() * 1j + hopping[ik, isp, i, j] += next(R) * 1j if ((only_upper_triangle)and(i != j)): hopping[ik, isp, j, i] = hopping[ ik, isp, i, j].conjugate() @@ -243,8 +242,8 @@ class HkConverter(ConverterTools): else: istart = 0 for j in range(istart, n_orb): - hopping[ik, isp, i, j] = R.next() - hopping[ik, isp, i, j] += R.next() * 1j + hopping[ik, isp, i, j] = next(R) + hopping[ik, isp, i, j] += next(R) * 1j if ((only_upper_triangle)and(i != j)): hopping[ik, isp, j, i] = hopping[ diff --git a/python/converters/plovasp/.gitignore b/python/triqs_dft_tools/converters/plovasp/.gitignore similarity index 100% rename from python/converters/plovasp/.gitignore rename to python/triqs_dft_tools/converters/plovasp/.gitignore diff --git a/python/converters/plovasp/__init__.py b/python/triqs_dft_tools/converters/plovasp/__init__.py similarity index 99% rename from python/converters/plovasp/__init__.py rename to python/triqs_dft_tools/converters/plovasp/__init__.py index db31c97b..d6a0c54a 100644 --- a/python/converters/plovasp/__init__.py +++ b/python/triqs_dft_tools/converters/plovasp/__init__.py @@ -1,4 +1,3 @@ - ################################################################################ # # TRIQS: a Toolbox for Research in Interacting Quantum Systems diff --git a/python/converters/plovasp/atm_desc.py b/python/triqs_dft_tools/converters/plovasp/atm_desc.py similarity index 82% rename from python/converters/plovasp/atm_desc.py rename to python/triqs_dft_tools/converters/plovasp/atm_desc.py index 55ff109a..383a98d9 100644 --- a/python/converters/plovasp/atm_desc.py +++ b/python/triqs_dft_tools/converters/plovasp/atm_desc.py @@ -1,5 +1,5 @@ # Generated automatically using the command : -# c++2py.py -m atm -o atm --moduledoc "Analytical Tetrahedron Method for DOS" ../../../c++/plovasp/atm/dos_tetra3d.hpp +# c++2py.py -m atm -o atm --moduledoc "Analytical Tetrahedron Method for DOS" ../../../../c++/triqs_dft_tools/converters/vasp/dos_tetra3d.hpp from cpp2py.wrap_generator import * # The module @@ -8,11 +8,12 @@ module = module_(full_name = "atm", doc = "Analytical Tetrahedron Method for cal # All the triqs C++/Python modules # Add here all includes beyond what is automatically included by the triqs modules -module.add_include("plovasp/atm/dos_tetra3d.hpp") +module.add_include("triqs_dft_tools/converters/vasp/dos_tetra3d.hpp") # Add here anything to add in the C++ code at the start, e.g. namespace using module.add_preamble(""" #include +using namespace triqs::arrays; """) module.add_function ("array dos_tetra_weights_3d (array_view eigk, double en, array_view itt)", doc = """DOS of a band by analytical tetrahedron method\n\n Returns corner weights for all tetrahedra for a given band and real energy.""") diff --git a/python/converters/plovasp/converter.py b/python/triqs_dft_tools/converters/plovasp/converter.py similarity index 94% rename from python/converters/plovasp/converter.py rename to python/triqs_dft_tools/converters/plovasp/converter.py index 53fbdb14..b2c97548 100644 --- a/python/converters/plovasp/converter.py +++ b/python/triqs_dft_tools/converters/plovasp/converter.py @@ -36,10 +36,10 @@ r""" Usage: python converter.py [] """ import sys -import vaspio -from inpconf import ConfigParameters -from elstruct import ElectronicStructure -from plotools import generate_plo, output_as_text +from . import vaspio +from .inpconf import ConfigParameters +from .elstruct import ElectronicStructure +from .plotools import generate_plo, output_as_text def generate_and_output_as_text(conf_filename, vasp_dir): """ diff --git a/python/converters/plovasp/elstruct.py b/python/triqs_dft_tools/converters/plovasp/elstruct.py similarity index 93% rename from python/converters/plovasp/elstruct.py rename to python/triqs_dft_tools/converters/plovasp/elstruct.py index 1ad1a4ca..b0a9ba58 100644 --- a/python/converters/plovasp/elstruct.py +++ b/python/triqs_dft_tools/converters/plovasp/elstruct.py @@ -92,7 +92,7 @@ class ElectronicStructure: # removed completely. # if not vasp_data.eigenval.eigs is None: if False: - print "eigvals from EIGENVAL" + print("eigvals from EIGENVAL") self.eigvals = vasp_data.eigenval.eigs self.ferw = vasp_data.eigenval.ferw.transpose((2, 0, 1)) @@ -102,7 +102,7 @@ class ElectronicStructure: # Check that the number of band is the same in PROJCAR and EIGENVAL assert nb_plo == self.nband, "PLOCAR is inconsistent with EIGENVAL (number of bands)" else: - print "eigvals from LOCPROJ" + print("eigvals from LOCPROJ") self.eigvals = vasp_data.plocar.eigs self.ferw = vasp_data.plocar.ferw.transpose((2, 0, 1)) self.efermi = vasp_data.doscar.efermi @@ -142,7 +142,7 @@ class ElectronicStructure: ## Construct a map to access coordinates by index # self.structure['ion_index'] = [] # for isort, nq in enumerate(self.structure['nq_types']): -# for iq in xrange(nq): +# for iq in range(nq): # self.structure['ion_index'].append((isort, iq)) @@ -154,7 +154,7 @@ class ElectronicStructure: nproj, ns, nk, nb = plo.shape ions = sorted(list(set([param['isite'] for param in self.proj_params]))) nions = len(ions) - norb = nproj / nions + norb = nproj // nions # Spin factor sp_fac = 2.0 if ns == 1 and not self.nc_flag else 1.0 @@ -163,8 +163,8 @@ class ElectronicStructure: overlap = np.zeros((ns, nproj, nproj), dtype=np.float64) # ov_min = np.ones((ns, nproj, nproj), dtype=np.float64) * 100.0 # ov_max = np.zeros((ns, nproj, nproj), dtype=np.float64) - for ispin in xrange(ns): - for ik in xrange(nk): + for ispin in range(ns): + for ik in range(nk): kweight = self.kmesh['kweights'][ik] occ = self.ferw[ispin, ik, :] den_mat[ispin, :, :] += np.dot(plo[:, ispin, ik, :] * occ, plo[:, ispin, ik, :].T.conj()).real * kweight * sp_fac @@ -174,12 +174,12 @@ class ElectronicStructure: # ov_min = np.minimum(ov, ov_min) # Output only the site-diagonal parts of the matrices - print - print " Unorthonormalized density matrices and overlaps:" - for ispin in xrange(ns): - print " Spin:", ispin + 1 + print() + print(" Unorthonormalized density matrices and overlaps:") + for ispin in range(ns): + print(" Spin:", ispin + 1) for io, ion in enumerate(ions): - print " Site:", ion + print(" Site:", ion) iorb_inds = [(ip, param['m']) for ip, param in enumerate(self.proj_params) if param['isite'] == ion] norb = len(iorb_inds) dm = np.zeros((norb, norb)) @@ -189,9 +189,9 @@ class ElectronicStructure: dm[iorb, iorb2] = den_mat[ispin, ind, ind2] ov[iorb, iorb2] = overlap[ispin, ind, ind2] - print " Density matrix" + (12*norb - 12 + 2)*" " + "Overlap" + print(" Density matrix" + (12*norb - 12 + 2)*" " + "Overlap") for drow, dov in zip(dm, ov): out = ''.join(map("{0:12.7f}".format, drow)) out += " " out += ''.join(map("{0:12.7f}".format, dov)) - print out + print(out) diff --git a/python/converters/plovasp/inpconf.py b/python/triqs_dft_tools/converters/plovasp/inpconf.py similarity index 92% rename from python/converters/plovasp/inpconf.py rename to python/triqs_dft_tools/converters/plovasp/inpconf.py index 861974a9..0c2b3a4e 100644 --- a/python/converters/plovasp/inpconf.py +++ b/python/triqs_dft_tools/converters/plovasp/inpconf.py @@ -29,20 +29,20 @@ r""" Module for parsing and checking an input config-file. """ -import ConfigParser +import configparser import numpy as np import re import sys import itertools as it -import vaspio +from . import vaspio def issue_warning(message): """ Issues a warning. """ - print - print " !!! WARNING !!!: " + message - print + print() + print(" !!! WARNING !!!: " + message) + print() ################################################################################ ################################################################################ @@ -73,7 +73,7 @@ class ConfigParameters: ################################################################################ def __init__(self, input_filename, verbosity=1): self.verbosity = verbosity - self.cp = ConfigParser.SafeConfigParser() + self.cp = configparser.SafeConfigParser() self.cp.readfp(open(input_filename, 'r')) self.parameters = {} @@ -89,7 +89,7 @@ class ConfigParameters: 'corr': ('corr', self.parse_string_logical, True)} self.gr_required = { - 'shells': ('shells', lambda s: map(int, s.split())), + 'shells': ('shells', lambda s: list(map(int, s.split()))), 'ewindow': ('ewindow', self.parse_energy_window)} self.gr_optional = { @@ -142,7 +142,7 @@ class ConfigParameters: else: # Check if a set of indices is given try: - l_tmp = map(int, par_str.split()) + l_tmp = list(map(int, par_str.split())) l_tmp.sort() # Subtract 1 so that VASP indices (starting with 1) are converted # to Python indices (starting with 0) @@ -160,7 +160,7 @@ class ConfigParameters: ion_list = [] nion = 0 for cl in classes: - ions = map(int, re.findall(patt2, cl)) + ions = list(map(int, re.findall(patt2, cl))) ion_list.append([ion - 1 for ion in ions]) nion += len(ions) @@ -218,7 +218,7 @@ class ConfigParameters: Energy window is given by two floats, with the first one being smaller than the second one. """ - ftmp = map(float, par_str.split()) + ftmp = list(map(float, par_str.split())) assert len(ftmp) == 2, "EWINDOW must be specified by exactly two floats" assert ftmp[0] < ftmp[1], "The first float in EWINDOW must be smaller than the second one" return tuple(ftmp) @@ -233,7 +233,7 @@ class ConfigParameters: Band window is given by two ints, with the first one being smaller than the second one. """ - ftmp = map(int, par_str.split()) + ftmp = list(map(int, par_str.split())) assert len(ftmp) == 2, "BANDS must be specified by exactly two ints" assert ftmp[0] < ftmp[1], "The first int in BANDS must be smaller than the second one" return tuple(ftmp) @@ -250,7 +250,7 @@ class ConfigParameters: """ str_rows = par_str.split('\n') try: - rows = [map(float, s.split()) for s in str_rows] + rows = [list(map(float, s.split())) for s in str_rows] except ValueError: err_mess = "Cannot parse a matrix string:\n%s"%(par_str) raise ValueError(err_mess) @@ -266,7 +266,7 @@ class ConfigParameters: mat = np.array(rows) else: err_mess = "Complex matrix must contain 2*M values:\n%s"%(par_str) - assert 2 * (nm / 2) == nm, err_mess + assert 2 * (nm // 2) == nm, err_mess tmp = np.array(rows, dtype=np.complex128) mat = tmp[:, 0::2] + 1.0j * tmp[:, 1::2] @@ -339,11 +339,11 @@ class ConfigParameters: For required parameters `exception=True` must be set. """ parsed = {} - for par in param_set.keys(): + for par in list(param_set.keys()): key = param_set[par][0] try: par_str = self.cp.get(section, par) - except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + except (configparser.NoOptionError, configparser.NoSectionError): if exception: message = "Required parameter '%s' not found in section [%s]"%(par, section) raise Exception(message) @@ -354,7 +354,7 @@ class ConfigParameters: continue if self.verbosity > 0: - print " %s = %s"%(par, par_str) + print(" %s = %s"%(par, par_str)) parse_fun = param_set[par][1] parsed[key] = parse_fun(par_str) @@ -376,27 +376,27 @@ class ConfigParameters: sections = self.cp.sections() sh_patt1 = re.compile('shell +.*', re.IGNORECASE) - sec_shells = filter(sh_patt1.match, sections) + sec_shells = list(filter(sh_patt1.match, sections)) self.nshells = len(sec_shells) assert self.nshells > 0, "No projected shells found in the input file" if self.verbosity > 0: - print + print() if self.nshells > 1: - print " Found %i projected shells"%(self.nshells) + print(" Found %i projected shells"%(self.nshells)) else: - print " Found 1 projected shell" + print(" Found 1 projected shell") # Get shell indices sh_patt2 = re.compile('shell +([0-9]*)$', re.IGNORECASE) try: get_ind = lambda s: int(sh_patt2.match(s).groups()[0]) - sh_inds = map(get_ind, sec_shells) + sh_inds = list(map(get_ind, sec_shells)) except (ValueError, AttributeError): raise ValueError("Failed to extract shell indices from a list: %s"%(sec_shells)) - self.sh_sections = {ind: sec for ind, sec in it.izip(sh_inds, sec_shells)} + self.sh_sections = {ind: sec for ind, sec in zip(sh_inds, sec_shells)} # Check that all indices are unique # In principle redundant because the list of sections will contain only unique names @@ -405,7 +405,7 @@ class ConfigParameters: # Ideally, indices should run from 1 to # If it's not the case, issue a warning sh_inds.sort() - if sh_inds != range(1, len(sh_inds) + 1): + if sh_inds != list(range(1, len(sh_inds) + 1)): issue_warning("Shell indices are not uniform or not starting from 1. " "This might be an indication of a incorrect setup.") @@ -418,8 +418,8 @@ class ConfigParameters: section = self.sh_sections[ind] if self.verbosity > 0: - print - print " Shell parameters:" + print() + print(" Shell parameters:") # Shell required parameters parsed = self.parse_parameter_set(section, self.sh_required, exception=True) shell.update(parsed) @@ -453,7 +453,7 @@ class ConfigParameters: sections = self.cp.sections() gr_patt = re.compile('group +(.*)', re.IGNORECASE) - sec_groups = filter(gr_patt.match, sections) + sec_groups = list(filter(gr_patt.match, sections)) self.ngroups = len(sec_groups) @@ -471,8 +471,8 @@ class ConfigParameters: group['index'] = gr_ind if self.verbosity > 0: - print - print " Group parameters:" + print() + print(" Group parameters:") # Group required parameters parsed = self.parse_parameter_set(section, self.gr_required, exception=True) group.update(parsed) @@ -514,18 +514,18 @@ class ConfigParameters: sh_gr_required = dict(self.gr_required) sh_gr_required.pop('shells') try: - for par in sh_gr_required.keys(): + for par in list(sh_gr_required.keys()): key = sh_gr_required[par][0] value = self.shells[0].pop(key) self.groups[0][key] = value except KeyError: message = "One [Shell] section is specified but no explicit [Group] section is provided." message += " In this case the [Shell] section must contain all required group information.\n" - message += " Required parameters are: %s"%(sh_gr_required.keys()) + message += " Required parameters are: %s"%(list(sh_gr_required.keys())) raise KeyError(message) # Do the same for optional group parameters, but do not raise an exception - for par in self.gr_optional.keys(): + for par in list(self.gr_optional.keys()): try: key = self.gr_optional[par][0] value = self.shells[0].pop(key) @@ -562,7 +562,7 @@ class ConfigParameters: # remove them and issue a warning. # # First, required group parameters - for par in self.gr_required.keys(): + for par in list(self.gr_required.keys()): try: key = self.gr_required[par][0] value = shell.pop(key) @@ -573,7 +573,7 @@ class ConfigParameters: continue # Second, optional group parameters - for par in self.gr_optional.keys(): + for par in list(self.gr_optional.keys()): try: key = self.gr_optional[par][0] value = shell.pop(key) @@ -591,7 +591,7 @@ class ConfigParameters: sh_refs_used.sort() # Check that all shells are referenced in the groups - assert sh_refs_used == range(self.nshells), "Some shells are not inside any of the groups" + assert sh_refs_used == list(range(self.nshells)), "Some shells are not inside any of the groups" ################################################################################ @@ -605,7 +605,7 @@ class ConfigParameters: """ self.general = {} sections = self.cp.sections() - gen_section = filter(lambda s: s.lower() == 'general', sections) + gen_section = [s for s in sections if s.lower() == 'general'] # If no [General] section is found parse a dummy section name to the parser # to reset parameters to their default values if len(gen_section) > 1: diff --git a/python/converters/plovasp/plotools.py b/python/triqs_dft_tools/converters/plovasp/plotools.py similarity index 86% rename from python/converters/plovasp/plotools.py rename to python/triqs_dft_tools/converters/plovasp/plotools.py index 458af36b..af500ff8 100644 --- a/python/converters/plovasp/plotools.py +++ b/python/triqs_dft_tools/converters/plovasp/plotools.py @@ -55,9 +55,9 @@ r""" """ import itertools as it import numpy as np -from proj_group import ProjectorGroup -from proj_shell import ProjectorShell -from proj_shell import ComplementShell +from .proj_group import ProjectorGroup +from .proj_shell import ProjectorShell +from .proj_shell import ComplementShell np.set_printoptions(suppress=True) @@ -71,9 +71,9 @@ def issue_warning(message): """ Issues a warning. """ - print - print " !!! WARNING !!!: " + message - print + print() + print(" !!! WARNING !!!: " + message) + print() ################################################################################ # check_data_consistency() @@ -129,18 +129,18 @@ def generate_plo(conf_pars, el_struct): # check if at least one shell is correlated assert np.any([shell['corr'] for shell in conf_pars.shells]), 'at least one shell has be CORR = True' nshell = len(conf_pars.shells) - print - print " Generating %i shell%s..."%(nshell, '' if nshell == 1 else 's') + print() + print(" Generating %i shell%s..."%(nshell, '' if nshell == 1 else 's')) pshells = [] for sh_par in conf_pars.shells: pshell = ProjectorShell(sh_par, proj_raw, el_struct.proj_params, el_struct.kmesh, el_struct.structure, el_struct.nc_flag) - print - print " Shell : %s"%(pshell.user_index) - print " Orbital l : %i"%(pshell.lorb) - print " Number of ions: %i"%(pshell.nion) - print " Dimension : %i"%(pshell.ndim) - print " Correlated : %r"%(pshell.corr) - print " Ion sort : %r"%(pshell.ion_sort) + print() + print(" Shell : %s"%(pshell.user_index)) + print(" Orbital l : %i"%(pshell.lorb)) + print(" Number of ions: %i"%(pshell.nion)) + print(" Dimension : %i"%(pshell.ndim)) + print(" Correlated : %r"%(pshell.corr)) + print(" Ion sort : %r"%(pshell.ion_sort)) pshells.append(pshell) @@ -153,49 +153,49 @@ def generate_plo(conf_pars, el_struct): if conf_pars.general['hk']: pgroup.calc_hk(eigvals) #testout = 'hk.out.h5' - #from pytriqs.archive import HDFArchive + #from h5 import HDFArchive #with HDFArchive(testout, 'w') as h5test: # h5test['hk'] = pgroup.hk # DEBUG output - print "Density matrix:" + print("Density matrix:") nimp = 0.0 ov_all = [] for ish in pgroup.ishells: if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell): - print " Shell %i"%(ish + 1) + print(" Shell %i"%(ish + 1)) dm_all, ov_all_ = pshells[ish].density_matrix(el_struct) ov_all.append(ov_all_[0]) spin_fac = 2 if dm_all.shape[0] == 1 else 1 - for io in xrange(dm_all.shape[1]): - print " Site %i"%(io + 1) + for io in range(dm_all.shape[1]): + print(" Site %i"%(io + 1)) dm = spin_fac * dm_all[:, io, : ,:].sum(0) for row in dm: - print ''.join(map("{0:14.7f}".format, row)) + print(''.join(map("{0:14.7f}".format, row))) ndm = dm.trace() if pshells[ish].corr: nimp += ndm - print " trace: ", ndm - print - print " Impurity density:", nimp - print - print "Overlap:" + print(" trace: ", ndm) + print() + print(" Impurity density:", nimp) + print() + print("Overlap:") for io, ov in enumerate(ov_all): - print " Site %i"%(io + 1) - print ov[0,...] - print - print "Local Hamiltonian:" + print(" Site %i"%(io + 1)) + print(ov[0,...]) + print() + print("Local Hamiltonian:") for ish in pgroup.ishells: if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell): - print " Shell %i"%(ish + 1) + print(" Shell %i"%(ish + 1)) loc_ham = pshells[pgroup.ishells[ish]].local_hamiltonian(el_struct) - for io in xrange(loc_ham.shape[1]): - print " Site %i (real | complex part)"%(io + 1) + for io in range(loc_ham.shape[1]): + print(" Site %i (real | complex part)"%(io + 1)) for row in loc_ham[:, io, :, :].sum(0): - print ''.join(map("{0:14.7f}".format, row.real))+' |'+''.join(map("{0:14.7f}".format, row.imag)) + print(''.join(map("{0:14.7f}".format, row.real))+' |'+''.join(map("{0:14.7f}".format, row.imag))) # END DEBUG output if 'dosmesh' in conf_pars.general: - print - print "Evaluating DOS..." + print() + print("Evaluating DOS...") mesh_pars = conf_pars.general['dosmesh'] if np.isnan(mesh_pars['emin']): dos_emin = pgroup.emin @@ -208,12 +208,12 @@ def generate_plo(conf_pars, el_struct): emesh = np.linspace(dos_emin, dos_emax, n_points) for ish in pgroup.ishells: if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell) or True: - print " Shell %i"%(ish + 1) + print(" Shell %i"%(ish + 1)) dos = pshells[pgroup.ishells[ish]].density_of_states(el_struct, emesh) de = emesh[1] - emesh[0] ntot = (dos[1:,...] + dos[:-1,...]).sum(0) / 2 * de - print " Total number of states:", ntot - for io in xrange(dos.shape[2]): + print(" Total number of states:", ntot) + for io in range(dos.shape[2]): np.savetxt('pdos_%i_%i.dat'%(ish,io), np.vstack((emesh.T, dos[:, 0, io, :].T)).T) pgroups.append(pgroup) @@ -254,7 +254,7 @@ def kpoints_output(basename, el_struct): f.write("%i\n"%(nktot)) # TODO: add the output of reciprocal lattice vectors f.write("# List of k-points with weights\n") - for ik in xrange(nktot): + for ik in range(nktot): kx, ky, kz = kmesh['kpoints'][ik, :] kwght = kmesh['kweights'][ik] f.write("%15.10f%15.10f%15.10f%20.10f\n"%(kx, ky, kz, kwght)) @@ -266,7 +266,7 @@ def kpoints_output(basename, el_struct): f.write("\n# Number of tetrahedra and volume: ntet, volt\n") f.write("%i %s\n"%(ntet, volt)) f.write("# List of tetrahedra: imult, ik1, ..., ik4\n") - for it in xrange(ntet): + for it in range(ntet): f.write(' '.join(map("{0:d}".format, *kmesh['itet'][it, :])) + '\n') except KeyError: pass @@ -315,14 +315,14 @@ def ctrl_output(conf_pars, el_struct, ng): header = json.dumps(head_dict, indent=4, separators=(',', ': ')) - print " Storing ctrl-file..." + print(" Storing ctrl-file...") with open(ctrl_fname, 'wt') as f: f.write(header + "\n") f.write("#END OF HEADER\n") f.write("# k-points and weights\n") labels = ['kx', 'ky', 'kz', 'kweight'] - out = "".join(map(lambda s: s.center(15), labels)) + out = "".join([s.center(15) for s in labels]) f.write("#" + out + "\n") for ik, kp in enumerate(el_struct.kmesh['kpoints']): tmp1 = "".join(map("{0:15.10f}".format, kp)) @@ -330,7 +330,7 @@ def ctrl_output(conf_pars, el_struct, ng): f.write(out + "\n") f.write("# k-points and weights cartesian\n") labels = ['kx', 'ky', 'kz'] - out = "".join(map(lambda s: s.center(15), labels)) + out = "".join([s.center(15) for s in labels]) f.write("#" + out + "\n") for ik, kp in enumerate(el_struct.kmesh['kpoints_cart']): out = "".join(map("{0:15.10f}".format, kp)) @@ -381,7 +381,7 @@ def plo_output(conf_pars, el_struct, pshells, pgroups): """ for ig, pgroup in enumerate(pgroups): plo_fname = conf_pars.general['basename'] + '.pg%i'%(ig + 1) - print " Storing PLO-group file '%s'..."%(plo_fname) + print(" Storing PLO-group file '%s'..."%(plo_fname)) head_dict = {} @@ -394,7 +394,7 @@ def plo_output(conf_pars, el_struct, pshells, pgroups): # Number of electrons within the window head_dict['nelect'] = pgroup.nelect_window(el_struct) - print " Density within window:", head_dict['nelect'] + print(" Density within window:", head_dict['nelect']) head_shells = [] for ish in pgroup.ishells: @@ -430,13 +430,13 @@ def plo_output(conf_pars, el_struct, pshells, pgroups): f.write("# Eigenvalues within the energy window: %s, %s\n"%(pgroup.emin, pgroup.emax)) nk, nband, ns_band = el_struct.eigvals.shape - for isp in xrange(ns_band): + for isp in range(ns_band): f.write("# is = %i\n"%(isp + 1)) - for ik in xrange(nk): + for ik in range(nk): ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1] # Output band indices in Fortran convention! f.write(" %i %i\n"%(ib1 + 1, ib2 + 1)) - for ib in xrange(ib1, ib2 + 1): + for ib in range(ib1, ib2 + 1): eigv_ef = el_struct.eigvals[ik, ib, isp] - el_struct.efermi f_weight = el_struct.ferw[isp, ik, ib] f.write("%13.8f %12.7f\n"%(eigv_ef, f_weight)) @@ -449,15 +449,15 @@ def plo_output(conf_pars, el_struct, pshells, pgroups): f.write("# Shell %i\n"%(ish)) nion, ns, nk, nlm, nb = shell.proj_win.shape - for isp in xrange(ns): + for isp in range(ns): f.write("# is = %i\n"%(isp + 1)) - for ik in xrange(nk): + for ik in range(nk): f.write("# ik = %i\n"%(ik + 1)) - for ion in xrange(nion): - for ilm in xrange(nlm): + for ion in range(nion): + for ilm in range(nlm): ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1] ib_win = ib2 - ib1 + 1 - for ib in xrange(ib_win): + for ib in range(ib_win): p = shell.proj_win[ion, isp, ik, ilm, ib] f.write("{0:16.10f}{1:16.10f}\n".format(p.real, p.imag)) f.write("\n") @@ -494,7 +494,7 @@ def hk_output(conf_pars, el_struct, pgroups): for ig, pgroup in enumerate(pgroups): hk_fname = conf_pars.general['basename'] + '.hk%i'%(ig + 1) - print " Storing HK-group file '%s'..."%(hk_fname) + print(" Storing HK-group file '%s'..."%(hk_fname)) head_shells = [] for ish in pgroup.ishells: @@ -528,13 +528,13 @@ def hk_output(conf_pars, el_struct, pgroups): f.write('%i %i %i %i # atom sort l dim\n'%(head['ion_list'][0],head['ion_sort'][0],head['lorb'],head['ndim'])) norbs = pgroup.hk.shape[2] - for isp in xrange(ns_band): - for ik in xrange(nk): - for io in xrange(norbs): - for iop in xrange(norbs): + for isp in range(ns_band): + for ik in range(nk): + for io in range(norbs): + for iop in range(norbs): f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].real)) f.write("\n") - for io in xrange(norbs): - for iop in xrange(norbs): + for io in range(norbs): + for iop in range(norbs): f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].imag)) f.write("\n") diff --git a/python/converters/plovasp/proj_group.py b/python/triqs_dft_tools/converters/plovasp/proj_group.py similarity index 95% rename from python/converters/plovasp/proj_group.py rename to python/triqs_dft_tools/converters/plovasp/proj_group.py index 9fdadee8..0237ae6e 100644 --- a/python/converters/plovasp/proj_group.py +++ b/python/triqs_dft_tools/converters/plovasp/proj_group.py @@ -30,7 +30,7 @@ r""" Storage and manipulation of projector groups. """ import numpy as np -from proj_shell import ComplementShell +from .proj_shell import ComplementShell np.set_printoptions(suppress=True) ################################################################################ @@ -89,8 +89,8 @@ class ProjectorGroup: assert np.all( n_bands == n_bands[0,0] ), "At each band the same number of bands has to be selected for calculating the complement (to end up with an equal number of orbitals at each k-point)." if n_orbs == n_bands[0,0]: self.complement = False - print "\nWARNING: The total number of orbitals in this group is " - print "equal to the number of bands. Setting COMPLEMENT to FALSE!\n" + print("\nWARNING: The total number of orbitals in this group is ") + print("equal to the number of bands. Setting COMPLEMENT to FALSE!\n") # Select projectors within the energy window @@ -112,8 +112,8 @@ class ProjectorGroup: self.nelect = 0 nk, ns_band, _ = self.ib_win.shape rspin = 2.0 if ns_band == 1 else 1.0 - for isp in xrange(ns_band): - for ik in xrange(nk): + for isp in range(ns_band): + for ik in range(nk): ib1 = self.ib_win[ik, isp, 0] ib2 = self.ib_win[ik, isp, 1]+1 occ = el_struct.ferw[isp, ik, ib1:ib2] @@ -154,8 +154,8 @@ class ProjectorGroup: _, ns, nk, _, _ = self.shells[0].proj_win.shape p_mat = np.zeros((ndim, self.nb_max), dtype=np.complex128) # Note that 'ns' and 'nk' are the same for all shells - for isp in xrange(ns): - for ik in xrange(nk): + for isp in range(ns): + for ik in range(nk): nb = self.ib_win[ik, isp, 1] - self.ib_win[ik, isp, 0] + 1 # Combine all projectors of the group to one block projector for bl_map in block_maps: @@ -203,8 +203,8 @@ class ProjectorGroup: self.hk = np.zeros((ns,nk,ndim,ndim), dtype=np.complex128) # Note that 'ns' and 'nk' are the same for all shells - for isp in xrange(ns): - for ik in xrange(nk): + for isp in range(ns): + for ik in range(nk): bmin = self.ib_win[ik, isp, 0] bmax = self.ib_win[ik, isp, 1]+1 @@ -247,7 +247,7 @@ class ProjectorGroup: """ - print '\nCalculating complement\n' + print('\nCalculating complement\n') block_maps, ndim = self.get_block_matrix_map() _, ns, nk, _, _ = self.shells[0].proj_win.shape @@ -257,8 +257,8 @@ class ProjectorGroup: # Note that 'ns' and 'nk' are the same for all shells - for isp in xrange(ns): - for ik in xrange(nk): + for isp in range(ns): + for ik in range(nk): bmin = self.ib_win[ik, isp, 0] bmax = self.ib_win[ik, isp, 1]+1 @@ -335,7 +335,7 @@ class ProjectorGroup: 1. Orthogonality is ensured on each site (NORMION = True). For each site 'ion' we have the following mapping: - block_maps = [bl_map[ion] for ion in xrange(shell.nion) + block_maps = [bl_map[ion] for ion in range(shell.nion) for shell in shells] bl_map = [((i1_start, i1_end), (i1_shell, ion)), @@ -362,7 +362,7 @@ class ProjectorGroup: _shell = self.shells[ish] nion, ns, nk, nlm, nb_max = _shell.proj_win.shape ndim = max(ndim, nlm) - for ion in xrange(nion): + for ion in range(nion): i1_bl = 0 i2_bl = nlm block = {'bmat_range': (i1_bl, i2_bl)} @@ -378,7 +378,7 @@ class ProjectorGroup: for ish in self.ishells: _shell = self.shells[ish] nion, ns, nk, nlm, nb_max = _shell.proj_win.shape - for ion in xrange(nion): + for ion in range(nion): i2_bl = i1_bl + nlm block = {'bmat_range': (i1_bl, i2_bl)} block['shell_ion'] = (ish, ion) @@ -456,14 +456,14 @@ class ProjectorGroup: ib_min = 10000000 ib_max = 0 - for isp in xrange(ns_band): - for ik in xrange(nk): - for ib in xrange(nband): + for isp in range(ns_band): + for ik in range(nk): + for ib in range(nband): en = eigvals[ik, ib, isp] if en >= self.emin: break ib1 = ib - for ib in xrange(ib1, nband): + for ib in range(ib1, nband): en = eigvals[ik, ib, isp] if en > self.emax: break diff --git a/python/converters/plovasp/proj_shell.py b/python/triqs_dft_tools/converters/plovasp/proj_shell.py similarity index 91% rename from python/converters/plovasp/proj_shell.py rename to python/triqs_dft_tools/converters/plovasp/proj_shell.py index 32035670..628357b0 100644 --- a/python/converters/plovasp/proj_shell.py +++ b/python/triqs_dft_tools/converters/plovasp/proj_shell.py @@ -33,18 +33,13 @@ def issue_warning(message): """ Issues a warning. """ - print - print " !!! WARNING !!!: " + message - print + print() + print(" !!! WARNING !!!: " + message) + print() import itertools as it import numpy as np -try: - import atm - atmlib_present = True -except ImportError: - issue_warning("Error importing ATM libray, DOS calculation will fail!") - atmlib_present = False +from . import atm np.set_printoptions(suppress=True) @@ -135,8 +130,8 @@ class ProjectorShell: assert nrow%nion == 0, "Number of rows in TRANSFILE must be divisible by the number of ions" assert ncol%nm == 0, "Number of columns in TRANSFILE must be divisible by the number of orbitals 2*l + 1" - nr = nrow / nion - nsize = ncol / nm + nr = nrow // nion + nsize = ncol // nm assert nsize in (1, 2, 4), "Number of columns in TRANSFILE must be divisible by either 1, 2, or 4" # # Determine the spin-dimension and whether the matrices are real or complex @@ -154,18 +149,18 @@ class ProjectorShell: # is_complex = True # is_complex = nsize > 1 - ns_dim = max(1, nsize / 2) + ns_dim = max(1, nsize // 2) # Dimension of the orbital subspace assert nr%ns_dim == 0, "Number of rows in TRANSFILE is not compatible with the spin dimension" - ndim = nr / ns_dim + ndim = nr // ns_dim self.tmatrices = np.zeros((nion, nr, nm * ns_dim), dtype=np.complex128) if is_complex: raw_matrices = raw_matrices[:, ::2] + raw_matrices[:, 1::2] * 1j - for io in xrange(nion): + for io in range(nion): i1 = io * nr i2 = (io + 1) * nr self.tmatrices[io, :, :] = raw_matrices[i1:i2, :] @@ -181,7 +176,7 @@ class ProjectorShell: assert ncol%nm == 0, "Number of columns in TRANSFORM must be divisible by the number of orbitals 2*l + 1" # Only spin-independent matrices are expected here - nsize = ncol / nm + nsize = ncol // nm assert nsize in (1, 2), "Number of columns in TRANSFORM must be divisible by either 1 or 2" is_complex = nsize > 1 @@ -193,7 +188,7 @@ class ProjectorShell: ndim = nrow self.tmatrices = np.zeros((nion, nrow, nm), dtype=np.complex128) - for io in xrange(nion): + for io in range(nion): self.tmatrices[io, :, :] = raw_matrix return ndim @@ -206,7 +201,7 @@ class ProjectorShell: # We still need the matrices for the output self.tmatrices = np.zeros((nion, ndim, ndim), dtype=np.complex128) - for io in xrange(nion): + for io in range(nion): self.tmatrices[io, :, :] = np.identity(ndim, dtype=np.complex128) return ndim @@ -236,20 +231,20 @@ class ProjectorShell: # for a non-collinear case 'ndim' is 'ns * nm' ndim = self.tmatrices.shape[1] self.proj_arr = np.zeros((nion, ns, nk, ndim, nb), dtype=np.complex128) - for ik in xrange(nk): + for ik in range(nk): kp = kmesh['kpoints'][ik] for io, ion in enumerate(self.ion_list): proj_k = np.zeros((ns, nlm, nb), dtype=np.complex128) qcoord = structure['qcoords'][ion] # kphase = np.exp(-2.0j * np.pi * np.dot(kp, qcoord)) # kphase = 1.0 - for m in xrange(nlm): + for m in range(nlm): # Here we search for the index of the projector with the given isite/l/m indices for ip, par in enumerate(proj_params): if par['isite'] - 1 == ion and par['l'] == self.lorb and par['m'] == m: proj_k[:, m, :] = proj_raw[ip, :, ik, :] #* kphase break - for isp in xrange(ns): + for isp in range(ns): self.proj_arr[io, isp, ik, :, :] = np.dot(self.tmatrices[io, :, :], proj_k[isp, :, :]) else: @@ -257,12 +252,12 @@ class ProjectorShell: self.proj_arr = np.zeros((nion, ns, nk, nlm, nb), dtype=np.complex128) for io, ion in enumerate(self.ion_list): qcoord = structure['qcoords'][ion] - for m in xrange(nlm): + for m in range(nlm): # Here we search for the index of the projector with the given isite/l/m indices for ip, par in enumerate(proj_params): if par['isite'] - 1 == ion and par['l'] == self.lorb and par['m'] == m: self.proj_arr[io, :, :, m, :] = proj_raw[ip, :, :, :] -# for ik in xrange(nk): +# for ik in range(nk): # kp = kmesh['kpoints'][ik] ## kphase = np.exp(-2.0j * np.pi * np.dot(kp, qcoord)) # kphase = 1.0 @@ -291,8 +286,8 @@ class ProjectorShell: # Select projectors for a given energy window ns_band = self.ib_win.shape[1] - for isp in xrange(ns): - for ik in xrange(nk): + for isp in range(ns): + for ik in range(nk): # TODO: for non-collinear case something else should be done here is_b = min(isp, ns_band) ib1 = self.ib_win[ik, is_b, 0] @@ -328,9 +323,9 @@ class ProjectorShell: ib1 = self.ib_min ib2 = self.ib_max + 1 if site_diag: - for isp in xrange(ns): - for ik, weight, occ in it.izip(it.count(), kweights, occnums[isp, :, :]): - for io in xrange(nion): + for isp in range(ns): + for ik, weight, occ in zip(it.count(), kweights, occnums[isp, :, :]): + for io in range(nion): proj_k = self.proj_win[io, isp, ik, ...] occ_mats[isp, io, :, :] += np.dot(proj_k * occ[ib1:ib2], proj_k.conj().T).real * weight @@ -338,9 +333,9 @@ class ProjectorShell: proj_k.conj().T).real * weight else: proj_k = np.zeros((ndim, nbtot), dtype=np.complex128) - for isp in xrange(ns): - for ik, weight, occ in it.izip(it.count(), kweights, occnums[isp, :, :]): - for io in xrange(nion): + for isp in range(ns): + for ik, weight, occ in zip(it.count(), kweights, occnums[isp, :, :]): + for io in range(nion): i1 = io * nlm i2 = (io + 1) * nlm proj_k[i1:i2, :] = self.proj_win[io, isp, ik, ...] @@ -375,10 +370,10 @@ class ProjectorShell: occnums = el_struct.ferw ib1 = self.ib_min ib2 = self.ib_max + 1 - for isp in xrange(ns): - for ik, weight, occ, eigk in it.izip(it.count(), kweights, occnums[isp, :, :], + for isp in range(ns): + for ik, weight, occ, eigk in zip(it.count(), kweights, occnums[isp, :, :], el_struct.eigvals[:, ib1:ib2, isp]): - for io in xrange(nion): + for io in range(nion): proj_k = self.proj_win[io, isp, ik, ...] loc_ham[isp, io, :, :] += np.dot(proj_k * (eigk - el_struct.efermi), proj_k.conj().T) * weight @@ -399,7 +394,6 @@ class ProjectorShell: """ nion, ns, nk, nlm, nbtot = self.proj_win.shape - assert atmlib_present, "ATM library was not imported; cannot calculate DOS" # There is a problem with data storage structure of projectors that will # make life more complicated. The problem is that band-indices of projectors # for different k-points do not match because we store 'nb_max' values starting @@ -410,13 +404,13 @@ class ProjectorShell: ne = len(emesh) dos = np.zeros((ne, ns, nion, nlm)) w_k = np.zeros((nk, nb_max, ns, nion, nlm), dtype=np.complex128) - for isp in xrange(ns): - for ik in xrange(nk): + for isp in range(ns): + for ik in range(nk): is_b = min(isp, ns_band) ib1 = self.ib_win[ik, is_b, 0] ib2 = self.ib_win[ik, is_b, 1] + 1 - for ib_g in xrange(ib1, ib2): - for io in xrange(nion): + for ib_g in range(ib1, ib2): + for io in range(nion): # Note the difference between 'ib' and 'ibn': # 'ib' counts from 0 to 'nb_k - 1' # 'ibn' counts from 'ib1 - ib_min' to 'ib2 - ib_min' @@ -429,19 +423,19 @@ class ProjectorShell: itt = el_struct.kmesh['itet'].T # k-indices are starting from 0 in Python itt[1:, :] -= 1 - for isp in xrange(ns): + for isp in range(ns): for ib, eigk in enumerate(el_struct.eigvals[:, self.ib_min:self.ib_max+1, isp].T): for ie, e in enumerate(emesh): eigk_ef = eigk - el_struct.efermi cti = atm.dos_tetra_weights_3d(eigk_ef, e, itt) - for im in xrange(nlm): - for io in xrange(nion): + for im in range(nlm): + for io in range(nion): dos[ie, isp, io, im] += np.sum((cti * w_k[itt[1:, :], ib, isp, io, im].real).sum(0) * itt[0, :]) dos *= 2 * el_struct.kmesh['volt'] -# for isp in xrange(ns): -# for ik, weight, occ in it.izip(it.count(), kweights, occnums[isp, :, :]): -# for io in xrange(nion): +# for isp in range(ns): +# for ik, weight, occ in zip(it.count(), kweights, occnums[isp, :, :]): +# for io in range(nion): # proj_k = self.proj_win[isp, io, ik, ...] # occ_mats[isp, io, :, :] += np.dot(proj_k * occ[ib1:ib2], # proj_k.conj().T).real * weight diff --git a/python/converters/plovasp/sc_dmft.py b/python/triqs_dft_tools/converters/plovasp/sc_dmft.py similarity index 84% rename from python/converters/plovasp/sc_dmft.py rename to python/triqs_dft_tools/converters/plovasp/sc_dmft.py index b0f32aae..067094d0 100644 --- a/python/converters/plovasp/sc_dmft.py +++ b/python/triqs_dft_tools/converters/plovasp/sc_dmft.py @@ -30,8 +30,8 @@ import re import time import signal import sys -import pytriqs.utility.mpi as mpi -import converter +import triqs.utility.mpi as mpi +from . import converter from shutil import copyfile xch = sys.excepthook @@ -63,7 +63,7 @@ def is_vasp_running(vasp_pid): if mpi.is_master_node(): try: os.kill(vasp_pid, 0) - except OSError, e: + except OSError as e: pid_exists = e.errno == errno.EPERM else: pid_exists = True @@ -85,7 +85,7 @@ def get_dft_energy(): try: dft_energy = float(line.split()[2]) except ValueError: - print "Cannot read energy from OSZICAR, setting it to zero" + print("Cannot read energy from OSZICAR, setting it to zero") dft_energy = 0.0 return dft_energy @@ -111,7 +111,7 @@ def run_all(vasp_pid, dmft_cycle, cfg_file, n_iter, n_iter_dft, vasp_version): iter = 0 while vasp_running: - if debug: print bcolors.RED + "rank %s"%(mpi.rank) + bcolors.ENDC + if debug: print(bcolors.RED + "rank %s"%(mpi.rank) + bcolors.ENDC) mpi.report(" Waiting for VASP lock to disappear...") mpi.barrier() while is_vasp_lock_present(): @@ -125,30 +125,30 @@ def run_all(vasp_pid, dmft_cycle, cfg_file, n_iter, n_iter_dft, vasp_version): # Tell VASP to stop if the maximum number of iterations is reached - if debug: print bcolors.MAGENTA + "rank %s"%(mpi.rank) + bcolors.ENDC + if debug: print(bcolors.MAGENTA + "rank %s"%(mpi.rank) + bcolors.ENDC) err = 0 exc = None - if debug: print bcolors.BLUE + "plovasp: rank %s"%(mpi.rank) + bcolors.ENDC + if debug: print(bcolors.BLUE + "plovasp: rank %s"%(mpi.rank) + bcolors.ENDC) if mpi.is_master_node(): converter.generate_and_output_as_text(cfg_file, vasp_dir='./') # Read energy from OSZICAR dft_energy = get_dft_energy() mpi.barrier() - if debug: print bcolors.GREEN + "rank %s"%(mpi.rank) + bcolors.ENDC + if debug: print(bcolors.GREEN + "rank %s"%(mpi.rank) + bcolors.ENDC) corr_energy, dft_dc = dmft_cycle() mpi.barrier() if mpi.is_master_node(): total_energy = dft_energy + corr_energy - dft_dc - print - print "="*80 - print " Total energy: ", total_energy - print " DFT energy: ", dft_energy - print " Corr. energy: ", corr_energy - print " DFT DC: ", dft_dc - print "="*80 - print + print() + print("="*80) + print(" Total energy: ", total_energy) + print(" DFT energy: ", dft_energy) + print(" Corr. energy: ", corr_energy) + print(" DFT DC: ", dft_dc) + print("="*80) + print() # check if we should do additional VASP calculations # in the standard VASP version, VASP writes out GAMMA itself @@ -176,8 +176,8 @@ def run_all(vasp_pid, dmft_cycle, cfg_file, n_iter, n_iter_dft, vasp_version): copyfile(src='GAMMA_recent',dst='GAMMA') iter += 1 if iter == n_iter: - print "\n Maximum number of iterations reached." - print " Aborting VASP iterations...\n" + print("\n Maximum number of iterations reached.") + print(" Aborting VASP iterations...\n") f_stop = open('STOPCAR', 'wt') f_stop.write("LABORT = .TRUE.\n") f_stop.close() @@ -200,28 +200,28 @@ def main(): vasp_pid = int(sys.argv[1]) except (ValueError, KeyError): if mpi.is_master_node(): - print "ERROR: VASP process pid must be provided as the first argument" + print("ERROR: VASP process pid must be provided as the first argument") raise try: n_iter = int(sys.argv[2]) except (ValueError, KeyError): if mpi.is_master_node(): - print "ERROR: Number of iterations must be provided as the second argument" + print("ERROR: Number of iterations must be provided as the second argument") raise try: n_iter_dft = int(sys.argv[3]) except (ValueError, KeyError): if mpi.is_master_node(): - print "ERROR: Number of VASP iterations with fixed charge density must be provided as the third argument" + print("ERROR: Number of VASP iterations with fixed charge density must be provided as the third argument") raise try: dmft_script = re.sub("\.py$", "", sys.argv[4]) except: if mpi.is_master_node(): - print "ERROR: User-defined DMFT script must be provided as the fourth argument" + print("ERROR: User-defined DMFT script must be provided as the fourth argument") raise # Optional parameter: config-file name diff --git a/python/converters/plovasp/vaspio.py b/python/triqs_dft_tools/converters/plovasp/vaspio.py similarity index 82% rename from python/converters/plovasp/vaspio.py rename to python/triqs_dft_tools/converters/plovasp/vaspio.py index f5c3cb31..c917cfed 100644 --- a/python/converters/plovasp/vaspio.py +++ b/python/triqs_dft_tools/converters/plovasp/vaspio.py @@ -83,12 +83,12 @@ class VaspData: except (IOError, StopIteration): self.eigenval.eigs = None self.eigenval.ferw = None - print "!!! WARNING !!!: Error reading from EIGENVAL, trying LOCPROJ" + print("!!! WARNING !!!: Error reading from EIGENVAL, trying LOCPROJ") try: self.doscar.from_file(vasp_dir) except (IOError, StopIteration): if efermi_required: - print "!!! WARNING !!!: Error reading from Efermi from DOSCAR, trying LOCPROJ" + print("!!! WARNING !!!: Error reading from Efermi from DOSCAR, trying LOCPROJ") try: self.plocar.efermi self.doscar.efermi = self.plocar.efermi @@ -96,7 +96,7 @@ class VaspData: raise Exception("Efermi cannot be read from DOSCAR or LOCPROJ") else: # TODO: This a hack. Find out a way to determine ncdij without DOSCAR - print "!!! WARNING !!!: Error reading from DOSCAR, taking Efermi from config" + print("!!! WARNING !!!: Error reading from DOSCAR, taking Efermi from config") self.doscar.ncdij = self.plocar.nspin ################################################################################ @@ -161,10 +161,10 @@ class Plocar: # Read the first line of LOCPROJ to get the dimensions with open(locproj_filename, 'rt') as f: line = f.readline() - nproj, nspin, nk, nband = map(int, line.split()) + nproj, nspin, nk, nband = list(map(int, line.split())) plo = np.zeros((nproj, nspin, nk, nband), dtype=np.complex128) - proj_params = [{} for i in xrange(nproj)] + proj_params = [{} for i in range(nproj)] iproj_site = 0 is_first_read = True @@ -173,7 +173,7 @@ class Plocar: while line: isite = int(line.split()[1]) if not is_first_read: - for il in xrange(norb): + for il in range(norb): ip_new = iproj_site * norb + il ip_prev = (iproj_site - 1) * norb + il proj_params[ip_new]['label'] = proj_params[ip_prev]['label'] @@ -181,8 +181,8 @@ class Plocar: proj_params[ip_new]['l'] = proj_params[ip_prev]['l'] proj_params[ip_new]['m'] = proj_params[ip_prev]['m'] - for ispin in xrange(nspin): - for ik in xrange(nk): + for ispin in range(nspin): + for ik in range(nk): # Parse the orbital labels and convert them to l,m-indices line = self.search_for(f, "^ *band") if is_first_read: @@ -202,10 +202,10 @@ class Plocar: is_first_read = False # Read the block of nk * ns * nband complex numbers - for ib in xrange(nband): + for ib in range(nband): line = f.readline() - rtmp = map(float, line.split()[1:]) - for il in xrange(norb): + rtmp = list(map(float, line.split()[1:])) + for il in range(norb): ctmp = complex(rtmp[2 * il], rtmp[2 * il + 1]) plo[iproj_site * norb + il, ispin, ik, ib] = ctmp @@ -213,9 +213,9 @@ class Plocar: iproj_site += 1 line = self.search_for(f, "^ *ISITE") - print "Read parameters:" + print("Read parameters:") for il, par in enumerate(proj_params): - print il, " -> ", par + print(il, " -> ", par) return proj_params, plo @@ -242,17 +242,17 @@ class Plocar: line = f.readline() line = line.split("#")[0] sline = line.split() - self.ncdij, nk, self.nband, nproj = map(int, sline[:4]) + self.ncdij, nk, self.nband, nproj = list(map(int, sline[:4])) self.nspin = 1 if self.ncdij == 1 else 2 self.nspin_band = 2 if self.ncdij == 2 else 1 try: self.efermi = float(sline[4]) except: - print "!!! WARNING !!!: Error reading E-Fermi from LOCPROJ, trying DOSCAR" + print("!!! WARNING !!!: Error reading E-Fermi from LOCPROJ, trying DOSCAR") plo = np.zeros((nproj, self.nspin, nk, self.nband), dtype=np.complex128) - proj_params = [{} for i in xrange(nproj)] + proj_params = [{} for i in range(nproj)] iproj_site = 0 is_first_read = True @@ -284,26 +284,26 @@ class Plocar: patt = re.compile("^orbital") # FIXME: fix spin indices for NCDIJ = 4 (non-collinear) assert self.ncdij < 4, "Non-collinear case is not implemented" - for ispin in xrange(self.nspin): - for ik in xrange(nk): - for ib in xrange(self.nband): + for ispin in range(self.nspin): + for ik in range(nk): + for ib in range(self.nband): line = "" while not line: line = f.readline().strip() sline = line.split() - isp_, ik_, ib_ = map(int, sline[1:4]) + isp_, ik_, ib_ = list(map(int, sline[1:4])) assert isp_ == ispin + 1 and ik_ == ik + 1 and ib_ == ib + 1, "Inconsistency in reading LOCPROJ" self.eigs[ik, ib, ispin] = float(sline[4]) self.ferw[ik, ib, ispin] = float(sline[5]) - for ip in xrange(nproj): + for ip in range(nproj): line = f.readline() sline = line.split() ctmp = complex(float(sline[1]), float(sline[2])) plo[ip, ispin, ik, ib] = ctmp - print "Read parameters:" + print("Read parameters:") for il, par in enumerate(proj_params): - print il, " -> ", par + print(il, " -> ", par) return proj_params, plo @@ -357,7 +357,7 @@ class Poscar: """ # Convenince local function def readline_remove_comments(): - return f.next().split('!')[0].split('#')[0].strip() + return next(f).split('!')[0].split('#')[0].strip() # Add a slash to the path name if necessary if vasp_dir[-1] != '/': @@ -365,17 +365,17 @@ class Poscar: f = read_lines(vasp_dir + poscar_filename) # Comment line - comment = f.next().rstrip() - print " Found POSCAR, title line: %s"%(comment) + comment = next(f).rstrip() + print(" Found POSCAR, title line: %s"%(comment)) # Read scale sline = readline_remove_comments() ascale = float(sline) # Read lattice vectors self.a_brav = np.zeros((3, 3)) - for ia in xrange(3): + for ia in range(3): sline = readline_remove_comments() - self.a_brav[ia, :] = map(float, sline.split()) + self.a_brav[ia, :] = list(map(float, sline.split())) # Negative scale means that it is a volume scale if ascale < 0: vscale = -ascale @@ -389,13 +389,13 @@ class Poscar: sline = readline_remove_comments() try: # Old v4.6 format: no element names - self.nions = map(int, sline.split()) - self.el_names = ['El%i'%(i) for i in xrange(len(self.nions))] + self.nions = list(map(int, sline.split())) + self.el_names = ['El%i'%(i) for i in range(len(self.nions))] except ValueError: # New v5.x format: read element names first self.el_names = sline.split() sline = readline_remove_comments() - self.nions = map(int, sline.split()) + self.nions = list(map(int, sline.split())) # Set the number of atom sorts (types) and the total # number of atoms in the unit cell @@ -415,27 +415,27 @@ class Poscar: # Read atomic positions self.q_types = [] self.type_of_ion = [] - for it in xrange(self.ntypes): + for it in range(self.ntypes): # Array mapping ion index to type self.type_of_ion += self.nions[it] * [it] q_at_it = np.zeros((self.nions[it], 3)) - for iq in xrange(self.nions[it]): + for iq in range(self.nions[it]): sline = readline_remove_comments() - qcoord = map(float, sline.split()[:3]) + qcoord = list(map(float, sline.split()[:3])) if cartesian: qcoord = np.dot(brec, qcoord) q_at_it[iq, :] = qcoord self.q_types.append(q_at_it) - print " Total number of ions:", self.nq - print " Number of types:", self.ntypes - print " Number of ions for each type:", self.nions + print(" Total number of ions:", self.nq) + print(" Number of types:", self.ntypes) + print(" Number of ions for each type:", self.nions) # print # print " Coords:" -# for it in xrange(ntypes): +# for it in range(ntypes): # print " Element:", el_names[it] # print q_at[it] @@ -485,23 +485,23 @@ class Kpoints: ibz_file = read_lines(vasp_dir + ibz_filename) # Skip comment line - line = ibz_file.next() + line = next(ibz_file) # Number of k-points - line = ibz_file.next() + line = next(ibz_file) self.nktot = int(line.strip().split()[0]) - print - print " {0:>26} {1:d}".format("Total number of k-points:", self.nktot) + print() + print(" {0:>26} {1:d}".format("Total number of k-points:", self.nktot)) self.kpts = np.zeros((self.nktot, 3)) self.kwghts = np.zeros((self.nktot)) # Skip comment line - line = ibz_file.next() - for ik in xrange(self.nktot): - line = ibz_file.next() + line = next(ibz_file) + for ik in range(self.nktot): + line = next(ibz_file) sline = line.strip().split() - self.kpts[ik, :] = map(float, sline[:3]) + self.kpts[ik, :] = list(map(float, sline[:3])) self.kwghts[ik] = float(sline[3]) self.kwghts /= self.nktot @@ -509,23 +509,23 @@ class Kpoints: # Attempt to read tetrahedra # Skip comment line ("Tetrahedra") try: - line = ibz_file.next() + line = next(ibz_file) # Number of tetrahedra and volume = 1/(6*nkx*nky*nkz) - line = ibz_file.next() + line = next(ibz_file) sline = line.split() self.ntet = int(sline[0]) self.volt = float(sline[1]) - print " {0:>26} {1:d}".format("Total number of tetrahedra:", self.ntet) + print(" {0:>26} {1:d}".format("Total number of tetrahedra:", self.ntet)) # Traditionally, itet[it, 0] contains multiplicity self.itet = np.zeros((self.ntet, 5), dtype=int) - for it in xrange(self.ntet): - line = ibz_file.next() - self.itet[it, :] = map(int, line.split()[:5]) - except StopIteration, ValueError: - print " No tetrahedron data found in %s. Skipping..."%(ibz_filename) + for it in range(self.ntet): + line = next(ibz_file) + self.itet[it, :] = list(map(int, line.split()[:5])) + except StopIteration as ValueError: + print(" No tetrahedron data found in %s. Skipping..."%(ibz_filename)) self.ntet = 0 # data = { 'nktot': nktot, @@ -567,22 +567,22 @@ class Eigenval: # First line: only the first and the last number out of four # are used; these are 'nions' and 'ispin' - sline = f.next().split() + sline = next(f).split() self.nq = int(sline[0]) self.ispin = int(sline[3]) # Second line: cell volume and lengths of lattice vectors (skip) - sline = f.next() + sline = next(f) # Third line: temperature (skip) - sline = f.next() + sline = next(f) # Fourth and fifth line: useless - sline = f.next() - sline = f.next() + sline = next(f) + sline = next(f) # Sixth line: NELECT, NKTOT, NBTOT - sline = f.next().split() + sline = next(f).split() self.nelect = int(sline[0]) self.nktot = int(sline[1]) self.nband = int(sline[2]) @@ -593,16 +593,16 @@ class Eigenval: self.eigs = np.zeros((self.nktot, self.nband, self.ispin)) self.ferw = np.zeros((self.nktot, self.nband, self.ispin)) - for ik in xrange(self.nktot): - sline = f.next() # Empty line - sline = f.next() # k-point info - tmp = map(float, sline.split()) + for ik in range(self.nktot): + sline = next(f) # Empty line + sline = next(f) # k-point info + tmp = list(map(float, sline.split())) self.kpts[ik, :] = tmp[:3] self.kwghts[ik] = tmp[3] - for ib in xrange(self.nband): - sline = f.next().split() - tmp = map(float, sline) + for ib in range(self.nband): + sline = next(f).split() + tmp = list(map(float, sline)) assert len(tmp) == 2 * self.ispin + 1, "EIGENVAL file is incorrect (probably from old versions of VASP)" self.eigs[ik, ib, :] = tmp[1:self.ispin+1] self.ferw[ik, ib, :] = tmp[self.ispin+1:] @@ -635,15 +635,15 @@ class Doscar: f = read_lines(vasp_dir + dos_filename) # First line: NION, NION, JOBPAR, NCDIJ - sline = f.next().split() + sline = next(f).split() self.ncdij = int(sline[3]) # Skip next 4 lines - for _ in xrange(4): - sline = f.next() + for _ in range(4): + sline = next(f) # Sixth line: EMAX, EMIN, NEDOS, EFERMI, 1.0 - sline = f.next().split() + sline = next(f).split() self.efermi = float(sline[3]) # TODO: implement output of SYMMCAR in VASP and read it here @@ -666,54 +666,54 @@ def read_symmcar(vasp_dir, symm_filename='SYMMCAR'): symmcar_exist = False sym_file = read_lines(vasp_dir + symm_filename) - line = sym_file.next() + line = next(sym_file) nrot = extract_int_par('NROT') - line = sym_file.next() + line = next(sym_file) ntrans = extract_int_par('NPCELL') # Lmax - line = sym_file.next() + line = next(sym_file) lmax = extract_int_par('LMAX') mmax = 2 * lmax + 1 # Nion - line = sym_file.next() + line = next(sym_file) nion = extract_int_par('NION') - print " {0:>26} {1:d}".format("Number of rotations:", nrot) - print " {0:>26} {1:d}".format("Number of translations:", ntrans) - print " {0:>26} {1:d}".format("Number of ions:", nion) - print " {0:>26} {1:d}".format("L_max:", lmax) + print(" {0:>26} {1:d}".format("Number of rotations:", nrot)) + print(" {0:>26} {1:d}".format("Number of translations:", ntrans)) + print(" {0:>26} {1:d}".format("Number of ions:", nion)) + print(" {0:>26} {1:d}".format("L_max:", lmax)) rot_mats = np.zeros((nrot, lmax+1, mmax, mmax)) rot_map = np.zeros((nrot, ntrans, nion), dtype=np.int32) - for irot in xrange(nrot): + for irot in range(nrot): # Empty line - line = sym_file.next() + line = next(sym_file) # IROT index (skip it) - line = sym_file.next() + line = next(sym_file) # ISYMOP matrix (can be also skipped) - line = sym_file.next() - line = sym_file.next() - line = sym_file.next() + line = next(sym_file) + line = next(sym_file) + line = next(sym_file) # Skip comment " Permutation map..." - line = sym_file.next() + line = next(sym_file) # Permutations (in chunks of 20 indices per line) - for it in xrange(ntrans): - for ibl in xrange((nion - 1) / 20 + 1): + for it in range(ntrans): + for ibl in range((nion - 1) // 20 + 1): i1 = ibl * 20 i2 = (ibl + 1) * 20 - line = sym_file.next() - rot_map[irot, it, i1:i2] = map(int, line.split()) + line = next(sym_file) + rot_map[irot, it, i1:i2] = list(map(int, line.split())) - for l in xrange(lmax + 1): + for l in range(lmax + 1): mmax = 2 * l + 1 # Comment: "L = ..." - line = sym_file.next() - for m in xrange(mmax): - line = sym_file.next() - rot_mats[irot, l, m, :mmax] = map(float, line.split()[:mmax]) + line = next(sym_file) + for m in range(mmax): + line = next(sym_file) + rot_mats[irot, l, m, :mmax] = list(map(float, line.split()[:mmax])) data.update({ 'nrot': nrot, 'ntrans': ntrans, 'lmax': lmax, 'nion': nion, diff --git a/python/converters/vasp_converter.py b/python/triqs_dft_tools/converters/vasp.py similarity index 87% rename from python/converters/vasp_converter.py rename to python/triqs_dft_tools/converters/vasp.py index 9ab3bdc0..62512579 100644 --- a/python/converters/vasp_converter.py +++ b/python/triqs_dft_tools/converters/vasp.py @@ -26,8 +26,8 @@ from types import * import numpy -from pytriqs.archive import * -from converter_tools import * +from h5 import * +from .converter_tools import * import os.path try: import simplejson as json @@ -75,7 +75,7 @@ class VaspConverter(ConverterTools): """ - assert type(filename)==StringType, "Please provide the DFT files' base name as a string." + assert isinstance(filename, str), "Please provide the DFT files' base name as a string." if hdf_filename is None: hdf_filename = filename+'.h5' self.hdf_file = hdf_filename self.basename = filename @@ -150,7 +150,7 @@ class VaspConverter(ConverterTools): # R is a generator : each R.Next() will return the next number in the file jheader, rf = self.read_header_and_data(self.ctrl_file) - print jheader + print(jheader) ctrl_head = json.loads(jheader) ng = ctrl_head['ngroups'] @@ -163,12 +163,12 @@ class VaspConverter(ConverterTools): kpts_cart = numpy.zeros((n_k, 3)) bz_weights = numpy.zeros(n_k) try: - for ik in xrange(n_k): - kx, ky, kz = rf.next(), rf.next(), rf.next() + for ik in range(n_k): + kx, ky, kz = next(rf), next(rf), next(rf) kpts[ik, :] = kx, ky, kz - bz_weights[ik] = rf.next() - for ik in xrange(n_k): - kx, ky, kz = rf.next(), rf.next(), rf.next() + bz_weights[ik] = next(rf) + for ik in range(n_k): + kx, ky, kz = next(rf), next(rf), next(rf) kpts_cart[ik, :] = kx, ky, kz except StopIteration: raise "VaspConverter: error reading %s"%self.ctrl_file @@ -186,7 +186,7 @@ class VaspConverter(ConverterTools): assert ng == 1, "Only one group is allowed at the moment" try: - for ig in xrange(ng): + for ig in range(ng): gr_file = self.basename + '.pg%i'%(ig + 1) jheader, rf = self.read_header_and_data(gr_file) gr_head = json.loads(jheader) @@ -203,9 +203,9 @@ class VaspConverter(ConverterTools): shells = [] corr_shells = [] - shion_to_shell = [[] for ish in xrange(len(p_shells))] - cr_shion_to_shell = [[] for ish in xrange(len(p_shells))] - shorbs_to_globalorbs = [[] for ish in xrange(len(p_shells))] + shion_to_shell = [[] for ish in range(len(p_shells))] + cr_shion_to_shell = [[] for ish in range(len(p_shells))] + shorbs_to_globalorbs = [[] for ish in range(len(p_shells))] last_dimension = 0 crshorbs_to_globalorbs = [] icsh = 0 @@ -243,7 +243,7 @@ class VaspConverter(ConverterTools): n_inequiv_shells, corr_to_inequiv, inequiv_to_corr = ConverterTools.det_shell_equivalence(self, corr_shells) if mpi.is_master_node(): - print " No. of inequivalent shells:", n_inequiv_shells + print(" No. of inequivalent shells:", n_inequiv_shells) # NB!: these rotation matrices are specific to Wien2K! Set to identity in VASP use_rotations = 1 @@ -272,19 +272,19 @@ class VaspConverter(ConverterTools): # else: hopping = numpy.zeros([n_k, n_spin_blocs, nb_max, nb_max], numpy.complex_) f_weights = numpy.zeros([n_k, n_spin_blocs, nb_max], numpy.complex_) - band_window = [numpy.zeros((n_k, 2), dtype=int) for isp in xrange(n_spin_blocs)] + band_window = [numpy.zeros((n_k, 2), dtype=int) for isp in range(n_spin_blocs)] n_orbitals = numpy.zeros([n_k, n_spin_blocs], numpy.int) - for isp in xrange(n_spin_blocs): - for ik in xrange(n_k): - ib1, ib2 = int(rf.next()), int(rf.next()) + for isp in range(n_spin_blocs): + for ik in range(n_k): + ib1, ib2 = int(next(rf)), int(next(rf)) band_window[isp][ik, :2] = ib1, ib2 nb = ib2 - ib1 + 1 n_orbitals[ik, isp] = nb - for ib in xrange(nb): - hopping[ik, isp, ib, ib] = rf.next() - f_weights[ik, isp, ib] = rf.next() + for ib in range(nb): + hopping[ik, isp, ib, ib] = next(rf) + f_weights[ik, isp, ib] = next(rf) if self.proj_or_hk == 'hk': hopping = numpy.zeros([n_k, n_spin_blocs, n_orbs, n_orbs], numpy.complex_) @@ -298,15 +298,15 @@ class VaspConverter(ConverterTools): f_hk.readline() count += 1 rf_hk = self.read_data(f_hk) - for isp in xrange(n_spin_blocs): - for ik in xrange(n_k): + for isp in range(n_spin_blocs): + for ik in range(n_k): n_orbitals[ik, isp] = n_orbs - for ib in xrange(n_orbs): - for jb in xrange(n_orbs): - hopping[ik, isp, ib, jb] = rf_hk.next() - for ib in xrange(n_orbs): - for jb in xrange(n_orbs): - hopping[ik, isp, ib, jb] += 1j*rf_hk.next() + for ib in range(n_orbs): + for jb in range(n_orbs): + hopping[ik, isp, ib, jb] = next(rf_hk) + for ib in range(n_orbs): + for jb in range(n_orbs): + hopping[ik, isp, ib, jb] += 1j*next(rf_hk) rf_hk.close() # Projectors @@ -328,14 +328,14 @@ class VaspConverter(ConverterTools): # use cases and decide which solution is to be made permanent. # for ish, sh in enumerate(p_shells): - for isp in xrange(n_spin_blocs): - for ik in xrange(n_k): - for ion in xrange(len(sh['ion_list'])): - for ilm in xrange(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1]): - for ib in xrange(n_orbitals[ik, isp]): + for isp in range(n_spin_blocs): + for ik in range(n_k): + for ion in range(len(sh['ion_list'])): + for ilm in range(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1]): + for ib in range(n_orbitals[ik, isp]): # This is to avoid confusion with the order of arguments - pr = rf.next() - pi = rf.next() + pr = next(rf) + pi = next(rf) proj_mat_csc[ik, isp, ilm, ib] = complex(pr, pi) # now save only projectors with flag 'corr' to proj_mat @@ -343,22 +343,22 @@ class VaspConverter(ConverterTools): if self.proj_or_hk == 'proj': for ish, sh in enumerate(p_shells): if sh['corr']: - for isp in xrange(n_spin_blocs): - for ik in xrange(n_k): - for ion in xrange(len(sh['ion_list'])): + for isp in range(n_spin_blocs): + for ik in range(n_k): + for ion in range(len(sh['ion_list'])): icsh = shion_to_shell[ish][ion] - for iclm,ilm in enumerate(xrange(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1])): - for ib in xrange(n_orbitals[ik, isp]): + for iclm,ilm in enumerate(range(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1])): + for ib in range(n_orbitals[ik, isp]): proj_mat[ik,isp,icsh,iclm,ib] = proj_mat_csc[ik,isp,ilm,ib] elif self.proj_or_hk == 'hk': for ish, sh in enumerate(p_shells): if sh['corr']: - for ion in xrange(len(sh['ion_list'])): + for ion in range(len(sh['ion_list'])): icsh = shion_to_shell[ish][ion] - for isp in xrange(n_spin_blocs): - for ik in xrange(n_k): - for iclm,ilm in enumerate(xrange(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1])): + for isp in range(n_spin_blocs): + for ik in range(n_k): + for iclm,ilm in enumerate(range(shorbs_to_globalorbs[ish][ion][0],shorbs_to_globalorbs[ish][ion][1])): proj_mat[ik,isp,icsh,iclm,ilm] = 1.0 #corr_shell.pop('ion_list') @@ -445,13 +445,13 @@ class VaspConverter(ConverterTools): if os.path.exists(f): mpi.report("Reading input from %s..."%f) R = ConverterTools.read_fortran_file(self, f, self.fortran_to_replace) - assert int(R.next()) == n_k, "convert_misc_input: Number of k-points is inconsistent in oubwin file!" - assert int(R.next()) == SO, "convert_misc_input: SO is inconsistent in oubwin file!" - for ik in xrange(n_k): - R.next() - band_window[isp][ik,0] = R.next() # lowest band - band_window[isp][ik,1] = R.next() # highest band - R.next() + assert int(next(R)) == n_k, "convert_misc_input: Number of k-points is inconsistent in oubwin file!" + assert int(next(R)) == SO, "convert_misc_input: SO is inconsistent in oubwin file!" + for ik in range(n_k): + next(R) + band_window[isp][ik,0] = next(R) # lowest band + band_window[isp][ik,1] = next(R) # highest band + next(R) things_to_save.append('band_window') R.close() # Reading done! diff --git a/python/converters/wannier90_converter.py b/python/triqs_dft_tools/converters/wannier90.py similarity index 97% rename from python/converters/wannier90_converter.py rename to python/triqs_dft_tools/converters/wannier90.py index f7f60960..167114aa 100644 --- a/python/converters/wannier90_converter.py +++ b/python/triqs_dft_tools/converters/wannier90.py @@ -47,8 +47,8 @@ from types import * import numpy import math -from pytriqs.archive import * -from converter_tools import * +from h5 import * +from .converter_tools import * from itertools import product import os.path @@ -79,7 +79,7 @@ class Wannier90Converter(ConverterTools): """ self._name = "Wannier90Converter" - assert type(seedname) == StringType, self._name + \ + assert isinstance(seedname, str), self._name + \ ": Please provide the DFT files' base name as a string." if hdf_filename is None: hdf_filename = seedname + '.h5' @@ -125,19 +125,19 @@ class Wannier90Converter(ConverterTools): # conversion try: # read k - point mesh generation option - kmesh_mode = int(R.next()) + kmesh_mode = int(next(R)) if kmesh_mode >= 0: # read k-point mesh size from input - nki = [int(R.next()) for idir in range(3)] + nki = [int(next(R)) for idir in range(3)] else: # some default grid, if everything else fails... nki = [8, 8, 8] # read the total number of electrons per cell - density_required = float(R.next()) + density_required = float(next(R)) # we do not read shells, because we have no additional shells beyond correlated ones, # and the data will be copied from corr_shells into shells (see below) # number of corr. shells (e.g. Fe d, Ce f) in the unit cell, - n_corr_shells = int(R.next()) + n_corr_shells = int(next(R)) # now read the information about the correlated shells (atom, sort, # l, dim, SO flag, irep): corr_shells = [{name: int(val) for name, val in zip( @@ -427,7 +427,7 @@ class Wannier90Converter(ConverterTools): ir += 1 # for each direct lattice vector R read the block of the # Hamiltonian H(R) - for ir, jj, ii in product(range(nrpt), range(num_wf), range(num_wf)): + for ir, jj, ii in product(list(range(nrpt)), list(range(num_wf)), list(range(num_wf))): # advance one line, split the line into tokens currpos += 1 cline = hr_data[currpos].split() @@ -576,7 +576,7 @@ class Wannier90Converter(ConverterTools): nkpt = msize[0] * msize[1] * msize[2] kmesh = numpy.zeros((nkpt, 3), dtype=float) ii = 0 - for ix, iy, iz in product(range(msize[0]), range(msize[1]), range(msize[2])): + for ix, iy, iz in product(list(range(msize[0])), list(range(msize[1])), list(range(msize[2]))): kmesh[ii, :] = [float(ix) / msize[0], float(iy) / msize[1], float(iz) / msize[2]] ii += 1 @@ -608,8 +608,8 @@ class Wannier90Converter(ConverterTools): twopi = 2 * numpy.pi h_of_k = [numpy.zeros((norb, norb), dtype=numpy.complex_) for ik in range(self.n_k)] - ridx = numpy.array(range(self.nrpt)) - for ik, ir in product(range(self.n_k), ridx): + ridx = numpy.array(list(range(self.nrpt))) + for ik, ir in product(list(range(self.n_k)), ridx): rdotk = twopi * numpy.dot(self.k_mesh[ik], self.rvec[ir]) factor = (math.cos(rdotk) + 1j * math.sin(rdotk)) / \ float(self.rdeg[ir]) diff --git a/python/converters/wien2k_converter.py b/python/triqs_dft_tools/converters/wien2k.py similarity index 88% rename from python/converters/wien2k_converter.py rename to python/triqs_dft_tools/converters/wien2k.py index 1fcb267b..b45ffa72 100644 --- a/python/converters/wien2k_converter.py +++ b/python/triqs_dft_tools/converters/wien2k.py @@ -22,8 +22,8 @@ from types import * import numpy -from pytriqs.archive import * -from converter_tools import * +from h5 import * +from .converter_tools import * import os.path @@ -65,8 +65,7 @@ class Wien2kConverter(ConverterTools): """ - assert type( - filename) == StringType, "Wien2kConverter: Please provide the DFT files' base name as a string." + assert isinstance(filename, str), "Wien2kConverter: Please provide the DFT files' base name as a string." if hdf_filename is None: hdf_filename = filename + '.h5' self.hdf_file = hdf_filename @@ -114,23 +113,23 @@ class Wien2kConverter(ConverterTools): R = ConverterTools.read_fortran_file( self, self.dft_file, self.fortran_to_replace) try: - energy_unit = R.next() # read the energy convertion factor + energy_unit = next(R) # read the energy convertion factor # read the number of k points - n_k = int(R.next()) + n_k = int(next(R)) k_dep_projection = 1 # flag for spin-polarised calculation - SP = int(R.next()) + SP = int(next(R)) # flag for spin-orbit calculation - SO = int(R.next()) - charge_below = R.next() # total charge below energy window + SO = int(next(R)) + charge_below = next(R) # total charge below energy window # total density required, for setting the chemical potential - density_required = R.next() + density_required = next(R) symm_op = 1 # Use symmetry groups for the k-sum # the information on the non-correlated shells is not important # here, maybe skip: # number of shells (e.g. Fe d, As p, O p) in the unit cell, - n_shells = int(R.next()) + n_shells = int(next(R)) # corresponds to index R in formulas # now read the information about the shells (atom, sort, l, dim): shell_entries = ['atom', 'sort', 'l', 'dim'] @@ -138,7 +137,7 @@ class Wien2kConverter(ConverterTools): shell_entries, R)} for ish in range(n_shells)] # number of corr. shells (e.g. Fe d, Ce f) in the unit cell, - n_corr_shells = int(R.next()) + n_corr_shells = int(next(R)) # corresponds to index R in formulas # now read the information about the shells (atom, sort, l, dim, SO # flag, irep): @@ -161,14 +160,14 @@ class Wien2kConverter(ConverterTools): for icrsh in range(n_corr_shells): for i in range(corr_shells[icrsh]['dim']): # read real part: for j in range(corr_shells[icrsh]['dim']): - rot_mat[icrsh][i, j] = R.next() + rot_mat[icrsh][i, j] = next(R) # read imaginary part: for i in range(corr_shells[icrsh]['dim']): for j in range(corr_shells[icrsh]['dim']): - rot_mat[icrsh][i, j] += 1j * R.next() + rot_mat[icrsh][i, j] += 1j * next(R) if (SP == 1): # read time inversion flag: - rot_mat_time_inv[icrsh] = int(R.next()) + rot_mat_time_inv[icrsh] = int(next(R)) # Read here the info for the transformation of the basis: n_reps = [1 for i in range(n_inequiv_shells)] @@ -176,8 +175,8 @@ class Wien2kConverter(ConverterTools): T = [] for ish in range(n_inequiv_shells): # number of representatives ("subsets"), e.g. t2g and eg - n_reps[ish] = int(R.next()) - dim_reps[ish] = [int(R.next()) for i in range( + n_reps[ish] = int(next(R)) + dim_reps[ish] = [int(next(R)) for i in range( n_reps[ish])] # dimensions of the subsets # The transformation matrix: @@ -189,10 +188,10 @@ class Wien2kConverter(ConverterTools): # now read it from file: for i in range(lmax): for j in range(lmax): - T[ish][i, j] = R.next() + T[ish][i, j] = next(R) for i in range(lmax): for j in range(lmax): - T[ish][i, j] += 1j * R.next() + T[ish][i, j] += 1j * next(R) # Spin blocks to be read: n_spin_blocs = SP + 1 - SO @@ -201,7 +200,7 @@ class Wien2kConverter(ConverterTools): n_orbitals = numpy.zeros([n_k, n_spin_blocs], numpy.int) for isp in range(n_spin_blocs): for ik in range(n_k): - n_orbitals[ik, isp] = int(R.next()) + n_orbitals[ik, isp] = int(next(R)) # Initialise the projectors: proj_mat = numpy.zeros([n_k, n_spin_blocs, n_corr_shells, max( @@ -216,12 +215,12 @@ class Wien2kConverter(ConverterTools): for isp in range(n_spin_blocs): for i in range(n_orb): for j in range(n_orbitals[ik][isp]): - proj_mat[ik, isp, icrsh, i, j] = R.next() + proj_mat[ik, isp, icrsh, i, j] = next(R) # now Imag part: for isp in range(n_spin_blocs): for i in range(n_orb): for j in range(n_orbitals[ik][isp]): - proj_mat[ik, isp, icrsh, i, j] += 1j * R.next() + proj_mat[ik, isp, icrsh, i, j] += 1j * next(R) # now define the arrays for weights and hopping ... # w(k_index), default normalisation @@ -231,7 +230,7 @@ class Wien2kConverter(ConverterTools): # weights in the file for ik in range(n_k): - bz_weights[ik] = R.next() + bz_weights[ik] = next(R) # if the sum over spins is in the weights, take it out again!! sm = sum(bz_weights) @@ -244,7 +243,7 @@ class Wien2kConverter(ConverterTools): for ik in range(n_k): n_orb = n_orbitals[ik, isp] for i in range(n_orb): - hopping[ik, isp, i, i] = R.next() * energy_unit + hopping[ik, isp, i, i] = next(R) * energy_unit # keep some things that we need for reading parproj: things_to_set = ['n_shells', 'shells', 'n_corr_shells', 'corr_shells', @@ -252,7 +251,7 @@ class Wien2kConverter(ConverterTools): for it in things_to_set: setattr(self, it, locals()[it]) except StopIteration: # a more explicit error if the file is corrupted. - raise IOError, "Wien2k_converter : reading file %s failed!" % self.dft_file + raise IOError("wien2k : reading file %s failed!" % self.dft_file) R.close() # Reading done! @@ -308,7 +307,7 @@ class Wien2kConverter(ConverterTools): R = ConverterTools.read_fortran_file( self, self.parproj_file, self.fortran_to_replace) - n_parproj = [int(R.next()) for i in range(self.n_shells)] + n_parproj = [int(next(R)) for i in range(self.n_shells)] n_parproj = numpy.array(n_parproj) # Initialise P, here a double list of matrices: @@ -328,39 +327,39 @@ class Wien2kConverter(ConverterTools): # read real part: for i in range(self.shells[ish]['dim']): for j in range(self.n_orbitals[ik][isp]): - proj_mat_all[ik, isp, ish, ir, i, j] = R.next() + proj_mat_all[ik, isp, ish, ir, i, j] = next(R) for isp in range(self.n_spin_blocs): # read imaginary part: for i in range(self.shells[ish]['dim']): for j in range(self.n_orbitals[ik][isp]): proj_mat_all[ik, isp, ish, - ir, i, j] += 1j * R.next() + ir, i, j] += 1j * next(R) # now read the Density Matrix for this orbital below the energy # window: for isp in range(self.n_spin_blocs): for i in range(self.shells[ish]['dim']): # read real part: for j in range(self.shells[ish]['dim']): - dens_mat_below[isp][ish][i, j] = R.next() + dens_mat_below[isp][ish][i, j] = next(R) for isp in range(self.n_spin_blocs): # read imaginary part: for i in range(self.shells[ish]['dim']): for j in range(self.shells[ish]['dim']): - dens_mat_below[isp][ish][i, j] += 1j * R.next() + dens_mat_below[isp][ish][i, j] += 1j * next(R) if (self.SP == 0): dens_mat_below[isp][ish] /= 2.0 # Global -> local rotation matrix for this shell: for i in range(self.shells[ish]['dim']): # read real part: for j in range(self.shells[ish]['dim']): - rot_mat_all[ish][i, j] = R.next() + rot_mat_all[ish][i, j] = next(R) for i in range(self.shells[ish]['dim']): # read imaginary part: for j in range(self.shells[ish]['dim']): - rot_mat_all[ish][i, j] += 1j * R.next() + rot_mat_all[ish][i, j] += 1j * next(R) if (self.SP): - rot_mat_all_time_inv[ish] = int(R.next()) + rot_mat_all_time_inv[ish] = int(next(R)) R.close() # Reading done! @@ -404,13 +403,13 @@ class Wien2kConverter(ConverterTools): mpi.report("Reading input from %s..." % self.band_file) R = ConverterTools.read_fortran_file( self, self.band_file, self.fortran_to_replace) - n_k = int(R.next()) + n_k = int(next(R)) # read the list of n_orbitals for all k points n_orbitals = numpy.zeros([n_k, self.n_spin_blocs], numpy.int) for isp in range(self.n_spin_blocs): for ik in range(n_k): - n_orbitals[ik, isp] = int(R.next()) + n_orbitals[ik, isp] = int(next(R)) # Initialise the projectors: proj_mat = numpy.zeros([n_k, self.n_spin_blocs, self.n_corr_shells, max( @@ -425,12 +424,12 @@ class Wien2kConverter(ConverterTools): for isp in range(self.n_spin_blocs): for i in range(n_orb): for j in range(n_orbitals[ik, isp]): - proj_mat[ik, isp, icrsh, i, j] = R.next() + proj_mat[ik, isp, icrsh, i, j] = next(R) # now Imag part: for isp in range(self.n_spin_blocs): for i in range(n_orb): for j in range(n_orbitals[ik, isp]): - proj_mat[ik, isp, icrsh, i, j] += 1j * R.next() + proj_mat[ik, isp, icrsh, i, j] += 1j * next(R) hopping = numpy.zeros([n_k, self.n_spin_blocs, numpy.max( n_orbitals), numpy.max(n_orbitals)], numpy.complex_) @@ -441,10 +440,10 @@ class Wien2kConverter(ConverterTools): for ik in range(n_k): n_orb = n_orbitals[ik, isp] for i in range(n_orb): - hopping[ik, isp, i, i] = R.next() * self.energy_unit + hopping[ik, isp, i, i] = next(R) * self.energy_unit # now read the partial projectors: - n_parproj = [int(R.next()) for i in range(self.n_shells)] + n_parproj = [int(next(R)) for i in range(self.n_shells)] n_parproj = numpy.array(n_parproj) # Initialise P, here a double list of matrices: @@ -460,20 +459,20 @@ class Wien2kConverter(ConverterTools): for i in range(self.shells[ish]['dim']): for j in range(n_orbitals[ik, isp]): proj_mat_all[ik, isp, ish, - ir, i, j] = R.next() + ir, i, j] = next(R) # read imaginary part: for i in range(self.shells[ish]['dim']): for j in range(n_orbitals[ik, isp]): proj_mat_all[ik, isp, ish, - ir, i, j] += 1j * R.next() + ir, i, j] += 1j * next(R) R.close() except KeyError: - raise IOError, "convert_bands_input : Needed data not found in hdf file. Consider calling convert_dft_input first!" + raise IOError("convert_bands_input : Needed data not found in hdf file. Consider calling convert_dft_input first!") except StopIteration: # a more explicit error if the file is corrupted. - raise IOError, "Wien2k_converter : reading file %s failed!" % self.band_file + raise IOError("wien2k : reading file %s failed!" % self.band_file) # Reading done! @@ -497,7 +496,7 @@ class Wien2kConverter(ConverterTools): - symmetries from :file:`case.outputs`, if those Wien2k files are present and stores the data in the hdf5 archive. - This function is automatically called by :meth:`convert_dft_input `. + This function is automatically called by :meth:`convert_dft_input `. """ @@ -507,7 +506,7 @@ class Wien2kConverter(ConverterTools): # Check if SP, SO and n_k are already in h5 with HDFArchive(self.hdf_file, 'r') as ar: if not (self.dft_subgrp in ar): - raise IOError, "convert_misc_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp + raise IOError("convert_misc_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp) SP = ar[self.dft_subgrp]['SP'] SO = ar[self.dft_subgrp]['SO'] n_k = ar[self.dft_subgrp]['n_k'] @@ -539,19 +538,19 @@ class Wien2kConverter(ConverterTools): mpi.report("Reading input from %s..." % f) R = ConverterTools.read_fortran_file( self, f, self.fortran_to_replace) - n_k_oubwin = int(R.next()) + n_k_oubwin = int(next(R)) if (n_k_oubwin != n_k): mpi.report( "convert_misc_input : WARNING : n_k in case.oubwin is different from n_k in case.klist") assert int( - R.next()) == SO, "convert_misc_input: SO is inconsistent in oubwin file!" + next(R)) == SO, "convert_misc_input: SO is inconsistent in oubwin file!" band_window[isp] = numpy.zeros((n_k_oubwin, 2), dtype=int) - for ik in xrange(n_k_oubwin): - R.next() - band_window[isp][ik, 0] = R.next() # lowest band - band_window[isp][ik, 1] = R.next() # highest band - R.next() + for ik in range(n_k_oubwin): + next(R) + band_window[isp][ik, 0] = next(R) # lowest band + band_window[isp][ik, 1] = next(R) # highest band + next(R) things_to_save.append('band_window') R.close() # Reading done! @@ -578,7 +577,7 @@ class Wien2kConverter(ConverterTools): things_to_save.extend( ['lattice_type', 'lattice_constants', 'lattice_angles']) except IOError: - raise IOError, "convert_misc_input: reading file %s failed" % self.struct_file + raise IOError("convert_misc_input: reading file %s failed" % self.struct_file) # Read relevant data from .outputs file ####################################### @@ -610,7 +609,7 @@ class Wien2kConverter(ConverterTools): things_to_save.extend(['n_symmetries', 'rot_symmetries']) things_to_save.append('rot_symmetries') except IOError: - raise IOError, "convert_misc_input: reading file %s failed" % self.outputs_file + raise IOError("convert_misc_input: reading file %s failed" % self.outputs_file) # Save it to the HDF: with HDFArchive(self.hdf_file, 'a') as ar: @@ -635,7 +634,7 @@ class Wien2kConverter(ConverterTools): # Check if SP, SO and n_k are already in h5 with HDFArchive(self.hdf_file, 'r') as ar: if not (self.dft_subgrp in ar): - raise IOError, "convert_transport_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp + raise IOError("convert_transport_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp) SP = ar[self.dft_subgrp]['SP'] SO = ar[self.dft_subgrp]['SO'] n_k = ar[self.dft_subgrp]['n_k'] @@ -665,20 +664,20 @@ class Wien2kConverter(ConverterTools): band_window_optics = [] for isp, f in enumerate(files): if not os.path.exists(f): - raise IOError, "convert_transport_input: File %s does not exist" % f + raise IOError("convert_transport_input: File %s does not exist" % f) mpi.report("Reading input from %s..." % f) R = ConverterTools.read_fortran_file( self, f, {'D': 'E', '(': '', ')': '', ',': ' '}) band_window_optics_isp = [] - for ik in xrange(n_k): - R.next() - nu1 = int(R.next()) - nu2 = int(R.next()) + for ik in range(n_k): + next(R) + nu1 = int(next(R)) + nu2 = int(next(R)) band_window_optics_isp.append((nu1, nu2)) n_bands = nu2 - nu1 + 1 for _ in range(4): - R.next() + next(R) if n_bands <= 0: velocity_xyz = numpy.zeros((1, 1, 3), dtype=complex) else: @@ -688,7 +687,7 @@ class Wien2kConverter(ConverterTools): for nu_j in range(nu_i, n_bands): for i in range(3): velocity_xyz[nu_i][nu_j][ - i] = R.next() + R.next() * 1j + i] = next(R) + next(R) * 1j if (nu_i != nu_j): velocity_xyz[nu_j][nu_i][i] = velocity_xyz[ nu_i][nu_j][i].conjugate() @@ -737,13 +736,13 @@ class Wien2kConverter(ConverterTools): self, symm_file, self.fortran_to_replace) try: - n_symm = int(R.next()) # Number of symmetry operations - n_atoms = int(R.next()) # number of atoms involved - perm = [[int(R.next()) for i in range(n_atoms)] + n_symm = int(next(R)) # Number of symmetry operations + n_atoms = int(next(R)) # number of atoms involved + perm = [[int(next(R)) for i in range(n_atoms)] for j in range(n_symm)] # list of permutations of the atoms if SP: # time inversion for SO coupling - time_inv = [int(R.next()) for j in range(n_symm)] + time_inv = [int(next(R)) for j in range(n_symm)] else: time_inv = [0 for j in range(n_symm)] @@ -757,11 +756,11 @@ class Wien2kConverter(ConverterTools): for i in range(orbits[orb]['dim']): for j in range(orbits[orb]['dim']): # real part - mat[i_symm][orb][i, j] = R.next() + mat[i_symm][orb][i, j] = next(R) for i in range(orbits[orb]['dim']): for j in range(orbits[orb]['dim']): mat[i_symm][orb][i, j] += 1j * \ - R.next() # imaginary part + next(R) # imaginary part mat_tinv = [numpy.identity(orbits[orb]['dim'], numpy.complex_) for orb in range(n_orbits)] @@ -773,14 +772,14 @@ class Wien2kConverter(ConverterTools): for i in range(orbits[orb]['dim']): for j in range(orbits[orb]['dim']): # real part - mat_tinv[orb][i, j] = R.next() + mat_tinv[orb][i, j] = next(R) for i in range(orbits[orb]['dim']): for j in range(orbits[orb]['dim']): mat_tinv[orb][i, j] += 1j * \ - R.next() # imaginary part + next(R) # imaginary part except StopIteration: # a more explicit error if the file is corrupted. - raise IOError, "Wien2k_converter : reading file %s failed!" %symm_file + raise IOError("wien2k : reading file %s failed!" %symm_file) R.close() # Reading done! diff --git a/python/sumk_dft.py b/python/triqs_dft_tools/sumk_dft.py similarity index 96% rename from python/sumk_dft.py rename to python/triqs_dft_tools/sumk_dft.py index 288b2225..7d05b24c 100644 --- a/python/sumk_dft.py +++ b/python/triqs_dft_tools/sumk_dft.py @@ -23,14 +23,13 @@ from types import * import numpy -import pytriqs.utility.dichotomy as dichotomy -from pytriqs.gf import * -import pytriqs.utility.mpi as mpi -from pytriqs.utility.comparison_tests import assert_arrays_are_close -from pytriqs.archive import * -from symmetry import * -from block_structure import BlockStructure -from sets import Set +import triqs.utility.dichotomy as dichotomy +from triqs.gf import * +import triqs.utility.mpi as mpi +from triqs.utility.comparison_tests import assert_arrays_are_close +from h5 import * +from .symmetry import * +from .block_structure import BlockStructure from itertools import product from warnings import warn from scipy import compress @@ -38,7 +37,7 @@ from scipy.optimize import minimize class SumkDFT(object): - """This class provides a general SumK method for combining ab-initio code and pytriqs.""" + """This class provides a general SumK method for combining ab-initio code and triqs.""" def __init__(self, hdf_file, h_field=0.0, use_dft_blocks=False, dft_data='dft_input', symmcorr_data='dft_symmcorr_input', parproj_data='dft_parproj_input', @@ -52,11 +51,11 @@ class SumkDFT(object): hdf_file : string Name of hdf5 containing the data. h_field : scalar, optional - The value of magnetic field to add to the DFT Hamiltonian. + The value of magnetic field to add to the DFT Hamiltonian. The contribution -h_field*sigma is added to diagonal elements of the Hamiltonian. It cannot be used with the spin-orbit coupling on; namely h_field is set to 0 if self.SO=True. use_dft_blocks : boolean, optional - If True, the local Green's function matrix for each spin is divided into smaller blocks + If True, the local Green's function matrix for each spin is divided into smaller blocks with the block structure determined from the DFT density matrix of the corresponding correlated shell. Alternatively and additionally, the block structure can be analysed using :meth:`analyse_block_structure ` @@ -64,7 +63,7 @@ class SumkDFT(object): dft_data : string, optional Name of hdf5 subgroup in which DFT data for projector and lattice Green's function construction are stored. symmcorr_data : string, optional - Name of hdf5 subgroup in which DFT data on symmetries of correlated shells + Name of hdf5 subgroup in which DFT data on symmetries of correlated shells (symmetry operations, permutaion matrices etc.) are stored. parproj_data : string, optional Name of hdf5 subgroup in which DFT data on non-normalized projectors for non-correlated @@ -82,7 +81,7 @@ class SumkDFT(object): Name of hdf5 subgroup in which miscellaneous DFT data are stored. """ - if not type(hdf_file) == StringType: + if not isinstance(hdf_file, str): mpi.report("Give a string for the hdf5 filename to read the input!") else: self.hdf_file = hdf_file @@ -128,10 +127,10 @@ class SumkDFT(object): # GF structure used for the local things in the k sums # Most general form allowing for all hybridisation, i.e. largest # blocks possible - self.gf_struct_sumk = [[(sp, range(self.corr_shells[icrsh]['dim'])) for sp in self.spin_block_names[self.corr_shells[icrsh]['SO']]] + self.gf_struct_sumk = [[(sp, list(range(self.corr_shells[icrsh]['dim']))) for sp in self.spin_block_names[self.corr_shells[icrsh]['SO']]] for icrsh in range(self.n_corr_shells)] # First set a standard gf_struct solver: - self.gf_struct_solver = [dict([(sp, range(self.corr_shells[self.inequiv_to_corr[ish]]['dim'])) + self.gf_struct_solver = [dict([(sp, list(range(self.corr_shells[self.inequiv_to_corr[ish]]['dim']))) for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]]) for ish in range(self.n_inequiv_shells)] # Set standard (identity) maps from gf_struct_sumk <-> @@ -153,14 +152,14 @@ class SumkDFT(object): self.chemical_potential = 0.0 # initialise mu self.init_dc() # initialise the double counting - + # charge mixing parameters self.charge_mixing = False # defaults from PRB 90 235103 ("... slow but stable convergence ...") self.charge_mixing_alpha = 0.1 self.charge_mixing_gamma = 1.0 self.deltaNOld = None - + # Analyse the block structure and determine the smallest gf_struct # blocks and maps, if desired if use_dft_blocks: @@ -277,7 +276,7 @@ class SumkDFT(object): try: list_to_return.append(ar[subgrp][it]) except: - raise ValueError, "load: %s not found, and so not loaded." % it + raise ValueError("load: %s not found, and so not loaded." % it) return list_to_return ################ @@ -300,10 +299,10 @@ class SumkDFT(object): bname : string Block name of the target block of the lattice Green's function. - gf_to_downfold : Gf + gf_to_downfold : Gf Block of the Green's function that is to be downfolded. - gf_inp : Gf - FIXME + gf_inp : Gf + FIXME shells : string, optional - if shells='corr': orthonormalized projectors for correlated shells are used for the downfolding. @@ -328,7 +327,7 @@ class SumkDFT(object): projmat = self.proj_mat[ik, isp, ish, 0:dim, 0:n_orb] elif shells == 'all': if ir is None: - raise ValueError, "downfold: provide ir if treating all shells." + raise ValueError("downfold: provide ir if treating all shells.") dim = self.shells[ish]['dim'] projmat = self.proj_mat_all[ik, isp, ish, ir, 0:dim, 0:n_orb] elif shells == 'csc': @@ -355,10 +354,10 @@ class SumkDFT(object): bname : string Block name of the target block of the lattice Green's function. - gf_to_upfold : Gf + gf_to_upfold : Gf Block of the Green's function that is to be upfolded. - gf_inp : Gf - FIXME + gf_inp : Gf + FIXME shells : string, optional - if shells='corr': orthonormalized projectors for correlated shells are used for the upfolding. @@ -383,7 +382,7 @@ class SumkDFT(object): projmat = self.proj_mat[ik, isp, ish, 0:dim, 0:n_orb] elif shells == 'all': if ir is None: - raise ValueError, "upfold: provide ir if treating all shells." + raise ValueError("upfold: provide ir if treating all shells.") dim = self.shells[ish]['dim'] projmat = self.proj_mat_all[ik, isp, ish, ir, 0:dim, 0:n_orb] elif shells == 'csc': @@ -406,10 +405,10 @@ class SumkDFT(object): - if shells='corr': ish labels all correlated shells (equivalent or not) - if shells='all': ish labels only representative (inequivalent) non-correlated shells - gf_to_rotate : Gf + gf_to_rotate : Gf Block of the Green's function that is to be rotated. direction : string - The direction of rotation can be either + The direction of rotation can be either - 'toLocal' : global -> local transformation, - 'toGlobal' : local -> global transformation. @@ -459,7 +458,7 @@ class SumkDFT(object): def lattice_gf(self, ik, mu=None, iw_or_w="iw", beta=40, broadening=None, mesh=None, with_Sigma=True, with_dc=True): r""" - Calculates the lattice Green function for a given k-point from the DFT Hamiltonian and the self energy. + Calculates the lattice Green function for a given k-point from the DFT Hamiltonian and the self energy. Parameters ---------- @@ -482,7 +481,7 @@ class SumkDFT(object): Data defining mesh on which the real-axis GF will be calculated, given in the form (om_min,om_max,n_points), where om_min is the minimum omega, om_max is the maximum omega and n_points is the number of points. with_Sigma : boolean, optional - If True the GF will be calculated with the self-energy stored in self.Sigmaimp_(w/iw), for real/Matsubara GF, respectively. + If True the GF will be calculated with the self-energy stored in self.Sigmaimp_(w/iw), for real/Matsubara GF, respectively. In this case the mesh is taken from the self.Sigma_imp object. If with_Sigma=True but self.Sigmaimp_(w/iw) is not present, with_Sigma is reset to False. with_dc : boolean, optional @@ -499,7 +498,7 @@ class SumkDFT(object): ntoi = self.spin_names_to_ind[self.SO] spn = self.spin_block_names[self.SO] if (iw_or_w != "iw") and (iw_or_w != "w"): - raise ValueError, "lattice_gf: Implemented only for Re/Im frequency functions." + raise ValueError("lattice_gf: Implemented only for Re/Im frequency functions.") if not hasattr(self, "Sigma_imp_" + iw_or_w): with_Sigma = False if broadening is None: @@ -525,12 +524,12 @@ class SumkDFT(object): else: if iw_or_w == "iw": if beta is None: - raise ValueError, "lattice_gf: Give the beta for the lattice GfReFreq." + raise ValueError("lattice_gf: Give the beta for the lattice GfReFreq.") # Default number of Matsubara frequencies mesh = MeshImFreq(beta=beta, S='Fermion', n_max=1025) elif iw_or_w == "w": if mesh is None: - raise ValueError, "lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq." + raise ValueError("lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq.") mesh = MeshReFreq(mesh[0], mesh[1], mesh[2]) # Check if G_latt is present @@ -551,7 +550,7 @@ class SumkDFT(object): # Set up G_latt if set_up_G_latt: block_structure = [ - range(self.n_orbitals[ik, ntoi[sp]]) for sp in spn] + list(range(self.n_orbitals[ik, ntoi[sp]])) for sp in spn] gf_struct = [(spn[isp], block_structure[isp]) for isp in range(self.n_spin_blocks[self.SO])] block_ind_list = [block for block, inner in gf_struct] @@ -573,9 +572,9 @@ class SumkDFT(object): idmat = [numpy.identity( self.n_orbitals[ik, ntoi[sp]], numpy.complex_) for sp in spn] M = copy.deepcopy(idmat) - + for ibl in range(self.n_spin_blocks[self.SO]): - + ind = ntoi[spn[ibl]] n_orb = self.n_orbitals[ik, ind] M[ibl] = self.hopping[ik, ind, 0:n_orb, 0:n_orb] - \ @@ -629,9 +628,9 @@ class SumkDFT(object): self.Sigma_imp_w = [self.block_structure.create_gf(ish=icrsh, mesh=Sigma_imp[icrsh].mesh, gf_function=GfReFreq, space='sumk') for icrsh in range(self.n_corr_shells)] SK_Sigma_imp = self.Sigma_imp_w - + else: - raise ValueError, "put_Sigma: This type of Sigma is not handled, give either BlockGf of GfReFreq or GfImFreq." + raise ValueError("put_Sigma: This type of Sigma is not handled, give either BlockGf of GfReFreq or GfImFreq.") # rotation from local to global coordinate system: for icrsh in range(self.n_corr_shells): @@ -715,7 +714,7 @@ class SumkDFT(object): else it will be in ``gf_struct_sumk``. show_warnings : bool, optional Displays warning messages during transformation - (Only effective if transform_to_solver_blocks = True + (Only effective if transform_to_solver_blocks = True Returns ------- @@ -733,19 +732,19 @@ class SumkDFT(object): G_loc = [self.Sigma_imp_iw[icrsh].copy() for icrsh in range( self.n_corr_shells)] # this list will be returned beta = G_loc[0].mesh.beta - G_loc_inequiv = [BlockGf(name_block_generator=[(block, GfImFreq(indices=inner, mesh=G_loc[0].mesh)) for block, inner in self.gf_struct_solver[ish].iteritems()], + G_loc_inequiv = [BlockGf(name_block_generator=[(block, GfImFreq(indices=inner, mesh=G_loc[0].mesh)) for block, inner in self.gf_struct_solver[ish].items()], make_copies=False) for ish in range(self.n_inequiv_shells)] elif iw_or_w == "w": G_loc = [self.Sigma_imp_w[icrsh].copy() for icrsh in range( self.n_corr_shells)] # this list will be returned mesh = G_loc[0].mesh - G_loc_inequiv = [BlockGf(name_block_generator=[(block, GfReFreq(indices=inner, mesh=mesh)) for block, inner in self.gf_struct_solver[ish].iteritems()], + G_loc_inequiv = [BlockGf(name_block_generator=[(block, GfReFreq(indices=inner, mesh=mesh)) for block, inner in self.gf_struct_solver[ish].items()], make_copies=False) for ish in range(self.n_inequiv_shells)] for icrsh in range(self.n_corr_shells): G_loc[icrsh].zero() # initialize to zero - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): if iw_or_w == 'iw': G_latt = self.lattice_gf( @@ -832,8 +831,8 @@ class SumkDFT(object): def analyse_block_structure(self, threshold=0.00001, include_shells=None, dm=None, hloc=None): r""" - Determines the block structure of local Green's functions by analysing the structure of - the corresponding density matrices and the local Hamiltonian. The resulting block structures + Determines the block structure of local Green's functions by analysing the structure of + the corresponding density matrices and the local Hamiltonian. The resulting block structures for correlated shells are stored in the :class:`SumkDFT.block_structure ` attribute. Parameters @@ -870,7 +869,7 @@ class SumkDFT(object): for ish in range(self.n_corr_shells)] if include_shells is None: - include_shells = range(self.n_inequiv_shells) + include_shells = list(range(self.n_inequiv_shells)) for ish in include_shells: for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]: @@ -881,7 +880,7 @@ class SumkDFT(object): # Determine off-diagonal entries in upper triangular part of # density matrix - offdiag = Set([]) + offdiag = set([]) for i in range(n_orb): for j in range(i + 1, n_orb): if dmbool[i, j] or hlocbool[i, j]: @@ -903,7 +902,7 @@ class SumkDFT(object): for i in range(num_blocs): blocs[i].sort() self.gf_struct_solver[ish].update( - [('%s_%s' % (sp, i), range(len(blocs[i])))]) + [('%s_%s' % (sp, i), list(range(len(blocs[i]))))]) # Construct sumk_to_solver taking (sumk_block, sumk_index) --> (solver_block, solver_inner) # and solver_to_sumk taking (solver_block, solver_inner) --> @@ -922,7 +921,7 @@ class SumkDFT(object): # Now calculate degeneracies of orbitals dm = {} - for block, inner in self.gf_struct_solver[ish].iteritems(): + for block, inner in self.gf_struct_solver[ish].items(): # get dm for the blocks: dm[block] = numpy.zeros( [len(inner), len(inner)], numpy.complex_) @@ -935,8 +934,8 @@ class SumkDFT(object): dm[block][ind1, ind2] = dens_mat[ish][ block_sumk][ind1_sumk, ind2_sumk] - for block1 in self.gf_struct_solver[ish].iterkeys(): - for block2 in self.gf_struct_solver[ish].iterkeys(): + for block1 in self.gf_struct_solver[ish].keys(): + for block2 in self.gf_struct_solver[ish].keys(): if dm[block1].shape == dm[block2].shape: if ((abs(dm[block1] - dm[block2]) < threshold).all()) and (block1 != block2): ind1 = -1 @@ -977,7 +976,7 @@ class SumkDFT(object): make_copies=False) for g_sh in G] for ish in range(len(gf)): for name, g in gf[ish]: - g.set_from_inverse_fourier(G[ish][name]) + g.set_from_fourier(G[ish][name]) # keep a GfImTime from the supplied GfImTime elif all(isinstance(g_sh.mesh, MeshImTime) for g_sh in G): gf = G @@ -996,7 +995,7 @@ class SumkDFT(object): else: return w-w0 gf = [BlockGf(name_block_generator = [(name, GfReFreq( - window=(-numpy.pi*(len(block.mesh)-1) / (len(block.mesh)*get_delta_from_mesh(block.mesh)), + window=(-numpy.pi*(len(block.mesh)-1) / (len(block.mesh)*get_delta_from_mesh(block.mesh)), numpy.pi*(len(block.mesh)-1) / (len(block.mesh)*get_delta_from_mesh(block.mesh))), n_points=len(block.mesh), indices=block.indices)) for name, block in g_sh], make_copies=False) for g_sh in G] @@ -1059,7 +1058,7 @@ class SumkDFT(object): if include_shells is None: # include all shells - include_shells = range(self.n_inequiv_shells) + include_shells = list(range(self.n_inequiv_shells)) for ish in include_shells: for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]: @@ -1070,7 +1069,7 @@ class SumkDFT(object): # Determine off-diagonal entries in upper triangular part of the # Green's function - offdiag = Set([]) + offdiag = set([]) for i in range(n_orb): for j in range(i + 1, n_orb): if maxgf_bool[i, j]: @@ -1092,7 +1091,7 @@ class SumkDFT(object): for i in range(num_blocs): blocs[i].sort() self.gf_struct_solver[ish].update( - [('%s_%s' % (sp, i), range(len(blocs[i])))]) + [('%s_%s' % (sp, i), list(range(len(blocs[i]))))]) # Construct sumk_to_solver taking (sumk_block, sumk_index) --> (solver_block, solver_inner) # and solver_to_sumk taking (solver_block, solver_inner) --> @@ -1111,7 +1110,7 @@ class SumkDFT(object): # transform G to the new structure full_structure = BlockStructure.full_structure( - [{sp:range(self.corr_shells[self.inequiv_to_corr[ish]]['dim']) + [{sp:list(range(self.corr_shells[self.inequiv_to_corr[ish]]['dim'])) for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]} for ish in range(self.n_inequiv_shells)],self.corr_to_inequiv) G_transformed = [ @@ -1163,7 +1162,7 @@ class SumkDFT(object): if include_shells is None: # include all shells - include_shells = range(self.n_inequiv_shells) + include_shells = list(range(self.n_inequiv_shells)) # We consider two blocks equal, if their Green's functions obey # maybe_conjugate1( v1^dagger G1 v1 ) = maybe_conjugate2( v2^dagger G2 v2 ) @@ -1176,8 +1175,8 @@ class SumkDFT(object): # where our goal is to find T # we just try whether there is such a T with and without conjugation for ish in include_shells: - for block1 in self.gf_struct_solver[ish].iterkeys(): - for block2 in self.gf_struct_solver[ish].iterkeys(): + for block1 in self.gf_struct_solver[ish].keys(): + for block2 in self.gf_struct_solver[ish].keys(): if block1==block2: continue # check if the blocks are already present in the deg_shells @@ -1360,7 +1359,7 @@ class SumkDFT(object): # a block was found, break out of the loop break - + def calculate_diagonalization_matrix(self, prop_to_be_diagonal='eal', calc_in_solver_blocks=True, write_to_blockstructure = True, shells=None): """ Calculates the diagonalisation matrix, and (optionally) stores it in the BlockStructure. @@ -1451,7 +1450,7 @@ class SumkDFT(object): - if 'using_point_integration': Only works for diagonal hopping matrix (true in wien2k). beta : float, optional - Inverse temperature. + Inverse temperature. Returns ------- @@ -1464,7 +1463,7 @@ class SumkDFT(object): dens_mat[icrsh][sp] = numpy.zeros( [self.corr_shells[icrsh]['dim'], self.corr_shells[icrsh]['dim']], numpy.complex_) - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): if method == "using_gf": @@ -1493,7 +1492,7 @@ class SumkDFT(object): MMat[isp][inu, inu] = 0.0 else: - raise ValueError, "density_matrix: the method '%s' is not supported." % method + raise ValueError("density_matrix: the method '%s' is not supported." % method) for icrsh in range(self.n_corr_shells): for isp, sp in enumerate(self.spin_block_names[self.corr_shells[icrsh]['SO']]): @@ -1696,10 +1695,10 @@ class SumkDFT(object): spn = self.spin_block_names[self.corr_shells[icrsh]['SO']] Ncr = {sp: 0.0 for sp in spn} - for block, inner in self.gf_struct_solver[ish].iteritems(): + for block, inner in self.gf_struct_solver[ish].items(): bl = self.solver_to_sumk_block[ish][block] Ncr[bl] += dens_mat[block].real.trace() - Ncrtot = sum(Ncr.itervalues()) + Ncrtot = sum(Ncr.values()) for sp in spn: self.dc_imp[icrsh][sp] = numpy.identity(dim, numpy.float_) if self.SP == 0: # average the densities if there is no SP: @@ -1712,7 +1711,7 @@ class SumkDFT(object): if use_dc_value is None: if U_interact is None and J_hund is None: - raise ValueError, "set_dc: either provide U_interact and J_hund or set use_dc_value to dc value." + raise ValueError("set_dc: either provide U_interact and J_hund or set use_dc_value to dc value.") if use_dc_formula == 0: # FLL @@ -1859,7 +1858,7 @@ class SumkDFT(object): def total_density(self, mu=None, iw_or_w="iw", with_Sigma=True, with_dc=True, broadening=None): r""" - Calculates the total charge within the energy window for a given chemical potential. + Calculates the total charge within the energy window for a given chemical potential. The chemical potential is either given by parameter `mu` or, if it is not specified, taken from `self.chemical_potential`. @@ -1876,7 +1875,7 @@ class SumkDFT(object): with - .. math:: n(k) = Tr G_{\nu\nu'}(k, i\omega_{n}). + .. math:: n(k) = Tr G_{\nu\nu'}(k, i\omega_{n}). The calculation is done in the global coordinate system, if distinction is made between local/global. @@ -1907,7 +1906,7 @@ class SumkDFT(object): if mu is None: mu = self.chemical_potential dens = 0.0 - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt = self.lattice_gf( ik=ik, mu=mu, iw_or_w=iw_or_w, with_Sigma=with_Sigma, with_dc=with_dc, broadening=broadening) @@ -2000,7 +1999,7 @@ class SumkDFT(object): elif dm_type == 'vasp': filename = 'GAMMA' - assert type(filename) == StringType, ("calc_density_correction: " + assert isinstance(filename, str), ("calc_density_correction: " "filename has to be a string!") ntoi = self.spin_names_to_ind[self.SO] @@ -2022,7 +2021,7 @@ class SumkDFT(object): # Convert Fermi weights to a density matrix dens_mat_dft = {} for sp in spn: - dens_mat_dft[sp] = [fermi_weights[ik, ntoi[sp], :].astype(numpy.complex_) for ik in xrange(self.n_k)] + dens_mat_dft[sp] = [fermi_weights[ik, ntoi[sp], :].astype(numpy.complex_) for ik in range(self.n_k)] # Set up deltaN: @@ -2031,7 +2030,7 @@ class SumkDFT(object): deltaN[sp] = [numpy.zeros([self.n_orbitals[ik, ntoi[sp]], self.n_orbitals[ ik, ntoi[sp]]], numpy.complex_) for ik in range(self.n_k)] - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_iw = self.lattice_gf( ik=ik, mu=self.chemical_potential, iw_or_w="iw") @@ -2043,7 +2042,7 @@ class SumkDFT(object): ik, 0, bname, G_latt_iw[bname], gf,shells='csc') G_latt_iw[bname] = G_latt_rot_iw.copy() - + for bname, gf in G_latt_iw: deltaN[bname][ik] = G_latt_iw[bname].density() @@ -2053,7 +2052,7 @@ class SumkDFT(object): nb = self.n_orbitals[ik, ntoi[bname]] diag_inds = numpy.diag_indices(nb) deltaN[bname][ik][diag_inds] -= dens_mat_dft[bname][ik][:nb] - + if self.charge_mixing and self.deltaNOld is not None: G2 = numpy.sum(self.kpts_cart[ik,:]**2) # Kerker mixing @@ -2075,9 +2074,9 @@ class SumkDFT(object): mpi.world, dens[bname], lambda x, y: x + y) self.deltaNOld = copy.copy(deltaN) mpi.barrier() - - - + + + band_en_correction = mpi.all_reduce(mpi.world, band_en_correction, lambda x,y : x+y) # now save to file: @@ -2120,7 +2119,7 @@ class SumkDFT(object): to_write = {f: (0, 'up'), f1: (1, 'down')} if self.SO == 1: to_write = {f: (0, 'ud'), f1: (0, 'ud')} - for fout in to_write.iterkeys(): + for fout in to_write.keys(): isp, sp = to_write[fout] for ik in range(self.n_k): fout.write("%s\n" % self.n_orbitals[ik, isp]) @@ -2137,12 +2136,12 @@ class SumkDFT(object): if mpi.is_master_node(): with open(filename, 'w') as f: f.write(" %i -1 ! Number of k-points, default number of bands\n"%(self.n_k)) - for ik in xrange(self.n_k): + for ik in range(self.n_k): ib1 = band_window[0][ik, 0] ib2 = band_window[0][ik, 1] f.write(" %i %i %i\n"%(ik + 1, ib1, ib2)) - for inu in xrange(self.n_orbitals[ik, 0]): - for imu in xrange(self.n_orbitals[ik, 0]): + for inu in range(self.n_orbitals[ik, 0]): + for imu in range(self.n_orbitals[ik, 0]): valre = (deltaN['up'][ik][inu, imu].real + deltaN['down'][ik][inu, imu].real) / 2.0 valim = (deltaN['up'][ik][inu, imu].imag + deltaN['down'][ik][inu, imu].imag) / 2.0 f.write(" %.14f %.14f"%(valre, valim)) @@ -2199,7 +2198,7 @@ class SumkDFT(object): return dc def check_projectors(self): - """Calculated the density matrix from projectors (DM = P Pdagger) to check that it is correct and + """Calculated the density matrix from projectors (DM = P Pdagger) to check that it is correct and specifically that it matches DFT.""" dens_mat = [numpy.zeros([self.corr_shells[icrsh]['dim'], self.corr_shells[icrsh]['dim']], numpy.complex_) for icrsh in range(self.n_corr_shells)] diff --git a/python/sumk_dft_tools.py b/python/triqs_dft_tools/sumk_dft_tools.py similarity index 94% rename from python/sumk_dft_tools.py rename to python/triqs_dft_tools/sumk_dft_tools.py index 589e800f..e11f42d7 100644 --- a/python/sumk_dft_tools.py +++ b/python/triqs_dft_tools/sumk_dft_tools.py @@ -21,10 +21,10 @@ import sys from types import * import numpy -from pytriqs.gf import * -import pytriqs.utility.mpi as mpi -from symmetry import * -from sumk_dft import SumkDFT +from triqs.gf import * +import triqs.utility.mpi as mpi +from .symmetry import * +from .sumk_dft import SumkDFT from scipy.integrate import * from scipy.interpolate import * @@ -79,7 +79,7 @@ class SumkDFTTools(SumkDFT): DOS projected to atoms and resolved into orbital contributions. """ if (mesh is None) and (not with_Sigma): - raise ValueError, "lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq." + raise ValueError("lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq.") if mesh is None: om_mesh = [x.real for x in self.Sigma_imp_w[0].mesh] om_min = om_mesh[0] @@ -111,7 +111,7 @@ class SumkDFTTools(SumkDFT): DOSproj_orb[ish][sp] = numpy.zeros( [n_om, dim, dim], numpy.complex_) - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_w = self.lattice_gf( @@ -217,7 +217,7 @@ class SumkDFTTools(SumkDFT): DOS projected to atoms and resolved into orbital contributions. """ if (mesh is None) and (not with_Sigma): - raise ValueError, "lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq." + raise ValueError("lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq.") if mesh is None: om_mesh = [x.real for x in self.Sigma_imp_w[0].mesh] om_min = om_mesh[0] @@ -229,12 +229,12 @@ class SumkDFTTools(SumkDFT): om_mesh = numpy.linspace(om_min, om_max, n_om) spn = self.spin_block_names[self.SO] - gf_struct_parproj = [[(sp, range(self.shells[ish]['dim'])) for sp in spn] + gf_struct_parproj = [[(sp, list(range(self.shells[ish]['dim']))) for sp in spn] for ish in range(self.n_shells)] #print(self.proj_mat_csc.shape[2]) #print(spn) n_local_orbs = self.proj_mat_csc.shape[2] - gf_struct_parproj_all = [[(sp, range(n_local_orbs)) for sp in spn]] + gf_struct_parproj_all = [[(sp, list(range(n_local_orbs))) for sp in spn]] glist_all = [GfReFreq(indices=inner, window=(om_min, om_max), n_points=n_om) for block, inner in gf_struct_parproj_all[0]] @@ -251,7 +251,7 @@ class SumkDFTTools(SumkDFT): DOSproj_orb[sp] = numpy.zeros( [n_om, dim, dim], numpy.complex_) - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_w = self.lattice_gf( @@ -352,7 +352,7 @@ class SumkDFTTools(SumkDFT): self.symmpar = Symmetry(self.hdf_file, subgroup=self.symmpar_data) if (mesh is None) and (not with_Sigma): - raise ValueError, "lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq." + raise ValueError("lattice_gf: Give the mesh=(om_min,om_max,n_points) for the lattice GfReFreq.") if mesh is None: om_mesh = [x.real for x in self.Sigma_imp_w[0].mesh] om_min = om_mesh[0] @@ -365,7 +365,7 @@ class SumkDFTTools(SumkDFT): G_loc = [] spn = self.spin_block_names[self.SO] - gf_struct_parproj = [[(sp, range(self.shells[ish]['dim'])) for sp in spn] + gf_struct_parproj = [[(sp, list(range(self.shells[ish]['dim']))) for sp in spn] for ish in range(self.n_shells)] for ish in range(self.n_shells): glist = [GfReFreq(indices=inner, window=(om_min, om_max), n_points=n_om) @@ -386,7 +386,7 @@ class SumkDFTTools(SumkDFT): DOSproj_orb[ish][sp] = numpy.zeros( [n_om, dim, dim], numpy.complex_) - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_w = self.lattice_gf( @@ -526,12 +526,12 @@ class SumkDFTTools(SumkDFT): if not ishell is None: gf_struct_parproj = [ - (sp, range(self.shells[ishell]['dim'])) for sp in spn] + (sp, list(range(self.shells[ishell]['dim']))) for sp in spn] G_loc = BlockGf(name_block_generator=[(block, GfReFreq(indices=inner, mesh=self.Sigma_imp_w[0].mesh)) for block, inner in gf_struct_parproj], make_copies=False) G_loc.zero() - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_w = self.lattice_gf( @@ -653,7 +653,7 @@ class SumkDFTTools(SumkDFT): for ish in range(self.n_shells)] for isp in range(len(spn))] # Set up G_loc - gf_struct_parproj = [[(sp, range(self.shells[ish]['dim'])) for sp in spn] + gf_struct_parproj = [[(sp, list(range(self.shells[ish]['dim']))) for sp in spn] for ish in range(self.n_shells)] if with_Sigma: G_loc = [BlockGf(name_block_generator=[(block, GfImFreq(indices=inner, mesh=self.Sigma_imp_iw[0].mesh)) @@ -667,7 +667,7 @@ class SumkDFTTools(SumkDFT): for ish in range(self.n_shells): G_loc[ish].zero() - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): G_latt_iw = self.lattice_gf( @@ -828,10 +828,10 @@ class SumkDFTTools(SumkDFT): if mpi.is_master_node(): ar = HDFArchive(self.hdf_file, 'r') if not (self.transp_data in ar): - raise IOError, "transport_distribution: No %s subgroup in hdf file found! Call convert_transp_input first." % self.transp_data + raise IOError("transport_distribution: No %s subgroup in hdf file found! Call convert_transp_input first." % self.transp_data) # check if outputs file was converted if not ('n_symmetries' in ar['dft_misc_input']): - raise IOError, "transport_distribution: n_symmetries missing. Check if case.outputs file is present and call convert_misc_input() or convert_dft_input()." + raise IOError("transport_distribution: n_symmetries missing. Check if case.outputs file is present and call convert_misc_input() or convert_dft_input().") self.read_transport_input_from_hdf() @@ -894,7 +894,7 @@ class SumkDFTTools(SumkDFT): for i, g in self.Sigma_imp_w[icrsh]: for iL in g.indices[0]: for iR in g.indices[0]: - for iom in xrange(n_om): + for iom in range(n_om): g.data[iom, int(iL), int(iR)] = Sigma_save[ i].data[ioffset + iom, int(iL), int(iR)] else: @@ -913,18 +913,18 @@ class SumkDFTTools(SumkDFT): self.Om_mesh = iOm_mesh * d_omega if mpi.is_master_node(): - print "Chemical potential: ", mu - print "Using n_om = %s points in the energy_window [%s,%s]" % (n_om, self.omega[0], self.omega[-1]), - print "where the omega vector is:" - print self.omega - print "Calculation requested for Omega mesh: ", numpy.array(Om_mesh) - print "Omega mesh automatically repined to: ", self.Om_mesh + print("Chemical potential: ", mu) + print("Using n_om = %s points in the energy_window [%s,%s]" % (n_om, self.omega[0], self.omega[-1]), end=' ') + print("where the omega vector is:") + print(self.omega) + print("Calculation requested for Omega mesh: ", numpy.array(Om_mesh)) + print("Omega mesh automatically repined to: ", self.Om_mesh) self.Gamma_w = {direction: numpy.zeros( (len(self.Om_mesh), n_om), dtype=numpy.float_) for direction in self.directions} # Sum over all k-points - ikarray = numpy.array(range(self.n_k)) + ikarray = numpy.array(list(range(self.n_k))) for ik in mpi.slice_array(ikarray): # Calculate G_w for ik and initialize A_kw G_w = self.lattice_gf(ik, mu, iw_or_w="w", beta=beta, @@ -938,7 +938,7 @@ class SumkDFTTools(SumkDFT): A_kw[isp] = copy.deepcopy(G_w[self.spin_block_names[self.SO][ isp]].data.swapaxes(0, 1).swapaxes(1, 2)) # calculate A(k,w) for each frequency - for iw in xrange(n_om): + for iw in range(n_om): A_kw[isp][:, :, iw] = -1.0 / (2.0 * numpy.pi * 1j) * ( A_kw[isp][:, :, iw] - numpy.conjugate(numpy.transpose(A_kw[isp][:, :, iw]))) @@ -963,7 +963,7 @@ class SumkDFTTools(SumkDFT): # calculate Gamma_w for each direction from the velocities # vel_R and the spectral function A_kw for direction in self.directions: - for iw in xrange(n_om): + for iw in range(n_om): for iq in range(len(self.Om_mesh)): if(iw + iOm_mesh[iq] >= n_om or self.omega[iw] < -self.Om_mesh[iq] + energy_window[0] or self.omega[iw] > self.Om_mesh[iq] + energy_window[1]): continue @@ -1033,7 +1033,7 @@ class SumkDFTTools(SumkDFT): else: # rectangular integration for w-grid (orignal implementation) d_w = self.omega[1] - self.omega[0] - for iw in xrange(self.Gamma_w[direction].shape[1]): + for iw in range(self.Gamma_w[direction].shape[1]): A += A_int[iw] * d_w A = A * numpy.pi * (2.0 - self.SP) else: @@ -1083,16 +1083,16 @@ class SumkDFTTools(SumkDFT): (n_q,), numpy.nan) for direction in self.directions} for direction in self.directions: - for iq in xrange(n_q): + for iq in range(n_q): A0[direction][iq] = self.transport_coefficient( direction, iq=iq, n=0, beta=beta, method=method) A1[direction][iq] = self.transport_coefficient( direction, iq=iq, n=1, beta=beta, method=method) A2[direction][iq] = self.transport_coefficient( direction, iq=iq, n=2, beta=beta, method=method) - print "A_0 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A0[direction][iq]) - print "A_1 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A1[direction][iq]) - print "A_2 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A2[direction][iq]) + print("A_0 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A0[direction][iq])) + print("A_1 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A1[direction][iq])) + print("A_2 in direction %s for Omega = %.2f %e a.u." % (direction, self.Om_mesh[iq], A2[direction][iq])) if ~numpy.isnan(A1[direction][iq]): # Seebeck and kappa are overwritten if there is more than one Omega = # 0 in Om_mesh @@ -1102,11 +1102,11 @@ class SumkDFTTools(SumkDFT): self.kappa[direction] *= 293178.0 self.optic_cond[direction] = beta * \ A0[direction] * 10700.0 / numpy.pi - for iq in xrange(n_q): - print "Conductivity in direction %s for Omega = %.2f %f x 10^4 Ohm^-1 cm^-1" % (direction, self.Om_mesh[iq], self.optic_cond[direction][iq]) + for iq in range(n_q): + print("Conductivity in direction %s for Omega = %.2f %f x 10^4 Ohm^-1 cm^-1" % (direction, self.Om_mesh[iq], self.optic_cond[direction][iq])) if not (numpy.isnan(A1[direction][iq])): - print "Seebeck in direction %s for Omega = 0.00 %f x 10^(-6) V/K" % (direction, self.seebeck[direction]) - print "kappa in direction %s for Omega = 0.00 %f W/(m * K)" % (direction, self.kappa[direction]) + print("Seebeck in direction %s for Omega = 0.00 %f x 10^(-6) V/K" % (direction, self.seebeck[direction])) + print("kappa in direction %s for Omega = 0.00 %f W/(m * K)" % (direction, self.kappa[direction])) return self.optic_cond, self.seebeck, self.kappa diff --git a/python/symmetry.py b/python/triqs_dft_tools/symmetry.py similarity index 95% rename from python/symmetry.py rename to python/triqs_dft_tools/symmetry.py index 6e1ee825..56df919d 100644 --- a/python/symmetry.py +++ b/python/triqs_dft_tools/symmetry.py @@ -23,9 +23,9 @@ import copy import numpy from types import * -from pytriqs.gf import * -from pytriqs.archive import * -import pytriqs.utility.mpi as mpi +from triqs.gf import * +from h5 import * +import triqs.utility.mpi as mpi class Symmetry: @@ -48,8 +48,7 @@ class Symmetry: the data is stored at the root of the hdf5 archive. """ - assert type( - hdf_file) == StringType, "Symmetry: hdf_file must be a filename." + assert isinstance(hdf_file, str), "Symmetry: hdf_file must be a filename." self.hdf_file = hdf_file things_to_read = ['n_symm', 'n_atoms', 'perm', 'orbits', 'SO', 'SP', 'time_inv', 'mat', 'mat_tinv'] @@ -118,7 +117,7 @@ class Symmetry: # to be complex since self.mat is complex! symm_obj = [copy.deepcopy(obj[i]) for i in range(len(obj))] for iorb in range(self.n_orbits): - if type(symm_obj[iorb]) == DictType: + if isinstance(symm_obj[iorb], dict): for ii in symm_obj[iorb]: symm_obj[iorb][ii] *= 0.0 else: @@ -143,7 +142,7 @@ class Symmetry: else: - if type(obj[iorb]) == DictType: + if isinstance(obj[iorb], dict): for ii in obj[iorb]: if self.time_inv[i_symm] == 0: symm_obj[jorb][ii] += numpy.dot(numpy.dot(self.mat[i_symm][iorb], obj[iorb][ii]), @@ -172,7 +171,7 @@ class Symmetry: # symm_obj[iorb] /= 2.0 # # else: -# if type(symm_obj[iorb]) == DictType: +# if isinstance(symm_obj[iorb], dict): # for ii in symm_obj[iorb]: # symm_obj[iorb][ii] += numpy.dot(numpy.dot(self.mat_tinv[iorb],symm_obj[iorb][ii].conjugate()), # self.mat_tinv[iorb].transpose().conjugate()) diff --git a/python/trans_basis.py b/python/triqs_dft_tools/trans_basis.py similarity index 97% rename from python/trans_basis.py rename to python/triqs_dft_tools/trans_basis.py index 7bd2ee4e..baaa5857 100644 --- a/python/trans_basis.py +++ b/python/triqs_dft_tools/trans_basis.py @@ -1,8 +1,8 @@ from triqs_dft_tools.sumk_dft import * from triqs_dft_tools.converters import Wien2kConverter -from pytriqs.gf import * -from pytriqs.archive import * -import pytriqs.utility.mpi as mpi +from triqs.gf import * +from h5 import * +import triqs.utility.mpi as mpi import numpy import copy @@ -130,7 +130,7 @@ class TransBasis: # transform the CTQMC blocks to the full matrix: # ish is the index of the inequivalent shell corresponding to icrsh ish = self.SK.corr_to_inequiv[0] - for block, inner in self.gf_struct_solver[ish].iteritems(): + for block, inner in self.gf_struct_solver[ish].items(): for ind1 in inner: for ind2 in inner: gfrotated[self.SK.solver_to_sumk_block[ish][block]][ @@ -143,7 +143,7 @@ class TransBasis: gfreturn = gf_to_rot.copy() # Put back into CTQMC basis: - for block, inner in self.gf_struct_solver[ish].iteritems(): + for block, inner in self.gf_struct_solver[ish].items(): for ind1 in inner: for ind2 in inner: gfreturn[block][ind1, ind2] << gfrotated[ diff --git a/python/update_archive.py b/python/triqs_dft_tools/update_archive.py similarity index 88% rename from python/update_archive.py rename to python/triqs_dft_tools/update_archive.py index c2af8c69..cc9db916 100644 --- a/python/update_archive.py +++ b/python/triqs_dft_tools/update_archive.py @@ -1,19 +1,19 @@ -from pytriqs.archive import HDFArchive +from h5 import HDFArchive import h5py import sys import numpy import subprocess if len(sys.argv) < 2: - print "Usage: python update_archive.py old_archive [v1.0|v1.2]" + print("Usage: python update_archive.py old_archive [v1.0|v1.2]") sys.exit() -print """ +print(""" This script is an attempt to update your archive to TRIQS 1.2. Please keep a copy of your old archive as this script is ** not guaranteed ** to work for your archive. If you encounter any problem please report it on github! -""" +""") def convert_shells(shells): @@ -63,26 +63,26 @@ A = h5py.File(filename) old_to_new = {'SumK_LDA': 'dft_input', 'SumK_LDA_ParProj': 'dft_parproj_input', 'SymmCorr': 'dft_symmcorr_input', 'SymmPar': 'dft_symmpar_input', 'SumK_LDA_Bands': 'dft_bands_input'} -for old, new in old_to_new.iteritems(): - if old not in A.keys(): +for old, new in old_to_new.items(): + if old not in list(A.keys()): continue - print "Changing %s to %s ..." % (old, new) + print("Changing %s to %s ..." % (old, new)) A.copy(old, new) del(A[old]) # Move output items from dft_input to user_data move_to_output = ['chemical_potential', 'dc_imp', 'dc_energ'] for obj in move_to_output: - if obj in A['dft_input'].keys(): + if obj in list(A['dft_input'].keys()): if 'user_data' not in A: A.create_group('user_data') - print "Moving %s to user_data ..." % obj + print("Moving %s to user_data ..." % obj) A.copy('dft_input/' + obj, 'user_data/' + obj) del(A['dft_input'][obj]) # Delete obsolete quantities to_delete = ['gf_struct_solver', 'map_inv', 'map', 'deg_shells', 'h_field'] for obj in to_delete: - if obj in A['dft_input'].keys(): + if obj in list(A['dft_input'].keys()): del(A['dft_input'][obj]) if from_v == 'v1.0': @@ -109,11 +109,11 @@ if 'n_inequiv_shells' not in A['dft_input']: # Rename variables groups = ['dft_symmcorr_input', 'dft_symmpar_input'] for group in groups: - if group not in A.keys(): + if group not in list(A.keys()): continue if 'n_s' not in A[group]: continue - print "Changing n_s to n_symm ..." + print("Changing n_s to n_symm ...") A[group].move('n_s', 'n_symm') # Convert orbits to list of dicts orbits_old = HDFArchive(filename, 'r')[group]['orbits'] @@ -125,11 +125,11 @@ for group in groups: groups = ['dft_parproj_input'] for group in groups: - if group not in A.keys(): + if group not in list(A.keys()): continue if 'proj_mat_pc' not in A[group]: continue - print "Changing proj_mat_pc to proj_mat_all ..." + print("Changing proj_mat_pc to proj_mat_all ...") A[group].move('proj_mat_pc', 'proj_mat_all') A.close() @@ -137,6 +137,6 @@ A.close() # Repack to reclaim disk space retcode = subprocess.call(["h5repack", "-i%s" % filename, "-otemphgfrt.h5"]) if retcode != 0: - print "h5repack failed!" + print("h5repack failed!") else: subprocess.call(["mv", "-f", "temphgfrt.h5", "%s" % filename]) diff --git a/python/version.py.in b/python/triqs_dft_tools/version.py.in similarity index 73% rename from python/version.py.in rename to python/triqs_dft_tools/version.py.in index ea412282..06a9e558 100644 --- a/python/version.py.in +++ b/python/triqs_dft_tools/version.py.in @@ -3,6 +3,9 @@ # TRIQS: a Toolbox for Research in Interacting Quantum Systems # # Copyright (C) 2011 by M. Aichhorn, L. Pourovskii, V. Vildosola +# Copyright (C) 2016-2018, N. Wentzell +# Copyright (C) 2018-2019, Simons Foundation +# author: N. Wentzell # # TRIQS is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software @@ -19,12 +22,12 @@ # ################################################################################ -version = "@DFT_TOOLS_VERSION@" +version = "@PROJECT_VERSION@" triqs_hash = "@TRIQS_GIT_HASH@" -dft_tools_hash = "@DFT_TOOLS_GIT_HASH@" +@PROJECT_NAME@_hash = "@PROJECT_GIT_HASH@" def show_version(): - print "\nYou are using the dft_tools version %s\n"%version + print("\nYou are using @PROJECT_NAME@ version %s\n"%version) def show_git_hash(): - print "\nYou are using the dft_tools git hash %s based on triqs git hash %s\n"%(dft_tools_hash, triqs_hash) + print("\nYou are using @PROJECT_NAME@ git hash %s based on triqs git hash %s\n"%("@PROJECT_GIT_HASH@", triqs_hash)) diff --git a/share/CMakeLists.txt b/share/CMakeLists.txt new file mode 100644 index 00000000..f6da5199 --- /dev/null +++ b/share/CMakeLists.txt @@ -0,0 +1,28 @@ +add_subdirectory(cmake) + +if(NOT CMAKE_INSTALL_PREFIX STREQUAL TRIQS_ROOT AND NOT IS_SUBPROJECT) + + if(PythonSupport) + set(EXPORT_PYTHON_PATH "export PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CPP2PY_PYTHON_LIB_DEST_ROOT}:$PYTHONPATH") + set(MODFILE_PYTHON_PATH "prepend-path PYTHONPATH $root/${CPP2PY_PYTHON_LIB_DEST_ROOT}") + endif() + + configure_file(${PROJECT_NAME}.modulefile.in ${PROJECT_NAME}.modulefile @ONLY) + configure_file(${PROJECT_NAME}vars.sh.in ${PROJECT_NAME}vars.sh @ONLY) + + install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.modulefile + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}vars.sh + DESTINATION share + ) + + message(STATUS "***************************************************************") + message(STATUS "* Custom install Location. Use: ") + message(STATUS "* ") + message(STATUS "* source ${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}vars.sh ") + message(STATUS "* ") + message(STATUS "* to set up the environment variables ") + message(STATUS "***************************************************************") + +endif() diff --git a/share/cmake/CMakeLists.txt b/share/cmake/CMakeLists.txt new file mode 100644 index 00000000..dad532b3 --- /dev/null +++ b/share/cmake/CMakeLists.txt @@ -0,0 +1,10 @@ +configure_file(${PROJECT_NAME}-config.cmake.in ${PROJECT_NAME}-config.cmake @ONLY) +configure_file(${PROJECT_NAME}-config-version.cmake.in ${PROJECT_NAME}-config-version.cmake @ONLY) +install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake + DESTINATION lib/cmake/${PROJECT_NAME} +) + +install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION lib/cmake/${PROJECT_NAME}) diff --git a/share/cmake/Modules/FindSphinx.cmake b/share/cmake/Modules/FindSphinx.cmake new file mode 100644 index 00000000..b8cc28a8 --- /dev/null +++ b/share/cmake/Modules/FindSphinx.cmake @@ -0,0 +1,37 @@ +# Copyright Olivier Parcollet 2017. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or copy at +# http://www.boost.org/LICENSE_1_0.txt) + +# This module looks for sphinx documentation tool +# and define a function that prepares the Makefile for sphinx-build + +find_program(SPHINXBUILD_EXECUTABLE + NAMES sphinx-build + PATHS /usr/bin /opt/local/bin /usr/local/bin #opt/sphinx-doc/bin + PATH_SUFFIXES bin + ) + +if (NOT SPHINXBUILD_EXECUTABLE) + message(FATAL_ERROR "I cannot find sphinx to build the triqs documentation") +endif() + +execute_process( + COMMAND "${SPHINXBUILD_EXECUTABLE}" --version + OUTPUT_VARIABLE SPHINXBUILD_VERSION + ERROR_VARIABLE SPHINXBUILD_VERSION + ) +if (SPHINXBUILD_VERSION MATCHES "[Ss]phinx.* ([0-9]+\\.[0-9]+(\\.|b)[0-9]+)") + set (SPHINXBUILD_VERSION "${CMAKE_MATCH_1}") +endif() + +if (SPHINXBUILD_VERSION VERSION_EQUAL 1.6.3) + message(FATAL_ERROR "sphinx-build found at ${SPHINXBUILD_EXECUTABLE} but version 1.6.3 has a bug. Upgrade sphinx.") +else() + message(STATUS "sphinx-build program found at ${SPHINXBUILD_EXECUTABLE} with version ${SPHINXBUILD_VERSION}") +endif () + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(SPHINX DEFAULT_MSG SPHINXBUILD_EXECUTABLE) + +mark_as_advanced( SPHINXBUILD_EXECUTABLE ) diff --git a/share/cmake/extract_flags.cmake b/share/cmake/extract_flags.cmake new file mode 100644 index 00000000..dfcf67b0 --- /dev/null +++ b/share/cmake/extract_flags.cmake @@ -0,0 +1,102 @@ +################################################################################### +# +# TRIQS: a Toolbox for Research in Interacting Quantum Systems +# +# Copyright (C) 2019-2020 Simons Foundation +# author: N. Wentzell +# +# TRIQS is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with +# TRIQS. If not, see . +# +################################################################################### + +# Recursively fetch all targets that the interface of a target depends upon +macro(get_all_interface_targets name target) + get_property(TARGET_LINK_LIBRARIES TARGET ${target} PROPERTY INTERFACE_LINK_LIBRARIES) + foreach(lib IN LISTS TARGET_LINK_LIBRARIES) + if(TARGET ${lib}) + # Append to list + list(APPEND ${name}_INTERFACE_TARGETS ${lib}) + # Recure into target dependencies + get_all_interface_targets(${name} ${lib}) + endif() + endforeach() +endmacro() + +# Extract the property from the target and recursively from all targets it depends upon +macro(get_property_recursive) + cmake_parse_arguments(get_property_recursive "" "TARGET" "PROPERTY" ${ARGN}) + set(target ${get_property_recursive_TARGET}) + set(property ${get_property_recursive_PROPERTY}) + get_all_interface_targets(${target} ${target}) + foreach(t IN LISTS ${target}_INTERFACE_TARGETS ITEMS ${target}) + get_property(p TARGET ${t} PROPERTY ${property}) + list(APPEND ${ARGV0} ${p}) + endforeach() + # Clean duplicates and any occurance of '/usr/include' dirs + if(${ARGV0}) + list(REMOVE_DUPLICATES ${ARGV0}) + list(REMOVE_ITEM ${ARGV0} /usr/include) + endif() +endmacro() + +# Recursively fetch all compiler flags attached to the interface of a target +macro(extract_flags) + + cmake_parse_arguments(ARG "BUILD_INTERFACE" "" "" ${ARGN}) + + set(target ${ARGV0}) + unset(${target}_CXXFLAGS) + unset(${target}_LDFLAGS) + + get_property_recursive(opts TARGET ${target} PROPERTY INTERFACE_COMPILE_OPTIONS) + foreach(opt ${opts}) + set(${target}_LDFLAGS "${${target}_LDFLAGS} ${opt}") + set(${target}_CXXFLAGS "${${target}_CXXFLAGS} ${opt}") + endforeach() + + get_property_recursive(defs TARGET ${target} PROPERTY INTERFACE_COMPILE_DEFINITIONS) + foreach(def ${defs}) + set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -D${def}") + endforeach() + + get_property_recursive(inc_dirs TARGET ${target} PROPERTY INTERFACE_INCLUDE_DIRECTORIES) + get_property_recursive(sys_inc_dirs TARGET ${target} PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES) + if(inc_dirs) + list(REMOVE_ITEM sys_inc_dirs ${inc_dirs}) + endif() + foreach(dir ${inc_dirs}) + set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -I${dir}") + endforeach() + foreach(dir ${sys_inc_dirs}) + set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -isystem${dir}") + endforeach() + + get_property_recursive(libs TARGET ${target} PROPERTY INTERFACE_LINK_LIBRARIES) + foreach(lib ${libs}) + if(NOT TARGET ${lib} AND NOT IS_DIRECTORY ${lib}) + set(${target}_LDFLAGS "${${target}_LDFLAGS} ${lib}") + endif() + endforeach() + + # We have to replace generator expressions explicitly + if(ARG_BUILD_INTERFACE) + string(REGEX REPLACE "\\$" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}") + string(REGEX REPLACE "\\$" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}") + else() + string(REGEX REPLACE "\\$" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}") + string(REGEX REPLACE "\\$" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}") + endif() + string(REGEX REPLACE " [^ ]*\\$<[^ ]*:[^>]*>" "" ${target}_LDFLAGS "${${target}_LDFLAGS}") + string(REGEX REPLACE " [^ ]*\\$<[^ ]*:[^>]*>" "" ${target}_CXXFLAGS "${${target}_CXXFLAGS}") +endmacro() diff --git a/share/cmake/triqs_dft_tools-config-version.cmake.in b/share/cmake/triqs_dft_tools-config-version.cmake.in new file mode 100644 index 00000000..c4c43a4b --- /dev/null +++ b/share/cmake/triqs_dft_tools-config-version.cmake.in @@ -0,0 +1,13 @@ +set(PACKAGE_VERSION @PROJECT_VERSION@) + +if (PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) +endif() + +if (PACKAGE_FIND_VERSION VERSION_LESS PACKAGE_VERSION) + set(PACKAGE_VERSION_COMPATIBLE TRUE) +endif() + +if (PACKAGE_FIND_VERSION VERSION_GREATER PACKAGE_VERSION) + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() diff --git a/share/cmake/triqs_dft_tools-config.cmake.in b/share/cmake/triqs_dft_tools-config.cmake.in new file mode 100644 index 00000000..e9101328 --- /dev/null +++ b/share/cmake/triqs_dft_tools-config.cmake.in @@ -0,0 +1,32 @@ +# This file allows other CMake Projects to find us +# We provide general project information +# and reestablish the exported CMake Targets + +# Multiple inclusion guard +if(NOT @PROJECT_NAME@_FOUND) +set(@PROJECT_NAME@_FOUND TRUE) +set_property(GLOBAL PROPERTY @PROJECT_NAME@_FOUND TRUE) + +# version +set(@PROJECT_NAME@_VERSION @PROJECT_VERSION@ CACHE STRING "@PROJECT_NAME@ version") +set(@PROJECT_NAME@_GIT_HASH @PROJECT_GIT_HASH@ CACHE STRING "@PROJECT_NAME@ git hash") + +# Root of the installation +set(@PROJECT_NAME@_ROOT @CMAKE_INSTALL_PREFIX@ CACHE STRING "@PROJECT_NAME@ root directory") + +## Find the target dependencies +#include(CMakeFindDependencyMacro) +#find_dependency(... HINTS @CMAKE_INSTALL_PREFIX@) + +# Include the exported targets of this project +include(@CMAKE_INSTALL_PREFIX@/lib/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake) + +message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@") + +# Was the Project built with Documentation? +set(@PROJECT_NAME@_WITH_DOCUMENTATION @Build_Documentation@ CACHE BOOL "Was @PROJECT_NAME@ build with documentation?") + +# Was the Project built with PythonSupport? +set(@PROJECT_NAME@_WITH_PYTHON_SUPPORT @PythonSupport@ CACHE BOOL "Was @PROJECT_NAME@ build with python support?") + +endif() diff --git a/share/replace_and_rename.py b/share/replace_and_rename.py new file mode 100755 index 00000000..b183fb40 --- /dev/null +++ b/share/replace_and_rename.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python2 + +import sys +import os +import glob + +if len(sys.argv) != 2: + print("Please pass the application name") + sys.exit() + +app_name = str(sys.argv[1]).lower() +capital_name = app_name.upper() + +# Move app4triqs directories if necessary +if os.path.isdir("c++/app4triqs"): os.rename("c++/app4triqs", "c++/" + app_name) +if os.path.isdir("python/app4triqs"): os.rename("python/app4triqs", "python/" + app_name) + +# Ignore these files +ignore_lst = [".git/", "replace_and_rename.py", "squash_history.sh"] + +# Find the root directory of app4triqs +app4triqs_root = os.path.abspath(os.path.dirname(__file__) + "/..") + +# Recurse over all subdirectories and files +for root, dirs, files in os.walk(app4triqs_root): + + for fname in files: + fpath = os.path.join(root, fname) + + # Ignore certain files / directories + if any(it in fpath for it in ignore_lst): continue + + if os.path.isfile(fpath): + # Rename files containing app4triqs in their filename + if "app4triqs" in fname: + new_fpath = os.path.join(root, fname.replace("app4triqs", app_name)) + os.rename(fpath, new_fpath) + fpath = new_fpath + + # Replace app4triqs and APP4TRIQS in all files + with open(fpath, 'r') as f: + s = f.read() + if "app4triqs" in s or "APP4TRIQS" in s: + with open(fpath, 'w') as f: + f.write(s.replace("app4triqs", app_name).replace("APP4TRIQS", capital_name)) diff --git a/share/sitecustomize.py b/share/sitecustomize.py new file mode 100644 index 00000000..ebd15431 --- /dev/null +++ b/share/sitecustomize.py @@ -0,0 +1,8 @@ +def application_triqs_import(name,*args,**kwargs): + if name.startswith('@package_name@'): + name = name[len('@package_name@')+1:] + return builtin_import(name,*args,**kwargs) + +import builtins +builtins.__import__, builtin_import = application_triqs_import, builtins.__import__ + diff --git a/share/squash_history.sh b/share/squash_history.sh new file mode 100755 index 00000000..8c03c08f --- /dev/null +++ b/share/squash_history.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +git reset $(git commit-tree HEAD\^{tree} -m "Initialize project from github.com/triqs/app4triqs@$(git rev-parse --short HEAD)") +git merge --allow-unrelated-histories -s ours HEAD@{1} -m "Track app4triqs skeleton" +git remote rm origin +git remote add app4triqs_remote https://github.com/triqs/app4triqs diff --git a/share/triqs_dft_tools.modulefile.in b/share/triqs_dft_tools.modulefile.in new file mode 100644 index 00000000..2c9befe8 --- /dev/null +++ b/share/triqs_dft_tools.modulefile.in @@ -0,0 +1,39 @@ +#%Module1.0 +# +# To be installed as ${MODULEPATH}/@PROJECT_NAME@/@PROJECT_VERSION@ + +set name @PROJECT_NAME@ +set version @PROJECT_VERSION@ +set root @CMAKE_INSTALL_PREFIX@ +set git_hash @PROJECT_GIT_HASH@ + +set url "https://github.com/TRIQS/@PROJECT_NAME@" +set description "An example application using cpp2py and triqs." + +module-whatis "$description" + +proc ModulesHelp { } { + global description url version git_hash + puts stderr "Description: $description" + puts stderr "URL: $url" + puts stderr "Version: $version" + puts stderr "Git hash: $git_hash" +} + +# You may need to edit the next line if the triqs module +# is installed under a different name in your setup. +prereq triqs/@TRIQS_VERSION@ + +# Only one version of @PROJECT_NAME@ can be loaded at a time +conflict $name + +setenv @PROJECT_NAME@_ROOT $root +setenv @PROJECT_NAME@_VERSION $version +setenv @PROJECT_NAME@_GIT_HASH $git_hash + +prepend-path PATH $root/bin +prepend-path CPLUS_INCLUDE_PATH $root/include +prepend-path LIBRARY_PATH $root/lib +prepend-path LD_LIBRARY_PATH $root/lib +prepend-path CMAKE_PREFIX_PATH $root +@MODFILE_PYTHON_PATH@ diff --git a/share/triqs_dft_toolsvars.sh.in b/share/triqs_dft_toolsvars.sh.in new file mode 100644 index 00000000..492a7a26 --- /dev/null +++ b/share/triqs_dft_toolsvars.sh.in @@ -0,0 +1,10 @@ +# Source this in your environment. + +export @PROJECT_NAME@_ROOT=@CMAKE_INSTALL_PREFIX@ + +export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_PREFIX@/include:$CPLUS_INCLUDE_PATH +export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH +export LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LIBRARY_PATH +export LD_LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LD_LIBRARY_PATH +export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH +@EXPORT_PYTHON_PATH@ diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index fd3de192..9eaefe3d 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -1,25 +1,6 @@ -# Copy h5 files to binary dir -FILE(GLOB all_h5_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.h5) -file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/${all_h5_files} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) -# Copy other files -FILE(COPY SrVO3.pmat SrVO3.struct SrVO3.outputs SrVO3.oubwin SrVO3.ctqmcout SrVO3.symqmc SrVO3.sympar SrVO3.parproj SrIrO3_rot.h5 hk_convert_hamiltonian.hk LaVO3-Pnma_hr.dat LaVO3-Pnma.inp DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) +add_subdirectory(c++) -# List all tests -set(all_tests wien2k_convert hk_convert w90_convert sumkdft_basic srvo3_Gloc srvo3_transp sigma_from_file blockstructure analyse_block_structure_from_gf analyse_block_structure_from_gf2 basis_transformation) - -set(python_executable python) - -if(${TEST_COVERAGE}) - set(python_executable ${PYTHON_COVERAGE} run --append --source "${CMAKE_BINARY_DIR}/python" ) +if(PythonSupport) + add_subdirectory(python) + add_subdirectory(python/plovasp) endif() - -foreach(t ${all_tests}) - add_test(NAME ${t} COMMAND ${python_executable} ${CMAKE_CURRENT_SOURCE_DIR}/${t}.py) -endforeach() - -# Set the PythonPath : put the build dir first (in case there is an installed version). -set_property(TEST ${all_tests} PROPERTY ENVIRONMENT PYTHONPATH=${CMAKE_BINARY_DIR}/python:$ENV{PYTHONPATH} ) - - -# VASP converter tests -add_subdirectory(plovasp) diff --git a/test/blockstructure.ref.h5 b/test/blockstructure.ref.h5 deleted file mode 100644 index 4b2ab404..00000000 Binary files a/test/blockstructure.ref.h5 and /dev/null differ diff --git a/test/c++/CMakeLists.txt b/test/c++/CMakeLists.txt new file mode 100644 index 00000000..36466c39 --- /dev/null +++ b/test/c++/CMakeLists.txt @@ -0,0 +1,35 @@ +# Copy h5 files to binary dir +file(GLOB_RECURSE all_h5_ref_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.ref.h5) +foreach(file ${all_h5_ref_files}) + configure_file(${file} ${file} COPYONLY) +endforeach() + +# List of all tests +file(GLOB_RECURSE all_tests RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp) + +foreach(test ${all_tests}) + get_filename_component(test_name ${test} NAME_WE) + get_filename_component(test_dir ${test} DIRECTORY) + add_executable(${test_name} ${test}) + target_link_libraries(${test_name} ${PROJECT_NAME}::${PROJECT_NAME}_c ${PROJECT_NAME}_warnings gtest_main) + set_property(TARGET ${test_name} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir}) + add_test(NAME ${test_name} COMMAND ${test_name} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir}) + # Run clang-tidy if found + if(CLANG_TIDY_EXECUTABLE) + set_target_properties(${test_name} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}") + endif() + # Run cppcheck if found + if(CPPCHECK_EXECUTABLE) + add_custom_command( + TARGET ${test_name} + COMMAND ${CPPCHECK_EXECUTABLE} + --enable=warning,style,performance,portability + --std=c++17 + --template=gcc + --verbose + --force + --quiet + ${CMAKE_CURRENT_SOURCE_DIR}/${test} + ) + endif() +endforeach() diff --git a/c++/plovasp/atm/test/reorder_flag.cpp b/test/c++/converters/vasp/reorder_flag.cpp similarity index 100% rename from c++/plovasp/atm/test/reorder_flag.cpp rename to test/c++/converters/vasp/reorder_flag.cpp diff --git a/c++/plovasp/atm/test/reorder_inds.cpp b/test/c++/converters/vasp/reorder_inds.cpp similarity index 100% rename from c++/plovasp/atm/test/reorder_inds.cpp rename to test/c++/converters/vasp/reorder_inds.cpp diff --git a/c++/plovasp/atm/test/testing.hpp b/test/c++/converters/vasp/testing.hpp similarity index 100% rename from c++/plovasp/atm/test/testing.hpp rename to test/c++/converters/vasp/testing.hpp diff --git a/c++/plovasp/atm/test/tet_weights.cpp b/test/c++/converters/vasp/tet_weights.cpp similarity index 100% rename from c++/plovasp/atm/test/tet_weights.cpp rename to test/c++/converters/vasp/tet_weights.cpp diff --git a/c++/plovasp/atm/test/weights1.cpp b/test/c++/converters/vasp/weights1.cpp similarity index 100% rename from c++/plovasp/atm/test/weights1.cpp rename to test/c++/converters/vasp/weights1.cpp diff --git a/test/plovasp/.gitignore b/test/plovasp/.gitignore deleted file mode 100644 index 0d20b648..00000000 --- a/test/plovasp/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.pyc diff --git a/test/plovasp/converter/lunio3.out.h5_old b/test/plovasp/converter/lunio3.out.h5_old deleted file mode 100644 index fb77f35c..00000000 Binary files a/test/plovasp/converter/lunio3.out.h5_old and /dev/null differ diff --git a/test/plovasp/converter/runtest.sh b/test/plovasp/converter/runtest.sh deleted file mode 100755 index 4ceb1ca0..00000000 --- a/test/plovasp/converter/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../../python:../../c:$PYTHONPATH ../../../build_pytriqs $1 diff --git a/test/plovasp/inpconf/.gitignore b/test/plovasp/inpconf/.gitignore deleted file mode 100644 index 0d20b648..00000000 --- a/test/plovasp/inpconf/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.pyc diff --git a/test/plovasp/inpconf/parse_shells_5.cfg~ b/test/plovasp/inpconf/parse_shells_5.cfg~ deleted file mode 100644 index cd3a9b6e..00000000 --- a/test/plovasp/inpconf/parse_shells_5.cfg~ +++ /dev/null @@ -1,18 +0,0 @@ -[General] - -[Group 1] -SHELLS = 1 2 - -[Shell 1] -LSHELL = 2 -IONS = 5..8 - -[Shell 2] -LSHELL = 1 -IONS = 1..4 - -TRANSFORM = 0.0 1.0 0.0 - 1.0 0.0 0.0 - 0.0 0.0 1.0 - -CORR = False \ No newline at end of file diff --git a/test/plovasp/inpconf/runtest.sh b/test/plovasp/inpconf/runtest.sh deleted file mode 100755 index 10b8ee04..00000000 --- a/test/plovasp/inpconf/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../..:../../../../../c:$PYTHONPATH python $1 diff --git a/test/plovasp/plocar_io/runtest.sh b/test/plovasp/plocar_io/runtest.sh deleted file mode 100755 index 9b7bd8ca..00000000 --- a/test/plovasp/plocar_io/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../../python:../../c:$PYTHONPATH python $1 diff --git a/test/plovasp/plotools/runtest.sh b/test/plovasp/plotools/runtest.sh deleted file mode 100755 index 4ceb1ca0..00000000 --- a/test/plovasp/plotools/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../../python:../../c:$PYTHONPATH ../../../build_pytriqs $1 diff --git a/test/plovasp/proj_group/projgroups.out b/test/plovasp/proj_group/projgroups.out deleted file mode 100644 index fbdc2e9d..00000000 --- a/test/plovasp/proj_group/projgroups.out +++ /dev/null @@ -1,1788 +0,0 @@ -pars: {'normalize': True, 'index': '1', 'ewindow': (-10.0, 4.0), 'shells': [0], 'normion': False} -10 25 - 1 -0j - 2 0j - 3 -0j - 4 (-0+0j) - 5 (-0-0j) - 1 (-0+0j) - 2 (-0-0j) - 3 (-0+0j) - 4 (-0-0j) - 5 0j - 1 -0j - 2 0j - 3 -0j - 4 0j - 5 (-0-0j) - 1 (-0+0j) - 2 (-0-0j) - 3 (-0+0j) - 4 -0j - 5 (-0+0j) - 1 (-0+0j) - 2 (-0-0j) - 3 (-0+0j) - 4 0j - 5 (-0+0j) - 1 -0j - 2 0j - 3 -0j - 4 0j - 5 (-0-0j) - 1 (-0+0j) - 2 -0j - 3 (-0+0j) - 4 0j - 5 0j - 1 (-0+0j) - 2 -0j - 3 (-0+0j) - 4 0j - 5 0j - 1 (-0+0j) - 2 -0j - 3 (-0+0j) - 4 0j - 5 (-0+0j) - 1 (-0.0004508+0.0028852j) - 2 (-0.1497561+0.9581279j) - 3 (-0+0j) - 4 (-0.0001217+0.0008514j) - 5 (-0+0j) - 1 (-0.0023141+0.0018742j) - 2 (0.0006823-0.0005381j) - 3 (-0+0j) - 4 (-0.7536259+0.6103144j) - 5 -0j - 1 (-0.9588806+0.1448303j) - 2 (0.0028849-0.0004356j) - 3 (-0+0j) - 4 (0.0029471-0.0004451j) - 5 0j - 1 0j - 2 0j - 3 (-0.0222997-0.0193299j) - 4 (-0-0j) - 5 (-0.5552623-0.4813134j) - 1 -0j - 2 (-0+0j) - 3 (0.5488983-0.4885585j) - 4 (-0+0j) - 5 (-0.0220441+0.0196209j) - 1 -0j - 2 (-0+0j) - 3 (0.0018298-0.0011222j) - 4 (-0-0j) - 5 (0.1926196-0.1180961j) - 1 (-0-0j) - 2 -0j - 3 (-0.225939-0.0007474j) - 4 0j - 5 (0.0021464+7.4e-06j) -10 23 - 1 -0j - 2 (-0-0j) - 3 (0.1798377+0.043276j) - 4 -0j - 5 (-0.30033-0.0722711j) - 1 (0.0913626+0.1364412j) - 2 0j - 3 (-0-0j) - 4 (0.0001106+0.0001663j) - 5 0j - 1 (3.9e-06-0.0001996j) - 2 (-0-0j) - 3 0j - 4 (-0.0037065+0.1641633j) - 5 (-0-0j) - 1 (-0.0307734-0.0241725j) - 2 0j - 3 (-0-0j) - 4 (-0.4380031-0.343931j) - 5 0j - 1 (0.5509428-0.0812239j) - 2 (-0-0j) - 3 0j - 4 (-0.0387125+0.005714j) - 5 (-0-0j) - 1 (-0+0j) - 2 0j - 3 (0.0303415+0.0734015j) - 4 (-0+0j) - 5 (-0.0506704-0.1225809j) - 1 (-0+0j) - 2 -0j - 3 (-0.0189898-0.0160965j) - 4 (-0-0j) - 5 (-0.010571-0.0089604j) - 1 (-0.1052749-0.0335264j) - 2 (-0-0j) - 3 (-0+0j) - 4 (-0.0030429-0.0009691j) - 5 (-0+0j) - 1 (-0.0021753+0.002338j) - 2 (-0+0j) - 3 -0j - 4 (0.075262-0.0808856j) - 5 -0j - 1 (-0-0j) - 2 (-0.9029934-0.3629887j) - 3 (-0-0j) - 4 (-0-0j) - 5 (-0-0j) - 1 (-0.0022701+0.0017722j) - 2 -0j - 3 (-0+0j) - 4 (-0.5988639+0.4674931j) - 5 -0j - 1 (0.4737477+0.5939284j) - 2 (-0-0j) - 3 0j - 4 (-0.0017958-0.0022515j) - 5 0j - 1 (-0-0j) - 2 (-0-0j) - 3 (0.3675154+0.536373j) - 4 (-0-0j) - 5 (0.2045843+0.2985821j) - 1 (-0-0j) - 2 0j - 3 (0.0256802+0.3419576j) - 4 (-0-0j) - 5 (-0.0428861-0.5710709j) -10 23 - 1 (-0-0j) - 2 (-0+0j) - 3 (-0.1733519+0.0645254j) - 4 (-0+0j) - 5 (0.2894987-0.1077578j) - 1 (-0.1377034+0.0894482j) - 2 (-0+0j) - 3 (-0-0j) - 4 (-0.0002898+0.000267j) - 5 (-0-0j) - 1 (0.0002132-0.0003313j) - 2 -0j - 3 0j - 4 (-0.06447+0.1510193j) - 5 0j - 1 (-0.0386479-0.000869j) - 2 (-0+0j) - 3 (-0-0j) - 4 (-0.5566305-0.0182938j) - 5 (-0-0j) - 1 (-0.2892412-0.4759324j) - 2 (-0+0j) - 3 (-0-0j) - 4 (0.0197331+0.0332419j) - 5 (-0-0j) - 1 (-1e-07-0j) - 2 (-0+1e-07j) - 3 (0.0683882-0.040391j) - 4 (-0+0j) - 5 (-0.1142089+0.067453j) - 1 0j - 2 0j - 3 (-0.0245703-0.0040012j) - 4 (-0-0j) - 5 (-0.0136775-0.0022274j) - 1 (-0.099971+0.0470387j) - 2 (-0+0j) - 3 -0j - 4 (-0.0028895+0.0013595j) - 5 (-0+0j) - 1 (0.0006444+0.0031277j) - 2 0j - 3 0j - 4 (-0.0222906-0.1082126j) - 5 (-0-0j) - 1 (-0+0j) - 2 (-0.1072096+0.9672973j) - 3 (-0+0j) - 4 (-0+0j) - 5 (-0+0j) - 1 (0.0020646-0.0020078j) - 2 (-0+0j) - 3 -0j - 4 (0.5446383-0.5296762j) - 5 (-0+0j) - 1 (-0.4361445-0.6220658j) - 2 0j - 3 (-0-0j) - 4 (0.0016533+0.002358j) - 5 (-0-0j) - 1 (-0+0j) - 2 (-0+0j) - 3 (0.0994177-0.6425572j) - 4 (-0+0j) - 5 (0.0553427-0.3576915j) - 1 (-0+0j) - 2 -0j - 3 (0.3262859-0.105508j) - 4 (-0+0j) - 5 (-0.5448991+0.176199j) -10 23 - 1 (-1e-07+0j) - 2 (-0-0j) - 3 (0.1276138-0.1338997j) - 4 0j - 5 (0.2131158-0.2236133j) - 1 (-0.1592148-0.0401744j) - 2 (-0.0001382+1.34e-05j) - 3 -0j - 4 0j - 5 0j - 1 (9.12e-05-0.0001048j) - 2 (-0.0596493+0.1529879j) - 3 (-0+0j) - 4 (-0-0j) - 5 (-0-0j) - 1 (-0.0337956+0.0219034j) - 2 (-0.4778652+0.2858139j) - 3 -0j - 4 0j - 5 0j - 1 (0.1085553+0.5461322j) - 2 (-0.0064237-0.0397572j) - 3 (-0+0j) - 4 (-0-0j) - 5 (-0-0j) - 1 (-4e-07+1e-07j) - 2 (-2e-07-0j) - 3 (-0.027945+0.0743468j) - 4 (2e-07+2e-07j) - 5 (-0.0466688+0.1241597j) - 1 0j - 2 (-0+0j) - 3 (-0.0180286+0.0171662j) - 4 (-0-0j) - 5 (0.010036-0.0095559j) - 1 (0.108703-0.0199224j) - 2 (0.0019145-0.000351j) - 3 (-0+0j) - 4 -0j - 5 -0j - 1 (0.0005201-0.0018756j) - 2 (-0.0295246+0.1064967j) - 3 -0j - 4 -0j - 5 -0j - 1 0j - 2 (-0+0j) - 3 0j - 4 (0.3917601+0.8908882j) - 5 (-0-0j) - 1 (-0.0028103-0.0001946j) - 2 (-0.7579135-0.0524913j) - 3 (-0-0j) - 4 -0j - 5 (-0-0j) - 1 (0.6214492+0.4370229j) - 2 (-0.0023043-0.0016205j) - 3 0j - 4 (-0-0j) - 5 (-0-0j) - 1 (-0+0j) - 2 (-0+0j) - 3 (0.2052216-0.6169665j) - 4 (-0+0j) - 5 (-0.1142404+0.343446j) - 1 0j - 2 -0j - 3 (-0.0254931+0.3419716j) - 4 (-0+0j) - 5 (-0.0425736+0.5710942j) -10 22 - 1 (-0.0649311-0.3442876j) - 2 (-0-0j) - 3 (0.0378287+0.2005811j) - 4 (-0-0j) - 5 0j - 1 (0.3106791+0.4493758j) - 2 (-0-0j) - 3 (0.0875018+0.1265652j) - 4 (-0+0j) - 5 (-3e-07-5e-07j) - 1 (-4e-07+3e-07j) - 2 0j - 3 (-1e-07+1e-07j) - 4 (-0+0j) - 5 (-0.471188+0.3354972j) - 1 (-0-0j) - 2 (0.2581334+0.1910264j) - 3 (-0+0j) - 4 (0.2581334+0.1910264j) - 5 0j - 1 (-0+0j) - 2 (0.4113635-0.070039j) - 3 -0j - 4 (-0.4113635+0.070039j) - 5 (-0-0j) - 1 (0.0016106-0.0027085j) - 2 -0j - 3 (0.0379126-0.0637563j) - 4 0j - 5 1e-07j - 1 0j - 2 (-0.236061-0.1082325j) - 3 0j - 4 (-0.236061-0.1082325j) - 5 (-0+0j) - 1 0j - 2 0j - 3 0j - 4 (-0-0j) - 5 (0.1545555+0.0883244j) - 1 0j - 2 0j - 3 0j - 4 (-0-0j) - 5 (0.0464772+0.0437532j) - 1 0j - 2 (0.5477333+0.0430706j) - 3 0j - 4 (0.5477332+0.0430706j) - 5 (-0+0j) - 1 (-0-0j) - 2 (-0.1179476-0.53596j) - 3 -0j - 4 (0.1179476+0.5359601j) - 5 (-0-0j) - 1 (-0.3912362-0.4448266j) - 2 0j - 3 (0.0271323+0.0308488j) - 4 0j - 5 0j - 1 (-0.0007282-0.0085436j) - 2 0j - 3 (-0.0618833-0.7260608j) - 4 0j - 5 (-0-0j) -10 22 - 1 (-0.3251665-0.1304481j) - 2 (-1e-07-0j) - 3 (-0.1894412-0.0759987j) - 4 0j - 5 (1e-07+0j) - 1 (0.5384525+0.0923521j) - 2 (2e-07+0j) - 3 (-0.1516534-0.0260108j) - 4 (-0-1e-07j) - 5 (5e-07+1e-07j) - 1 (5e-07+0j) - 2 (-1e-07-0j) - 3 (-2e-07+0j) - 4 (-0+0j) - 5 (-0.5766721-0.0450084j) - 1 0j - 2 (0.297352-0.1212671j) - 3 (-0+0j) - 4 (-0.297352+0.1212672j) - 5 -0j - 1 0j - 2 (0.3819936-0.1679472j) - 3 (-0+0j) - 4 (0.3819937-0.1679472j) - 5 -0j - 1 (0.0031359+0.0003096j) - 2 (1e-07-0j) - 3 (-0.0738181-0.0072888j) - 4 (-0-0j) - 5 (-0+0j) - 1 (-0-0j) - 2 (-0.1050147-0.23751j) - 3 (-0-0j) - 4 (0.1050147+0.23751j) - 5 (-0-0j) - 1 0j - 2 0j - 3 (-0-0j) - 4 (-0-0j) - 5 (-0.1725222-0.0438714j) - 1 -0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.0121359+0.0626673j) - 1 0j - 2 (0.0639723-0.545687j) - 3 0j - 4 (-0.0639723+0.545687j) - 5 -0j - 1 -0j - 2 (0.0330027-0.5477915j) - 3 -0j - 4 (0.0330027-0.5477916j) - 5 0j - 1 (0.5575486+0.2001901j) - 2 (-0-0j) - 3 (0.0386661+0.0138832j) - 4 (-0-0j) - 5 0j - 1 (0.0068083+0.0052126j) - 2 0j - 3 (-0.5785859-0.4429811j) - 4 0j - 5 (-0-0j) -10 23 - 1 (-0+0j) - 2 0j - 3 (-0.1733157+0.0646225j) - 4 0j - 5 (-0.2894383+0.10792j) - 1 (0.0913591-0.1364436j) - 2 (7.13e-05-0.0001072j) - 3 0j - 4 0j - 5 0j - 1 (-0.0001234+3.7e-05j) - 2 (0.1571436-0.0476365j) - 3 0j - 4 0j - 5 0j - 1 (-0.0384934+0.0095109j) - 2 (-0.5406479+0.133395j) - 3 0j - 4 0j - 5 0j - 1 (-0.4575424-0.3174104j) - 2 (0.0325716+0.0226117j) - 3 0j - 4 0j - 5 0j - 1 -0j - 2 (-0-0j) - 3 (-0.0663351+0.043681j) - 4 (-0-0j) - 5 (-0.1107801+0.0729476j) - 1 -0j - 2 (-0+0j) - 3 (-0.0039723+0.024575j) - 4 (-0-0j) - 5 (0.0022112-0.0136801j) - 1 (-0.0243186+0.1078047j) - 2 (-0.0004283+0.0018986j) - 3 0j - 4 (-0+0j) - 5 (-0-0j) - 1 (0.0015793-0.0011376j) - 2 (-0.089671+0.0645937j) - 3 -0j - 4 (-0+0j) - 5 (-0-0j) - 1 (-0-0j) - 2 0j - 3 (-0-0j) - 4 (-0.9558028-0.1833002j) - 5 0j - 1 (-0.0025628-0.0011694j) - 2 (-0.6911981-0.3153306j) - 3 (-0-0j) - 4 (-0+0j) - 5 (-0-0j) - 1 (-0.3231788+0.6875636j) - 2 (0.0011982-0.0025495j) - 3 (-0+0j) - 4 -0j - 5 -0j - 1 -0j - 2 (-0-0j) - 3 (0.4772363+0.4415984j) - 4 (-0-0j) - 5 (-0.2656625-0.245824j) - 1 (-0-0j) - 2 (-0+0j) - 3 (0.0837075-0.332547j) - 4 (-0-0j) - 5 (0.1397919-0.5553552j) -10 22 - 1 (-0.1598438+0.3117691j) - 2 (-0-0j) - 3 (-0.0931246+0.1816359j) - 4 (-0-0j) - 5 -0j - 1 (0.316545-0.4452632j) - 2 0j - 3 (-0.0891539+0.125407j) - 4 0j - 5 (3e-07-4e-07j) - 1 (-4e-07+4e-07j) - 2 (-0-0j) - 3 (1e-07-1e-07j) - 4 (-0-0j) - 5 (0.4209675-0.3966899j) - 1 0j - 2 (-0.1548728+0.2813154j) - 3 0j - 4 (0.1548728-0.2813154j) - 5 (-0+0j) - 1 (-0-0j) - 2 (-0.0725145-0.4109344j) - 3 (-0-0j) - 4 (-0.0725145-0.4109344j) - 5 0j - 1 (0.0021496-0.0023042j) - 2 0j - 3 (-0.050601+0.0542381j) - 4 0j - 5 0j - 1 -0j - 2 (0.2181373-0.1409084j) - 3 (-0+0j) - 4 (-0.2181373+0.1409084j) - 5 -0j - 1 -0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.075198+0.1613501j) - 1 -0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.0138021+0.0623215j) - 1 -0j - 2 (0.3074963+0.4553162j) - 3 0j - 4 (-0.3074963-0.4553161j) - 5 0j - 1 (-0-0j) - 2 (-0.5313799-0.1371137j) - 3 (-0-0j) - 4 (-0.5313799-0.1371138j) - 5 -0j - 1 (0.3010873-0.5101793j) - 2 (-0+0j) - 3 (0.0208805-0.035381j) - 4 (-0+0j) - 5 -0j - 1 (-0.0085158+0.0010024j) - 2 (-0+0j) - 3 (0.7236963-0.085191j) - 4 (-0+0j) - 5 -0j -10 22 - 1 (0.3461913+0.053866j) - 2 (-0+0j) - 3 (-0.2016902-0.0313822j) - 4 (-0+0j) - 5 -0j - 1 (-0.5398321-0.0839132j) - 2 -0j - 3 (-0.152042-0.0236339j) - 4 -0j - 5 (6e-07+1e-07j) - 1 (5e-07+2e-07j) - 2 (-0+0j) - 3 (2e-07+0j) - 4 (-0+0j) - 5 (0.5506471+0.1770995j) - 1 -0j - 2 (0.3072619+0.0933492j) - 3 (-0-0j) - 4 (0.3072619+0.0933492j) - 5 (-0-0j) - 1 (-0+0j) - 2 (0.4143474+0.0494131j) - 3 (-0+0j) - 4 (-0.4143474-0.0494131j) - 5 0j - 1 (0.0028697+0.0013018j) - 2 0j - 3 (0.0675512+0.0306442j) - 4 (-0+0j) - 5 (-0-0j) - 1 0j - 2 (-0.222548-0.1338337j) - 3 0j - 4 (-0.222548-0.1338337j) - 5 0j - 1 -0j - 2 -0j - 3 -0j - 4 (-0+0j) - 5 (0.0068112-0.1778826j) - 1 -0j - 2 -0j - 3 -0j - 4 (-0+0j) - 5 (0.031429-0.0555579j) - 1 0j - 2 (0.2680391+0.4796059j) - 3 0j - 4 (0.2680391+0.4796058j) - 5 -0j - 1 0j - 2 (-0.3931449-0.3828862j) - 3 (-0-0j) - 4 (0.3931449+0.3828862j) - 5 (-0-0j) - 1 (-0.1714822+0.5670365j) - 2 -0j - 3 (0.0118923-0.0393241j) - 4 -0j - 5 -0j - 1 (0.0059642+0.0061605j) - 2 (-0-0j) - 3 (0.5068579+0.5235351j) - 4 (-0-0j) - 5 0j -10 23 - 1 0j - 2 0j - 3 (-0.2381631-0.2572602j) - 4 (-0-0j) - 5 -0j - 1 (-0-0j) - 2 (7.83e-05+1.53e-05j) - 3 (-0-0j) - 4 (0.1310127+0.0989901j) - 5 (-0+0j) - 1 0j - 2 (0.161646+0.0288775j) - 3 0j - 4 (-6.44e-05-4.7e-05j) - 5 -0j - 1 0j - 2 (-0.0272174+0.0096473j) - 3 0j - 4 (-0.427908-0.3573898j) - 5 -0j - 1 (-0-0j) - 2 (-0.502332+0.241858j) - 3 (-0-0j) - 4 (0.0240306+0.0160121j) - 5 (-0+0j) - 1 0j - 2 0j - 3 (0.1476598+0.0292827j) - 4 (-1e-07+0j) - 5 -1e-07j - 1 0j - 2 (-0+0j) - 3 -0j - 4 -0j - 5 (0.0206397-0.0208035j) - 1 0j - 2 (2.77e-05+2.68e-05j) - 3 0j - 4 (0.0794091+0.0768845j) - 5 (-0-0j) - 1 (-0-0j) - 2 (-0.1099844-0.010976j) - 3 (-0-0j) - 4 (3.83e-05+3.8e-06j) - 5 (-0-0j) - 1 (0.5611965+0.7951203j) - 2 0j - 3 0j - 4 0j - 5 0j - 1 -0j - 2 (-0.4667031+0.5994866j) - 3 (-0+0j) - 4 (-0.0001685+0.0002131j) - 5 (-0+0j) - 1 -0j - 2 (2.83e-05-0.0002701j) - 3 (-0+0j) - 4 (-0.0848183+0.7549847j) - 5 -0j - 1 0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.7603683+0.0877228j) - 1 (-0-0j) - 2 -0j - 3 (-0.2680395+0.5920943j) - 4 -0j - 5 -0j -10 22 - 1 (-0-1e-07j) - 2 0j - 3 (-0.0747045+0.0786119j) - 4 (-0.2413475+0.2539712j) - 5 (-0.1241594+0.1306537j) - 1 (-0+1e-07j) - 2 (-0+0j) - 3 (-0.049552+0.065018j) - 4 (0.3311519-0.4345094j) - 5 (-0.0823574+0.1080624j) - 1 0j - 2 (-0-0j) - 3 (0.466613-0.1496885j) - 4 (-1.2e-06+4e-07j) - 5 (-0.2585042+0.0829276j) - 1 (-0.0391107-0.3187385j) - 2 (-0.0391107-0.3187386j) - 3 (-0+0j) - 4 0j - 5 (-0-0j) - 1 (-0.2658556+0.3216306j) - 2 (0.2658556-0.3216306j) - 3 (-0+0j) - 4 0j - 5 -0j - 1 (-1e-07+0j) - 2 (-0-0j) - 3 (-0.0017948-0.039369j) - 4 (0.0001435+0.003148j) - 5 (-0.002983-0.0654316j) - 1 (-0.256565-0.0401683j) - 2 (-0.256565-0.0401683j) - 3 (-0-0j) - 4 0j - 5 0j - 1 -0j - 2 -0j - 3 (-0.1421434+0.0503882j) - 4 -0j - 5 (0.0787478-0.0279151j) - 1 0j - 2 0j - 3 (0.0515882+0.0162174j) - 4 0j - 5 (-0.02858-0.0089845j) - 1 (-0.1868235+0.5166836j) - 2 (-0.1868246+0.5166867j) - 3 (-0+0j) - 4 (-0+0j) - 5 (-0+0j) - 1 (0.5038491-0.2174918j) - 2 (-0.503846+0.2174904j) - 3 (-0+0j) - 4 -0j - 5 (-0+0j) - 1 -0j - 2 (-0+0j) - 3 (-0.021825+0.0003071j) - 4 (-0.5923403+0.0083342j) - 5 (-0.0362733+0.0005104j) - 1 -0j - 2 (-0+0j) - 3 (0.3860531-0.0291359j) - 4 (-0.0085503+0.0006453j) - 5 (0.6416229-0.0484242j) -10 22 - 1 (-0+0j) - 2 0j - 3 (-0.0901265-0.0603143j) - 4 (0.2911712+0.1948572j) - 5 (-0.1497909-0.1002429j) - 1 (-0+0j) - 2 -0j - 3 (0.0789973+0.0210278j) - 4 (0.527932+0.1405269j) - 5 (0.1312966+0.034949j) - 1 (-0+0j) - 2 0j - 3 (0.3391403-0.3537205j) - 4 (9e-07-1e-06j) - 5 (-0.1878842+0.1959616j) - 1 (0.239632-0.2137766j) - 2 (-0.2396321+0.2137766j) - 3 0j - 4 0j - 5 (-0+0j) - 1 (-0.289175+0.3008375j) - 2 (-0.289175+0.3008376j) - 3 0j - 4 0j - 5 (-0+0j) - 1 -0j - 2 (-0-1e-07j) - 3 (0.0132586-0.0371126j) - 4 (0.0010602-0.0029675j) - 5 (0.0220359-0.0616815j) - 1 (-0.2321363+0.1164122j) - 2 (0.2321364-0.1164122j) - 3 -0j - 4 -0j - 5 (-0+0j) - 1 (-0-0j) - 2 0j - 3 (-0.1251119-0.0842065j) - 4 (-0-0j) - 5 (0.0693123+0.0466506j) - 1 (-0+0j) - 2 -0j - 3 (0.0461768-0.0281434j) - 4 (-0+0j) - 5 (-0.0255821+0.0155915j) - 1 (0.3410524+0.430753j) - 2 (-0.3410545-0.4307557j) - 3 (-0-0j) - 4 0j - 5 (-0-0j) - 1 (0.2715043-0.4769193j) - 2 (0.2715026-0.4769164j) - 3 -0j - 4 -0j - 5 -0j - 1 0j - 2 0j - 3 (0.0075332+0.020486j) - 4 (-0.2044533-0.5559994j) - 5 (0.0125202+0.0340479j) - 1 0j - 2 0j - 3 (-0.1912074-0.3366387j) - 4 (-0.0042348-0.0074559j) - 5 (-0.317788-0.5594959j) -10 22 - 1 (-0+0j) - 2 (-0.1234766+0.3278774j) - 3 (-0.0382198+0.1014882j) - 4 0j - 5 (0.0635216-0.1686742j) - 1 -0j - 2 (0.0521713-0.5438182j) - 3 (-0.0078067+0.0813749j) - 4 (-0-0j) - 5 (0.012975-0.1352472j) - 1 (-0+0j) - 2 (8e-07+2e-07j) - 3 (-0.4793712-0.1016736j) - 4 -0j - 5 (-0.2655725-0.0563274j) - 1 (0.108191-0.3023552j) - 2 (-0-0j) - 3 -0j - 4 (0.108191-0.3023552j) - 5 0j - 1 (0.1992612+0.3666338j) - 2 (-0+0j) - 3 -0j - 4 (-0.1992612-0.3666339j) - 5 0j - 1 -0j - 2 (0.0008334-0.003039j) - 3 (-0.0104232+0.0380065j) - 4 (-0+0j) - 5 (0.0173235-0.0631671j) - 1 (-0.1048285-0.2375922j) - 2 0j - 3 (-0-0j) - 4 (-0.1048285-0.2375922j) - 5 (-0-0j) - 1 (-0-0j) - 2 (-0-0j) - 3 (0.0612592+0.1378079j) - 4 (-0-0j) - 5 (0.0339378+0.0763459j) - 1 (-0+0j) - 2 (-0+0j) - 3 (-0.0299063+0.0450551j) - 4 (-0+0j) - 5 (-0.0165682+0.0249606j) - 1 (0.4041486+0.3721946j) - 2 0j - 3 0j - 4 (0.4041511+0.3721968j) - 5 (-0-0j) - 1 (0.216841+0.5041295j) - 2 0j - 3 (-0-0j) - 4 (-0.2168397-0.5041265j) - 5 0j - 1 (-0-0j) - 2 (0.0577942+0.589573j) - 3 (0.0021294+0.0217231j) - 4 0j - 5 (-0.0035392-0.0361039j) - 1 (-0-0j) - 2 (0.0073427+0.0044281j) - 3 (-0.3315318-0.1999314j) - 4 0j - 5 (0.5510081+0.3322874j) -10 21 - 1 (0.0266804-0.191265j) - 2 (0.0266804-0.1912649j) - 3 (-0+1e-07j) - 4 (0.0266804-0.1912649j) - 5 -0j - 1 (0.0150184+0.0124662j) - 2 (-0.0516523-0.0429122j) - 3 (-0.1293059-0.1073315j) - 4 (0.0366337+0.0304459j) - 5 (0.4388582+0.3646539j) - 1 (0.0170772-0.0640336j) - 2 (-0.0041746+0.0156867j) - 3 (-0.1470318+0.5513171j) - 4 (-0.0129026+0.0483472j) - 5 (-0.0433857+0.1623507j) - 1 (-0.1253193-0.3210191j) - 2 (-0.1253193-0.321019j) - 3 1e-07j - 4 (-0.1253193-0.321019j) - 5 (-0-0j) - 1 (0.033639+0.5021813j) - 2 (-0.0174786-0.2520752j) - 3 (0.0071199+0.1062898j) - 4 (-0.0161604-0.2501061j) - 5 (-0.0001611-0.0002406j) - 1 (-0.0009347-0.0009989j) - 2 (-0.416504-0.1264776j) - 3 (-0.0001978-0.0002114j) - 4 (0.4174386+0.1274766j) - 5 (-0.1019076-0.0310332j) - 1 (0.0008405-0.0016868j) - 2 (-0.0089923+0.0180457j) - 3 (0.0041109-0.0082498j) - 4 (0.0081518-0.0163589j) - 5 (-0.0484096+0.0971479j) - 1 (0.0206005+0.0082553j) - 2 (-0.0087852-0.0035205j) - 3 (0.1007525+0.0403748j) - 4 (-0.0118153-0.0047348j) - 5 (0.0085559+0.0034287j) - 1 0j - 2 0j - 3 0j - 4 (-0-0j) - 5 (-0-0j) - 1 (-0.2167601-0.2946513j) - 2 (-0.216763-0.2946553j) - 3 (-1e-07-2e-07j) - 4 (-0.2167629-0.2946552j) - 5 0j - 1 (0.0044653-0.0025254j) - 2 (-0.4003201+0.2264033j) - 3 (-0.0003468+0.0001961j) - 4 (0.3958549-0.223878j) - 5 (0.0356986-0.0201895j) - 1 (0.337287+0.4063527j) - 2 (-0.1658039-0.1997553j) - 3 (-0.0261939-0.0315576j) - 4 (-0.1714787-0.2065922j) - 5 (-0.0002544-0.0003066j) -10 21 - 1 (0.180996+0.0673392j) - 2 (-0.180996-0.0673392j) - 3 (1e-07+0j) - 4 (0.180996+0.0673392j) - 5 (-0-0j) - 1 (0.0169397+0.0092093j) - 2 (0.0590636+0.0318285j) - 3 (0.1458481+0.0792908j) - 4 (0.0421237+0.0226191j) - 5 (-0.5029889-0.270652j) - 1 (-0.0661595-0.004904j) - 2 (-0.0164223-0.0012894j) - 3 (-0.5696205-0.0422228j) - 4 (0.0497374+0.0036146j) - 5 (-0.1656052-0.0115586j) - 1 (0.2963674+0.1758534j) - 2 (-0.2963674-0.1758534j) - 3 1e-07j - 4 (0.2963676+0.1758536j) - 5 (-1e-07-1e-07j) - 1 (-0.1245317-0.4785835j) - 2 (-0.0834806-0.3175658j) - 3 (0.0263579+0.1012952j) - 4 (0.0410511+0.1610178j) - 5 (-0.0051849-0.0191301j) - 1 (-0.0934036+0.0067043j) - 2 (0.3801347-0.0316259j) - 3 (0.0197694-0.001419j) - 4 (0.4735382-0.0383301j) - 5 (0.1043187-0.0085486j) - 1 (0.0106362+0.0126787j) - 2 (0.013615+0.0162295j) - 3 (-0.0520194-0.0620088j) - 4 (0.0029787+0.0035508j) - 5 (0.0468555+0.0558535j) - 1 (0.0098175-0.0112169j) - 2 (-0.0045305+0.0051762j) - 3 (-0.0480153+0.0548596j) - 4 (-0.014348+0.0163932j) - 5 (-0.053307+0.0609054j) - 1 (-0+0j) - 2 (-0+0j) - 3 -0j - 4 (-0+0j) - 5 -0j - 1 (-0.2245956-0.2887233j) - 2 (0.2245987+0.2887271j) - 3 (2e-07+2e-07j) - 4 (-0.2245986-0.2887271j) - 5 (-0-0j) - 1 (0.0034709+0.0069413j) - 2 (0.206264+0.4124994j) - 3 (0.0002695+0.0005391j) - 4 (0.2027932+0.4055583j) - 5 (-0.0183411-0.0366797j) - 1 (0.479977+0.2201664j) - 2 (0.2338765+0.1072796j) - 3 (0.0372753+0.0170983j) - 4 (-0.2460943-0.1128839j) - 5 (0.0005478+0.0002513j) -10 22 - 1 (-0+0j) - 2 (0.2113309-0.2794445j) - 3 (-0.0654134+0.0864967j) - 4 (-0+0j) - 5 (0.1087176-0.1437582j) - 1 -0j - 2 (-0.5330864-0.1194945j) - 3 (-0.079769-0.0178807j) - 4 -0j - 5 (0.1325783+0.0297182j) - 1 (-0+1e-07j) - 2 (8e-07+1e-07j) - 3 (0.4896792+0.0186693j) - 4 0j - 5 (0.2712831+0.0103429j) - 1 (-0.2409274-0.2123156j) - 2 (-0-1e-07j) - 3 (-0-0j) - 4 (0.2409274+0.2123156j) - 5 -0j - 1 (-0.2496325-0.3343785j) - 2 0j - 3 -0j - 4 (-0.2496326-0.3343785j) - 5 0j - 1 (-0+0j) - 2 (0.0031147-0.0004779j) - 3 (0.0389541-0.0059762j) - 4 -0j - 5 (-0.064742+0.0099324j) - 1 (-0.2196089+0.1386038j) - 2 -0j - 3 -0j - 4 (0.2196089-0.1386038j) - 5 -0j - 1 -0j - 2 -0j - 3 (0.0897408-0.1212035j) - 4 (-0+0j) - 5 (0.0497166-0.067147j) - 1 (-0+0j) - 2 (-0+0j) - 3 (0.0527718-0.0118105j) - 4 -0j - 5 (0.0292357-0.0065431j) - 1 (0.3397561+0.4317763j) - 2 0j - 3 (-0-0j) - 4 (-0.3397581-0.4317789j) - 5 0j - 1 (0.460635+0.2982986j) - 2 0j - 3 0j - 4 (0.4606322+0.2982967j) - 5 (-0-0j) - 1 -0j - 2 (-0.3393617+0.4855616j) - 3 (0.0125039-0.0178907j) - 4 -0j - 5 (-0.0207816+0.0297345j) - 1 -0j - 2 (-0.0085536+0.000599j) - 3 (-0.386205+0.0270476j) - 4 -0j - 5 (0.6418754-0.0449534j) -10 21 - 1 (-0.1922488+0.0182892j) - 2 (-0.1922489+0.0182892j) - 3 (-1e-07-0j) - 4 (0.1922488-0.0182892j) - 5 -0j - 1 (0.0186791+0.0058831j) - 2 (-0.0640597-0.020197j) - 3 (0.1608241+0.0506519j) - 4 (-0.0453804-0.0143139j) - 5 (-0.5440128-0.1715492j) - 1 (-0.0638085-0.0178284j) - 2 (0.0155716+0.0043445j) - 3 (-0.5493789-0.1534987j) - 4 (-0.0482371-0.0134839j) - 5 (-0.1623764-0.0454304j) - 1 (0.3444106+0.0118098j) - 2 (0.3444106+0.0118097j) - 3 (1e-07+0j) - 4 (-0.3444105-0.0118097j) - 5 (-1e-07-0j) - 1 (-0.4931486+0.0397544j) - 2 (0.1668077-0.0131739j) - 3 (0.104378-0.0084143j) - 4 (-0.326341+0.0265805j) - 5 (0.0194949-0.0016383j) - 1 (-0.0653873-0.0653302j) - 2 (0.336824+0.3344709j) - 3 (0.0138396+0.0138276j) - 4 (0.2714368+0.2691406j) - 5 (-0.0743294-0.0737612j) - 1 (0.0017617+0.0166422j) - 2 (0.0004591+0.0043365j) - 3 (-0.0086163-0.0813934j) - 4 (0.0022208+0.0209788j) - 5 (-0.0075672-0.0714826j) - 1 (0.0142405-0.0036364j) - 2 (-0.0211628+0.005404j) - 3 (-0.0696472+0.0177846j) - 4 (-0.0069223+0.0017676j) - 5 (0.0793035-0.0202505j) - 1 -0j - 2 -0j - 3 (-0+0j) - 4 -0j - 5 -0j - 1 (0.3116739-0.1914777j) - 2 (0.3116781-0.1914803j) - 3 (-2e-07+1e-07j) - 4 (-0.311678+0.1914803j) - 5 -0j - 1 (-0.0023688+0.0009237j) - 2 (0.4272969-0.1666091j) - 3 (-0.000184+7.17e-05j) - 4 (0.4249283-0.1656855j) - 5 (0.0382117-0.0148993j) - 1 (-0.286342+0.443749j) - 2 (0.1419753-0.2200216j) - 3 (-0.0222375+0.0344618j) - 4 (-0.1443631+0.2237217j) - 5 (-0.0001071+0.0001659j) -10 21 - 1 (-0.0379938+0.1893425j) - 2 (0.0379938-0.1893425j) - 3 -1e-07j - 4 (0.0379938-0.1893425j) - 5 (-0+0j) - 1 (0.0085893-0.0173659j) - 2 (0.0297315-0.0601722j) - 3 (-0.0739527+0.1495175j) - 4 (-0.0211421+0.0428061j) - 5 (0.252886-0.5118919j) - 1 (-0.0184688+0.0636903j) - 2 (-0.0045691+0.0157284j) - 3 (0.1590129-0.5483607j) - 4 (-0.0138997+0.047962j) - 5 (0.046381-0.1602288j) - 1 (-0.0569709+0.3398713j) - 2 (0.056971-0.3398712j) - 3 -1e-07j - 4 (0.0569709-0.3398712j) - 5 (-0+0j) - 1 (-0.0984473-0.4935861j) - 2 (-0.0492445-0.2473405j) - 3 (-0.020837-0.1044706j) - 4 (-0.0492027-0.2462457j) - 5 (5.1e-06+0.0001338j) - 1 (-0.0001074+0.0006234j) - 2 (0.1408778-0.4121537j) - 3 (-2.27e-05+0.0001319j) - 4 (-0.1409851+0.4127771j) - 5 (-0.0344436+0.1008064j) - 1 (-0.006793-0.0211665j) - 2 (-0.003763-0.011725j) - 3 (-0.033223-0.1035207j) - 4 (-0.00303-0.0094415j) - 5 (0.0020695+0.006448j) - 1 (0.0009931-0.0009649j) - 2 (-0.0133111+0.012933j) - 3 (0.0048571-0.0047189j) - 4 (0.0143042-0.0138978j) - 5 (0.0779772-0.0757618j) - 1 0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 -0j - 1 (-0.0051332-0.3657567j) - 2 (0.0051332+0.3657617j) - 3 (-0-3e-07j) - 4 (0.0051332+0.3657616j) - 5 0j - 1 (0.0051749+1.66e-05j) - 2 (0.459927+0.0014718j) - 3 (-0.0004019-1.3e-06j) - 4 (-0.4547522-0.0014552j) - 5 (0.041012+0.0001312j) - 1 (0.0847781-0.5212459j) - 2 (0.0416691-0.2561961j) - 3 (-0.0065839+0.0404803j) - 4 (0.043108-0.265043j) - 5 (-6.45e-05+0.0003967j) -10 23 - 1 0j - 2 (-0+0j) - 3 (0.169971+0.3066176j) - 4 (-0+0j) - 5 -0j - 1 (-0-0j) - 2 (1.1e-05+1.68e-05j) - 3 (-0+0j) - 4 (-0.0903043-0.137144j) - 5 0j - 1 0j - 2 (0.1323352+0.0972149j) - 3 -0j - 4 (1.62e-05+1.19e-05j) - 5 (-0-0j) - 1 (-0-0j) - 2 (-0.0025818-0.002754j) - 3 (-0+0j) - 4 (-0.5061166-0.2355808j) - 5 0j - 1 (-0-0j) - 2 (0.0702434+0.5538215j) - 3 (-0+0j) - 4 (-0.0018368-0.0032979j) - 5 0j - 1 (-0+0j) - 2 (-0+0j) - 3 (0.0478756-0.1427193j) - 4 (-0+0j) - 5 -0j - 1 0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.016255+0.0243835j) - 1 (-0-0j) - 2 (-3.48e-05-1.69e-05j) - 3 (-0-0j) - 4 (-0.0993714-0.0483979j) - 5 0j - 1 0j - 2 (0.1104208+0.0049274j) - 3 -0j - 4 (-3.86e-05-1.8e-06j) - 5 0j - 1 (0.6598727-0.7153501j) - 2 -0j - 3 -0j - 4 -0j - 5 (-0+0j) - 1 (-0-0j) - 2 (0.7007046+0.2936138j) - 3 0j - 4 (0.0002504+9.88e-05j) - 5 0j - 1 (-0+0j) - 2 (-0.0002683+2.16e-05j) - 3 -0j - 4 (0.7558501-0.076725j) - 5 (-0+0j) - 1 (-0+0j) - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 (-0.3593545+0.6758103j) - 1 0j - 2 (-0+0j) - 3 (0.1930428-0.6206088j) - 4 (-0+0j) - 5 (-0+0j) -10 22 - 1 (-0+0j) - 2 -0j - 3 (-0.0770352+0.0763294j) - 4 (0.2488772-0.2465972j) - 5 (-0.128033+0.1268601j) - 1 (-0-0j) - 2 (-0+0j) - 3 (-0.0318723+0.0752788j) - 4 (-0.213+0.5030816j) - 5 (-0.0529731+0.1251163j) - 1 (-0+0j) - 2 -0j - 3 (0.0383429+0.4885327j) - 4 (1e-07+1.3e-06j) - 5 (-0.021242-0.2706477j) - 1 (0.1210361-0.2974461j) - 2 (-0.1210361+0.2974461j) - 3 (-0-0j) - 4 -0j - 5 (-0+0j) - 1 (-0.0308891-0.4161385j) - 2 (-0.0308891-0.4161386j) - 3 0j - 4 (-0+0j) - 5 -0j - 1 -0j - 2 (-0-0j) - 3 (-0.0387729+0.0070567j) - 4 (-0.0031003+0.0005643j) - 5 (-0.0644409+0.0117283j) - 1 (0.1224696-0.2289984j) - 2 (-0.1224696+0.2289984j) - 3 (-0+0j) - 4 (-0+0j) - 5 -0j - 1 (-0-0j) - 2 0j - 3 (-0.1460966-0.0374101j) - 4 (-0-0j) - 5 (0.0809378+0.0207253j) - 1 0j - 2 (-0-0j) - 3 (-0.0540033-0.0028269j) - 4 0j - 5 (0.029918+0.0015661j) - 1 (0.536582-0.1180876j) - 2 (-0.5365853+0.1180883j) - 3 (-0+0j) - 4 -0j - 5 (-0+0j) - 1 (0.5474006-0.0389775j) - 2 (0.5473972-0.0389773j) - 3 -0j - 4 -0j - 5 -0j - 1 0j - 2 0j - 3 (0.0105429+0.0191121j) - 4 (-0.2861388-0.518711j) - 5 (0.0175224+0.0317645j) - 1 (-0+0j) - 2 (-0+0j) - 3 (0.0068018-0.3870912j) - 4 (0.0001506-0.0085733j) - 5 (0.0113046-0.6433483j) -10 22 - 1 0j - 2 (-0-0j) - 3 (0.0736874+0.0795661j) - 4 (0.2380616+0.2570538j) - 5 (0.1224691+0.1322395j) - 1 (-0-0j) - 2 (-0+0j) - 3 (0.032624+0.0749561j) - 4 (-0.2180232-0.5009251j) - 5 (0.0542223+0.12458j) - 1 0j - 2 0j - 3 (-0.1419583-0.4690226j) - 4 (4e-07+1.2e-06j) - 5 (0.0786451+0.2598391j) - 1 (0.2574503-0.191946j) - 2 (0.2574503-0.191946j) - 3 -0j - 4 0j - 5 (-0+0j) - 1 (0.243026-0.3392105j) - 2 (-0.243026+0.3392105j) - 3 (-0+0j) - 4 (-0+0j) - 5 -0j - 1 (-0-0j) - 2 0j - 3 (0.018083+0.0350164j) - 4 (-0.0014459-0.0027999j) - 5 (0.030054+0.0581974j) - 1 (0.0108474+0.2594637j) - 2 (0.0108474+0.2594637j) - 3 0j - 4 (-0-0j) - 5 (-0-0j) - 1 (-0+0j) - 2 (-0+0j) - 3 (0.1101443-0.1030143j) - 4 (-0+0j) - 5 (-0.0610202+0.0570701j) - 1 0j - 2 0j - 3 (0.0419385+0.0341396j) - 4 0j - 5 (-0.023234-0.0189134j) - 1 (0.2075556-0.5087097j) - 2 (0.2075569-0.5087128j) - 3 -0j - 4 -0j - 5 -0j - 1 (-0.0488447-0.5466085j) - 2 (0.0488444+0.5466051j) - 3 0j - 4 (-0-0j) - 5 0j - 1 -0j - 2 (-0+0j) - 3 (-0.0016891+0.0217617j) - 4 (-0.0458433+0.5906224j) - 5 (-0.0028073+0.0361681j) - 1 (-0+0j) - 2 -0j - 3 (-0.0986893+0.3743612j) - 4 (0.0021858-0.0082913j) - 5 (-0.1640223+0.6221909j) -10 22 - 1 -0j - 2 (0.3101743-0.162917j) - 3 (-0.0960085+0.0504279j) - 4 (-0+0j) - 5 (0.1595669-0.0838115j) - 1 (-0+0j) - 2 (0.4128783-0.357759j) - 3 (0.0617815-0.0535337j) - 4 (1e-07-1e-07j) - 5 (-0.1026826+0.0889745j) - 1 -0j - 2 (2e-07+8e-07j) - 3 (0.0910885+0.4814947j) - 4 (-0-0j) - 5 (0.0504632+0.2667488j) - 1 (-0.2398135+0.2135729j) - 2 0j - 3 0j - 4 (0.2398136-0.213573j) - 5 (-0+0j) - 1 (0.417256-0.0047748j) - 2 (-0+1e-07j) - 3 (-0-0j) - 4 (0.4172561-0.0047748j) - 5 (-0-0j) - 1 -0j - 2 (0.0030097-0.000934j) - 3 (0.0376389-0.0116813j) - 4 (-0-0j) - 5 (-0.062556+0.0194147j) - 1 (-0.2453607-0.0850719j) - 2 0j - 3 0j - 4 (0.2453607+0.0850719j) - 5 0j - 1 -0j - 2 -0j - 3 (0.1179537-0.0939715j) - 4 (-0+0j) - 5 (0.0653466-0.0520604j) - 1 -0j - 2 -0j - 3 (-0.0003925+0.0540758j) - 4 0j - 5 (-0.0002174+0.0299581j) - 1 (0.4719832-0.2812416j) - 2 -0j - 3 (-0+0j) - 4 (-0.471986+0.2812433j) - 5 -0j - 1 (-0.5215511-0.1707368j) - 2 (-0-0j) - 3 (-0-0j) - 4 (-0.5215479-0.1707357j) - 5 0j - 1 (-0+0j) - 2 (0.5892905-0.0606063j) - 3 (-0.0217126+0.0022331j) - 4 (-0+0j) - 5 (0.0360866-0.0037114j) - 1 (-0+0j) - 2 (0.0069465-0.0050269j) - 3 (0.3136418-0.2269685j) - 4 (-0+0j) - 5 (-0.5212749+0.3772232j) -10 21 - 1 (0.0858148-0.1730027j) - 2 (-0.0858148+0.1730027j) - 3 (-1e-07+0j) - 4 (-0.0858148+0.1730027j) - 5 -0j - 1 (-0.0191865-0.0028874j) - 2 (-0.0663769-0.0099853j) - 3 (0.1651927+0.0248601j) - 4 (0.0471902+0.0070978j) - 5 (-0.5645275-0.084918j) - 1 (-0.0653869-0.0109997j) - 2 (-0.016123-0.0027134j) - 3 (0.5629683+0.0947052j) - 4 (-0.0492641-0.0082863j) - 5 (0.1647399+0.0277019j) - 1 (-0.186063+0.2900667j) - 2 (0.1860631-0.2900667j) - 3 (1e-07-0j) - 4 (0.186063-0.2900667j) - 5 (-0+0j) - 1 (-0.2799825+0.4182455j) - 2 (-0.1399322+0.2092216j) - 3 (-0.0592601+0.0885243j) - 4 (-0.1400503+0.209024j) - 5 (-1.44e-05-2.41e-05j) - 1 (0.0001241+4.75e-05j) - 2 (-0.3148297+0.3014086j) - 3 (2.63e-05+1e-05j) - 4 (0.3149538-0.3013612j) - 5 (0.0769594-0.0736584j) - 1 (0.0168564-0.0144923j) - 2 (0.0093375-0.0080279j) - 3 (0.0824409-0.0708788j) - 4 (0.0075189-0.0064645j) - 5 (-0.005135+0.0044145j) - 1 (0.0013192-0.0004206j) - 2 (-0.0176823+0.0056377j) - 3 (0.0064517-0.0020573j) - 4 (0.0190014-0.0060584j) - 5 (0.1035836-0.0330263j) - 1 0j - 2 -0j - 3 (-0+0j) - 4 (-0+0j) - 5 -0j - 1 (0.3619824+0.0526599j) - 2 (-0.3619873-0.0526607j) - 3 (2e-07+0j) - 4 (-0.3619872-0.0526606j) - 5 (-0-0j) - 1 (0.0047868+0.0019665j) - 2 (0.4254299+0.1747695j) - 3 (-0.0003717-0.0001527j) - 4 (-0.4206433-0.1728031j) - 5 (0.0379359+0.0155843j) - 1 (-0.4923445+0.1910013j) - 2 (-0.2419909+0.0938785j) - 3 (0.0382358-0.0148333j) - 4 (-0.2503472+0.0971203j) - 5 (0.0003747-0.0001454j) -10 21 - 1 (0.1776099-0.075821j) - 2 (0.1776099-0.0758211j) - 3 (1e-07-0j) - 4 (-0.1776099+0.0758211j) - 5 (-0+0j) - 1 (-0.0011601-0.0195589j) - 2 (0.0038281+0.0670614j) - 3 (-0.0099881-0.1683987j) - 4 (0.0026681+0.0475023j) - 5 (0.0322919+0.5694808j) - 1 (-0.0559756-0.0354363j) - 2 (0.0136747+0.0086048j) - 3 (-0.4819394-0.3050993j) - 4 (-0.0423012-0.0268316j) - 5 (-0.1422986-0.0906033j) - 1 (-0.3083103+0.1539574j) - 2 (-0.3083103+0.1539574j) - 3 (-1e-07+1e-07j) - 4 (0.3083102-0.1539574j) - 5 (1e-07+0j) - 1 (0.0946255-0.4856717j) - 2 (-0.0319254+0.1645438j) - 3 (-0.0200281+0.1027955j) - 4 (0.0627002-0.321128j) - 5 (-0.0037607+0.0191346j) - 1 (-0.0018957-0.0921139j) - 2 (0.0090616+0.474493j) - 3 (0.0004012+0.0194965j) - 4 (0.0071658+0.3823791j) - 5 (-0.001983-0.1047096j) - 1 (-0.0162873+0.0038455j) - 2 (-0.0042442+0.0010021j) - 3 (0.0796578-0.0188076j) - 4 (-0.0205315+0.0048476j) - 5 (0.0699588-0.0165177j) - 1 (0.0146504+0.0011755j) - 2 (-0.0217719-0.0017468j) - 3 (-0.0716521-0.0057489j) - 4 (-0.0071214-0.0005714j) - 5 (0.0815858+0.0065458j) - 1 -0j - 2 -0j - 3 (-0+0j) - 4 -0j - 5 -0j - 1 (0.3162043+0.1838998j) - 2 (0.3162086+0.1839023j) - 3 (-2e-07-1e-07j) - 4 (-0.3162085-0.1839023j) - 5 0j - 1 (-0.002198-0.0012783j) - 2 (0.3964525+0.230579j) - 3 (-0.0001707-9.93e-05j) - 4 (0.3942547+0.2293009j) - 5 (0.0354534+0.0206199j) - 1 (0.3660974+0.3806279j) - 2 (-0.18152-0.1887244j) - 3 (0.0284314+0.0295598j) - 4 (0.1845727+0.1918985j) - 5 (0.0001369+0.0001423j) -10 22 - 1 (-0-1e-07j) - 2 (-0.3442878-0.0649297j) - 3 (-0.1065677-0.0200978j) - 4 0j - 5 (0.1771163+0.0334026j) - 1 (1e-07+1e-07j) - 2 (0.5446573-0.0425236j) - 3 (-0.0815007+0.0063631j) - 4 (-1e-07-1e-07j) - 5 (0.1354561-0.0105757j) - 1 1e-07j - 2 (-7e-07-5e-07j) - 3 (0.3857673+0.3021884j) - 4 (-0-0j) - 5 (0.2137158+0.1674128j) - 1 (-0.0788943-0.3112871j) - 2 0j - 3 (1e-07-0j) - 4 (-0.0788943-0.311287j) - 5 (-0+0j) - 1 (-0.3575089+0.215204j) - 2 (1e-07-0j) - 3 (1e-07-0j) - 4 (0.3575089-0.2152041j) - 5 (-1e-07+0j) - 1 1e-07j - 2 (0.0017235+0.0026381j) - 3 (-0.0215556-0.0329924j) - 4 (-0-1e-07j) - 5 (0.0358254+0.0548336j) - 1 (0.0313262-0.257794j) - 2 (-0+0j) - 3 -0j - 4 (0.0313262-0.257794j) - 5 -0j - 1 0j - 2 0j - 3 (-0.1493548-0.0209011j) - 4 0j - 5 (-0.0827429-0.0115793j) - 1 -0j - 2 -0j - 3 (0.0183594-0.0508653j) - 4 -0j - 5 (0.0101712-0.0281795j) - 1 (0.3541611-0.4200415j) - 2 -0j - 3 -0j - 4 (0.3541633-0.420044j) - 5 (-0+0j) - 1 (-0.5009882+0.2240031j) - 2 (-0+0j) - 3 -0j - 4 (0.5009852-0.2240017j) - 5 (-0+0j) - 1 -0j - 2 (-0.5820188+0.1104112j) - 3 (-0.0214447+0.0040681j) - 4 (-0+0j) - 5 (0.0356413-0.0067613j) - 1 (-0+0j) - 2 (0.0060497-0.0060765j) - 3 (-0.2731518+0.274361j) - 4 -0j - 5 (0.4539802-0.4559899j) -10 21 - 1 (-0.1286532+0.1440226j) - 2 (0.1286532-0.1440225j) - 3 (-1e-07+0j) - 4 (-0.1286532+0.1440226j) - 5 0j - 1 (0.0188409-0.0045178j) - 2 (0.0652665-0.0156517j) - 3 (0.1622172-0.0388976j) - 4 (0.0464254-0.0111339j) - 5 (-0.5552056+0.1331479j) - 1 (0.0448326-0.0488625j) - 2 (0.0110728-0.0120674j) - 3 (0.3859999-0.4206967j) - 4 (-0.03376+0.0367953j) - 5 (0.1127752-0.1229195j) - 1 (0.2112891-0.2722409j) - 2 (-0.2112891+0.2722408j) - 3 (1e-07-1e-07j) - 4 (0.2112891-0.2722409j) - 5 (-1e-07-0j) - 1 (0.2468755-0.4286337j) - 2 (0.163549-0.2841377j) - 3 (-0.0522528+0.090723j) - 4 (-0.0833265+0.144496j) - 5 (0.0098032-0.0170642j) - 1 (0.0758111-0.0538314j) - 2 (-0.3110985+0.2214831j) - 3 (-0.0160459+0.0113938j) - 4 (-0.3869096+0.2753146j) - 5 (-0.0852965+0.0607086j) - 1 (-0.0154151+0.0060212j) - 2 (-0.0197322+0.0077074j) - 3 (0.075392-0.0294484j) - 4 (-0.004317+0.0016862j) - 5 (-0.0679076+0.0265245j) - 1 (-0.0093844+0.0115817j) - 2 (0.0043307-0.0053446j) - 3 (0.0458969-0.0566433j) - 4 (0.0137151-0.0169263j) - 5 (0.0509559-0.0628862j) - 1 -0j - 2 -0j - 3 (-0+0j) - 4 -0j - 5 (-0+0j) - 1 (-0.1521792+0.3326347j) - 2 (0.1521812-0.3326391j) - 3 (1e-07-2e-07j) - 4 (-0.1521812+0.3326391j) - 5 (-0+0j) - 1 (0.0029886-0.0071622j) - 2 (0.1776027-0.4256265j) - 3 (0.0002321-0.0005562j) - 4 (0.1746142-0.4184644j) - 5 (-0.0157926+0.037847j) - 1 (0.4906235-0.195294j) - 2 (0.2390641-0.0951602j) - 3 (0.0381021-0.0151667j) - 4 (-0.251553+0.1001313j) - 5 (0.00056-0.0002229j) -10 21 - 1 (-0.191392-0.025753j) - 2 (-0.191392-0.025753j) - 3 (1e-07+0j) - 4 (-0.191392-0.025753j) - 5 (-0-0j) - 1 (0.0133511+0.0142481j) - 2 (-0.0459177-0.0490024j) - 3 (-0.1149507-0.1226732j) - 4 (0.0325665+0.0347542j) - 5 (0.390135+0.4163429j) - 1 (-0.0645688-0.0149168j) - 2 (0.0158085+0.0036521j) - 3 (0.5559247+0.1284306j) - 4 (0.0487605+0.0112647j) - 5 (0.1638+0.0378409j) - 1 (-0.3319918-0.09241j) - 2 (-0.3319918-0.09241j) - 3 (1e-07+0j) - 4 (-0.3319918-0.09241j) - 5 (-0-0j) - 1 (-0.4264657+0.267294j) - 2 (0.2137561-0.1339152j) - 3 (-0.0902642+0.0565745j) - 4 (0.2127096-0.1333788j) - 5 (0.0001279-6.56e-05j) - 1 (-0.0006788-1.4e-05j) - 2 (-0.4330527-0.046475j) - 3 (-0.0001437-3e-06j) - 4 (0.4337315+0.0464891j) - 5 (-0.1059209-0.0113602j) - 1 (0.0002977+0.0018608j) - 2 (-0.0031864-0.0199086j) - 3 (0.001456+0.009101j) - 4 (0.0028887+0.0180478j) - 5 (-0.0171543-0.1071772j) - 1 (0.0219872+0.0030155j) - 2 (-0.0093767-0.0012861j) - 3 (0.1075346+0.0147484j) - 4 (-0.0126105-0.0017294j) - 5 (0.0091313+0.0012517j) - 1 (-0+0j) - 2 (-0+0j) - 3 -0j - 4 (-0-0j) - 5 -0j - 1 (-0.3644743+0.0310289j) - 2 (-0.3644792+0.0310293j) - 3 (-3e-07+0j) - 4 (-0.3644791+0.0310293j) - 5 -0j - 1 (0.0046487+0.0021692j) - 2 (-0.4167665-0.1944743j) - 3 (-0.000361-0.0001685j) - 4 (0.412118+0.1923052j) - 5 (0.0371652+0.0173423j) - 1 (0.4189292-0.3215328j) - 2 (-0.2059377+0.1580595j) - 3 (-0.0325343+0.0249704j) - 4 (-0.2129861+0.1634692j) - 5 (-0.000316+0.0002426j) diff --git a/test/plovasp/proj_group/runtest.sh b/test/plovasp/proj_group/runtest.sh deleted file mode 100755 index 10b8ee04..00000000 --- a/test/plovasp/proj_group/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../..:../../../../../c:$PYTHONPATH python $1 diff --git a/test/plovasp/proj_shell/runtest.sh b/test/plovasp/proj_shell/runtest.sh deleted file mode 100755 index 10b8ee04..00000000 --- a/test/plovasp/proj_shell/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../..:../../../../../c:$PYTHONPATH python $1 diff --git a/test/plovasp/run_all.sh b/test/plovasp/run_all.sh deleted file mode 100755 index cb9a28eb..00000000 --- a/test/plovasp/run_all.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../:../../../../c:$PYTHONPATH python test_all.py diff --git a/test/plovasp/vaspio/__init__.py b/test/plovasp/vaspio/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/plovasp/vaspio/runtest.sh b/test/plovasp/vaspio/runtest.sh deleted file mode 100755 index 4537817b..00000000 --- a/test/plovasp/vaspio/runtest.sh +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=../..:../../../../c:$PYTHONPATH python $1 diff --git a/test/python/CMakeLists.txt b/test/python/CMakeLists.txt new file mode 100644 index 00000000..2097f00a --- /dev/null +++ b/test/python/CMakeLists.txt @@ -0,0 +1,19 @@ +# Copy h5 files to binary dir +file(GLOB_RECURSE all_h5_ref_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.h5) +foreach(file ${all_h5_ref_files}) + configure_file(${file} ${file} COPYONLY) +endforeach() + +# Copy other files +FILE(COPY SrVO3.pmat SrVO3.struct SrVO3.outputs SrVO3.oubwin SrVO3.ctqmcout SrVO3.symqmc SrVO3.sympar SrVO3.parproj hk_convert_hamiltonian.hk LaVO3-Pnma_hr.dat LaVO3-Pnma.inp LaVO3-Pnma_ef_hr.dat LaVO3-Pnma_ef.inp DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + + +# List of all tests +file(GLOB all_tests RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py) + +foreach(test ${all_tests}) + get_filename_component(test_name ${test} NAME_WE) + get_filename_component(test_dir ${test} DIRECTORY) + add_test(NAME Py_${test_name} COMMAND ${TRIQS_PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/${test_dir}/${test_name}.py WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir}) + set_property(TEST Py_${test_name} APPEND PROPERTY ENVIRONMENT PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SANITIZER_RT_PRELOAD}) +endforeach() diff --git a/test/LaVO3-Pnma.inp b/test/python/LaVO3-Pnma.inp similarity index 100% rename from test/LaVO3-Pnma.inp rename to test/python/LaVO3-Pnma.inp diff --git a/test/LaVO3-Pnma_ef.inp b/test/python/LaVO3-Pnma_ef.inp similarity index 100% rename from test/LaVO3-Pnma_ef.inp rename to test/python/LaVO3-Pnma_ef.inp diff --git a/test/LaVO3-Pnma_ef_hr.dat b/test/python/LaVO3-Pnma_ef_hr.dat similarity index 100% rename from test/LaVO3-Pnma_ef_hr.dat rename to test/python/LaVO3-Pnma_ef_hr.dat diff --git a/test/LaVO3-Pnma_hr.dat b/test/python/LaVO3-Pnma_hr.dat similarity index 100% rename from test/LaVO3-Pnma_hr.dat rename to test/python/LaVO3-Pnma_hr.dat diff --git a/test/SrIrO3_rot.h5 b/test/python/SrIrO3_rot.h5 similarity index 100% rename from test/SrIrO3_rot.h5 rename to test/python/SrIrO3_rot.h5 diff --git a/test/SrVO3.ctqmcout b/test/python/SrVO3.ctqmcout similarity index 100% rename from test/SrVO3.ctqmcout rename to test/python/SrVO3.ctqmcout diff --git a/test/SrVO3.oubwin b/test/python/SrVO3.oubwin similarity index 100% rename from test/SrVO3.oubwin rename to test/python/SrVO3.oubwin diff --git a/test/SrVO3.outputs b/test/python/SrVO3.outputs similarity index 100% rename from test/SrVO3.outputs rename to test/python/SrVO3.outputs diff --git a/test/SrVO3.parproj b/test/python/SrVO3.parproj similarity index 100% rename from test/SrVO3.parproj rename to test/python/SrVO3.parproj diff --git a/test/SrVO3.pmat b/test/python/SrVO3.pmat similarity index 100% rename from test/SrVO3.pmat rename to test/python/SrVO3.pmat diff --git a/test/SrVO3.h5 b/test/python/SrVO3.ref.h5 similarity index 100% rename from test/SrVO3.h5 rename to test/python/SrVO3.ref.h5 diff --git a/test/SrVO3.struct b/test/python/SrVO3.struct similarity index 100% rename from test/SrVO3.struct rename to test/python/SrVO3.struct diff --git a/test/SrVO3.sympar b/test/python/SrVO3.sympar similarity index 100% rename from test/SrVO3.sympar rename to test/python/SrVO3.sympar diff --git a/test/SrVO3.symqmc b/test/python/SrVO3.symqmc similarity index 100% rename from test/SrVO3.symqmc rename to test/python/SrVO3.symqmc diff --git a/test/SrVO3_Sigma.h5 b/test/python/SrVO3_Sigma.h5 similarity index 100% rename from test/SrVO3_Sigma.h5 rename to test/python/SrVO3_Sigma.h5 diff --git a/test/analyse_block_structure_from_gf.py b/test/python/analyse_block_structure_from_gf.py similarity index 97% rename from test/analyse_block_structure_from_gf.py rename to test/python/analyse_block_structure_from_gf.py index 442a7fff..328425c4 100644 --- a/test/analyse_block_structure_from_gf.py +++ b/test/python/analyse_block_structure_from_gf.py @@ -1,9 +1,9 @@ -from pytriqs.gf import * +from triqs.gf import * from triqs_dft_tools.sumk_dft import SumkDFT from scipy.linalg import expm import numpy as np -from pytriqs.utility.comparison_tests import assert_gfs_are_close, assert_arrays_are_close, assert_block_gfs_are_close -from pytriqs.archive import * +from triqs.utility.comparison_tests import assert_gfs_are_close, assert_arrays_are_close, assert_block_gfs_are_close +from h5 import * import itertools # The full test checks all different possible combinations of conjugated @@ -212,7 +212,7 @@ for conjugate in conjugate_values: # first, construct the old format of the deg shells for ish in range(len(SK.deg_shells)): for gr in range(len(SK.deg_shells[ish])): - SK.deg_shells[ish][gr] = SK.deg_shells[ish][gr].keys() + SK.deg_shells[ish][gr] = list(SK.deg_shells[ish][gr].keys()) # symmetrizing the GF as is has to leave it unchanged G_new_symm << G_pre_transform diff --git a/test/analyse_block_structure_from_gf.ref.h5 b/test/python/analyse_block_structure_from_gf.ref.h5 similarity index 100% rename from test/analyse_block_structure_from_gf.ref.h5 rename to test/python/analyse_block_structure_from_gf.ref.h5 diff --git a/test/analyse_block_structure_from_gf2.py b/test/python/analyse_block_structure_from_gf2.py similarity index 87% rename from test/analyse_block_structure_from_gf2.py rename to test/python/analyse_block_structure_from_gf2.py index 19553af8..d08dad30 100644 --- a/test/analyse_block_structure_from_gf2.py +++ b/test/python/analyse_block_structure_from_gf2.py @@ -1,7 +1,7 @@ -from pytriqs.gf import * +from triqs.gf import * from triqs_dft_tools.sumk_dft import SumkDFT import numpy as np -from pytriqs.utility.comparison_tests import assert_block_gfs_are_close +from triqs.utility.comparison_tests import assert_block_gfs_are_close # here we test the SK.analyse_block_structure_from_gf function # with GfReFreq, GfReTime @@ -35,13 +35,13 @@ Hloc[8:,8:] = Hloc1 V = get_random_hermitian(2) # the hopping elements from impurity to bath b1 = np.random.rand() # the bath energy of the first bath level b2 = np.random.rand() # the bath energy of the second bath level -delta = GfReFreq(window=(-10,10), indices=range(2), n_points=1001) +delta = GfReFreq(window=(-10,10), indices=list(range(2)), n_points=1001) delta[0,0] << (V[0,0]*V[0,0].conjugate()*inverse(Omega-b1)+V[0,1]*V[0,1].conjugate()*inverse(Omega-b2+0.02j))/2.0 delta[0,1] << (V[0,0]*V[1,0].conjugate()*inverse(Omega-b1)+V[0,1]*V[1,1].conjugate()*inverse(Omega-b2+0.02j))/2.0 delta[1,0] << (V[1,0]*V[0,0].conjugate()*inverse(Omega-b1)+V[1,1]*V[0,1].conjugate()*inverse(Omega-b2+0.02j))/2.0 delta[1,1] << (V[1,0]*V[1,0].conjugate()*inverse(Omega-b1)+V[1,1]*V[1,1].conjugate()*inverse(Omega-b2+0.02j))/2.0 # construct G -G = BlockGf(name_block_generator=[('ud',GfReFreq(window=(-10,10), indices=range(10), n_points=1001))], make_copies=False) +G = BlockGf(name_block_generator=[('ud',GfReFreq(window=(-10,10), indices=list(range(10)), n_points=1001))], make_copies=False) for i in range(0,10,2): G['ud'][i:i+2,i:i+2] << inverse(Omega-delta+0.02j) G['ud'] << inverse(inverse(G['ud']) - Hloc) @@ -58,9 +58,9 @@ assert SK.gf_struct_sumk == [[('ud', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])], [('ud', [ "wrong gf_struct_sumk" for i in range(5): assert 'ud_{}'.format(i) in SK.gf_struct_solver[0], "missing block" - assert SK.gf_struct_solver[0]['ud_{}'.format(i)] == range(2), "wrong block size" + assert SK.gf_struct_solver[0]['ud_{}'.format(i)] == list(range(2)), "wrong block size" for i in range(10): - assert SK.sumk_to_solver[0]['ud',i] == ('ud_{}'.format(i/2), i%2), "wrong mapping" + assert SK.sumk_to_solver[0]['ud',i] == ('ud_{}'.format(i//2), i%2), "wrong mapping" assert len(SK.deg_shells[0]) == 2, "wrong number of equivalent groups found" assert sorted([len(d) for d in SK.deg_shells[0]]) == [2,3], "wrong number of members in the equivalent groups found" @@ -90,7 +90,7 @@ Gt = BlockGf(name_block_generator = [(name, known_moments = np.zeros((2,10,10), dtype=np.complex) known_moments[1,:] = np.eye(10) -Gt['ud'].set_from_inverse_fourier(G['ud'], known_moments) +Gt['ud'].set_from_fourier(G['ud'], known_moments) G_new = SK.analyse_block_structure_from_gf([Gt]) G_new_symm = G_new[0].copy() @@ -101,9 +101,9 @@ assert SK.gf_struct_sumk == [[('ud', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])], [('ud', [ "wrong gf_struct_sumk" for i in range(5): assert 'ud_{}'.format(i) in SK.gf_struct_solver[0], "missing block" - assert SK.gf_struct_solver[0]['ud_{}'.format(i)] == range(2), "wrong block size" + assert SK.gf_struct_solver[0]['ud_{}'.format(i)] == list(range(2)), "wrong block size" for i in range(10): - assert SK.sumk_to_solver[0]['ud',i] == ('ud_{}'.format(i/2), i%2), "wrong mapping" + assert SK.sumk_to_solver[0]['ud',i] == ('ud_{}'.format(i//2), i%2), "wrong mapping" assert len(SK.deg_shells[0]) == 2, "wrong number of equivalent groups found" assert sorted([len(d) for d in SK.deg_shells[0]]) == [2,3], "wrong number of members in the equivalent groups found" diff --git a/test/basis_transformation.py b/test/python/basis_transformation.py similarity index 94% rename from test/basis_transformation.py rename to test/python/basis_transformation.py index 05586a2d..8f0abcea 100644 --- a/test/basis_transformation.py +++ b/test/python/basis_transformation.py @@ -1,4 +1,4 @@ -from pytriqs.utility.comparison_tests import * +from triqs.utility.comparison_tests import * from triqs_dft_tools.sumk_dft import * import numpy as np @@ -17,7 +17,7 @@ def call_diagonalize(SK): SK.block_structure.transformation = None return t_sumk_eal, t_solver_eal, t_sumk_dm, t_solver_dm -SK = SumkDFT(hdf_file = 'SrVO3.h5', use_dft_blocks=True) +SK = SumkDFT(hdf_file = 'SrVO3.ref.h5', use_dft_blocks=True) # only eal and dm are allowed SK.block_structure.transformation = None @@ -71,9 +71,9 @@ for dmi in dm: # Test convert_operator -SK = SumkDFT(hdf_file = 'SrVO3.h5', use_dft_blocks=True) +SK = SumkDFT(hdf_file = 'SrVO3.ref.h5', use_dft_blocks=True) BS = SK.block_structure -from pytriqs.operators.util import h_int_slater, U_matrix, t2g_submatrix, transform_U_matrix +from triqs.operators.util import h_int_slater, U_matrix, t2g_submatrix, transform_U_matrix U3x3 = t2g_submatrix(U_matrix(2, U_int=2, J_hund=0.2, basis='spheric')) @@ -93,7 +93,7 @@ H3 = BS.convert_operator(h_int_slater(spin_names=['up','down'], orb_names=[0,1,2 for op in H3: for c_op in op[0]: assert(BS.gf_struct_solver_dict[0][c_op[1][0]][c_op[1][1]] is not None) # This crashes with a key error if the operator structure is not the solver structure - + U_trafod = transform_U_matrix(U3x3, BS.transformation[0]['up'].conjugate()) # The notorious .conjugate() H4 = h_int_slater(spin_names=['up','down'], orb_names=range(3), U_matrix=U_trafod, map_operator_structure=BS.sumk_to_solver[0]) assert( H4 == H3 ) # check that convert_operator does the same as transform_U_matrix diff --git a/test/blockstructure.in.h5 b/test/python/blockstructure.in.h5 similarity index 100% rename from test/blockstructure.in.h5 rename to test/python/blockstructure.in.h5 diff --git a/test/blockstructure.py b/test/python/blockstructure.py similarity index 87% rename from test/blockstructure.py rename to test/python/blockstructure.py index 30aec6f6..34f2a808 100644 --- a/test/blockstructure.py +++ b/test/python/blockstructure.py @@ -1,11 +1,10 @@ from triqs_dft_tools.sumk_dft import * -from pytriqs.utility.h5diff import h5diff -from pytriqs.gf import * -from pytriqs.utility.comparison_tests import assert_block_gfs_are_close +from triqs.utility.h5diff import h5diff, compare, failures +from triqs.gf import * +from triqs.utility.comparison_tests import assert_block_gfs_are_close from scipy.linalg import expm from triqs_dft_tools.block_structure import BlockStructure import numpy as np -from pytriqs.utility.h5diff import compare, failures def cmp(a, b, precision=1.e-15): @@ -17,28 +16,28 @@ SK = SumkDFT('blockstructure.in.h5', use_dft_blocks=True) original_bs = SK.block_structure cmp(original_bs.effective_transformation_sumk, - [{'down': np.array([[1., 0., 0.], - [0., 1., 0.], - [0., 0., 1.]]), - 'up': np.array([[1., 0., 0.], + [{'up': np.array([[1., 0., 0.], [0., 1., 0.], - [0., 0., 1.]])}]) + [0., 0., 1.]]), + 'down': np.array([[1., 0., 0.], + [0., 1., 0.], + [0., 0., 1.]])}]) cmp(original_bs.effective_transformation_solver, [{'up_0': np.array([[1., 0., 0.], [0., 1., 0.]]), 'up_1': np.array([[0., 0., 1.]]), - 'down_1': np.array([[0., 0., 1.]]), 'down_0': np.array([[1., 0., 0.], - [0., 1., 0.]])}]) + [0., 1., 0.]]), + 'down_1': np.array([[0., 0., 1.]])}]) created_matrix = original_bs.create_matrix() cmp(created_matrix, {'up_0': np.array([[0. + 0.j, 0. + 0.j], [0. + 0.j, 0. + 0.j]]), 'up_1': np.array([[0. + 0.j]]), - 'down_1': np.array([[0. + 0.j]]), 'down_0': np.array([[0. + 0.j, 0. + 0.j], - [0. + 0.j, 0. + 0.j]])}) + [0. + 0.j, 0. + 0.j]]), + 'down_1': np.array([[0. + 0.j]])}) # check pick_gf_struct_solver @@ -46,12 +45,13 @@ pick1 = original_bs.copy() pick1.pick_gf_struct_solver([{'up_0': [1], 'up_1': [0], 'down_1': [0]}]) cmp(pick1.effective_transformation_sumk, - [{'down': np.array([[0., 0., 0.], - [0., 0., 0.], - [0., 0., 1.]]), - 'up': np.array([[0., 0., 0.], + [{'up': np.array([[0., 0., 0.], [0., 1., 0.], - [0., 0., 1.]])}]) + [0., 0., 1.]]), + 'down': np.array([[0., 0., 0.], + [0., 0., 0.], + [0., 0., 1.]])}]) + cmp(pick1.effective_transformation_solver, [{'up_0': np.array([[0., 1., 0.]]), 'up_1': np.array([[0., 0., 1.]]), @@ -72,12 +72,13 @@ sk_pick1 = BlockStructure(gf_struct_sumk=SK.gf_struct_sumk, assert sk_pick1 == pick1, 'constructing block structure from SumkDFT properties failed' cmp(pick1.effective_transformation_sumk, - [{'down': np.array([[0., 0., 0.], - [0., 0., 0.], - [0., 0., 1.]]), - 'up': np.array([[0., 0., 0.], + [{'up': np.array([[0., 0., 0.], [0., 1., 0.], - [0., 0., 1.]])}]) + [0., 0., 1.]]), + 'down': np.array([[0., 0., 0.], + [0., 0., 0.], + [0., 0., 1.]])}]) + cmp(pick1.effective_transformation_solver, [{'up_0': np.array([[0., 1., 0.]]), 'up_1': np.array([[0., 0., 1.]]), @@ -88,12 +89,13 @@ pick2 = original_bs.copy() pick2.pick_gf_struct_sumk([{'up': [1, 2], 'down': [0, 1]}]) cmp(pick2.effective_transformation_sumk, - [{'down': np.array([[1., 0., 0.], - [0., 1., 0.], - [0., 0., 0.]]), - 'up': np.array([[0., 0., 0.], + [{'up': np.array([[0., 0., 0.], [0., 1., 0.], - [0., 0., 1.]])}]) + [0., 0., 1.]]), + 'down': np.array([[1., 0., 0.], + [0., 1., 0.], + [0., 0., 0.]])}]) + cmp(pick2.effective_transformation_solver, [{'up_0': np.array([[0., 1., 0.]]), 'up_1': np.array([[0., 0., 1.]]), @@ -103,12 +105,13 @@ cmp(pick2.effective_transformation_solver, pick3 = pick2.copy() pick3.transformation = [np.reshape(range(9), (3, 3))] cmp(pick3.effective_transformation_sumk, - [{'down': np.array([[0, 1, 2], - [3, 4, 5], - [0, 0, 0]]), - 'up': np.array([[0, 0, 0], + [{'up': np.array([[0, 0, 0], [3, 4, 5], - [6, 7, 8]])}]) + [6, 7, 8]]), + 'down': np.array([[0, 1, 2], + [3, 4, 5], + [0, 0, 0]])}]) + cmp(pick3.effective_transformation_solver, [{'up_0': np.array([[3, 4, 5]]), 'up_1': np.array([[6, 7, 8]]), @@ -166,8 +169,8 @@ cmp(m2, {'down': np.array([[0. + 0.j, 0. + 0.j, 0. + 0.j], [0. + 0.j, 0. + 0.j, 0. + 0.j], [0. + 0.j, 0. + 0.j, 0. + 0.j]]), - 'up_0': np.array([[0. + 0.j]]), - 'down_1': np.array([[0. + 0.j]])}) + 'down_1': np.array([[0. + 0.j]]), + 'up_0': np.array([[0. + 0.j]])}) # check full_structure full = BlockStructure.full_structure( @@ -230,6 +233,7 @@ if mpi.is_master_node(): with HDFArchive('blockstructure.out.h5', 'r') as ar,\ HDFArchive('blockstructure.ref.h5', 'r') as ar2: for k in ar2: + print(k) if isinstance(ar[k], BlockGf): assert_block_gfs_are_close(ar[k], ar2[k], 1.e-6) else: diff --git a/test/python/blockstructure.ref.h5 b/test/python/blockstructure.ref.h5 new file mode 100644 index 00000000..b0df2c0e Binary files /dev/null and b/test/python/blockstructure.ref.h5 differ diff --git a/test/hk_convert.py b/test/python/hk_convert.py similarity index 91% rename from test/hk_convert.py rename to test/python/hk_convert.py index f4c048c7..f2bf37b4 100644 --- a/test/hk_convert.py +++ b/test/python/hk_convert.py @@ -21,9 +21,9 @@ ################################################################################ -from pytriqs.archive import * -from pytriqs.utility.h5diff import h5diff -import pytriqs.utility.mpi as mpi +from h5 import * +from triqs.utility.h5diff import h5diff +import triqs.utility.mpi as mpi from triqs_dft_tools.converters import * diff --git a/test/hk_convert.ref.h5 b/test/python/hk_convert.ref.h5 similarity index 100% rename from test/hk_convert.ref.h5 rename to test/python/hk_convert.ref.h5 diff --git a/test/hk_convert_hamiltonian.hk b/test/python/hk_convert_hamiltonian.hk similarity index 100% rename from test/hk_convert_hamiltonian.hk rename to test/python/hk_convert_hamiltonian.hk diff --git a/c++/plovasp/.gitignore b/test/python/plovasp/.gitignore similarity index 100% rename from c++/plovasp/.gitignore rename to test/python/plovasp/.gitignore diff --git a/test/plovasp/CMakeLists.txt b/test/python/plovasp/CMakeLists.txt similarity index 52% rename from test/plovasp/CMakeLists.txt rename to test/python/plovasp/CMakeLists.txt index 3004dadd..88e68eee 100644 --- a/test/plovasp/CMakeLists.txt +++ b/test/python/plovasp/CMakeLists.txt @@ -12,8 +12,9 @@ set(all_tests FILE(COPY ${all_tests} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) FILE(COPY run_suite.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) -foreach(t ${all_tests}) - add_test(NAME ${t} COMMAND ${python_executable} run_suite.py ${t}) +foreach(test_name ${all_tests}) + add_test(NAME Py_${test_name} COMMAND ${TRIQS_PYTHON_EXECUTABLE} run_suite.py ${t}) + set_property(TEST Py_${test_name} APPEND PROPERTY ENVIRONMENT PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SANITIZER_RT_PRELOAD}) endforeach() -set_property(TEST ${all_tests} PROPERTY ENVIRONMENT PYTHONPATH=${CMAKE_BINARY_DIR}/python:$ENV{PYTHONPATH} ) + diff --git a/test/plovasp/proj_shell/mytest.py b/test/python/plovasp/atm/mytest.py similarity index 91% rename from test/plovasp/proj_shell/mytest.py rename to test/python/plovasp/atm/mytest.py index fb0c64e8..4c56f35c 100644 --- a/test/plovasp/proj_shell/mytest.py +++ b/test/python/plovasp/atm/mytest.py @@ -44,8 +44,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/atm/test_atm.py b/test/python/plovasp/atm/test_atm.py similarity index 100% rename from test/plovasp/atm/test_atm.py rename to test/python/plovasp/atm/test_atm.py diff --git a/test/plovasp/converter/example.cfg b/test/python/plovasp/converter/example.cfg similarity index 100% rename from test/plovasp/converter/example.cfg rename to test/python/plovasp/converter/example.cfg diff --git a/test/plovasp/converter/lunio3.cfg b/test/python/plovasp/converter/lunio3.cfg similarity index 100% rename from test/plovasp/converter/lunio3.cfg rename to test/python/plovasp/converter/lunio3.cfg diff --git a/test/plovasp/converter/lunio3.out.h5 b/test/python/plovasp/converter/lunio3.ref.h5 similarity index 100% rename from test/plovasp/converter/lunio3.out.h5 rename to test/python/plovasp/converter/lunio3.ref.h5 diff --git a/test/plovasp/converter/lunio3/DOSCAR b/test/python/plovasp/converter/lunio3/DOSCAR similarity index 100% rename from test/plovasp/converter/lunio3/DOSCAR rename to test/python/plovasp/converter/lunio3/DOSCAR diff --git a/test/plovasp/converter/lunio3/IBZKPT b/test/python/plovasp/converter/lunio3/IBZKPT similarity index 100% rename from test/plovasp/converter/lunio3/IBZKPT rename to test/python/plovasp/converter/lunio3/IBZKPT diff --git a/test/plovasp/converter/lunio3/LOCPROJ b/test/python/plovasp/converter/lunio3/LOCPROJ similarity index 100% rename from test/plovasp/converter/lunio3/LOCPROJ rename to test/python/plovasp/converter/lunio3/LOCPROJ diff --git a/test/plovasp/converter/lunio3/POSCAR b/test/python/plovasp/converter/lunio3/POSCAR similarity index 100% rename from test/plovasp/converter/lunio3/POSCAR rename to test/python/plovasp/converter/lunio3/POSCAR diff --git a/test/plovasp/converter/lunio3/readme.txt b/test/python/plovasp/converter/lunio3/readme.txt similarity index 100% rename from test/plovasp/converter/lunio3/readme.txt rename to test/python/plovasp/converter/lunio3/readme.txt diff --git a/test/plovasp/converter/lunio3/rot_dz2_dx2 b/test/python/plovasp/converter/lunio3/rot_dz2_dx2 similarity index 100% rename from test/plovasp/converter/lunio3/rot_dz2_dx2 rename to test/python/plovasp/converter/lunio3/rot_dz2_dx2 diff --git a/test/plovasp/converter/lunio3/test_lunio3.tar.gz b/test/python/plovasp/converter/lunio3/test_lunio3.tar.gz similarity index 100% rename from test/plovasp/converter/lunio3/test_lunio3.tar.gz rename to test/python/plovasp/converter/lunio3/test_lunio3.tar.gz diff --git a/test/plovasp/converter/mytest.py b/test/python/plovasp/converter/mytest.py similarity index 91% rename from test/plovasp/converter/mytest.py rename to test/python/plovasp/converter/mytest.py index 9d28fdbe..44523f7f 100644 --- a/test/plovasp/converter/mytest.py +++ b/test/python/plovasp/converter/mytest.py @@ -5,7 +5,7 @@ Module defining a custom TestCase with extra functionality. import unittest import numpy as np import difflib -from pytriqs.utility.h5diff import h5diff +from triqs.utility.h5diff import h5diff class MyTestCase(unittest.TestCase): """ @@ -45,8 +45,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/converter/one_site/DOSCAR b/test/python/plovasp/converter/one_site/DOSCAR similarity index 100% rename from test/plovasp/converter/one_site/DOSCAR rename to test/python/plovasp/converter/one_site/DOSCAR diff --git a/test/plovasp/converter/one_site/IBZKPT b/test/python/plovasp/converter/one_site/IBZKPT similarity index 100% rename from test/plovasp/converter/one_site/IBZKPT rename to test/python/plovasp/converter/one_site/IBZKPT diff --git a/test/plovasp/converter/one_site/LOCPROJ b/test/python/plovasp/converter/one_site/LOCPROJ similarity index 100% rename from test/plovasp/converter/one_site/LOCPROJ rename to test/python/plovasp/converter/one_site/LOCPROJ diff --git a/test/plovasp/converter/one_site/POSCAR b/test/python/plovasp/converter/one_site/POSCAR similarity index 100% rename from test/plovasp/converter/one_site/POSCAR rename to test/python/plovasp/converter/one_site/POSCAR diff --git a/test/plovasp/converter/one_site/PROJCAR b/test/python/plovasp/converter/one_site/PROJCAR similarity index 100% rename from test/plovasp/converter/one_site/PROJCAR rename to test/python/plovasp/converter/one_site/PROJCAR diff --git a/test/plovasp/converter/one_site/readme.txt b/test/python/plovasp/converter/one_site/readme.txt similarity index 100% rename from test/plovasp/converter/one_site/readme.txt rename to test/python/plovasp/converter/one_site/readme.txt diff --git a/test/plovasp/converter/pg_output.out.h5 b/test/python/plovasp/converter/pg_output.ref.h5 similarity index 100% rename from test/plovasp/converter/pg_output.out.h5 rename to test/python/plovasp/converter/pg_output.ref.h5 diff --git a/test/plovasp/converter/rpath.py b/test/python/plovasp/converter/rpath.py similarity index 100% rename from test/plovasp/converter/rpath.py rename to test/python/plovasp/converter/rpath.py diff --git a/test/plovasp/converter/test_all.py b/test/python/plovasp/converter/test_all.py similarity index 100% rename from test/plovasp/converter/test_all.py rename to test/python/plovasp/converter/test_all.py diff --git a/test/plovasp/converter/test_converter_lunio3.py b/test/python/plovasp/converter/test_converter_lunio3.py similarity index 90% rename from test/plovasp/converter/test_converter_lunio3.py rename to test/python/plovasp/converter/test_converter_lunio3.py index 1d9a2eb0..ced2a045 100644 --- a/test/plovasp/converter/test_converter_lunio3.py +++ b/test/python/plovasp/converter/test_converter_lunio3.py @@ -29,12 +29,12 @@ class TestConverterLuNiO3(mytest.MyTestCase): generate_and_output_as_text(_rpath + 'lunio3.cfg', _rpath + 'lunio3/') test_file = _rpath + 'lunio3.test.h5' - converter = VaspConverter(filename=_rpath + 'lunio3', + converter = VaspConverter(filename=_rpath + 'lunio3', hdf_filename=test_file) converter.convert_dft_input() - expected_file = _rpath + 'lunio3.out.h5' + expected_file = _rpath + 'lunio3.ref.h5' self.assertH5FileEqual(test_file, expected_file) if __name__ == '__main__': diff --git a/test/plovasp/converter/test_converter_one_site.py b/test/python/plovasp/converter/test_converter_one_site.py similarity index 95% rename from test/plovasp/converter/test_converter_one_site.py rename to test/python/plovasp/converter/test_converter_one_site.py index a53b9bce..29385160 100644 --- a/test/plovasp/converter/test_converter_one_site.py +++ b/test/python/plovasp/converter/test_converter_one_site.py @@ -34,7 +34,7 @@ class TestConverterOneSite(mytest.MyTestCase): converter.convert_dft_input() - expected_file = _rpath + 'pg_output.out.h5' + expected_file = _rpath + 'pg_output.ref.h5' self.assertH5FileEqual(test_file, expected_file) if __name__ == '__main__': diff --git a/python/.gitignore b/test/python/plovasp/inpconf/.gitignore similarity index 100% rename from python/.gitignore rename to test/python/plovasp/inpconf/.gitignore diff --git a/c++/plovasp/__init__.py b/test/python/plovasp/inpconf/__init__.py similarity index 100% rename from c++/plovasp/__init__.py rename to test/python/plovasp/inpconf/__init__.py diff --git a/test/plovasp/inpconf/arraytest.py b/test/python/plovasp/inpconf/arraytest.py similarity index 100% rename from test/plovasp/inpconf/arraytest.py rename to test/python/plovasp/inpconf/arraytest.py diff --git a/test/plovasp/inpconf/example.cfg b/test/python/plovasp/inpconf/example.cfg similarity index 100% rename from test/plovasp/inpconf/example.cfg rename to test/python/plovasp/inpconf/example.cfg diff --git a/test/plovasp/inpconf/example_nogroup.cfg b/test/python/plovasp/inpconf/example_nogroup.cfg similarity index 100% rename from test/plovasp/inpconf/example_nogroup.cfg rename to test/python/plovasp/inpconf/example_nogroup.cfg diff --git a/test/plovasp/inpconf/input_test_1.cfg b/test/python/plovasp/inpconf/input_test_1.cfg similarity index 100% rename from test/plovasp/inpconf/input_test_1.cfg rename to test/python/plovasp/inpconf/input_test_1.cfg diff --git a/test/plovasp/inpconf/input_test_2.cfg b/test/python/plovasp/inpconf/input_test_2.cfg similarity index 100% rename from test/plovasp/inpconf/input_test_2.cfg rename to test/python/plovasp/inpconf/input_test_2.cfg diff --git a/test/plovasp/inpconf/input_test_3.cfg b/test/python/plovasp/inpconf/input_test_3.cfg similarity index 100% rename from test/plovasp/inpconf/input_test_3.cfg rename to test/python/plovasp/inpconf/input_test_3.cfg diff --git a/test/plovasp/inpconf/input_test_4.cfg b/test/python/plovasp/inpconf/input_test_4.cfg similarity index 100% rename from test/plovasp/inpconf/input_test_4.cfg rename to test/python/plovasp/inpconf/input_test_4.cfg diff --git a/test/plovasp/inpconf/parse_groups_1.cfg b/test/python/plovasp/inpconf/parse_groups_1.cfg similarity index 100% rename from test/plovasp/inpconf/parse_groups_1.cfg rename to test/python/plovasp/inpconf/parse_groups_1.cfg diff --git a/test/plovasp/inpconf/parse_shells_1.cfg b/test/python/plovasp/inpconf/parse_shells_1.cfg similarity index 100% rename from test/plovasp/inpconf/parse_shells_1.cfg rename to test/python/plovasp/inpconf/parse_shells_1.cfg diff --git a/test/plovasp/inpconf/parse_shells_2.cfg b/test/python/plovasp/inpconf/parse_shells_2.cfg similarity index 100% rename from test/plovasp/inpconf/parse_shells_2.cfg rename to test/python/plovasp/inpconf/parse_shells_2.cfg diff --git a/test/plovasp/inpconf/parse_shells_3.cfg b/test/python/plovasp/inpconf/parse_shells_3.cfg similarity index 100% rename from test/plovasp/inpconf/parse_shells_3.cfg rename to test/python/plovasp/inpconf/parse_shells_3.cfg diff --git a/test/plovasp/inpconf/parse_shells_4.cfg b/test/python/plovasp/inpconf/parse_shells_4.cfg similarity index 100% rename from test/plovasp/inpconf/parse_shells_4.cfg rename to test/python/plovasp/inpconf/parse_shells_4.cfg diff --git a/test/plovasp/inpconf/parse_shells_5.cfg b/test/python/plovasp/inpconf/parse_shells_5.cfg similarity index 100% rename from test/plovasp/inpconf/parse_shells_5.cfg rename to test/python/plovasp/inpconf/parse_shells_5.cfg diff --git a/test/plovasp/inpconf/rpath.py b/test/python/plovasp/inpconf/rpath.py similarity index 100% rename from test/plovasp/inpconf/rpath.py rename to test/python/plovasp/inpconf/rpath.py diff --git a/test/plovasp/inpconf/test1.cfg b/test/python/plovasp/inpconf/test1.cfg similarity index 100% rename from test/plovasp/inpconf/test1.cfg rename to test/python/plovasp/inpconf/test1.cfg diff --git a/test/plovasp/inpconf/test_all.py b/test/python/plovasp/inpconf/test_all.py similarity index 100% rename from test/plovasp/inpconf/test_all.py rename to test/python/plovasp/inpconf/test_all.py diff --git a/test/plovasp/inpconf/test_general.py b/test/python/plovasp/inpconf/test_general.py similarity index 95% rename from test/plovasp/inpconf/test_general.py rename to test/python/plovasp/inpconf/test_general.py index b30d3879..1d1316da 100644 --- a/test/plovasp/inpconf/test_general.py +++ b/test/python/plovasp/inpconf/test_general.py @@ -2,10 +2,10 @@ r""" Tests of 'parse_general()' defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters diff --git a/test/plovasp/inpconf/test_groups.py b/test/python/plovasp/inpconf/test_groups.py similarity index 90% rename from test/plovasp/inpconf/test_groups.py rename to test/python/plovasp/inpconf/test_groups.py index 7dbd9cc2..af078fa1 100644 --- a/test/plovasp/inpconf/test_groups.py +++ b/test/python/plovasp/inpconf/test_groups.py @@ -2,10 +2,10 @@ r""" Tests of 'parse_groups()' defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters @@ -30,7 +30,7 @@ class TestParseGroups(arraytest.ArrayTestCase): def test_gr_required(self): conf_pars = ConfigParameters(_rpath + 'parse_groups_1.cfg') err_mess = "Required parameter" - with self.assertRaisesRegexp(Exception, err_mess): + with self.assertRaisesRegex(Exception, err_mess): conf_pars.parse_groups() # Scenario 2 @@ -42,8 +42,8 @@ class TestParseGroups(arraytest.ArrayTestCase): 'normalize': True, 'normion': True,'complement': False}, {'index': 2, 'shells': [3], 'ewindow': (-1.6, 2.0), 'normalize': True, 'normion': True,'complement': False}] - print res - print expected + print(res) + print(expected) self.assertListEqual(res, expected) diff --git a/test/plovasp/inpconf/test_input.py b/test/python/plovasp/inpconf/test_input.py similarity index 77% rename from test/plovasp/inpconf/test_input.py rename to test/python/plovasp/inpconf/test_input.py index 6bf12050..9784491f 100644 --- a/test/plovasp/inpconf/test_input.py +++ b/test/python/plovasp/inpconf/test_input.py @@ -2,10 +2,10 @@ r""" Tests of 'parse_input()' defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters @@ -38,28 +38,28 @@ class TestParseInput(arraytest.ArrayTestCase): def test_no_group(self): conf_pars = ConfigParameters(_rpath + 'input_test_1.cfg') err_mess = "At least one group" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): conf_pars.parse_input() # Scenario 2 def test_gr_required(self): conf_pars = ConfigParameters(_rpath + 'input_test_2.cfg') err_mess = "One \[Shell\] section is" - with self.assertRaisesRegexp(KeyError, err_mess): + with self.assertRaisesRegex(KeyError, err_mess): conf_pars.parse_input() # Scenario 3 def test_no_shell(self): conf_pars = ConfigParameters(_rpath + 'input_test_3.cfg') err_mess = "Shell 3 referenced in" - with self.assertRaisesRegexp(Exception, err_mess): + with self.assertRaisesRegex(Exception, err_mess): conf_pars.parse_input() # Scenario 4 def test_shell_outside_groups(self): conf_pars = ConfigParameters(_rpath + 'input_test_4.cfg') err_mess = "Some shells are not inside" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): conf_pars.parse_input() # Scenario 5 @@ -78,12 +78,12 @@ class TestParseInput(arraytest.ArrayTestCase): res = res.replace(" ","") # Remove spaces for comparison expected = r"""Shells: -[{'ions':{'nion':4,'ion_list':[[4],[5],[6],[7]]},'user_index':1,'lshell':2,'corr':True,'ion_sort':None},{'tmatrix':array([[0.,1.,0.], +[{'user_index':1,'ions':{'ion_list':[[4],[5],[6],[7]],'nion':4},'lshell':2,'ion_sort':None,'corr':True},{'user_index':2,'ions':{'ion_list':[[0],[1],[2],[3]],'nion':4},'lshell':1,'tmatrix':array([[0.,1.,0.], [1.,0.,0.], -[0.,0.,1.]]),'ions':{'nion':4,'ion_list':[[0],[1],[2],[3]]},'lshell':1,'corr':True,'ion_sort':None,'user_index':2},{'ions':{'nion':4,'ion_list':[[0],[1],[2],[3]]},'user_index':3,'lshell':3,'corr':True,'ion_sort':None}] +[0.,0.,1.]]),'ion_sort':None,'corr':True},{'user_index':3,'ions':{'ion_list':[[0],[1],[2],[3]],'nion':4},'lshell':3,'ion_sort':None,'corr':True}] Groups: -[{'normalize':True,'index':1,'ewindow':(-7.6,3.0),'shells':[0,1],'complement':False,'normion':True},{'normalize':True,'index':2,'ewindow':(-1.6,2.0),'shells':[2],'complement':False,'normion':True}]""" +[{'index':1,'shells':[0,1],'ewindow':(-7.6,3.0),'normalize':True,'normion':True,'complement':False},{'index':2,'shells':[2],'ewindow':(-1.6,2.0),'normalize':True,'normion':True,'complement':False}]""" self.assertEqual(res, expected) @@ -103,10 +103,10 @@ Groups: res = res.replace(" ","") # Remove spaces for comparison expected = r"""Shells: -[{'ions':{'nion':4,'ion_list':[[4],[5],[6],[7]]},'lshell':2,'corr':True,'ion_sort':None,'user_index':1}] +[{'user_index':1,'ions':{'ion_list':[[4],[5],[6],[7]],'nion':4},'lshell':2,'ion_sort':None,'corr':True}] Groups: -[{'normalize':True,'index':'1','ewindow':(-7.6,3.0),'normion':True,'complement':False,'shells':[0]}]""" +[{'index':'1','ewindow':(-7.6,3.0),'normalize':True,'normion':True,'complement':False,'shells':[0]}]""" self.assertEqual(res, expected) diff --git a/test/plovasp/inpconf/test_parameter_set.py b/test/python/plovasp/inpconf/test_parameter_set.py similarity index 94% rename from test/plovasp/inpconf/test_parameter_set.py rename to test/python/plovasp/inpconf/test_parameter_set.py index f42b4f76..de794cb2 100644 --- a/test/plovasp/inpconf/test_parameter_set.py +++ b/test/python/plovasp/inpconf/test_parameter_set.py @@ -2,10 +2,10 @@ r""" Tests of 'parse_parameter_set()' defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters @@ -47,6 +47,6 @@ class TestParseParameterSet(arraytest.ArrayTestCase): section = 'Shell 1' param_set = self.cpars.sh_required # contains 'lshell' and 'ions' err_mess = "Required parameter" # .* in section [%s]"%(section) - with self.assertRaisesRegexp(Exception, err_mess): + with self.assertRaisesRegex(Exception, err_mess): self.cpars.parse_parameter_set(section, param_set, exception=True) - + diff --git a/test/plovasp/inpconf/test_shells.py b/test/python/plovasp/inpconf/test_shells.py similarity index 94% rename from test/plovasp/inpconf/test_shells.py rename to test/python/plovasp/inpconf/test_shells.py index cb5d68fb..ecbc9271 100644 --- a/test/plovasp/inpconf/test_shells.py +++ b/test/python/plovasp/inpconf/test_shells.py @@ -2,10 +2,10 @@ r""" Tests of 'parse_shells()' defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters @@ -37,21 +37,21 @@ class TestParseShells(arraytest.ArrayTestCase): def test_no_shell(self): conf_pars = ConfigParameters(_rpath + 'parse_shells_1.cfg') err_mess = "No projected shells" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): conf_pars.parse_shells() # Scenario 2 def test_bad_indices(self): conf_pars = ConfigParameters(_rpath + 'parse_shells_2.cfg') err_mess = "Failed to extract shell indices" - with self.assertRaisesRegexp(ValueError, err_mess): + with self.assertRaisesRegex(ValueError, err_mess): conf_pars.parse_shells() # Scenario 3 def test_sh_required(self): conf_pars = ConfigParameters(_rpath + 'parse_shells_3.cfg') err_mess = "Required parameter" - with self.assertRaisesRegexp(Exception, err_mess): + with self.assertRaisesRegex(Exception, err_mess): conf_pars.parse_shells() # Scenario 4 diff --git a/test/plovasp/inpconf/test_special_parsers.py b/test/python/plovasp/inpconf/test_special_parsers.py similarity index 93% rename from test/plovasp/inpconf/test_special_parsers.py rename to test/python/plovasp/inpconf/test_special_parsers.py index b9811bec..d3d01f27 100644 --- a/test/plovasp/inpconf/test_special_parsers.py +++ b/test/python/plovasp/inpconf/test_special_parsers.py @@ -2,10 +2,10 @@ r""" Tests of special parseres defined in ConfigParameters class """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import arraytest +from . import arraytest import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters @@ -87,7 +87,7 @@ class TestParseStringIonList(arraytest.ArrayTestCase): # Scenario 3 def test_out_of_bounds(self): err_mess = "Lowest ion index is" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_string_ion_list('0 1') # Scenario 4 @@ -99,7 +99,7 @@ class TestParseStringIonList(arraytest.ArrayTestCase): # Scenario 5 def test_range_wrong_order(self): err_mess = "First index of the range" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_string_ion_list('8..5') # Scenario 6 @@ -140,14 +140,14 @@ class TestParseStringTmatrix(arraytest.ArrayTestCase): def test_number_of_columns(self): par_str = "1.0 0.0\n1.0" err_mess = "Number of columns" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_string_tmatrix(par_str, real=True) # Scenario 2 def test_complex_matrix_odd(self): par_str = "1.0 0.0 2.0 1.0 0.0\n0.0 1.0 2.0 3.0 -1.0" err_mess = "Complex matrix must" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_string_tmatrix(par_str, real=False) # Scenario 3 @@ -192,13 +192,13 @@ class TestParseEnergyWindow(arraytest.ArrayTestCase): # Scenario 2 def test_wrong_range(self): err_mess = "The first float in EWINDOW" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_energy_window('3.0 -1.5') # Scenario 3 def test_one_float(self): err_mess = "EWINDOW must be specified" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_energy_window('1.0') # Scenario 4 @@ -209,7 +209,7 @@ class TestParseEnergyWindow(arraytest.ArrayTestCase): # Scenario 5 def test_three_floats(self): err_mess = "EWINDOW must be specified" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_energy_window('1.5 3.0 2.0') ################################################################################ @@ -246,13 +246,13 @@ class TestParseBandWindow(arraytest.ArrayTestCase): # Scenario 2 def test_wrong_range(self): err_mess = "The first int in BANDS" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_band_window('10 1') # Scenario 3 def test_one_float(self): err_mess = "BANDS must be specified" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_band_window('1') # Scenario 4 @@ -263,7 +263,7 @@ class TestParseBandWindow(arraytest.ArrayTestCase): # Scenario 5 def test_three_ints(self): err_mess = "BANDS must be specified" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): self.cpars.parse_band_window('1 2 3') ################################################################################ @@ -345,7 +345,7 @@ class TestParseStringDosmesh(arraytest.ArrayTestCase): # Scenario 3 def test_two_numbers(self): err_mess = "DOSMESH must be either" - with self.assertRaisesRegexp(ValueError, err_mess): + with self.assertRaisesRegex(ValueError, err_mess): self.cpars.parse_string_dosmesh('-8.0 101') # Scenario 4 diff --git a/test/plovasp/inpconf/tmatrix_file.dat b/test/python/plovasp/inpconf/tmatrix_file.dat similarity index 100% rename from test/plovasp/inpconf/tmatrix_file.dat rename to test/python/plovasp/inpconf/tmatrix_file.dat diff --git a/test/plovasp/plocar_io/.gitignore b/test/python/plovasp/plocar_io/.gitignore similarity index 100% rename from test/plovasp/plocar_io/.gitignore rename to test/python/plovasp/plocar_io/.gitignore diff --git a/test/plovasp/plocar_io/PLOCAR.example b/test/python/plovasp/plocar_io/PLOCAR.example similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.example rename to test/python/plovasp/plocar_io/PLOCAR.example diff --git a/test/plovasp/plocar_io/PLOCAR.example.out b/test/python/plovasp/plocar_io/PLOCAR.example.out similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.example.out rename to test/python/plovasp/plocar_io/PLOCAR.example.out diff --git a/test/plovasp/plocar_io/PLOCAR.noprec b/test/python/plovasp/plocar_io/PLOCAR.noprec similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.noprec rename to test/python/plovasp/plocar_io/PLOCAR.noprec diff --git a/test/plovasp/plocar_io/PLOCAR.prec8 b/test/python/plovasp/plocar_io/PLOCAR.prec8 similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.prec8 rename to test/python/plovasp/plocar_io/PLOCAR.prec8 diff --git a/test/plovasp/plocar_io/PLOCAR.prec8.out b/test/python/plovasp/plocar_io/PLOCAR.prec8.out similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.prec8.out rename to test/python/plovasp/plocar_io/PLOCAR.prec8.out diff --git a/test/plovasp/plocar_io/PLOCAR.trunc b/test/python/plovasp/plocar_io/PLOCAR.trunc similarity index 100% rename from test/plovasp/plocar_io/PLOCAR.trunc rename to test/python/plovasp/plocar_io/PLOCAR.trunc diff --git a/test/plovasp/plocar_io/mytest.py b/test/python/plovasp/plocar_io/mytest.py similarity index 91% rename from test/plovasp/plocar_io/mytest.py rename to test/python/plovasp/plocar_io/mytest.py index 1490b477..4ce354f0 100644 --- a/test/plovasp/plocar_io/mytest.py +++ b/test/python/plovasp/plocar_io/mytest.py @@ -43,8 +43,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/plocar_io/test_fileio.py b/test/python/plovasp/plocar_io/test_fileio.py similarity index 83% rename from test/plovasp/plocar_io/test_fileio.py rename to test/python/plovasp/plocar_io/test_fileio.py index fc6c2bed..04b6cbf4 100644 --- a/test/plovasp/plocar_io/test_fileio.py +++ b/test/python/plovasp/plocar_io/test_fileio.py @@ -27,19 +27,19 @@ class TestFileIO(mytest.MyTestCase): # Scenario 1 def test_no_plocar(self): err_mess = "Error opening xPLOCAR" - with self.assertRaisesRegexp(IOError, err_mess): + with self.assertRaisesRegex(IOError, err_mess): read_plocar('xPLOCAR') # Scenario 2 def test_end_of_file(self): err_mess = "End-of-file reading" - with self.assertRaisesRegexp(IOError, err_mess): + with self.assertRaisesRegex(IOError, err_mess): read_plocar('PLOCAR.trunc') # Scenario 3 def test_wrong_prec(self): err_mess = "only 'prec = 4, 8' are supported" - with self.assertRaisesRegexp(ValueError, err_mess): + with self.assertRaisesRegex(ValueError, err_mess): read_plocar('PLOCAR.noprec') # Scenario 4 @@ -52,10 +52,10 @@ class TestFileIO(mytest.MyTestCase): f.write(" nlm =%5i\n"%(nlm)) ion = 1 isp = 1 - for ik in xrange(nk): - for ib in xrange(nb): + for ik in range(nk): + for ib in range(nb): f.write("%5i%5i%5i%5i%10.5f\n"%(ion, isp, ik+1, ib+1, ferw[0, 0, ik, ib])) - for ilm in xrange(nlm): + for ilm in range(nlm): p = plo[0, 0, ik, ib, ilm] f.write("%5i%15.7f%15.7f\n"%(ilm+1, p.real, p.imag)) @@ -75,13 +75,13 @@ class TestFileIO(mytest.MyTestCase): test_file = 'PLOCAR.example.out.test' with open(test_file, 'wt') as f: f.write("pars: %s\n"%(pars)) - for ion in xrange(nion): - for isp in xrange(ns): - for ik in xrange(nk): - for ib in xrange(nb): + for ion in range(nion): + for isp in range(ns): + for ik in range(nk): + for ib in range(nb): f.write("%5i%5i%5i%5i %s\n"%(ion+1, isp+1, ik+1, ib+1, ferw[ion, isp, ik, ib])) - for ilm in xrange(nlm): + for ilm in range(nlm): p = plo[ion, isp, ik, ib, ilm] f.write("%5i %s\n"%(ilm+1, p)) diff --git a/test/plovasp/plocar_io/test_plocar_io.py b/test/python/plovasp/plocar_io/test_plocar_io.py similarity index 100% rename from test/plovasp/plocar_io/test_plocar_io.py rename to test/python/plovasp/plocar_io/test_plocar_io.py diff --git a/test/plovasp/plotools/.gitignore b/test/python/plovasp/plotools/.gitignore similarity index 100% rename from test/plovasp/plotools/.gitignore rename to test/python/plovasp/plotools/.gitignore diff --git a/test/plovasp/plotools/DOSCAR b/test/python/plovasp/plotools/DOSCAR similarity index 100% rename from test/plovasp/plotools/DOSCAR rename to test/python/plovasp/plotools/DOSCAR diff --git a/test/plovasp/plotools/EIGENVAL b/test/python/plovasp/plotools/EIGENVAL similarity index 100% rename from test/plovasp/plotools/EIGENVAL rename to test/python/plovasp/plotools/EIGENVAL diff --git a/test/plovasp/plotools/IBZKPT b/test/python/plovasp/plotools/IBZKPT similarity index 100% rename from test/plovasp/plotools/IBZKPT rename to test/python/plovasp/plotools/IBZKPT diff --git a/test/plovasp/plotools/OUTCAR b/test/python/plovasp/plotools/OUTCAR similarity index 100% rename from test/plovasp/plotools/OUTCAR rename to test/python/plovasp/plotools/OUTCAR diff --git a/test/plovasp/plotools/PLOCAR b/test/python/plovasp/plotools/PLOCAR similarity index 100% rename from test/plovasp/plotools/PLOCAR rename to test/python/plovasp/plotools/PLOCAR diff --git a/test/plovasp/plotools/POSCAR b/test/python/plovasp/plotools/POSCAR similarity index 100% rename from test/plovasp/plotools/POSCAR rename to test/python/plovasp/plotools/POSCAR diff --git a/test/plovasp/plotools/POSCAR.complex b/test/python/plovasp/plotools/POSCAR.complex similarity index 100% rename from test/plovasp/plotools/POSCAR.complex rename to test/python/plovasp/plotools/POSCAR.complex diff --git a/test/plovasp/plotools/example.cfg b/test/python/plovasp/plotools/example.cfg similarity index 100% rename from test/plovasp/plotools/example.cfg rename to test/python/plovasp/plotools/example.cfg diff --git a/test/plovasp/plotools/mytest.py b/test/python/plovasp/plotools/mytest.py similarity index 91% rename from test/plovasp/plotools/mytest.py rename to test/python/plovasp/plotools/mytest.py index 38f0689e..4cac121d 100644 --- a/test/plovasp/plotools/mytest.py +++ b/test/python/plovasp/plotools/mytest.py @@ -43,8 +43,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/plotools/rpath.py b/test/python/plovasp/plotools/rpath.py similarity index 100% rename from test/plovasp/plotools/rpath.py rename to test/python/plovasp/plotools/rpath.py diff --git a/test/plovasp/plotools/test_all.py b/test/python/plovasp/plotools/test_all.py similarity index 100% rename from test/plovasp/plotools/test_all.py rename to test/python/plovasp/plotools/test_all.py diff --git a/test/plovasp/plotools/test_consistency.py b/test/python/plovasp/plotools/test_consistency.py similarity index 98% rename from test/plovasp/plotools/test_consistency.py rename to test/python/plovasp/plotools/test_consistency.py index 8b79935c..bd1c9ee5 100644 --- a/test/plovasp/plotools/test_consistency.py +++ b/test/python/plovasp/plotools/test_consistency.py @@ -3,7 +3,7 @@ import triqs_dft_tools.converters.plovasp.vaspio from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.plotools import check_data_consistency from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure -import mytest +from . import mytest ################################################################################ # diff --git a/test/plovasp/plotools/wrong_shell.cfg b/test/python/plovasp/plotools/wrong_shell.cfg similarity index 100% rename from test/plovasp/plotools/wrong_shell.cfg rename to test/python/plovasp/plotools/wrong_shell.cfg diff --git a/test/plovasp/proj_group/.gitignore b/test/python/plovasp/proj_group/.gitignore similarity index 100% rename from test/plovasp/proj_group/.gitignore rename to test/python/plovasp/proj_group/.gitignore diff --git a/test/plovasp/inpconf/__init__.py b/test/python/plovasp/proj_group/__init__.py similarity index 100% rename from test/plovasp/inpconf/__init__.py rename to test/python/plovasp/proj_group/__init__.py diff --git a/test/plovasp/proj_group/block_matrix.cfg b/test/python/plovasp/proj_group/block_matrix.cfg similarity index 100% rename from test/plovasp/proj_group/block_matrix.cfg rename to test/python/plovasp/proj_group/block_matrix.cfg diff --git a/test/plovasp/proj_group/example.cfg b/test/python/plovasp/proj_group/example.cfg similarity index 100% rename from test/plovasp/proj_group/example.cfg rename to test/python/plovasp/proj_group/example.cfg diff --git a/test/plovasp/proj_group/example_two_site.cfg b/test/python/plovasp/proj_group/example_two_site.cfg similarity index 100% rename from test/plovasp/proj_group/example_two_site.cfg rename to test/python/plovasp/proj_group/example_two_site.cfg diff --git a/test/plovasp/proj_group/hk.out.h5 b/test/python/plovasp/proj_group/hk.ref.h5 similarity index 100% rename from test/plovasp/proj_group/hk.out.h5 rename to test/python/plovasp/proj_group/hk.ref.h5 diff --git a/test/plovasp/proj_group/mytest.py b/test/python/plovasp/proj_group/mytest.py similarity index 91% rename from test/plovasp/proj_group/mytest.py rename to test/python/plovasp/proj_group/mytest.py index 9d28fdbe..44523f7f 100644 --- a/test/plovasp/proj_group/mytest.py +++ b/test/python/plovasp/proj_group/mytest.py @@ -5,7 +5,7 @@ Module defining a custom TestCase with extra functionality. import unittest import numpy as np import difflib -from pytriqs.utility.h5diff import h5diff +from triqs.utility.h5diff import h5diff class MyTestCase(unittest.TestCase): """ @@ -45,8 +45,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/proj_group/one_site/DOSCAR b/test/python/plovasp/proj_group/one_site/DOSCAR similarity index 100% rename from test/plovasp/proj_group/one_site/DOSCAR rename to test/python/plovasp/proj_group/one_site/DOSCAR diff --git a/test/plovasp/proj_group/one_site/EIGENVAL b/test/python/plovasp/proj_group/one_site/EIGENVAL similarity index 100% rename from test/plovasp/proj_group/one_site/EIGENVAL rename to test/python/plovasp/proj_group/one_site/EIGENVAL diff --git a/test/plovasp/proj_group/one_site/IBZKPT b/test/python/plovasp/proj_group/one_site/IBZKPT similarity index 100% rename from test/plovasp/proj_group/one_site/IBZKPT rename to test/python/plovasp/proj_group/one_site/IBZKPT diff --git a/test/plovasp/proj_group/one_site/LOCPROJ b/test/python/plovasp/proj_group/one_site/LOCPROJ similarity index 100% rename from test/plovasp/proj_group/one_site/LOCPROJ rename to test/python/plovasp/proj_group/one_site/LOCPROJ diff --git a/test/plovasp/proj_group/one_site/POSCAR b/test/python/plovasp/proj_group/one_site/POSCAR similarity index 100% rename from test/plovasp/proj_group/one_site/POSCAR rename to test/python/plovasp/proj_group/one_site/POSCAR diff --git a/test/plovasp/proj_group/projortho.out.h5 b/test/python/plovasp/proj_group/projortho.ref.h5 similarity index 100% rename from test/plovasp/proj_group/projortho.out.h5 rename to test/python/plovasp/proj_group/projortho.ref.h5 diff --git a/test/plovasp/proj_group/projortho_2site.out.h5 b/test/python/plovasp/proj_group/projortho_2site.ref.h5 similarity index 100% rename from test/plovasp/proj_group/projortho_2site.out.h5 rename to test/python/plovasp/proj_group/projortho_2site.ref.h5 diff --git a/test/plovasp/proj_group/projortho_normion.out.h5 b/test/python/plovasp/proj_group/projortho_normion.ref.h5 similarity index 100% rename from test/plovasp/proj_group/projortho_normion.out.h5 rename to test/python/plovasp/proj_group/projortho_normion.ref.h5 diff --git a/test/plovasp/proj_group/rpath.py b/test/python/plovasp/proj_group/rpath.py similarity index 100% rename from test/plovasp/proj_group/rpath.py rename to test/python/plovasp/proj_group/rpath.py diff --git a/test/plovasp/proj_group/simple.cfg b/test/python/plovasp/proj_group/simple.cfg similarity index 100% rename from test/plovasp/proj_group/simple.cfg rename to test/python/plovasp/proj_group/simple.cfg diff --git a/test/plovasp/proj_group/simple/DOSCAR b/test/python/plovasp/proj_group/simple/DOSCAR similarity index 100% rename from test/plovasp/proj_group/simple/DOSCAR rename to test/python/plovasp/proj_group/simple/DOSCAR diff --git a/test/plovasp/proj_group/simple/IBZKPT b/test/python/plovasp/proj_group/simple/IBZKPT similarity index 100% rename from test/plovasp/proj_group/simple/IBZKPT rename to test/python/plovasp/proj_group/simple/IBZKPT diff --git a/test/plovasp/proj_group/simple/LOCPROJ b/test/python/plovasp/proj_group/simple/LOCPROJ similarity index 100% rename from test/plovasp/proj_group/simple/LOCPROJ rename to test/python/plovasp/proj_group/simple/LOCPROJ diff --git a/test/plovasp/proj_group/simple/POSCAR b/test/python/plovasp/proj_group/simple/POSCAR similarity index 100% rename from test/plovasp/proj_group/simple/POSCAR rename to test/python/plovasp/proj_group/simple/POSCAR diff --git a/test/plovasp/proj_group/test_all.py b/test/python/plovasp/proj_group/test_all.py similarity index 100% rename from test/plovasp/proj_group/test_all.py rename to test/python/plovasp/proj_group/test_all.py diff --git a/test/plovasp/proj_group/test_block_map.py b/test/python/plovasp/proj_group/test_block_map.py similarity index 93% rename from test/plovasp/proj_group/test_block_map.py rename to test/python/plovasp/proj_group/test_block_map.py index 27570c0e..b51068db 100644 --- a/test/plovasp/proj_group/test_block_map.py +++ b/test/python/plovasp/proj_group/test_block_map.py @@ -1,13 +1,13 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -import mytest +from . import mytest ################################################################################ # @@ -30,19 +30,19 @@ class TestBlockMap(mytest.MyTestCase): nproj = 16 self.mock_plo = np.zeros((nproj, 1, 1, 11), dtype=np.complex128) - self.mock_proj_params = [{} for i in xrange(nproj)] + self.mock_proj_params = [{} for i in range(nproj)] ip = 0 # Mock d-sites - for isite in xrange(2): - for im in xrange(5): + for isite in range(2): + for im in range(5): self.mock_proj_params[ip]['label'] = 'd-orb' self.mock_proj_params[ip]['isite'] = isite + 1 self.mock_proj_params[ip]['l'] = 2 self.mock_proj_params[ip]['m'] = im ip += 1 # Mock p-sites - for isite in xrange(2, 4): - for im in xrange(3): + for isite in range(2, 4): + for im in range(3): self.mock_proj_params[ip]['label'] = 'p-orb' self.mock_proj_params[ip]['isite'] = isite + 1 self.mock_proj_params[ip]['l'] = 1 diff --git a/test/plovasp/proj_group/test_one_site.py b/test/python/plovasp/proj_group/test_one_site.py similarity index 89% rename from test/plovasp/proj_group/test_one_site.py rename to test/python/plovasp/proj_group/test_one_site.py index d4307891..738c6f7e 100644 --- a/test/plovasp/proj_group/test_one_site.py +++ b/test/python/plovasp/proj_group/test_one_site.py @@ -1,6 +1,6 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np @@ -9,8 +9,8 @@ from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -from pytriqs.archive import HDFArchive -import mytest +from h5 import HDFArchive +from . import mytest ################################################################################ # @@ -61,8 +61,7 @@ class TestProjectorGroup(mytest.MyTestCase): # FIXME: seems redundant, as 'overl' is written to the file anyway self.assertEqual(overl, np.eye(5)) -# expected_file = _rpath + 'projortho.out' - expected_file = _rpath + 'projortho.out.h5' + expected_file = _rpath + 'projortho.ref.h5' # self.assertFileEqual(testout, expected_file) self.assertH5FileEqual(testout, expected_file) @@ -85,17 +84,16 @@ class TestProjectorGroup(mytest.MyTestCase): # FIXME: seems redundant, as 'overl' is written to the file anyway self.assertEqual(overl, np.eye(5)) -# expected_file = _rpath + 'projortho.out' # self.assertFileEqual(testout, expected_file) - expected_file = _rpath + 'projortho.out.h5' + expected_file = _rpath + 'projortho.ref.h5' self.assertH5FileEqual(testout, expected_file) - + def test_hk(self): self.proj_gr.orthogonalize() self.proj_gr.calc_hk(self.eigvals) - + testout = _rpath + 'hk.test.h5' with HDFArchive(testout, 'w') as h5test: h5test['hk'] = self.proj_gr.hk - expected_file = _rpath + 'hk.out.h5' - self.assertH5FileEqual(testout, expected_file) \ No newline at end of file + expected_file = _rpath + 'hk.ref.h5' + self.assertH5FileEqual(testout, expected_file) diff --git a/test/plovasp/proj_group/test_one_site_compl.py b/test/python/plovasp/proj_group/test_one_site_compl.py similarity index 86% rename from test/plovasp/proj_group/test_one_site_compl.py rename to test/python/plovasp/proj_group/test_one_site_compl.py index 1d9daa65..fc5f15e3 100644 --- a/test/plovasp/proj_group/test_one_site_compl.py +++ b/test/python/plovasp/proj_group/test_one_site_compl.py @@ -1,6 +1,6 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np @@ -9,8 +9,8 @@ from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -from pytriqs.archive import HDFArchive -import mytest +from h5 import HDFArchive +from . import mytest ################################################################################ # @@ -36,43 +36,43 @@ class TestProjectorGroupCompl(mytest.MyTestCase): efermi = self.el_struct.efermi self.eigvals = self.el_struct.eigvals - efermi - + struct = self.el_struct.structure kmesh = self.el_struct.kmesh - + self.proj_sh = ProjectorShell(self.pars.shells[0], vasp_data.plocar.plo, vasp_data.plocar.proj_params, kmesh, struct, 0) - + def test_num_bands(self): - self.pars.groups[0]['complement'] = True + self.pars.groups[0]['complement'] = True err_mess = "At each band the same number" - with self.assertRaisesRegexp(AssertionError, err_mess): - self.proj_gr = ProjectorGroup(self.pars.groups[0], [self.proj_sh], self.eigvals) - + with self.assertRaisesRegex(AssertionError, err_mess): + self.proj_gr = ProjectorGroup(self.pars.groups[0], [self.proj_sh], self.eigvals) + def test_compl(self): self.pars.groups[0]['complement'] = True self.pars.groups[0]['bands'] = [10, 25] - + self.proj_gr = ProjectorGroup(self.pars.groups[0], [self.proj_sh], self.eigvals) - + self.proj_gr.orthogonalize() self.proj_gr.calc_complement(self.eigvals) - + temp = self.proj_gr.normion self.proj_gr.normion = False block_maps, ndim = self.proj_gr.get_block_matrix_map() self.proj_gr.normion = temp _, ns, nk, _, _ = self.proj_gr.shells[0].proj_win.shape - + # Note that 'ns' and 'nk' are the same for all shells - for isp in xrange(ns): - for ik in xrange(nk): - print('ik',ik) + for isp in range(ns): + for ik in range(nk): + print(('ik',ik)) bmin = self.proj_gr.ib_win[ik, isp, 0] bmax = self.proj_gr.ib_win[ik, isp, 1]+1 - - nb = bmax - bmin + + nb = bmax - bmin p_mat = np.zeros((ndim, nb), dtype=np.complex128) #print(bmin,bmax,nb) # Combine all projectors of the group to one block projector @@ -84,11 +84,11 @@ class TestProjectorGroupCompl(mytest.MyTestCase): nlm = i2 - i1 + 1 shell = self.proj_gr.shells[ish] p_mat[i1:i2, :nb] = shell.proj_win[ion, isp, ik, :nlm, :nb] - + overlap_L = np.dot(p_mat.conjugate().transpose(),p_mat) overlap_N = np.dot(p_mat,p_mat.conjugate().transpose()) - + assert np.all(np.abs(np.eye(overlap_N.shape[0]) - overlap_N) < 1e-13) assert np.all(np.abs(np.eye(overlap_L.shape[0]) - overlap_L) < 1e-13) - \ No newline at end of file + diff --git a/test/plovasp/proj_group/test_select_bands.py b/test/python/plovasp/proj_group/test_select_bands.py similarity index 92% rename from test/plovasp/proj_group/test_select_bands.py rename to test/python/plovasp/proj_group/test_select_bands.py index 10f59ef8..bb9236b6 100644 --- a/test/plovasp/proj_group/test_select_bands.py +++ b/test/python/plovasp/proj_group/test_select_bands.py @@ -1,6 +1,6 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np @@ -9,7 +9,7 @@ from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -import mytest +from . import mytest ################################################################################ # @@ -58,14 +58,14 @@ class TestSelectBands(mytest.MyTestCase): def test_emin_too_large(self): self.proj_gr.emin = 20.0 self.proj_gr.emax = 25.0 - with self.assertRaisesRegexp(Exception, "No bands inside the window"): + with self.assertRaisesRegex(Exception, "No bands inside the window"): ib_win, nb_min, nb_max = self.proj_gr.select_bands(self.eigvals) # Scenario 3 def test_emax_too_small(self): self.proj_gr.emin = -50.0 self.proj_gr.emax = -55.0 - with self.assertRaisesRegexp(Exception, "Energy window does not overlap"): + with self.assertRaisesRegex(Exception, "Energy window does not overlap"): ib_win, nb_min, nb_max = self.proj_gr.select_bands(self.eigvals) diff --git a/test/plovasp/proj_group/test_two_site.py b/test/python/plovasp/proj_group/test_two_site.py similarity index 76% rename from test/plovasp/proj_group/test_two_site.py rename to test/python/plovasp/proj_group/test_two_site.py index f3959447..75ba53e3 100644 --- a/test/plovasp/proj_group/test_two_site.py +++ b/test/python/plovasp/proj_group/test_two_site.py @@ -1,6 +1,6 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np @@ -9,8 +9,8 @@ from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -from pytriqs.archive import HDFArchive -import mytest +from h5 import HDFArchive +from . import mytest ################################################################################ # @@ -51,10 +51,6 @@ class TestProjectorGroupTwoSite(mytest.MyTestCase): dens_mat, overl = self.proj_sh.density_matrix(self.el_struct) -# testout = _rpath + 'projortho_2site.out.test' -# with open(testout, 'wt') as f: -# f.write("density matrix: %s\n"%(dens_mat)) -# f.write("overlap matrix: %s\n"%(overl)) testout = _rpath + 'projortho_2site.test.h5' with HDFArchive(testout, 'w') as h5test: h5test['density_matrix'] = dens_mat @@ -64,9 +60,7 @@ class TestProjectorGroupTwoSite(mytest.MyTestCase): self.assertEqual(overl[0, 0, ...], np.eye(5)) self.assertEqual(overl[0, 1, ...], np.eye(5)) -# expected_file = _rpath + 'projortho_2site.out' -# self.assertFileEqual(testout, expected_file) - expected_file = _rpath + 'projortho_2site.out.h5' + expected_file = _rpath + 'projortho_2site.ref.h5' self.assertH5FileEqual(testout, expected_file) # Scenario 2 @@ -76,10 +70,6 @@ class TestProjectorGroupTwoSite(mytest.MyTestCase): dens_mat, overl = self.proj_sh.density_matrix(self.el_struct) -# testout = _rpath + 'projortho_normion.out.test' -# with open(testout, 'wt') as f: -# f.write("density matrix: %s\n"%(dens_mat)) -# f.write("overlap matrix: %s\n"%(overl)) testout = _rpath + 'projortho_normion.test.h5' with HDFArchive(testout, 'w') as h5test: h5test['density_matrix'] = dens_mat @@ -89,9 +79,7 @@ class TestProjectorGroupTwoSite(mytest.MyTestCase): self.assertEqual(overl[0, 0, ...], np.eye(5)) self.assertEqual(overl[0, 1, ...], np.eye(5)) -# expected_file = _rpath + 'projortho_normion.out' -# self.assertFileEqual(testout, expected_file) - expected_file = _rpath + 'projortho_normion.out.h5' + expected_file = _rpath + 'projortho_normion.ref.h5' self.assertH5FileEqual(testout, expected_file) diff --git a/test/plovasp/proj_group/two_site/DOSCAR b/test/python/plovasp/proj_group/two_site/DOSCAR similarity index 100% rename from test/plovasp/proj_group/two_site/DOSCAR rename to test/python/plovasp/proj_group/two_site/DOSCAR diff --git a/test/plovasp/proj_group/two_site/IBZKPT b/test/python/plovasp/proj_group/two_site/IBZKPT similarity index 100% rename from test/plovasp/proj_group/two_site/IBZKPT rename to test/python/plovasp/proj_group/two_site/IBZKPT diff --git a/test/plovasp/proj_group/two_site/LOCPROJ b/test/python/plovasp/proj_group/two_site/LOCPROJ similarity index 100% rename from test/plovasp/proj_group/two_site/LOCPROJ rename to test/python/plovasp/proj_group/two_site/LOCPROJ diff --git a/test/plovasp/proj_group/two_site/POSCAR b/test/python/plovasp/proj_group/two_site/POSCAR similarity index 100% rename from test/plovasp/proj_group/two_site/POSCAR rename to test/python/plovasp/proj_group/two_site/POSCAR diff --git a/test/plovasp/proj_shell/.gitignore b/test/python/plovasp/proj_shell/.gitignore similarity index 100% rename from test/plovasp/proj_shell/.gitignore rename to test/python/plovasp/proj_shell/.gitignore diff --git a/test/plovasp/proj_group/__init__.py b/test/python/plovasp/proj_shell/__init__.py similarity index 100% rename from test/plovasp/proj_group/__init__.py rename to test/python/plovasp/proj_shell/__init__.py diff --git a/test/plovasp/proj_shell/densmat.out b/test/python/plovasp/proj_shell/densmat.out similarity index 100% rename from test/plovasp/proj_shell/densmat.out rename to test/python/plovasp/proj_shell/densmat.out diff --git a/test/plovasp/proj_shell/example.cfg b/test/python/plovasp/proj_shell/example.cfg similarity index 100% rename from test/plovasp/proj_shell/example.cfg rename to test/python/plovasp/proj_shell/example.cfg diff --git a/test/plovasp/atm/mytest.py b/test/python/plovasp/proj_shell/mytest.py similarity index 91% rename from test/plovasp/atm/mytest.py rename to test/python/plovasp/proj_shell/mytest.py index fb0c64e8..4c56f35c 100644 --- a/test/plovasp/atm/mytest.py +++ b/test/python/plovasp/proj_shell/mytest.py @@ -44,8 +44,8 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] # diff delta = difflib.unified_diff(lstr1, lstr2) # combine delta's to a string diff --git a/test/plovasp/proj_shell/one_site/DOSCAR b/test/python/plovasp/proj_shell/one_site/DOSCAR similarity index 100% rename from test/plovasp/proj_shell/one_site/DOSCAR rename to test/python/plovasp/proj_shell/one_site/DOSCAR diff --git a/test/plovasp/proj_shell/one_site/IBZKPT b/test/python/plovasp/proj_shell/one_site/IBZKPT similarity index 100% rename from test/plovasp/proj_shell/one_site/IBZKPT rename to test/python/plovasp/proj_shell/one_site/IBZKPT diff --git a/test/plovasp/proj_shell/one_site/LOCPROJ b/test/python/plovasp/proj_shell/one_site/LOCPROJ similarity index 100% rename from test/plovasp/proj_shell/one_site/LOCPROJ rename to test/python/plovasp/proj_shell/one_site/LOCPROJ diff --git a/test/plovasp/proj_shell/one_site/POSCAR b/test/python/plovasp/proj_shell/one_site/POSCAR similarity index 100% rename from test/plovasp/proj_shell/one_site/POSCAR rename to test/python/plovasp/proj_shell/one_site/POSCAR diff --git a/test/plovasp/proj_shell/one_site/PROJCAR b/test/python/plovasp/proj_shell/one_site/PROJCAR similarity index 100% rename from test/plovasp/proj_shell/one_site/PROJCAR rename to test/python/plovasp/proj_shell/one_site/PROJCAR diff --git a/test/plovasp/proj_shell/one_site/readme.txt b/test/python/plovasp/proj_shell/one_site/readme.txt similarity index 100% rename from test/plovasp/proj_shell/one_site/readme.txt rename to test/python/plovasp/proj_shell/one_site/readme.txt diff --git a/test/plovasp/proj_shell/projshells.out b/test/python/plovasp/proj_shell/projshells.out similarity index 99% rename from test/plovasp/proj_shell/projshells.out rename to test/python/plovasp/proj_shell/projshells.out index 054460b1..52c4e60b 100644 --- a/test/plovasp/proj_shell/projshells.out +++ b/test/python/plovasp/proj_shell/projshells.out @@ -1,4 +1,4 @@ -pars: {'ions': {'nion': 1, 'ion_list': [[1]]}, 'lshell': 2, 'corr': True, 'ion_sort': None, 'user_index': 1} +pars: {'user_index': 1, 'ions': {'ion_list': [[1]], 'nion': 1}, 'lshell': 2, 'ion_sort': None, 'corr': True} 10 25 1 0.000000 -0.000000 2 0.000000 0.000000 diff --git a/test/plovasp/proj_shell/rpath.py b/test/python/plovasp/proj_shell/rpath.py similarity index 100% rename from test/plovasp/proj_shell/rpath.py rename to test/python/plovasp/proj_shell/rpath.py diff --git a/test/plovasp/proj_shell/test_all.py b/test/python/plovasp/proj_shell/test_all.py similarity index 100% rename from test/plovasp/proj_shell/test_all.py rename to test/python/plovasp/proj_shell/test_all.py diff --git a/test/plovasp/proj_shell/test_projshells.py b/test/python/plovasp/proj_shell/test_projshells.py similarity index 91% rename from test/plovasp/proj_shell/test_projshells.py rename to test/python/plovasp/proj_shell/test_projshells.py index 244821b8..f9aaaf05 100644 --- a/test/plovasp/proj_shell/test_projshells.py +++ b/test/python/plovasp/proj_shell/test_projshells.py @@ -1,6 +1,6 @@ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' import numpy as np @@ -9,7 +9,7 @@ from triqs_dft_tools.converters.plovasp.elstruct import ElectronicStructure from triqs_dft_tools.converters.plovasp.inpconf import ConfigParameters from triqs_dft_tools.converters.plovasp.proj_shell import ProjectorShell from triqs_dft_tools.converters.plovasp.proj_group import ProjectorGroup -import mytest +from . import mytest ################################################################################ # @@ -52,14 +52,14 @@ class TestProjectorShell(mytest.MyTestCase): nion, ns, nk, nlm, nbtot = self.proj_sh.proj_win.shape with open(testout, 'wt') as f: f.write("pars: %s\n"%(self.pars.shells[0])) - for ion in xrange(nion): - for isp in xrange(ns): - for ik in xrange(nk): + for ion in range(nion): + for isp in range(ns): + for ik in range(nk): ib1 = self.proj_sh.ib_win[ik, 0, 0] ib2 = self.proj_sh.ib_win[ik, 0, 1] f.write("%i %i\n"%(ib1, ib2)) - for ib in xrange(ib2 - ib1 + 1): - for ilm in xrange(nlm): + for ib in range(ib2 - ib1 + 1): + for ilm in range(nlm): p = self.proj_sh.proj_win[ion, isp, ik, ilm, ib] f.write("%5i %f %f\n"%(ilm+1, p.real, p.imag)) @@ -76,4 +76,4 @@ class TestProjectorShell(mytest.MyTestCase): expected_file = _rpath + 'densmat.out' self.assertFileEqual(testout, expected_file) - + diff --git a/test/plovasp/run_suite.py b/test/python/plovasp/run_suite.py old mode 100644 new mode 100755 similarity index 93% rename from test/plovasp/run_suite.py rename to test/python/plovasp/run_suite.py index ec9a97b6..82e47c10 --- a/test/plovasp/run_suite.py +++ b/test/python/plovasp/run_suite.py @@ -27,8 +27,8 @@ if __name__ == '__main__': if results.wasSuccessful(): raise SystemExit(0) else: - print "Failed tests:" + print("Failed tests:") for failure in results.failures: - print failure[0].__str__() + print(failure[0].__str__()) raise SystemExit(1) diff --git a/test/plovasp/vaspio/.gitignore b/test/python/plovasp/vaspio/.gitignore similarity index 100% rename from test/plovasp/vaspio/.gitignore rename to test/python/plovasp/vaspio/.gitignore diff --git a/test/plovasp/vaspio/DOSCAR.example b/test/python/plovasp/vaspio/DOSCAR.example similarity index 100% rename from test/plovasp/vaspio/DOSCAR.example rename to test/python/plovasp/vaspio/DOSCAR.example diff --git a/test/plovasp/vaspio/EIGENVAL.example b/test/python/plovasp/vaspio/EIGENVAL.example similarity index 100% rename from test/plovasp/vaspio/EIGENVAL.example rename to test/python/plovasp/vaspio/EIGENVAL.example diff --git a/test/plovasp/vaspio/EIGENVAL.example.out b/test/python/plovasp/vaspio/EIGENVAL.example.out similarity index 100% rename from test/plovasp/vaspio/EIGENVAL.example.out rename to test/python/plovasp/vaspio/EIGENVAL.example.out diff --git a/test/plovasp/vaspio/EIGENVAL.wrong b/test/python/plovasp/vaspio/EIGENVAL.wrong similarity index 100% rename from test/plovasp/vaspio/EIGENVAL.wrong rename to test/python/plovasp/vaspio/EIGENVAL.wrong diff --git a/test/plovasp/vaspio/IBZKPT.example b/test/python/plovasp/vaspio/IBZKPT.example similarity index 100% rename from test/plovasp/vaspio/IBZKPT.example rename to test/python/plovasp/vaspio/IBZKPT.example diff --git a/test/plovasp/vaspio/IBZKPT.example.out b/test/python/plovasp/vaspio/IBZKPT.example.out similarity index 100% rename from test/plovasp/vaspio/IBZKPT.example.out rename to test/python/plovasp/vaspio/IBZKPT.example.out diff --git a/test/plovasp/vaspio/IBZKPT.notet b/test/python/plovasp/vaspio/IBZKPT.notet similarity index 100% rename from test/plovasp/vaspio/IBZKPT.notet rename to test/python/plovasp/vaspio/IBZKPT.notet diff --git a/test/plovasp/vaspio/IBZKPT.notet.out b/test/python/plovasp/vaspio/IBZKPT.notet.out similarity index 100% rename from test/plovasp/vaspio/IBZKPT.notet.out rename to test/python/plovasp/vaspio/IBZKPT.notet.out diff --git a/test/plovasp/vaspio/POSCAR.complex b/test/python/plovasp/vaspio/POSCAR.complex similarity index 100% rename from test/plovasp/vaspio/POSCAR.complex rename to test/python/plovasp/vaspio/POSCAR.complex diff --git a/test/plovasp/vaspio/POSCAR.example b/test/python/plovasp/vaspio/POSCAR.example similarity index 100% rename from test/plovasp/vaspio/POSCAR.example rename to test/python/plovasp/vaspio/POSCAR.example diff --git a/test/plovasp/vaspio/POSCAR.example.out b/test/python/plovasp/vaspio/POSCAR.example.out similarity index 100% rename from test/plovasp/vaspio/POSCAR.example.out rename to test/python/plovasp/vaspio/POSCAR.example.out diff --git a/test/plovasp/proj_shell/__init__.py b/test/python/plovasp/vaspio/__init__.py similarity index 100% rename from test/plovasp/proj_shell/__init__.py rename to test/python/plovasp/vaspio/__init__.py diff --git a/test/plovasp/vaspio/mytest.py b/test/python/plovasp/vaspio/mytest.py similarity index 92% rename from test/plovasp/vaspio/mytest.py rename to test/python/plovasp/vaspio/mytest.py index f92cba6d..7c859089 100644 --- a/test/plovasp/vaspio/mytest.py +++ b/test/python/plovasp/vaspio/mytest.py @@ -44,9 +44,9 @@ class MyTestCase(unittest.TestCase): # Make a diff # # Remove empty lines - lstr1 = filter(lambda s: s.strip() != '', str1.splitlines(True)) + lstr1 = [s for s in str1.splitlines(True) if s.strip() != ''] lstr1 = [str1.replace(" ","") for str1 in lstr1] # Remove spaces - lstr2 = filter(lambda s: s.strip() != '', str2.splitlines(True)) + lstr2 = [s for s in str2.splitlines(True) if s.strip() != ''] lstr2 = [str2.replace(" ","") for str2 in lstr2] # Remove spaces # diff delta = difflib.unified_diff(lstr1, lstr2) diff --git a/test/plovasp/vaspio/rpath.py b/test/python/plovasp/vaspio/rpath.py similarity index 100% rename from test/plovasp/vaspio/rpath.py rename to test/python/plovasp/vaspio/rpath.py diff --git a/test/plovasp/vaspio/test_all.py b/test/python/plovasp/vaspio/test_all.py similarity index 100% rename from test/plovasp/vaspio/test_all.py rename to test/python/plovasp/vaspio/test_all.py diff --git a/test/plovasp/vaspio/test_doscar.py b/test/python/plovasp/vaspio/test_doscar.py similarity index 95% rename from test/plovasp/vaspio/test_doscar.py rename to test/python/plovasp/vaspio/test_doscar.py index 8c43f8f2..efa32143 100644 --- a/test/plovasp/vaspio/test_doscar.py +++ b/test/python/plovasp/vaspio/test_doscar.py @@ -2,10 +2,10 @@ r""" Tests for class 'Doscar' from module 'vaspio' """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import mytest +from . import mytest import numpy as np from triqs_dft_tools.converters.plovasp.vaspio import Doscar diff --git a/test/plovasp/vaspio/test_eigenval.py b/test/python/plovasp/vaspio/test_eigenval.py similarity index 94% rename from test/plovasp/vaspio/test_eigenval.py rename to test/python/plovasp/vaspio/test_eigenval.py index ec3264e8..675814d1 100644 --- a/test/plovasp/vaspio/test_eigenval.py +++ b/test/python/plovasp/vaspio/test_eigenval.py @@ -2,10 +2,10 @@ r""" Tests for class 'Eigneval' from module 'vaspio' """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import mytest +from . import mytest import numpy as np from triqs_dft_tools.converters.plovasp.vaspio import Eigenval @@ -55,6 +55,6 @@ class TestEigenval(mytest.MyTestCase): eigenval = Eigenval() err_mess = "EIGENVAL file is incorrect" - with self.assertRaisesRegexp(AssertionError, err_mess): + with self.assertRaisesRegex(AssertionError, err_mess): eigenval.from_file(vasp_dir=_rpath, eig_filename=filename) diff --git a/test/plovasp/vaspio/test_kpoints.py b/test/python/plovasp/vaspio/test_kpoints.py similarity index 97% rename from test/plovasp/vaspio/test_kpoints.py rename to test/python/plovasp/vaspio/test_kpoints.py index f7410c3b..64cebb7c 100644 --- a/test/plovasp/vaspio/test_kpoints.py +++ b/test/python/plovasp/vaspio/test_kpoints.py @@ -2,10 +2,10 @@ r""" Tests for class 'Ibzkpt' from module 'vaspio' """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import mytest +from . import mytest import numpy as np from triqs_dft_tools.converters.plovasp.vaspio import Kpoints diff --git a/test/plovasp/vaspio/test_poscar.py b/test/python/plovasp/vaspio/test_poscar.py similarity index 97% rename from test/plovasp/vaspio/test_poscar.py rename to test/python/plovasp/vaspio/test_poscar.py index 3ec48e16..c9c5982d 100644 --- a/test/plovasp/vaspio/test_poscar.py +++ b/test/python/plovasp/vaspio/test_poscar.py @@ -2,10 +2,10 @@ r""" Tests for class 'Poscar' from module 'vaspio' """ import os -import rpath +from . import rpath _rpath = os.path.dirname(rpath.__file__) + '/' -import mytest +from . import mytest import numpy as np from triqs_dft_tools.converters.plovasp.vaspio import Poscar diff --git a/test/sigma_from_file.py b/test/python/sigma_from_file.py similarity index 84% rename from test/sigma_from_file.py rename to test/python/sigma_from_file.py index 7f309ab3..7096e14d 100644 --- a/test/sigma_from_file.py +++ b/test/python/sigma_from_file.py @@ -19,11 +19,11 @@ # ################################################################################ -from pytriqs.archive import * -from pytriqs.gf import * -from pytriqs.gf.tools import * +from h5 import * +from triqs.gf import * +from triqs.gf.tools import * from triqs_dft_tools.sumk_dft_tools import * -from pytriqs.utility.comparison_tests import * +from triqs.utility.comparison_tests import * import numpy as np # Read self energy from hdf file @@ -40,9 +40,10 @@ for name, s in Sigma_hdf: np.savetxt('Sigma_' + name + '.dat', mesh_a_data) # Read self energy from txt files -SK = SumkDFTTools(hdf_file = 'SrVO3.h5', use_dft_blocks = True) +SK = SumkDFTTools(hdf_file = 'SrVO3.ref.h5', use_dft_blocks = True) -a_list = sorted([a for a,al in SK.gf_struct_solver[0].iteritems()]) +# the order in the orig SrVO3 file is not assured, hence order it here +a_list = sorted([a for a,al in SK.gf_struct_solver[0].items()]) g_list = [read_gf_from_txt([['Sigma_' + a + '.dat']], a) for a in a_list] Sigma_txt = BlockGf(name_list = a_list, block_list = g_list, make_copies=False) diff --git a/test/python/sigma_from_file.ref.h5 b/test/python/sigma_from_file.ref.h5 new file mode 100644 index 00000000..773e48f7 Binary files /dev/null and b/test/python/sigma_from_file.ref.h5 differ diff --git a/test/srvo3_Gloc.py b/test/python/srvo3_Gloc.py similarity index 82% rename from test/srvo3_Gloc.py rename to test/python/srvo3_Gloc.py index 181101c2..d3d89577 100644 --- a/test/srvo3_Gloc.py +++ b/test/python/srvo3_Gloc.py @@ -19,23 +19,23 @@ # ################################################################################ -from pytriqs.archive import * -from pytriqs.gf import * +from h5 import * +from triqs.gf import * from triqs_dft_tools.sumk_dft import * -from triqs_dft_tools.converters.wien2k_converter import * -from pytriqs.operators.util import set_operator_structure -from pytriqs.utility.comparison_tests import * -from pytriqs.utility.h5diff import h5diff +from triqs_dft_tools.converters.wien2k import * +from triqs.operators.util import set_operator_structure +from triqs.utility.comparison_tests import * +from triqs.utility.h5diff import h5diff # Basic input parameters beta = 40 # Init the SumK class -SK=SumkDFT(hdf_file='SrVO3.h5',use_dft_blocks=True) +SK=SumkDFT(hdf_file='SrVO3.ref.h5',use_dft_blocks=True) num_orbitals = SK.corr_shells[0]['dim'] l = SK.corr_shells[0]['l'] -spin_names = ['up','down'] +spin_names = ['down','up'] orb_names = ['%s'%i for i in range(num_orbitals)] orb_hybridized = False diff --git a/test/python/srvo3_Gloc.ref.h5 b/test/python/srvo3_Gloc.ref.h5 new file mode 100644 index 00000000..e3ff6e82 Binary files /dev/null and b/test/python/srvo3_Gloc.ref.h5 differ diff --git a/test/srvo3_transp.py b/test/python/srvo3_transp.py similarity index 89% rename from test/srvo3_transp.py rename to test/python/srvo3_transp.py index 4d37523b..278457e3 100644 --- a/test/srvo3_transp.py +++ b/test/python/srvo3_transp.py @@ -20,11 +20,11 @@ ################################################################################ from numpy import * -from triqs_dft_tools.converters.wien2k_converter import * +from triqs_dft_tools.converters.wien2k import * from triqs_dft_tools.sumk_dft import * from triqs_dft_tools.sumk_dft_tools import * -from pytriqs.utility.comparison_tests import * -from pytriqs.utility.h5diff import h5diff +from triqs.utility.comparison_tests import * +from triqs.utility.h5diff import h5diff beta = 40 @@ -32,7 +32,7 @@ Converter = Wien2kConverter(filename='SrVO3', repacking=True) Converter.convert_dft_input() Converter.convert_transport_input() -SK = SumkDFTTools(hdf_file='SrVO3.h5', use_dft_blocks=True) +SK = SumkDFTTools(hdf_file='SrVO3.ref.h5', use_dft_blocks=True) with HDFArchive('SrVO3_Sigma.h5', 'a') as ar: Sigma = ar['dmft_transp_input']['Sigma_w'] diff --git a/test/srvo3_transp.ref.h5 b/test/python/srvo3_transp.ref.h5 similarity index 100% rename from test/srvo3_transp.ref.h5 rename to test/python/srvo3_transp.ref.h5 diff --git a/test/sumkdft_basic.py b/test/python/sumkdft_basic.py similarity index 82% rename from test/sumkdft_basic.py rename to test/python/sumkdft_basic.py index 7f41b816..ebead200 100644 --- a/test/sumkdft_basic.py +++ b/test/python/sumkdft_basic.py @@ -20,13 +20,13 @@ # ################################################################################ -from pytriqs.archive import * +from h5 import * from triqs_dft_tools.sumk_dft_tools import SumkDFTTools -import pytriqs.utility.mpi as mpi -from pytriqs.utility.comparison_tests import * -from pytriqs.utility.h5diff import h5diff +import triqs.utility.mpi as mpi +from triqs.utility.comparison_tests import * +from triqs.utility.h5diff import h5diff -SK = SumkDFTTools(hdf_file = 'SrVO3.h5') +SK = SumkDFTTools(hdf_file = 'SrVO3.ref.h5') dm = SK.density_matrix(method = 'using_gf', beta = 40) dm_pc = SK.partial_charges(beta=40,with_Sigma=False,with_dc=False) @@ -36,4 +36,4 @@ with HDFArchive('sumkdft_basic.out.h5','w') as ar: ar['dm_pc'] = dm_pc if mpi.is_master_node(): - h5diff('sumkdft_basic.out.h5','sumkdft_basic.ref.h5') + h5diff('sumkdft_basic.out.h5','sumkdft_basic.ref.h5') diff --git a/test/sumkdft_basic.ref.h5 b/test/python/sumkdft_basic.ref.h5 similarity index 100% rename from test/sumkdft_basic.ref.h5 rename to test/python/sumkdft_basic.ref.h5 diff --git a/test/test_w90_ef.py b/test/python/test_w90_ef.py similarity index 88% rename from test/test_w90_ef.py rename to test/python/test_w90_ef.py index a94d8e72..bdf64d60 100644 --- a/test/test_w90_ef.py +++ b/test/python/test_w90_ef.py @@ -2,7 +2,7 @@ import unittest import numpy as np import sys sys.path.insert(1, '../python/converters/') -from wannier90_converter import Wannier90Converter +from triqs_dft_tools.converters.wannier90 import Wannier90Converter from triqs_dft_tools import SumkDFT class test_w90_conv(unittest.TestCase): @@ -19,7 +19,7 @@ class test_w90_conv(unittest.TestCase): for ik in range(SK1.n_k): self.assertTrue(np.all(SK1.hopping[ik,0] - conv2.fermi_energy*np.identity(SK1.n_orbitals[ik][0]) - SK2.hopping[ik,0] < 1e-12)) - + if __name__ == '__main__': unittest.main() - + diff --git a/test/w90_convert.py b/test/python/w90_convert.py similarity index 91% rename from test/w90_convert.py rename to test/python/w90_convert.py index 5ab531e2..411b0378 100644 --- a/test/w90_convert.py +++ b/test/python/w90_convert.py @@ -22,9 +22,9 @@ from triqs_dft_tools.converters import * -from pytriqs.archive import * -from pytriqs.utility.h5diff import h5diff -import pytriqs.utility.mpi as mpi +from h5 import * +from triqs.utility.h5diff import h5diff +import triqs.utility.mpi as mpi Converter = Wannier90Converter(seedname='LaVO3-Pnma',hdf_filename='w90_convert.out.h5') diff --git a/test/w90_convert.ref.h5 b/test/python/w90_convert.ref.h5 similarity index 100% rename from test/w90_convert.ref.h5 rename to test/python/w90_convert.ref.h5 diff --git a/test/wien2k_convert.py b/test/python/wien2k_convert.py similarity index 88% rename from test/wien2k_convert.py rename to test/python/wien2k_convert.py index 215200fc..0713ce1f 100644 --- a/test/wien2k_convert.py +++ b/test/python/wien2k_convert.py @@ -1,4 +1,3 @@ - ################################################################################ # # TRIQS: a Toolbox for Research in Interacting Quantum Systems @@ -20,10 +19,10 @@ # ################################################################################ -from pytriqs.archive import * -from pytriqs.utility.comparison_tests import * -from pytriqs.utility.h5diff import h5diff -import pytriqs.utility.mpi as mpi +from h5 import * +from triqs.utility.comparison_tests import * +from triqs.utility.h5diff import h5diff +import triqs.utility.mpi as mpi from triqs_dft_tools.converters import Wien2kConverter diff --git a/test/wien2k_convert.ref.h5 b/test/python/wien2k_convert.ref.h5 similarity index 100% rename from test/wien2k_convert.ref.h5 rename to test/python/wien2k_convert.ref.h5 diff --git a/test/sigma_from_file.ref.h5 b/test/sigma_from_file.ref.h5 deleted file mode 100644 index 7b6688f1..00000000 Binary files a/test/sigma_from_file.ref.h5 and /dev/null differ diff --git a/test/srvo3_Gloc.ref.h5 b/test/srvo3_Gloc.ref.h5 deleted file mode 100644 index d615317e..00000000 Binary files a/test/srvo3_Gloc.ref.h5 and /dev/null differ