mirror of
https://github.com/triqs/dft_tools
synced 2025-01-03 18:16:03 +01:00
Merge branch 'py3' into unstable
update unstable branch with python 3 compatibility and apps4triqs compatibility.
This commit is contained in:
commit
491bf30795
2
.clang-tidy
Normal file
2
.clang-tidy
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
Checks: '-*,modernize-*,cppcoreguidelines-*,-modernize-use-trailing-return-type'
|
||||||
|
HeaderFilterRegex: 'triqs_dft_tools'
|
@ -1,2 +1,4 @@
|
|||||||
|
.travis.yml
|
||||||
Dockerfile
|
Dockerfile
|
||||||
Jenkinsfile
|
Jenkinsfile
|
||||||
|
.git/objects/pack
|
||||||
|
2
.github/ISSUE_TEMPLATE/bug.md
vendored
2
.github/ISSUE_TEMPLATE/bug.md
vendored
@ -32,7 +32,7 @@ Please provide the application version that you used.
|
|||||||
|
|
||||||
You can get this information from copy and pasting the output of
|
You can get this information from copy and pasting the output of
|
||||||
```bash
|
```bash
|
||||||
python -c "from app4triqs.version import *; show_version(); show_git_hash();"
|
python -c "from triqs_dft_tools.version import *; show_version(); show_git_hash();"
|
||||||
```
|
```
|
||||||
from the command line. Also, please include the OS you are running and its version.
|
from the command line. Also, please include the OS you are running and its version.
|
||||||
|
|
||||||
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
compile_commands.json
|
||||||
|
doc/cpp2rst_generated
|
45
.travis.yml
45
.travis.yml
@ -1,47 +1,34 @@
|
|||||||
|
|
||||||
language: cpp
|
language: cpp
|
||||||
sudo: required
|
sudo: required
|
||||||
dist: trusty
|
dist: bionic
|
||||||
|
|
||||||
compiler:
|
compiler:
|
||||||
- gcc
|
- gcc
|
||||||
# - clang
|
- clang
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- sudo add-apt-repository 'deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty-5.0 main' -y
|
|
||||||
- wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key|sudo apt-key add -
|
|
||||||
- sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
|
||||||
- sudo apt-get update
|
- sudo apt-get update
|
||||||
- sudo apt-get install -y --allow-unauthenticated g++-7 clang-5.0
|
- sudo apt-get install -y --allow-unauthenticated libblas-dev libboost-all-dev libfftw3-dev libgfortran3 libhdf5-serial-dev libgmp-dev liblapack-dev libopenmpi-dev libclang-dev python-clang-6.0 python-dev python-h5py python-mako python-matplotlib python-mpi4py python-numpy python-scipy python-sphinx libjs-mathjax libnfft3-dev
|
||||||
- export LIBRARY_PATH=/usr/lib/llvm-5.0/lib:$LIBRARY_PATH
|
|
||||||
- sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 60 --slave /usr/bin/g++ g++ /usr/bin/g++-7
|
|
||||||
- sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-5.0 60 --slave /usr/bin/clang++ clang++ /usr/bin/clang++-5.0
|
|
||||||
- sudo apt-get install -y --allow-unauthenticated libboost-all-dev cmake git libgfortran3 gfortran openmpi-bin openmpi-common openmpi-doc libopenmpi-dev libblas-dev liblapack-dev libfftw3-dev libgmp-dev hdf5-tools libhdf5-serial-dev python-h5py python-dev python-numpy python-scipy python-jinja2 python-virtualenv python-matplotlib python-tornado python-zmq python-mpi4py python-mako clang-format-5.0 libclang-5.0-dev python-clang-5.0 python-sphinx libjs-mathjax valgrind libnfft3-dev
|
|
||||||
|
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
script:
|
script:
|
||||||
# ===== Set up Cpp2Py
|
- export INSTALL_DIR=$HOME/root_install # We install outside the repository
|
||||||
- git clone https://github.com/triqs/cpp2py
|
|
||||||
- mkdir cpp2py/build && cd cpp2py/build
|
|
||||||
- git checkout master
|
|
||||||
- cmake .. -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DPYTHON_INTERPRETER=/usr/bin/python -DCMAKE_INSTALL_PREFIX=$TRAVIS_BUILD_DIR/root_install
|
|
||||||
- make -j8 install
|
|
||||||
- cd $TRAVIS_BUILD_DIR
|
|
||||||
- source root_install/share/cpp2pyvars.sh
|
|
||||||
# ===== Set up TRIQS
|
# ===== Set up TRIQS
|
||||||
- git clone https://github.com/TRIQS/triqs --branch $TRAVIS_BRANCH
|
|
||||||
- mkdir triqs/build && cd triqs/build
|
|
||||||
- cmake .. -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$TRAVIS_BUILD_DIR/root_install -DCMAKE_BUILD_TYPE=Debug
|
|
||||||
- make -j8 install
|
|
||||||
- cd $TRAVIS_BUILD_DIR
|
- cd $TRAVIS_BUILD_DIR
|
||||||
- source root_install/share/triqsvars.sh
|
- git clone https://github.com/TRIQS/triqs --branch unstable
|
||||||
# ===== Set up dft_tools and Test using fsanitize=address
|
- mkdir triqs/build && cd triqs/build
|
||||||
|
- cmake .. -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR
|
||||||
|
- make -j2 install
|
||||||
|
- source $INSTALL_DIR/share/triqsvars.sh
|
||||||
|
# ===== Set up triqs_dft_tools and test
|
||||||
|
- cd $TRAVIS_BUILD_DIR
|
||||||
- mkdir build && cd build
|
- mkdir build && cd build
|
||||||
- cmake .. -DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_COMPILER=/usr/bin/${CXX} -DCMAKE_CXX_FLAGS='-fsanitize=address -fno-omit-frame-pointer -fuse-ld=gold'
|
- cmake .. -DASAN=ON -DUBSAN=ON
|
||||||
- make -j8
|
- export UBSAN_SYMBOLIZER_PATH=$(which llvm-symbolizer)
|
||||||
- export ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-5.0/bin/llvm-symbolizer
|
- export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer)
|
||||||
|
- export UBSAN_OPTIONS=symbolize=1:print_stacktrace=1
|
||||||
- export ASAN_OPTIONS=symbolize=1:detect_leaks=0
|
- export ASAN_OPTIONS=symbolize=1:detect_leaks=0
|
||||||
- export CTEST_OUTPUT_ON_FAILURE=1
|
- export CTEST_OUTPUT_ON_FAILURE=1
|
||||||
- if [ "$CXX" = g++ ]; then export LD_PRELOAD=/usr/lib/gcc/x86_64-linux-gnu/7/libasan.so; elif [ "$CXX" = clang++ ]; then export LD_PRELOAD=/usr/lib/llvm-5.0/lib/clang/5.0.1/lib/linux/libclang_rt.asan-x86_64.so; fi
|
- make -j2 && make test
|
||||||
- cd test && ctest
|
|
||||||
|
215
CMakeLists.txt
215
CMakeLists.txt
@ -1,105 +1,164 @@
|
|||||||
# Start configuration
|
# ##############################################################################
|
||||||
cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR)
|
#
|
||||||
project(triqs_dft_tools C CXX Fortran)
|
# triqs_dft_tools - An example application using triqs and cpp2py
|
||||||
|
#
|
||||||
|
# Copyright (C) ...
|
||||||
|
#
|
||||||
|
# triqs_dft_tools is free software: you can redistribute it and/or modify it under the
|
||||||
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
# version.
|
||||||
|
#
|
||||||
|
# triqs_dft_tools is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# triqs_dft_tools (in the file COPYING.txt in this directory). If not, see
|
||||||
|
# <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##############################################################################
|
||||||
|
|
||||||
|
cmake_minimum_required(VERSION 3.3.2 FATAL_ERROR)
|
||||||
|
cmake_policy(VERSION 3.3.2)
|
||||||
if(POLICY CMP0074)
|
if(POLICY CMP0074)
|
||||||
cmake_policy(SET CMP0074 NEW)
|
cmake_policy(SET CMP0074 NEW)
|
||||||
endif()
|
endif()
|
||||||
|
if(POLICY CMP0077)
|
||||||
|
cmake_policy(SET CMP0077 NEW)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Define Project
|
||||||
|
project(triqs_dft_tools VERSION 3.0.0 LANGUAGES C CXX Fortran)
|
||||||
|
get_directory_property(IS_SUBPROJECT PARENT_DIRECTORY)
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Load TRIQS and CPP2PY
|
||||||
|
find_package(TRIQS 3.0 REQUIRED)
|
||||||
|
|
||||||
|
# Get the git hash & print status
|
||||||
|
triqs_get_git_hash_of_source_dir(PROJECT_GIT_HASH)
|
||||||
|
message(STATUS "${PROJECT_NAME} version : ${PROJECT_VERSION}")
|
||||||
|
message(STATUS "${PROJECT_NAME} Git hash: ${PROJECT_GIT_HASH}")
|
||||||
|
|
||||||
|
# Enforce Consistent Versioning
|
||||||
|
if(NOT ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} VERSION_EQUAL ${TRIQS_VERSION_MAJOR}.${TRIQS_VERSION_MINOR})
|
||||||
|
message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Default Install directory to TRIQS_ROOT if not given or invalid.
|
||||||
|
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX}))
|
||||||
|
message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT")
|
||||||
|
set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE)
|
||||||
|
set(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT FALSE)
|
||||||
|
endif()
|
||||||
|
if(NOT IS_SUBPROJECT)
|
||||||
|
message(STATUS "-------- CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} --------")
|
||||||
|
endif()
|
||||||
|
set(${PROJECT_NAME}_BINARY_DIR ${PROJECT_BINARY_DIR} CACHE STRING "Binary directory of the ${PROJECT_NAME} Project")
|
||||||
|
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Options
|
||||||
|
|
||||||
|
# Make additional Find Modules available
|
||||||
|
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/share/cmake/Modules)
|
||||||
|
|
||||||
# Default to Release build type
|
# Default to Release build type
|
||||||
if(NOT CMAKE_BUILD_TYPE)
|
if(NOT CMAKE_BUILD_TYPE)
|
||||||
set(CMAKE_BUILD_TYPE Release CACHE STRING "Type of build" FORCE)
|
set(CMAKE_BUILD_TYPE Release CACHE STRING "Type of build" FORCE)
|
||||||
endif()
|
endif()
|
||||||
message( STATUS "-------- BUILD-TYPE: ${CMAKE_BUILD_TYPE} --------")
|
if(NOT IS_SUBPROJECT)
|
||||||
|
message(STATUS "-------- BUILD-TYPE: ${CMAKE_BUILD_TYPE} --------")
|
||||||
# Use shared libraries
|
|
||||||
set(BUILD_SHARED_LIBS ON)
|
|
||||||
|
|
||||||
# Load TRIQS and Cpp2Py
|
|
||||||
find_package(TRIQS 2.2 REQUIRED)
|
|
||||||
find_package(Cpp2Py 1.6 REQUIRED)
|
|
||||||
|
|
||||||
if (NOT ${TRIQS_WITH_PYTHON_SUPPORT})
|
|
||||||
MESSAGE(FATAL_ERROR "dft_tools require Python support in TRIQS")
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Default Install directory to TRIQS_ROOT if not given. Checks an absolute name is given.
|
# Python Support
|
||||||
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX}))
|
option(PythonSupport "Build with Python support" ON)
|
||||||
message(STATUS " No install prefix given (or invalid). Defaulting to TRIQS_ROOT")
|
if(PythonSupport AND NOT TRIQS_WITH_PYTHON_SUPPORT)
|
||||||
set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE)
|
message(FATAL_ERROR "TRIQS was installed without Python support. Cannot build the Python Interface. Disable the build with -DPythonSupport=OFF")
|
||||||
endif()
|
endif()
|
||||||
message(STATUS "-------- CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} -------------")
|
|
||||||
|
|
||||||
# Define the dft_tools version numbers and get the git hash
|
# Documentation
|
||||||
set(DFT_TOOLS_VERSION_MAJOR 2)
|
option(Build_Documentation "Build documentation" OFF)
|
||||||
set(DFT_TOOLS_VERSION_MINOR 2)
|
|
||||||
set(DFT_TOOLS_VERSION_PATCH 0)
|
|
||||||
set(DFT_TOOLS_VERSION ${DFT_TOOLS_VERSION_MAJOR}.${DFT_TOOLS_VERSION_MINOR}.${DFT_TOOLS_VERSION_PATCH})
|
|
||||||
triqs_get_git_hash_of_source_dir(DFT_TOOLS_GIT_HASH)
|
|
||||||
message(STATUS "Dft_tools version : ${DFT_TOOLS_VERSION}")
|
|
||||||
message(STATUS "Git hash: ${DFT_TOOLS_GIT_HASH}")
|
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
option(Build_Tests "Build tests" ON)
|
||||||
|
if(Build_Tests)
|
||||||
|
enable_testing()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Export the list of compile-commands into compile_commands.json
|
||||||
|
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||||
|
|
||||||
|
# Global compiler options
|
||||||
|
option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF)
|
||||||
|
add_compile_options($<$<CONFIG:Debug>:-ggdb3>)
|
||||||
|
|
||||||
|
# Create an Interface target for compiler warnings
|
||||||
|
add_library(${PROJECT_NAME}_warnings INTERFACE)
|
||||||
|
target_compile_options(${PROJECT_NAME}_warnings
|
||||||
|
INTERFACE
|
||||||
|
-Wall
|
||||||
|
-Wextra
|
||||||
|
-Wpedantic
|
||||||
|
-Wno-sign-compare
|
||||||
|
$<$<CXX_COMPILER_ID:GNU>:-Wshadow=local>
|
||||||
|
$<$<CXX_COMPILER_ID:GNU>:-Wno-attributes>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wshadow>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wno-gcc-compat>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wshadow>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-gcc-compat>
|
||||||
|
)
|
||||||
|
|
||||||
|
# #############
|
||||||
|
# Build Project
|
||||||
|
|
||||||
|
# Find / Build dependencies
|
||||||
|
add_subdirectory(deps)
|
||||||
|
|
||||||
|
# Build and install the library
|
||||||
|
add_subdirectory(c++/${PROJECT_NAME})
|
||||||
|
|
||||||
|
# add here stuff for the Fortran part in DFTTools
|
||||||
add_subdirectory(fortran/dmftproj)
|
add_subdirectory(fortran/dmftproj)
|
||||||
|
|
||||||
# Add the compiling options (-D... ) for C++
|
# Tests
|
||||||
message(STATUS "TRIQS : Adding compilation flags detected by the library (C++11/14, libc++, etc...) ")
|
if(Build_Tests)
|
||||||
|
add_subdirectory(test)
|
||||||
add_subdirectory(c++)
|
|
||||||
add_subdirectory(python python/triqs_dft_tools)
|
|
||||||
add_subdirectory(shells)
|
|
||||||
|
|
||||||
#------------------------
|
|
||||||
# tests
|
|
||||||
#------------------------
|
|
||||||
|
|
||||||
option(TEST_COVERAGE "Analyze the coverage of tests" OFF)
|
|
||||||
|
|
||||||
# perform tests with coverage info
|
|
||||||
if (${TEST_COVERAGE})
|
|
||||||
# we try to locate the coverage program
|
|
||||||
find_program(PYTHON_COVERAGE python-coverage)
|
|
||||||
find_program(PYTHON_COVERAGE coverage)
|
|
||||||
if(NOT PYTHON_COVERAGE)
|
|
||||||
message(FATAL_ERROR "Program coverage (or python-coverage) not found.\nEither set PYTHON_COVERAGE explicitly or disable TEST_COVERAGE!\nYou need to install the python package coverage, e.g. with\n pip install coverage\nor with\n apt install python-coverage")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
message(STATUS "Setting up test coverage")
|
|
||||||
add_custom_target(coverage ${PYTHON_COVERAGE} combine --append .coverage plovasp/.coverage || true COMMAND ${PYTHON_COVERAGE} html COMMAND echo "Open ${CMAKE_BINARY_DIR}/test/htmlcov/index.html in browser!" WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/test)
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
enable_testing()
|
# Python
|
||||||
|
if(PythonSupport)
|
||||||
option(Build_Tests "Build the tests of the library " ON)
|
add_subdirectory(python/${PROJECT_NAME})
|
||||||
if (Build_Tests)
|
|
||||||
message(STATUS "-------- Preparing tests -------------")
|
|
||||||
add_subdirectory(test)
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
#------------------------
|
# Docs
|
||||||
# Documentation
|
if(Build_Documentation)
|
||||||
#------------------------
|
|
||||||
option(Build_Documentation "Build documentation" OFF)
|
|
||||||
if(${Build_Documentation})
|
|
||||||
if(NOT ${TRIQS_WITH_DOCUMENTATION})
|
|
||||||
message("Error: TRIQS library has not been compiled with its documentation")
|
|
||||||
endif()
|
|
||||||
add_subdirectory(doc)
|
add_subdirectory(doc)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
#--------------------------------------------------------
|
# dfttols vasp interface bash scripts
|
||||||
# Packaging
|
add_subdirectory(bin)
|
||||||
#--------------------------------------------------------
|
|
||||||
|
# Additional configuration files
|
||||||
|
add_subdirectory(share)
|
||||||
|
|
||||||
|
# #############
|
||||||
|
# Debian Package
|
||||||
|
|
||||||
option(BUILD_DEBIAN_PACKAGE "Build a deb package" OFF)
|
option(BUILD_DEBIAN_PACKAGE "Build a deb package" OFF)
|
||||||
if(BUILD_DEBIAN_PACKAGE)
|
if(BUILD_DEBIAN_PACKAGE AND NOT IS_SUBPROJECT)
|
||||||
if(NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
if(NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
||||||
message(FATAL_ERROR "CMAKE_INSTALL_PREFIX must be /usr for packaging")
|
message(FATAL_ERROR "CMAKE_INSTALL_PREFIX must be /usr for packaging")
|
||||||
endif()
|
endif()
|
||||||
SET(CPACK_GENERATOR "DEB")
|
set(CPACK_PACKAGE_NAME ${PROJECT_NAME})
|
||||||
SET(CPACK_PACKAGE_VERSION ${DFT_TOOLS_VERSION})
|
set(CPACK_GENERATOR "DEB")
|
||||||
SET(CPACK_PACKAGE_CONTACT "https://github.com/TRIQS/dft_tools")
|
set(CPACK_PACKAGE_VERSION ${PROJECT_VERSION})
|
||||||
EXECUTE_PROCESS(COMMAND dpkg --print-architecture OUTPUT_VARIABLE CMAKE_DEBIAN_PACKAGE_ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
set(CPACK_PACKAGE_CONTACT "https://github.com/TRIQS/${PROJECT_NAME}")
|
||||||
SET(CPACK_DEBIAN_PACKAGE_DEPENDS "triqs (>= 2.2)")
|
execute_process(COMMAND dpkg --print-architecture OUTPUT_VARIABLE CMAKE_DEBIAN_PACKAGE_ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
SET(CPACK_DEBIAN_PACKAGE_CONFLICTS "dft_tools")
|
set(CPACK_DEBIAN_PACKAGE_DEPENDS "triqs (>= 3.0)")
|
||||||
SET(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
||||||
SET(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON)
|
set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON)
|
||||||
INCLUDE(CPack)
|
include(CPack)
|
||||||
endif()
|
endif()
|
||||||
|
1
ChangeLog.md
Symbolic link
1
ChangeLog.md
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
doc/ChangeLog.md
|
12
Dockerfile
12
Dockerfile
@ -1,12 +1,16 @@
|
|||||||
# See ../triqs/packaging for other options
|
# See ../triqs/packaging for other options
|
||||||
FROM flatironinstitute/triqs:master-ubuntu-clang
|
FROM flatironinstitute/triqs:unstable-ubuntu-clang
|
||||||
|
ARG APPNAME=triqs_dft_tools
|
||||||
|
|
||||||
ARG APPNAME
|
COPY requirements.txt /src/$APPNAME/requirements.txt
|
||||||
COPY . $SRC/$APPNAME
|
RUN pip3 install -r /src/$APPNAME/requirements.txt
|
||||||
|
|
||||||
|
COPY --chown=build . $SRC/$APPNAME
|
||||||
WORKDIR $BUILD/$APPNAME
|
WORKDIR $BUILD/$APPNAME
|
||||||
RUN chown build .
|
RUN chown build .
|
||||||
USER build
|
USER build
|
||||||
ARG BUILD_DOC=0
|
ARG BUILD_DOC=0
|
||||||
RUN cmake $SRC/$APPNAME -DTRIQS_ROOT=${INSTALL} -DBuild_Documentation=${BUILD_DOC} && make -j2 && make test CTEST_OUTPUT_ON_FAILURE=1
|
ARG BUILD_ID
|
||||||
|
RUN cmake $SRC/$APPNAME -DTRIQS_ROOT=${INSTALL} -DBuild_Documentation=${BUILD_DOC} -DBuild_Deps=Always && make -j2 || make -j1 VERBOSE=1
|
||||||
USER root
|
USER root
|
||||||
RUN make install
|
RUN make install
|
||||||
|
47
Jenkinsfile
vendored
47
Jenkinsfile
vendored
@ -1,5 +1,6 @@
|
|||||||
def projectName = "dft_tools" /* set to app/repo name */
|
def projectName = "dft_tools" /* set to app/repo name */
|
||||||
|
|
||||||
|
def dockerName = projectName.toLowerCase();
|
||||||
/* which platform to build documentation on */
|
/* which platform to build documentation on */
|
||||||
def documentationPlatform = "ubuntu-clang"
|
def documentationPlatform = "ubuntu-clang"
|
||||||
/* depend on triqs upstream branch/project */
|
/* depend on triqs upstream branch/project */
|
||||||
@ -37,7 +38,12 @@ for (int i = 0; i < dockerPlatforms.size(); i++) {
|
|||||||
mv -f Dockerfile.jenkins Dockerfile
|
mv -f Dockerfile.jenkins Dockerfile
|
||||||
"""
|
"""
|
||||||
/* build and tag */
|
/* build and tag */
|
||||||
def img = docker.build("flatironinstitute/${projectName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_DOC=${platform==documentationPlatform} .")
|
def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_DOC=${platform==documentationPlatform} --build-arg BUILD_ID=${env.BUILD_TAG} .")
|
||||||
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
||||||
|
img.inside() {
|
||||||
|
sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1"
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!keepInstall) {
|
if (!keepInstall) {
|
||||||
sh "docker rmi --no-prune ${img.imageName()}"
|
sh "docker rmi --no-prune ${img.imageName()}"
|
||||||
}
|
}
|
||||||
@ -48,7 +54,7 @@ for (int i = 0; i < dockerPlatforms.size(); i++) {
|
|||||||
/****************** osx builds (on host) */
|
/****************** osx builds (on host) */
|
||||||
def osxPlatforms = [
|
def osxPlatforms = [
|
||||||
["gcc", ['CC=gcc-9', 'CXX=g++-9', 'FC=gfortran-9']],
|
["gcc", ['CC=gcc-9', 'CXX=g++-9', 'FC=gfortran-9']],
|
||||||
["clang", ['CC=$BREW/opt/llvm/bin/clang', 'CXX=$BREW/opt/llvm/bin/clang++', 'CXXFLAGS=-I$BREW/opt/llvm/include', 'LDFLAGS=-L$BREW/opt/llvm/lib', 'FC=gfortran-9']]
|
["clang", ['CC=$BREW/opt/llvm/bin/clang', 'CXX=$BREW/opt/llvm/bin/clang++', 'FC=gfortran-9', 'CXXFLAGS=-I$BREW/opt/llvm/include', 'LDFLAGS=-L$BREW/opt/llvm/lib']]
|
||||||
]
|
]
|
||||||
for (int i = 0; i < osxPlatforms.size(); i++) {
|
for (int i = 0; i < osxPlatforms.size(); i++) {
|
||||||
def platformEnv = osxPlatforms[i]
|
def platformEnv = osxPlatforms[i]
|
||||||
@ -58,29 +64,37 @@ for (int i = 0; i < osxPlatforms.size(); i++) {
|
|||||||
def srcDir = pwd()
|
def srcDir = pwd()
|
||||||
def tmpDir = pwd(tmp:true)
|
def tmpDir = pwd(tmp:true)
|
||||||
def buildDir = "$tmpDir/build"
|
def buildDir = "$tmpDir/build"
|
||||||
|
/* install real branches in a fixed predictable place so apps can find them */
|
||||||
def installDir = keepInstall ? "${env.HOME}/install/${projectName}/${env.BRANCH_NAME}/${platform}" : "$tmpDir/install"
|
def installDir = keepInstall ? "${env.HOME}/install/${projectName}/${env.BRANCH_NAME}/${platform}" : "$tmpDir/install"
|
||||||
def triqsDir = "${env.HOME}/install/triqs/${triqsBranch}/${platform}"
|
def triqsDir = "${env.HOME}/install/triqs/${triqsBranch}/${platform}"
|
||||||
|
def venv = triqsDir
|
||||||
dir(installDir) {
|
dir(installDir) {
|
||||||
deleteDir()
|
deleteDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
checkout scm
|
checkout scm
|
||||||
|
|
||||||
|
def hdf5 = "${env.BREW}/opt/hdf5@1.10"
|
||||||
dir(buildDir) { withEnv(platformEnv[1].collect { it.replace('\$BREW', env.BREW) } + [
|
dir(buildDir) { withEnv(platformEnv[1].collect { it.replace('\$BREW', env.BREW) } + [
|
||||||
"PATH=$triqsDir/bin:${env.BREW}/bin:/usr/bin:/bin:/usr/sbin",
|
"PATH=$venv/bin:${env.BREW}/bin:/usr/bin:/bin:/usr/sbin",
|
||||||
"CPLUS_INCLUDE_PATH=$triqsDir/include:${env.BREW}/include",
|
"HDF5_ROOT=$hdf5",
|
||||||
"LIBRARY_PATH=$triqsDir/lib:${env.BREW}/lib",
|
"C_INCLUDE_PATH=$hdf5/include:${env.BREW}/include",
|
||||||
"CMAKE_PREFIX_PATH=$triqsDir/lib/cmake/triqs"]) {
|
"CPLUS_INCLUDE_PATH=$venv/include:$hdf5/include:${env.BREW}/include",
|
||||||
|
"LIBRARY_PATH=$venv/lib:$hdf5/lib:${env.BREW}/lib",
|
||||||
|
"LD_LIBRARY_PATH=$hdf5/lib",
|
||||||
|
"PYTHONPATH=$installDir/lib/python3.7/site-packages",
|
||||||
|
"CMAKE_PREFIX_PATH=$venv/lib/cmake/triqs"]) {
|
||||||
deleteDir()
|
deleteDir()
|
||||||
/* note: this is installing into the parent (triqs) venv (install dir), which is thus shared among apps and so not be completely safe */
|
/* note: this is installing into the parent (triqs) venv (install dir), which is thus shared among apps and so not be completely safe */
|
||||||
sh "pip install -r $srcDir/requirements.txt"
|
sh "pip3 install -U -r $srcDir/requirements.txt"
|
||||||
sh "cmake $srcDir -DCMAKE_INSTALL_PREFIX=$installDir -DTRIQS_ROOT=$triqsDir"
|
sh "cmake $srcDir -DCMAKE_INSTALL_PREFIX=$installDir -DTRIQS_ROOT=$triqsDir -DBuild_Deps=Always"
|
||||||
sh "make -j3"
|
sh "make -j2"
|
||||||
try {
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { try {
|
||||||
sh "make test CTEST_OUTPUT_ON_FAILURE=1"
|
sh "make test CTEST_OUTPUT_ON_FAILURE=1"
|
||||||
} catch (exc) {
|
} catch (exc) {
|
||||||
archiveArtifacts(artifacts: 'Testing/Temporary/LastTest.log')
|
archiveArtifacts(artifacts: 'Testing/Temporary/LastTest.log')
|
||||||
throw exc
|
throw exc
|
||||||
}
|
} }
|
||||||
sh "make install"
|
sh "make install"
|
||||||
} }
|
} }
|
||||||
} }
|
} }
|
||||||
@ -95,15 +109,20 @@ try {
|
|||||||
stage("publish") { timeout(time: 5, unit: 'MINUTES') {
|
stage("publish") { timeout(time: 5, unit: 'MINUTES') {
|
||||||
def commit = sh(returnStdout: true, script: "git rev-parse HEAD").trim()
|
def commit = sh(returnStdout: true, script: "git rev-parse HEAD").trim()
|
||||||
def release = env.BRANCH_NAME == "master" || env.BRANCH_NAME == "unstable" || sh(returnStdout: true, script: "git describe --exact-match HEAD || true").trim()
|
def release = env.BRANCH_NAME == "master" || env.BRANCH_NAME == "unstable" || sh(returnStdout: true, script: "git describe --exact-match HEAD || true").trim()
|
||||||
def workDir = pwd()
|
def workDir = pwd(tmp:true)
|
||||||
lock('triqs_publish') {
|
lock('triqs_publish') {
|
||||||
/* Update documention on gh-pages branch */
|
/* Update documention on gh-pages branch */
|
||||||
dir("$workDir/gh-pages") {
|
dir("$workDir/gh-pages") {
|
||||||
def subdir = "${projectName}/${env.BRANCH_NAME}"
|
def subdir = "${projectName}/${env.BRANCH_NAME}"
|
||||||
git(url: "ssh://git@github.com/TRIQS/TRIQS.github.io.git", branch: "master", credentialsId: "ssh", changelog: false)
|
git(url: "ssh://git@github.com/TRIQS/TRIQS.github.io.git", branch: "master", credentialsId: "ssh", changelog: false)
|
||||||
sh "rm -rf ${subdir}"
|
sh "rm -rf ${subdir}"
|
||||||
docker.image("flatironinstitute/${projectName}:${env.BRANCH_NAME}-${documentationPlatform}").inside() {
|
docker.image("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${documentationPlatform}").inside() {
|
||||||
sh "cp -rp \$INSTALL/share/doc/triqs_${projectName} ${subdir}"
|
sh """#!/bin/bash -ex
|
||||||
|
base=\$INSTALL/share/doc
|
||||||
|
dir="${projectName}"
|
||||||
|
[[ -d \$base/triqs_\$dir ]] && dir=triqs_\$dir || [[ -d \$base/\$dir ]]
|
||||||
|
cp -rp \$base/\$dir ${subdir}
|
||||||
|
"""
|
||||||
}
|
}
|
||||||
sh "git add -A ${subdir}"
|
sh "git add -A ${subdir}"
|
||||||
sh """
|
sh """
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
|
|
||||||
configure_file(plovasp.bash.in plovasp)
|
configure_file(plovasp.in plovasp)
|
||||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/plovasp DESTINATION bin
|
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/plovasp DESTINATION bin
|
||||||
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
||||||
|
|
||||||
configure_file(vasp_dmft.bash.in vasp_dmft)
|
configure_file(vasp_dmft.in vasp_dmft)
|
||||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/vasp_dmft DESTINATION bin
|
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/vasp_dmft DESTINATION bin
|
||||||
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
||||||
|
|
5
c++/plovasp/atm/.gitignore
vendored
5
c++/plovasp/atm/.gitignore
vendored
@ -1,5 +0,0 @@
|
|||||||
makefile
|
|
||||||
Makefile
|
|
||||||
*.so
|
|
||||||
*.o
|
|
||||||
*.pyc
|
|
@ -1,7 +0,0 @@
|
|||||||
add_library(atm_c dos_tetra3d.hpp dos_tetra3d.cpp argsort.hpp argsort.cpp)
|
|
||||||
target_link_libraries(atm_c triqs)
|
|
||||||
target_compile_options(atm_c PRIVATE -std=c++17)
|
|
||||||
|
|
||||||
install(TARGETS atm_c DESTINATION lib)
|
|
||||||
|
|
||||||
add_subdirectory(test)
|
|
@ -1,13 +0,0 @@
|
|||||||
enable_testing()
|
|
||||||
|
|
||||||
FILE(GLOB TestList RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
|
|
||||||
FOREACH( TestName1 ${TestList} )
|
|
||||||
STRING(REPLACE ".cpp" "" TestName ${TestName1})
|
|
||||||
add_executable( ${TestName} ${TestName}.cpp )
|
|
||||||
target_link_libraries( ${TestName} atm_c triqs)
|
|
||||||
triqs_set_rpath_for_target( ${TestName} )
|
|
||||||
add_test(NAME ${TestName} COMMAND ${CMAKE_CURRENT_BINARY_DIR}/${t})
|
|
||||||
ENDFOREACH( TestName1 ${TestList} )
|
|
||||||
|
|
||||||
|
|
||||||
|
|
81
c++/triqs_dft_tools/CMakeLists.txt
Normal file
81
c++/triqs_dft_tools/CMakeLists.txt
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
file(GLOB_RECURSE sources *.cpp)
|
||||||
|
add_library(${PROJECT_NAME}_c ${sources})
|
||||||
|
add_library(${PROJECT_NAME}::${PROJECT_NAME}_c ALIAS ${PROJECT_NAME}_c)
|
||||||
|
|
||||||
|
# Link against triqs and enable warnings
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC triqs PRIVATE $<BUILD_INTERFACE:${PROJECT_NAME}_warnings>)
|
||||||
|
|
||||||
|
# Configure target and compilation
|
||||||
|
set_property(TARGET ${PROJECT_NAME}_c PROPERTY POSITION_INDEPENDENT_CODE ON)
|
||||||
|
target_include_directories(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/c++>)
|
||||||
|
target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $<INSTALL_INTERFACE:${CMAKE_INSTALL_PREFIX}/include>)
|
||||||
|
target_compile_definitions(${PROJECT_NAME}_c PUBLIC
|
||||||
|
TRIQS_DFT_TOOLS_GIT_HASH=${PROJECT_GIT_HASH}
|
||||||
|
TRIQS_GIT_HASH=${TRIQS_GIT_HASH}
|
||||||
|
$<$<CONFIG:Debug>:TRIQS_DFT_TOOLS_DEBUG>
|
||||||
|
$<$<CONFIG:Debug>:TRIQS_DEBUG>
|
||||||
|
$<$<CONFIG:Debug>:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK>
|
||||||
|
)
|
||||||
|
|
||||||
|
# Install library and headers
|
||||||
|
install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION lib)
|
||||||
|
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
|
||||||
|
|
||||||
|
|
||||||
|
# ========= Static Analyzer Checks ==========
|
||||||
|
|
||||||
|
option(ANALYZE_SOURCES OFF "Run static analyzer checks if found (clang-tidy, cppcheck)")
|
||||||
|
if(ANALYZE_SOURCES)
|
||||||
|
|
||||||
|
# Locate static analyzer tools
|
||||||
|
find_program(CPPCHECK_EXECUTABLE NAMES "cppcheck" PATHS ENV PATH)
|
||||||
|
find_program(CLANG_TIDY_EXECUTABLE NAMES "clang-tidy" PATHS ENV PATH)
|
||||||
|
|
||||||
|
# Run clang-tidy if found
|
||||||
|
if(CLANG_TIDY_EXECUTABLE)
|
||||||
|
message(STATUS "clang-tidy found: ${CLANG_TIDY_EXECUTABLE}")
|
||||||
|
set_target_properties(${PROJECT_NAME}_c PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}")
|
||||||
|
else()
|
||||||
|
message(STATUS "clang-tidy not found in $PATH. Please consider installing clang-tidy for additional checks!")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Run cppcheck if found
|
||||||
|
if(CPPCHECK_EXECUTABLE)
|
||||||
|
message(STATUS "cppcheck found: ${CPPCHECK_EXECUTABLE}")
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${PROJECT_NAME}_c
|
||||||
|
COMMAND ${CPPCHECK_EXECUTABLE}
|
||||||
|
--enable=warning,style,performance,portability
|
||||||
|
--std=c++17
|
||||||
|
--template=gcc
|
||||||
|
--verbose
|
||||||
|
--force
|
||||||
|
--quiet
|
||||||
|
${sources}
|
||||||
|
WORKING_DIRECTORY
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}
|
||||||
|
)
|
||||||
|
else()
|
||||||
|
message(STATUS "cppcheck not found in $PATH. Please consider installing cppcheck for additional checks!")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
endif()
|
||||||
|
|
||||||
|
|
||||||
|
# ========= Dynamic Analyzer Checks ==========
|
||||||
|
|
||||||
|
option(ASAN OFF "Compile library and executables with LLVM Address Sanitizer")
|
||||||
|
option(UBSAN OFF "Compile library and executables with LLVM Undefined Behavior Sanitizer")
|
||||||
|
|
||||||
|
if(ASAN)
|
||||||
|
if(NOT TARGET asan)
|
||||||
|
find_package(sanitizer REQUIRED "asan")
|
||||||
|
endif()
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:asan>)
|
||||||
|
endif()
|
||||||
|
if(UBSAN)
|
||||||
|
if(NOT TARGET ubsan)
|
||||||
|
find_package(sanitizer REQUIRED "ubsan")
|
||||||
|
endif()
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:ubsan>)
|
||||||
|
endif()
|
3
c++/triqs_dft_tools/converters/vasp.hpp
Normal file
3
c++/triqs_dft_tools/converters/vasp.hpp
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
#pragma once
|
||||||
|
#include "./vasp/argsort.hpp"
|
||||||
|
#include "./vasp/dos_tetra3d.hpp"
|
@ -22,16 +22,14 @@
|
|||||||
|
|
||||||
#include <triqs/arrays.hpp>
|
#include <triqs/arrays.hpp>
|
||||||
|
|
||||||
using triqs::arrays::array;
|
|
||||||
using triqs::arrays::array_view;
|
|
||||||
|
|
||||||
/// DOS of a band by analytical tetrahedron method
|
/// DOS of a band by analytical tetrahedron method
|
||||||
///
|
///
|
||||||
/// Returns corner weights for all tetrahedra for a given band and real energy.
|
/// Returns corner weights for all tetrahedra for a given band and real energy.
|
||||||
array<double, 2>
|
triqs::arrays::array<double, 2>
|
||||||
dos_tetra_weights_3d(array_view<double, 1> eigk, /// Band energies for each k-point
|
dos_tetra_weights_3d(triqs::arrays::array_view<double, 1> eigk, /// Band energies for each k-point
|
||||||
double en, /// Energy at which DOS weights are to be calculated
|
double en, /// Energy at which DOS weights are to be calculated
|
||||||
array_view<long, 2> itt /// Tetrahedra defined by k-point indices
|
triqs::arrays::array_view<long, 2> itt /// Tetrahedra defined by k-point indices
|
||||||
);
|
);
|
||||||
//array<double, 2>
|
//array<double, 2>
|
||||||
//dos_tetra_weights_3d(array<double, 1> eigk, /// Band energies for each k-point
|
//dos_tetra_weights_3d(array<double, 1> eigk, /// Band energies for each k-point
|
3
c++/triqs_dft_tools/triqs_dft_tools.hpp
Normal file
3
c++/triqs_dft_tools/triqs_dft_tools.hpp
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
#pragma once
|
||||||
|
#include "./converters/vasp.hpp"
|
||||||
|
|
@ -1,8 +0,0 @@
|
|||||||
def application_pytriqs_import(name,*args,**kwargs):
|
|
||||||
if name.startswith('@package_name@'):
|
|
||||||
name = name[len('@package_name@')+1:]
|
|
||||||
return builtin_import(name,*args,**kwargs)
|
|
||||||
|
|
||||||
import __builtin__
|
|
||||||
__builtin__.__import__, builtin_import = application_pytriqs_import, __builtin__.__import__
|
|
||||||
|
|
1
deps/.gitignore
vendored
Normal file
1
deps/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
*
|
66
deps/CMakeLists.txt
vendored
Normal file
66
deps/CMakeLists.txt
vendored
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
include(external_dependency.cmake)
|
||||||
|
|
||||||
|
# Add your dependencies with the function
|
||||||
|
#
|
||||||
|
# external_dependency(name
|
||||||
|
# [VERSION <version-number>]
|
||||||
|
# [GIT_REPO <url>]
|
||||||
|
# [GIT_TAG <tag>]
|
||||||
|
# [BUILD_ALWAYS]
|
||||||
|
# [EXCLUDE_FROM_ALL]
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# Resolve the dependency using the following steps in order.
|
||||||
|
# If a step was successful, skip the remaining ones.
|
||||||
|
#
|
||||||
|
# 1. Use find_package(name [<version-number>])
|
||||||
|
# to locate the package in the system.
|
||||||
|
# Skip this step if Build_Deps option is set.
|
||||||
|
# 2. Try to find a directory containing the sources
|
||||||
|
# at ${PROJECT_SOURCE_DIR}/deps/name. If found
|
||||||
|
# build it as a cmake sub-project.
|
||||||
|
# 3. If GIT_REPO is provided, git clone the sources,
|
||||||
|
# and build them as a cmake sub-project.
|
||||||
|
#
|
||||||
|
# Addtional options:
|
||||||
|
#
|
||||||
|
# GIT_TAG - Use this keyword to specify the git-tag, branch or commit hash
|
||||||
|
#
|
||||||
|
# BUILD_ALWAYS - If set, this dependency will always be built from source
|
||||||
|
# and will never be searched in the system.
|
||||||
|
#
|
||||||
|
# EXCLUDE_FROM_ALL - If set, targets of the dependency cmake subproject
|
||||||
|
# will not be included in the ALL target of the project.
|
||||||
|
# In particular the dependency will not be installed.
|
||||||
|
|
||||||
|
if(NOT DEFINED Build_Deps)
|
||||||
|
set(Build_Deps "Never" CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]")
|
||||||
|
else()
|
||||||
|
set(Build_Deps_Opts "Never" "Always" "IfNotFound")
|
||||||
|
if(NOT ${Build_Deps} IN_LIST Build_Deps_Opts)
|
||||||
|
message(FATAL_ERROR "Build_Deps option should be either 'Never', 'Always' or 'IfNotFound'")
|
||||||
|
endif()
|
||||||
|
set(Build_Deps ${Build_Deps} CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]")
|
||||||
|
if(NOT IS_SUBPROJECT AND NOT Build_Deps STREQUAL "Always" AND (ASAN OR UBSAN))
|
||||||
|
message(WARNING "For builds with llvm sanitizers (ASAN/UBSAN) it is recommended to use -DBuild_Deps=Always to avoid false positives.")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Cpp2Py --
|
||||||
|
if(PythonSupport OR Build_Documentation)
|
||||||
|
external_dependency(Cpp2Py
|
||||||
|
GIT_REPO https://github.com/TRIQS/cpp2py
|
||||||
|
VERSION 2.0
|
||||||
|
GIT_TAG master
|
||||||
|
BUILD_ALWAYS
|
||||||
|
EXCLUDE_FROM_ALL
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- GTest --
|
||||||
|
external_dependency(GTest
|
||||||
|
GIT_REPO https://github.com/google/googletest
|
||||||
|
GIT_TAG release-1.10.0
|
||||||
|
BUILD_ALWAYS
|
||||||
|
EXCLUDE_FROM_ALL
|
||||||
|
)
|
70
deps/external_dependency.cmake
vendored
Normal file
70
deps/external_dependency.cmake
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
###################################################################################
|
||||||
|
#
|
||||||
|
# TRIQS_DFT_TOOLS: a Toolbox for Research in Interacting Quantum Systems
|
||||||
|
#
|
||||||
|
# Copyright (C) 2020 Simons Foundation
|
||||||
|
# authors: N. Wentzell
|
||||||
|
#
|
||||||
|
# TRIQS_DFT_TOOLS is free software: you can redistribute it and/or modify it under the
|
||||||
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
# version.
|
||||||
|
#
|
||||||
|
# TRIQS_DFT_TOOLS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||||
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||||
|
# details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# TRIQS_DFT_TOOLS. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
###################################################################################
|
||||||
|
|
||||||
|
function(external_dependency)
|
||||||
|
cmake_parse_arguments(ARG "EXCLUDE_FROM_ALL;BUILD_ALWAYS" "VERSION;GIT_REPO;GIT_TAG" "" ${ARGN})
|
||||||
|
|
||||||
|
# -- Was dependency already found?
|
||||||
|
get_property(${ARGV0}_FOUND GLOBAL PROPERTY ${ARGV0}_FOUND)
|
||||||
|
if(${ARGV0}_FOUND)
|
||||||
|
message(STATUS "Dependency ${ARGV0} was already resolved.")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Try to find package in system.
|
||||||
|
if(NOT ARG_BUILD_ALWAYS AND NOT Build_Deps STREQUAL "Always")
|
||||||
|
find_package(${ARGV0} ${ARG_VERSION} QUIET HINTS ${CMAKE_INSTALL_PREFIX})
|
||||||
|
if(${ARGV0}_FOUND)
|
||||||
|
message(STATUS "Found dependency ${ARGV0} in system ${${ARGV0}_ROOT}")
|
||||||
|
return()
|
||||||
|
elseif(Build_Deps STREQUAL "Never")
|
||||||
|
message(FATAL_ERROR "Could not find dependency ${ARGV0} in system. Please install the dependency manually or use -DBuild_Deps=IfNotFound during cmake configuration to automatically build all dependencies that are not found.")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Build package from source
|
||||||
|
message(STATUS " =============== Configuring Dependency ${ARGV0} =============== ")
|
||||||
|
if(ARG_EXCLUDE_FROM_ALL)
|
||||||
|
set(subdir_opts EXCLUDE_FROM_ALL)
|
||||||
|
set(Build_Tests OFF)
|
||||||
|
set(Build_Documentation OFF)
|
||||||
|
endif()
|
||||||
|
if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0})
|
||||||
|
message(STATUS "Found sources for dependency ${ARGV0} at ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0}")
|
||||||
|
add_subdirectory(${ARGV0} ${subdir_opts})
|
||||||
|
elseif(ARG_GIT_REPO)
|
||||||
|
set(bin_dir ${CMAKE_CURRENT_BINARY_DIR}/${ARGV0})
|
||||||
|
set(src_dir ${bin_dir}_src)
|
||||||
|
if(NOT IS_DIRECTORY ${src_dir})
|
||||||
|
if(ARG_GIT_TAG)
|
||||||
|
set(clone_opts --branch ${ARG_GIT_TAG} -c advice.detachedHead=false)
|
||||||
|
endif()
|
||||||
|
execute_process(COMMAND git clone ${ARG_GIT_REPO} --depth 1 ${clone_opts} ${src_dir})
|
||||||
|
endif()
|
||||||
|
add_subdirectory(${src_dir} ${bin_dir} ${subdir_opts})
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Could not find or build dependency ${ARGV0}")
|
||||||
|
endif()
|
||||||
|
message(STATUS " =============== End ${ARGV0} Configuration =============== ")
|
||||||
|
set_property(GLOBAL PROPERTY ${ARGV0}_FOUND TRUE)
|
||||||
|
|
||||||
|
endfunction()
|
@ -1,23 +1,81 @@
|
|||||||
# generate the conf.py
|
# Generate the sphinx config file
|
||||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in ${CMAKE_CURRENT_BINARY_DIR}/conf.py @ONLY)
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in ${CMAKE_CURRENT_BINARY_DIR}/conf.py @ONLY)
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Create an optional target that allows us to regenerate the C++ doc with c++2rst
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
add_custom_target(${PROJECT_NAME}_docs_cpp2rst)
|
||||||
|
include(${PROJECT_SOURCE_DIR}/share/cmake/extract_flags.cmake)
|
||||||
|
extract_flags(${PROJECT_NAME}_c BUILD_INTERFACE)
|
||||||
|
separate_arguments(${PROJECT_NAME}_c_CXXFLAGS)
|
||||||
|
macro(generate_docs header_file)
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${PROJECT_NAME}_docs_cpp2rst
|
||||||
|
COMMAND rm -rf ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated
|
||||||
|
COMMAND
|
||||||
|
PYTHONPATH=${CPP2PY_BINARY_DIR}:$ENV{PYTHONPATH}
|
||||||
|
PATH=${CPP2PY_BINARY_DIR}/bin:${CPP2PY_ROOT}/bin:$ENV{PATH}
|
||||||
|
c++2rst
|
||||||
|
${header_file}
|
||||||
|
-N ${PROJECT_NAME}
|
||||||
|
--output_directory ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated
|
||||||
|
-I${PROJECT_SOURCE_DIR}/c++
|
||||||
|
--cxxflags="${${PROJECT_NAME}_c_CXXFLAGS}"
|
||||||
|
)
|
||||||
|
endmacro(generate_docs)
|
||||||
|
|
||||||
|
generate_docs(${PROJECT_SOURCE_DIR}/c++/${PROJECT_NAME}/${PROJECT_NAME}.hpp)
|
||||||
|
|
||||||
|
# --------------------------------------------------------
|
||||||
|
# Build & Run the C++ doc examples and capture the output
|
||||||
|
# --------------------------------------------------------
|
||||||
|
|
||||||
|
add_custom_target(${PROJECT_NAME}_docs_example_output)
|
||||||
|
file(GLOB_RECURSE ExampleList RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
|
||||||
|
foreach(example ${ExampleList})
|
||||||
|
get_filename_component(f ${example} NAME_WE)
|
||||||
|
get_filename_component(d ${example} DIRECTORY)
|
||||||
|
add_executable(${PROJECT_NAME}_doc_${f} EXCLUDE_FROM_ALL ${example})
|
||||||
|
set_property(TARGET ${PROJECT_NAME}_doc_${f} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${d})
|
||||||
|
target_link_libraries(${PROJECT_NAME}_doc_${f} triqs)
|
||||||
|
add_custom_command(TARGET ${PROJECT_NAME}_doc_${f}
|
||||||
|
COMMAND ${PROJECT_NAME}_doc_${f} > ${CMAKE_CURRENT_SOURCE_DIR}/${d}/${f}.output 2>/dev/null
|
||||||
|
WORKING_DIRECTORY ${d}
|
||||||
|
)
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_example_output ${PROJECT_NAME}_doc_${f})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
# ---------------------------------
|
# ---------------------------------
|
||||||
# Top Sphinx target
|
# Top Sphinx target
|
||||||
# ---------------------------------
|
# ---------------------------------
|
||||||
# Sources
|
if(NOT DEFINED SPHINXBUILD_EXECUTABLE)
|
||||||
file(GLOB_RECURSE sources *.rst)
|
find_package(Sphinx)
|
||||||
|
endif()
|
||||||
|
|
||||||
# create documentation target
|
# Sphinx has internal caching, always run it
|
||||||
set(sphinx_top ${CMAKE_CURRENT_BINARY_DIR}/html/index.html)
|
add_custom_target(${PROJECT_NAME}_docs_sphinx ALL)
|
||||||
add_custom_command(OUTPUT ${sphinx_top} DEPENDS ${sources}
|
add_custom_command(
|
||||||
COMMAND ${TRIQS_SPHINXBUILD_EXECUTABLE} -c . -j8 -b html ${CMAKE_CURRENT_SOURCE_DIR} html)
|
TARGET ${PROJECT_NAME}_docs_sphinx
|
||||||
add_custom_target(doc_sphinx ALL DEPENDS ${sphinx_top} ${CMAKE_CURRENT_BINARY_DIR})
|
COMMAND PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SPHINXBUILD_EXECUTABLE} -c . -j8 -b html ${CMAKE_CURRENT_SOURCE_DIR} html
|
||||||
|
)
|
||||||
|
|
||||||
|
option(Sphinx_Only "When building the documentation, skip the Python Modules and the generation of C++ Api and example outputs" OFF)
|
||||||
|
if(NOT Sphinx_Only)
|
||||||
|
# Autodoc usage requires the python modules to be built first
|
||||||
|
get_property(CPP2PY_MODULES_LIST GLOBAL PROPERTY CPP2PY_MODULES_LIST)
|
||||||
|
if(CPP2PY_MODULES_LIST)
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_sphinx ${CPP2PY_MODULES_LIST})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Generation of C++ Api and Example Outputs
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_sphinx ${PROJECT_NAME}_docs_cpp2rst ${PROJECT_NAME}_docs_example_output)
|
||||||
|
endif()
|
||||||
|
|
||||||
# ---------------------------------
|
# ---------------------------------
|
||||||
# Install
|
# Install
|
||||||
# ---------------------------------
|
# ---------------------------------
|
||||||
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/triqs_dft_tools
|
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/${PROJECT_NAME}
|
||||||
FILES_MATCHING
|
FILES_MATCHING
|
||||||
REGEX "\\.(html|pdf|png|gif|jpg|js|xsl|css|py|txt|inv|bib|cfg)$"
|
REGEX "\\.(html|pdf|png|gif|jpg|svg|js|xsl|css|py|txt|inv|bib)$"
|
||||||
PATTERN "_*"
|
PATTERN "_*"
|
||||||
)
|
)
|
||||||
|
2
doc/_templates/sideb.html
vendored
2
doc/_templates/sideb.html
vendored
@ -10,5 +10,5 @@
|
|||||||
</p>
|
</p>
|
||||||
<hr>
|
<hr>
|
||||||
<p>
|
<p>
|
||||||
<a href="https://github.com/triqs/dft_tools"> <img style="width: 200px; margin: 10px 0 0 5px" src='_static/logo_github.png' alt="Visit the project on GitHub"/> </a>
|
<a href="https://github.com/triqs/triqs_dft_tools"> <img style="width: 200px; margin: 10px 0 0 5px" src='_static/logo_github.png' alt="Visit the project on GitHub"/> </a>
|
||||||
</p>
|
</p>
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
This document describes the main changes in DFTTools.
|
This document describes the main changes in TRIQS_DFT_TOOLS.
|
||||||
|
|
||||||
.. include:: ChangeLog.md
|
.. include:: ChangeLog.md
|
||||||
|
@ -3,34 +3,36 @@
|
|||||||
# TRIQS documentation build configuration file
|
# TRIQS documentation build configuration file
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, "@TRIQS_SPHINXEXT_PATH@/numpydoc")
|
sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext")
|
||||||
|
sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext/numpydoc")
|
||||||
sys.path.insert(0, "@CMAKE_BINARY_DIR@/python")
|
sys.path.insert(0, "@CMAKE_BINARY_DIR@/python")
|
||||||
|
|
||||||
extensions = ['sphinx.ext.autodoc',
|
extensions = ['sphinx.ext.autodoc',
|
||||||
'sphinx.ext.mathjax',
|
'sphinx.ext.mathjax',
|
||||||
'sphinx.ext.intersphinx',
|
'sphinx.ext.intersphinx',
|
||||||
'matplotlib.sphinxext.plot_directive',
|
|
||||||
'sphinx.ext.doctest',
|
'sphinx.ext.doctest',
|
||||||
'sphinx.ext.todo',
|
'sphinx.ext.todo',
|
||||||
'sphinx.ext.viewcode',
|
'sphinx.ext.viewcode',
|
||||||
'sphinx.ext.autosummary',
|
'sphinx.ext.autosummary',
|
||||||
'nbsphinx',
|
|
||||||
'numpydoc',
|
|
||||||
'sphinx.ext.githubpages',
|
'sphinx.ext.githubpages',
|
||||||
'IPython.sphinxext.ipython_console_highlighting'
|
'sphinx_autorun',
|
||||||
]
|
'matplotlib.sphinxext.plot_directive',
|
||||||
|
'nbsphinx',
|
||||||
|
'IPython.sphinxext.ipython_console_highlighting',
|
||||||
|
'numpydoc']
|
||||||
|
|
||||||
source_suffix = '.rst'
|
source_suffix = '.rst'
|
||||||
|
|
||||||
project = u'TRIQS DFTTools'
|
project = '@PROJECT_NAME@'
|
||||||
copyright = u'2011-2019'
|
version = '@PROJECT_VERSION@'
|
||||||
version = '@DFT_TOOLS_VERSION@'
|
|
||||||
|
|
||||||
mathjax_path = "@TRIQS_MATHJAX_PATH@/MathJax.js?config=default"
|
copyright = '2011-2020'
|
||||||
templates_path = ['@CMAKE_SOURCE_DIR@/doc/_templates']
|
|
||||||
|
mathjax_path = "https://raw.githubusercontent.com/mathjax/MathJax/2.7.8/MathJax.js"
|
||||||
|
templates_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_templates']
|
||||||
|
|
||||||
html_theme = 'triqs'
|
html_theme = 'triqs'
|
||||||
html_theme_path = ['@TRIQS_THEMES_PATH@']
|
html_theme_path = ['@CMAKE_CURRENT_SOURCE_DIR@/themes']
|
||||||
html_show_sphinx = False
|
html_show_sphinx = False
|
||||||
html_context = {'header_title': 'dft tools',
|
html_context = {'header_title': 'dft tools',
|
||||||
'header_subtitle': 'connecting <a class="triqs" style="font-size: 12px" href="http://triqs.github.io/triqs">TRIQS</a> to DFT packages',
|
'header_subtitle': 'connecting <a class="triqs" style="font-size: 12px" href="http://triqs.github.io/triqs">TRIQS</a> to DFT packages',
|
||||||
@ -39,9 +41,9 @@ html_context = {'header_title': 'dft tools',
|
|||||||
['Tutorials', 'tutorials'],
|
['Tutorials', 'tutorials'],
|
||||||
['Issues', 'issues'],
|
['Issues', 'issues'],
|
||||||
['About DFTTools', 'about']]}
|
['About DFTTools', 'about']]}
|
||||||
html_static_path = ['@CMAKE_SOURCE_DIR@/doc/_static']
|
html_static_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_static']
|
||||||
html_sidebars = {'index': ['sideb.html', 'searchbox.html']}
|
html_sidebars = {'index': ['sideb.html', 'searchbox.html']}
|
||||||
|
|
||||||
htmlhelp_basename = 'TRIQSDFTToolsdoc'
|
htmlhelp_basename = '@PROJECT_NAME@doc'
|
||||||
|
|
||||||
intersphinx_mapping = {'python': ('http://docs.python.org/2.7', None), 'triqslibs': ('http://triqs.github.io/triqs/latest', None), 'triqscthyb': ('https://triqs.github.io/cthyb/latest', None)}
|
intersphinx_mapping = {'python': ('https://docs.python.org/3.8', None), 'triqslibs': ('https://triqs.github.io/triqs/latest', None), 'triqscthyb': ('https://triqs.github.io/cthyb/latest', None)}
|
||||||
|
@ -7,6 +7,7 @@ Table of contents
|
|||||||
index
|
index
|
||||||
install
|
install
|
||||||
documentation
|
documentation
|
||||||
|
tutorials
|
||||||
issues
|
issues
|
||||||
changelog
|
changelog
|
||||||
about
|
about
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from triqs_dft_tools.converters.wien2k_converter import Wien2kConverter
|
from triqs_dft_tools.converters.wien2k import Wien2kConverter
|
||||||
from triqs_dft_tools import SumkDFTTools
|
from triqs_dft_tools import SumkDFTTools
|
||||||
|
|
||||||
filename = 'Sr2RuO4'
|
filename = 'Sr2RuO4'
|
||||||
|
@ -37,7 +37,7 @@ class::
|
|||||||
Note that all routines available in :class:`SumkDFT <dft.sumk_dft.SumkDFT>` are also available here.
|
Note that all routines available in :class:`SumkDFT <dft.sumk_dft.SumkDFT>` are also available here.
|
||||||
|
|
||||||
If required, we have to load and initialise the real-frequency self energy. Most conveniently,
|
If required, we have to load and initialise the real-frequency self energy. Most conveniently,
|
||||||
you have your self energy already stored as a real-frequency :class:`BlockGf <pytriqs.gf.BlockGf>` object
|
you have your self energy already stored as a real-frequency :class:`BlockGf <triqs.gf.BlockGf>` object
|
||||||
in a hdf5 file::
|
in a hdf5 file::
|
||||||
|
|
||||||
with HDFArchive('case.h5', 'r') as ar:
|
with HDFArchive('case.h5', 'r') as ar:
|
||||||
@ -45,10 +45,10 @@ in a hdf5 file::
|
|||||||
|
|
||||||
You may also have your self energy stored in text files. For this case the :ref:`TRIQS <triqslibs:welcome>` library offers
|
You may also have your self energy stored in text files. For this case the :ref:`TRIQS <triqslibs:welcome>` library offers
|
||||||
the function :meth:`read_gf_from_txt`, which is able to load the data from text files of one Green function block
|
the function :meth:`read_gf_from_txt`, which is able to load the data from text files of one Green function block
|
||||||
into a real-frequency :class:`ReFreqGf <pytriqs.gf.ReFreqGf>` object. Loading each block separately and
|
into a real-frequency :class:`ReFreqGf <triqs.gf.ReFreqGf>` object. Loading each block separately and
|
||||||
building up a :class:´BlockGf <pytriqs.gf.BlockGf>´ is done with::
|
building up a :class:´BlockGf <triqs.gf.BlockGf>´ is done with::
|
||||||
|
|
||||||
from pytriqs.gf.tools import *
|
from triqs.gf.tools import *
|
||||||
# get block names
|
# get block names
|
||||||
n_list = [n for n,nl in SK.gf_struct_solver[0].iteritems()]
|
n_list = [n for n,nl in SK.gf_struct_solver[0].iteritems()]
|
||||||
# load sigma for each block - in this example sigma is composed of 1x1 blocks
|
# load sigma for each block - in this example sigma is composed of 1x1 blocks
|
||||||
@ -128,7 +128,7 @@ Momentum resolved spectral function (with real-frequency self energy)
|
|||||||
|
|
||||||
Another quantity of interest is the momentum-resolved spectral function, which can directly be compared to ARPES
|
Another quantity of interest is the momentum-resolved spectral function, which can directly be compared to ARPES
|
||||||
experiments. First we have to execute `lapw1`, `lapw2 -almd` and :program:`dmftproj` with the `-band`
|
experiments. First we have to execute `lapw1`, `lapw2 -almd` and :program:`dmftproj` with the `-band`
|
||||||
option and use the :meth:`convert_bands_input <dft.converters.wien2k_converter.Wien2kConverter.convert_bands_input>`
|
option and use the :meth:`convert_bands_input <dft.converters.wien2k.Wien2kConverter.convert_bands_input>`
|
||||||
routine, which converts the required files (for a more detailed description see :ref:`conversion`). The spectral function is then calculated by typing::
|
routine, which converts the required files (for a more detailed description see :ref:`conversion`). The spectral function is then calculated by typing::
|
||||||
|
|
||||||
SK.spaghettis(broadening=0.01,plot_shift=0.0,plot_range=None,ishell=None,save_to_file='Akw_')
|
SK.spaghettis(broadening=0.01,plot_shift=0.0,plot_range=None,ishell=None,save_to_file='Akw_')
|
||||||
|
@ -21,7 +21,7 @@ We can create a simple :class:`BlockStructure <dft.block_structure.BlockStructur
|
|||||||
|
|
||||||
This creates a block structure with one 3x3 block named *up*. Note that we have not created any Green's function yet; this is just the structure of those objects. If you want to create a Green's function with this structure, you can do (we will if with some content also)::
|
This creates a block structure with one 3x3 block named *up*. Note that we have not created any Green's function yet; this is just the structure of those objects. If you want to create a Green's function with this structure, you can do (we will if with some content also)::
|
||||||
|
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
GF_sumk = BS.create_gf(space='sumk', beta = 40, n_points = 1000)
|
GF_sumk = BS.create_gf(space='sumk', beta = 40, n_points = 1000)
|
||||||
GF_sumk['up'][0,0] << iOmega_n - 2.0
|
GF_sumk['up'][0,0] << iOmega_n - 2.0
|
||||||
GF_sumk['up'][1,1] << iOmega_n + 0.5
|
GF_sumk['up'][1,1] << iOmega_n + 0.5
|
||||||
|
@ -87,7 +87,7 @@ matrix of the imaginary part, and then move on to the next :math:`\mathbf{k}`-po
|
|||||||
|
|
||||||
The converter itself is used as::
|
The converter itself is used as::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.hk_converter import *
|
from triqs_dft_tools.converters.hk import *
|
||||||
Converter = HkConverter(filename = hkinputfile)
|
Converter = HkConverter(filename = hkinputfile)
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
|
||||||
|
@ -183,7 +183,7 @@ Running the VASP converter
|
|||||||
|
|
||||||
The actual conversion to a h5-file is performed with the orthonormalized projector functions readable by the :ref:`VaspConverter<refVASPconverter>` in the same fashion as with the other `DFTTools` converters::
|
The actual conversion to a h5-file is performed with the orthonormalized projector functions readable by the :ref:`VaspConverter<refVASPconverter>` in the same fashion as with the other `DFTTools` converters::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.vasp_converter import *
|
from triqs_dft_tools.converters.vasp import *
|
||||||
Converter = VaspConverter(filename = 'vasp')
|
Converter = VaspConverter(filename = 'vasp')
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
|
||||||
|
@ -94,9 +94,9 @@ directory name):
|
|||||||
|
|
||||||
Now we convert these files into an hdf5 file that can be used for the
|
Now we convert these files into an hdf5 file that can be used for the
|
||||||
DMFT calculations. For this purpose we
|
DMFT calculations. For this purpose we
|
||||||
use the python module :class:`Wien2kConverter <dft.converters.wien2k_converter.Wien2kConverter>`. It is initialized as::
|
use the python module :class:`Wien2kConverter <dft.converters.wien2k.Wien2kConverter>`. It is initialized as::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
Converter = Wien2kConverter(filename = case)
|
Converter = Wien2kConverter(filename = case)
|
||||||
|
|
||||||
The only necessary parameter to this construction is the parameter `filename`.
|
The only necessary parameter to this construction is the parameter `filename`.
|
||||||
|
@ -80,8 +80,8 @@ for :emphasis:`use_dc_formula` are:
|
|||||||
|
|
||||||
At the end of the calculation, we can save the Green function and self energy into a file::
|
At the end of the calculation, we can save the Green function and self energy into a file::
|
||||||
|
|
||||||
from pytriqs.archive import HDFArchive
|
from h5 import HDFArchive
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
if mpi.is_master_node():
|
if mpi.is_master_node():
|
||||||
ar = HDFArchive("YourDFTDMFTcalculation.h5",'w')
|
ar = HDFArchive("YourDFTDMFTcalculation.h5",'w')
|
||||||
ar["G"] = S.G_iw
|
ar["G"] = S.G_iw
|
||||||
|
@ -52,13 +52,13 @@ real-frequency self energy.
|
|||||||
it is crucial to perform the analytic continuation in such a way that the real-frequency self energy
|
it is crucial to perform the analytic continuation in such a way that the real-frequency self energy
|
||||||
is accurate around the Fermi energy as low-energy features strongly influence the final results.
|
is accurate around the Fermi energy as low-energy features strongly influence the final results.
|
||||||
|
|
||||||
Besides the self energy the Wien2k files read by the transport converter (:meth:`convert_transport_input <dft.converters.wien2k_converter.Wien2kConverter.convert_transport_input>`) are:
|
Besides the self energy the Wien2k files read by the transport converter (:meth:`convert_transport_input <dft.converters.wien2k.Wien2kConverter.convert_transport_input>`) are:
|
||||||
* :file:`.struct`: The lattice constants specified in the struct file are used to calculate the unit cell volume.
|
* :file:`.struct`: The lattice constants specified in the struct file are used to calculate the unit cell volume.
|
||||||
* :file:`.outputs`: In this file the k-point symmetries are given.
|
* :file:`.outputs`: In this file the k-point symmetries are given.
|
||||||
* :file:`.oubwin`: Contains the indices of the bands within the projected subspace (written by :program:`dmftproj`) for each k-point.
|
* :file:`.oubwin`: Contains the indices of the bands within the projected subspace (written by :program:`dmftproj`) for each k-point.
|
||||||
* :file:`.pmat`: This file is the output of the Wien2k optics package and contains the velocity (momentum) matrix elements between all bands in the desired energy
|
* :file:`.pmat`: This file is the output of the Wien2k optics package and contains the velocity (momentum) matrix elements between all bands in the desired energy
|
||||||
window for each k-point. How to use the optics package is described below.
|
window for each k-point. How to use the optics package is described below.
|
||||||
* :file:`.h5`: The hdf5 archive has to be present and should contain the dft_input subgroup. Otherwise :meth:`convert_dft_input <dft.converters.wien2k_converter.Wien2kConverter.convert_dft_input>` needs to be called before :meth:`convert_transport_input <dft.converters.wien2k_converter.Wien2kConverter.convert_transport_input>`.
|
* :file:`.h5`: The hdf5 archive has to be present and should contain the dft_input subgroup. Otherwise :meth:`convert_dft_input <dft.converters.wien2k.Wien2kConverter.convert_dft_input>` needs to be called before :meth:`convert_transport_input <dft.converters.wien2k.Wien2kConverter.convert_transport_input>`.
|
||||||
|
|
||||||
|
|
||||||
Wien2k optics package
|
Wien2k optics package
|
||||||
@ -84,7 +84,7 @@ Using the transport code
|
|||||||
|
|
||||||
First we have to read the Wien2k files and store the relevant information in the hdf5 archive::
|
First we have to read the Wien2k files and store the relevant information in the hdf5 archive::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
from triqs_dft_tools.sumk_dft_tools import *
|
from triqs_dft_tools.sumk_dft_tools import *
|
||||||
|
|
||||||
Converter = Wien2kConverter(filename='case', repacking=True)
|
Converter = Wien2kConverter(filename='case', repacking=True)
|
||||||
@ -92,7 +92,7 @@ First we have to read the Wien2k files and store the relevant information in the
|
|||||||
|
|
||||||
SK = SumkDFTTools(hdf_file='case.h5', use_dft_blocks=True)
|
SK = SumkDFTTools(hdf_file='case.h5', use_dft_blocks=True)
|
||||||
|
|
||||||
The converter :meth:`convert_transport_input <dft.converters.wien2k_converter.Wien2kConverter.convert_transport_input>`
|
The converter :meth:`convert_transport_input <dft.converters.wien2k.Wien2kConverter.convert_transport_input>`
|
||||||
reads the required data of the Wien2k output and stores it in the `dft_transp_input` subgroup of your hdf file.
|
reads the required data of the Wien2k output and stores it in the `dft_transp_input` subgroup of your hdf file.
|
||||||
Additionally we need to read and set the self energy, the chemical potential and the double counting::
|
Additionally we need to read and set the self energy, the chemical potential and the double counting::
|
||||||
|
|
||||||
|
@ -37,34 +37,41 @@ Compiling DFTTools from source
|
|||||||
Prerequisites
|
Prerequisites
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
#. The :ref:`TRIQS <triqslibs:welcome>` toolbox.
|
#. The :ref:`TRIQS <triqslibs:welcome>` library, see :ref:`TRIQS installation instruction <triqslibs:installation>`.
|
||||||
|
In the following, we assume that TRIQS is installed in the directory ``path_to_triqs``.
|
||||||
#. Likely, you will also need at least one impurity solver, e.g. the :ref:`CTHYB solver <triqscthyb:welcome>`.
|
#. Likely, you will also need at least one impurity solver, e.g. the :ref:`CTHYB solver <triqscthyb:welcome>`.
|
||||||
|
|
||||||
Installation steps
|
Installation steps
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
#. Download the source code by cloning the ``TRIQS/dft_tools`` repository from GitHub::
|
#. Download the source code of the latest stable version by cloning the ``TRIQS/dft_tools`` repository from GitHub::
|
||||||
|
|
||||||
$ git clone https://github.com/TRIQS/dft_tools.git dft_tools.src
|
$ git clone https://github.com/TRIQS/dft_tools dft_tools.src
|
||||||
|
|
||||||
|
#. Make sure that all additional dependencies are installed on your system and available in your environment.
|
||||||
|
Alternatively build the dependencies from source instead with::
|
||||||
|
|
||||||
|
$ (cd deps && ./download.sh)
|
||||||
|
|
||||||
|
In this case they will be installed together with your application.
|
||||||
|
|
||||||
#. Create and move to a new directory where you will compile the code::
|
#. Create and move to a new directory where you will compile the code::
|
||||||
|
|
||||||
$ mkdir dft_tools.build && cd dft_tools.build
|
$ mkdir dft_tools.build && cd dft_tools.build
|
||||||
|
|
||||||
#. Ensure that your shell contains the TRIQS environment variables by sourcing the ``triqsvars.sh`` file from your TRIQS installation::
|
#. Ensure that your shell contains the TRIQS environment variables by sourcing the ``triqsvars.sh`` file from your TRIQS installation::
|
||||||
|
|
||||||
$ source path_to_triqs/share/triqsvarsh.sh
|
$ source path_to_triqs/share/triqsvarsh.sh
|
||||||
|
|
||||||
#. In the build directory call cmake, including any additional custom CMake options, see below::
|
#. In the build directory call cmake, including any additional custom CMake options, see below::
|
||||||
|
|
||||||
$ cmake ../dft_tools.src
|
$ cmake ../dft_tools.src
|
||||||
|
|
||||||
#. Compile the code, run the tests and install the application::
|
#. Compile the code, run the tests and install the application::
|
||||||
|
|
||||||
$ make
|
$ make
|
||||||
$ make test
|
$ make test
|
||||||
$ make install
|
$ make install
|
||||||
|
|
||||||
|
|
||||||
Installation steps for the use with WIEN2K version 14.2 and older
|
Installation steps for the use with WIEN2K version 14.2 and older
|
||||||
@ -114,37 +121,41 @@ Finally, you will have to change the calls to :program:`python_with_DMFT` to
|
|||||||
your :program:`python` installation in the Wien2k :file:`path_to_Wien2k/run*` files.
|
your :program:`python` installation in the Wien2k :file:`path_to_Wien2k/run*` files.
|
||||||
|
|
||||||
|
|
||||||
Version compatibility
|
Version compatibility
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
Be careful that the version of the TRIQS library and of the :program:`DFTTools` must be
|
Keep in mind that the version of ``dft_tools`` must be compatible with your TRIQS library version,
|
||||||
compatible (more information on the :ref:`TRIQS website <triqslibs:welcome>`.
|
see :ref:`TRIQS website <triqslibs:versions>`.
|
||||||
If you want to use a version of the :program:`DFTTools` that is not the latest one, go
|
In particular the Major and Minor Version numbers have to be the same.
|
||||||
into the directory with the sources and look at all available versions::
|
To use a particular version, go into the directory with the sources, and look at all available versions::
|
||||||
|
|
||||||
$ cd src && git tag
|
$ cd dft_tools.src && git tag
|
||||||
|
|
||||||
Checkout the version of the code that you want, for instance::
|
Checkout the version of the code that you want::
|
||||||
|
|
||||||
$ git co 2.1
|
$ git checkout 2.1.0
|
||||||
|
|
||||||
Then follow the steps 2 to 5 described above to compile the code.
|
and follow steps 2 to 4 above to compile the code.
|
||||||
|
|
||||||
Custom CMake options
|
Custom CMake options
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
Functionality of ``dft_tools`` can be tweaked using extra compile-time options passed to CMake::
|
The compilation of ``dft_tools`` can be configured using CMake-options::
|
||||||
|
|
||||||
cmake -DOPTION1=value1 -DOPTION2=value2 ... ../dft_tools.src
|
cmake ../dft_tools.src -DOPTION1=value1 -DOPTION2=value2 ...
|
||||||
|
|
||||||
+---------------------------------------------------------------+-----------------------------------------------+
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
| Options | Syntax |
|
| Options | Syntax |
|
||||||
+===============================================================+===============================================+
|
+=================================================================+===============================================+
|
||||||
| Disable testing (not recommended) | -DBuild_Tests=OFF |
|
| Specify an installation path other than path_to_triqs | -DCMAKE_INSTALL_PREFIX=path_to_dft_tools|
|
||||||
+---------------------------------------------------------------+-----------------------------------------------+
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
| Build the documentation locally | -DBuild_Documentation=ON |
|
| Build in Debugging Mode | -DCMAKE_BUILD_TYPE=Debug |
|
||||||
+---------------------------------------------------------------+-----------------------------------------------+
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
| Check test coverage when testing | -DTEST_COVERAGE=ON |
|
| Disable testing (not recommended) | -DBuild_Tests=OFF |
|
||||||
| (run ``make coverage`` to show the results; requires the | |
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
| python ``coverage`` package) | |
|
| Build the documentation | -DBuild_Documentation=ON |
|
||||||
+---------------------------------------------------------------+-----------------------------------------------+
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
| Check test coverage when testing | -DTEST_COVERAGE=ON |
|
||||||
|
| (run ``make coverage`` to show the results; requires the | |
|
||||||
|
| python ``coverage`` package) | |
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
|
.. _issues:
|
||||||
|
|
||||||
Reporting issues
|
Reporting issues
|
||||||
================
|
================
|
||||||
|
|
||||||
Please report all problems and bugs directly at the github issue page
|
Please report all problems and bugs directly at the github issue page
|
||||||
`<https://github.com/TRIQS/dft_tools/issues>`_. In order to make it easier
|
`<https://github.com/TRIQS/dft_tools/issues>`_. In order to make it easier for us
|
||||||
for us to solve the issue please follow these guidelines:
|
to solve the issue please follow these guidelines:
|
||||||
|
|
||||||
#. In all cases specify which version of the application you are using. You can
|
#. In all cases specify which version of the application you are using. You can
|
||||||
find the version number in the file :file:`README.txt` at the root of the
|
find the version number in the file :file:`CMakeLists.txt` at the root of the
|
||||||
application sources.
|
application sources.
|
||||||
|
|
||||||
#. If you have a problem during the installation, give us information about
|
#. If you have a problem during the installation, give us information about
|
||||||
|
@ -5,20 +5,20 @@ Converters
|
|||||||
|
|
||||||
Wien2k Converter
|
Wien2k Converter
|
||||||
----------------
|
----------------
|
||||||
.. autoclass:: triqs_dft_tools.converters.wien2k_converter.Wien2kConverter
|
.. autoclass:: triqs_dft_tools.converters.wien2k.Wien2kConverter
|
||||||
:members:
|
:members:
|
||||||
:special-members:
|
:special-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
H(k) Converter
|
H(k) Converter
|
||||||
--------------
|
--------------
|
||||||
.. autoclass:: triqs_dft_tools.converters.hk_converter.HkConverter
|
.. autoclass:: triqs_dft_tools.converters.hk.HkConverter
|
||||||
:members:
|
:members:
|
||||||
:special-members:
|
:special-members:
|
||||||
|
|
||||||
Wannier90 Converter
|
Wannier90 Converter
|
||||||
-------------------
|
-------------------
|
||||||
.. autoclass:: triqs_dft_tools.converters.wannier90_converter.Wannier90Converter
|
.. autoclass:: triqs_dft_tools.converters.wannier90.Wannier90Converter
|
||||||
:members:
|
:members:
|
||||||
:special-members:
|
:special-members:
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ PLOVASP reference, the classes / functions are sorted the way the converter uses
|
|||||||
VASP Converter
|
VASP Converter
|
||||||
-------------------
|
-------------------
|
||||||
.. _refVASPconverter:
|
.. _refVASPconverter:
|
||||||
.. autoclass:: triqs_dft_tools.converters.vasp_converter.VaspConverter
|
.. autoclass:: triqs_dft_tools.converters.vasp.VaspConverter
|
||||||
:members:
|
:members:
|
||||||
:special-members:
|
:special-members:
|
||||||
|
|
||||||
|
427
doc/sphinxext/numpydoc/apigen.py
Normal file
427
doc/sphinxext/numpydoc/apigen.py
Normal file
@ -0,0 +1,427 @@
|
|||||||
|
"""Attempt to generate templates for module reference with Sphinx
|
||||||
|
|
||||||
|
XXX - we exclude extension modules
|
||||||
|
|
||||||
|
To include extension modules, first identify them as valid in the
|
||||||
|
``_uri2path`` method, then handle them in the ``_parse_module`` script.
|
||||||
|
|
||||||
|
We get functions and classes by parsing the text of .py files.
|
||||||
|
Alternatively we could import the modules for discovery, and we'd have
|
||||||
|
to do that for extension modules. This would involve changing the
|
||||||
|
``_parse_module`` method to work via import and introspection, and
|
||||||
|
might involve changing ``discover_modules`` (which determines which
|
||||||
|
files are modules, and therefore which module URIs will be passed to
|
||||||
|
``_parse_module``).
|
||||||
|
|
||||||
|
NOTE: this is a modified version of a script originally shipped with the
|
||||||
|
PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed
|
||||||
|
project."""
|
||||||
|
|
||||||
|
# Stdlib imports
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Functions and classes
|
||||||
|
class ApiDocWriter:
|
||||||
|
''' Class for automatic detection and parsing of API docs
|
||||||
|
to Sphinx-parsable reST format'''
|
||||||
|
|
||||||
|
# only separating first two levels
|
||||||
|
rst_section_levels = ['*', '=', '-', '~', '^']
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
package_name,
|
||||||
|
rst_extension='.rst',
|
||||||
|
package_skip_patterns=None,
|
||||||
|
module_skip_patterns=None,
|
||||||
|
):
|
||||||
|
''' Initialize package for parsing
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
package_name : string
|
||||||
|
Name of the top-level package. *package_name* must be the
|
||||||
|
name of an importable package
|
||||||
|
rst_extension : string, optional
|
||||||
|
Extension for reST files, default '.rst'
|
||||||
|
package_skip_patterns : None or sequence of {strings, regexps}
|
||||||
|
Sequence of strings giving URIs of packages to be excluded
|
||||||
|
Operates on the package path, starting at (including) the
|
||||||
|
first dot in the package path, after *package_name* - so,
|
||||||
|
if *package_name* is ``sphinx``, then ``sphinx.util`` will
|
||||||
|
result in ``.util`` being passed for earching by these
|
||||||
|
regexps. If is None, gives default. Default is:
|
||||||
|
['\.tests$']
|
||||||
|
module_skip_patterns : None or sequence
|
||||||
|
Sequence of strings giving URIs of modules to be excluded
|
||||||
|
Operates on the module name including preceding URI path,
|
||||||
|
back to the first dot after *package_name*. For example
|
||||||
|
``sphinx.util.console`` results in the string to search of
|
||||||
|
``.util.console``
|
||||||
|
If is None, gives default. Default is:
|
||||||
|
['\.setup$', '\._']
|
||||||
|
'''
|
||||||
|
if package_skip_patterns is None:
|
||||||
|
package_skip_patterns = ['\\.tests$']
|
||||||
|
if module_skip_patterns is None:
|
||||||
|
module_skip_patterns = ['\\.setup$', '\\._']
|
||||||
|
self.package_name = package_name
|
||||||
|
self.rst_extension = rst_extension
|
||||||
|
self.package_skip_patterns = package_skip_patterns
|
||||||
|
self.module_skip_patterns = module_skip_patterns
|
||||||
|
|
||||||
|
def get_package_name(self):
|
||||||
|
return self._package_name
|
||||||
|
|
||||||
|
def set_package_name(self, package_name):
|
||||||
|
''' Set package_name
|
||||||
|
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> import sphinx
|
||||||
|
>>> docwriter.root_path == sphinx.__path__[0]
|
||||||
|
True
|
||||||
|
>>> docwriter.package_name = 'docutils'
|
||||||
|
>>> import docutils
|
||||||
|
>>> docwriter.root_path == docutils.__path__[0]
|
||||||
|
True
|
||||||
|
'''
|
||||||
|
# It's also possible to imagine caching the module parsing here
|
||||||
|
self._package_name = package_name
|
||||||
|
self.root_module = __import__(package_name)
|
||||||
|
self.root_path = self.root_module.__path__[0]
|
||||||
|
self.written_modules = None
|
||||||
|
|
||||||
|
package_name = property(get_package_name, set_package_name, None,
|
||||||
|
'get/set package_name')
|
||||||
|
|
||||||
|
def _get_object_name(self, line):
|
||||||
|
''' Get second token in line
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> docwriter._get_object_name(" def func(): ")
|
||||||
|
'func'
|
||||||
|
>>> docwriter._get_object_name(" class Klass: ")
|
||||||
|
'Klass'
|
||||||
|
>>> docwriter._get_object_name(" class Klass: ")
|
||||||
|
'Klass'
|
||||||
|
'''
|
||||||
|
name = line.split()[1].split('(')[0].strip()
|
||||||
|
# in case we have classes which are not derived from object
|
||||||
|
# ie. old style classes
|
||||||
|
return name.rstrip(':')
|
||||||
|
|
||||||
|
def _uri2path(self, uri):
|
||||||
|
''' Convert uri to absolute filepath
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : string
|
||||||
|
URI of python module to return path for
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
path : None or string
|
||||||
|
Returns None if there is no valid path for this URI
|
||||||
|
Otherwise returns absolute file system path for URI
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> import sphinx
|
||||||
|
>>> modpath = sphinx.__path__[0]
|
||||||
|
>>> res = docwriter._uri2path('sphinx.builder')
|
||||||
|
>>> res == os.path.join(modpath, 'builder.py')
|
||||||
|
True
|
||||||
|
>>> res = docwriter._uri2path('sphinx')
|
||||||
|
>>> res == os.path.join(modpath, '__init__.py')
|
||||||
|
True
|
||||||
|
>>> docwriter._uri2path('sphinx.does_not_exist')
|
||||||
|
|
||||||
|
'''
|
||||||
|
if uri == self.package_name:
|
||||||
|
return os.path.join(self.root_path, '__init__.py')
|
||||||
|
path = uri.replace('.', os.path.sep)
|
||||||
|
path = path.replace(self.package_name + os.path.sep, '')
|
||||||
|
path = os.path.join(self.root_path, path)
|
||||||
|
# XXX maybe check for extensions as well?
|
||||||
|
if os.path.exists(path + '.py'): # file
|
||||||
|
path += '.py'
|
||||||
|
elif os.path.exists(os.path.join(path, '__init__.py')):
|
||||||
|
path = os.path.join(path, '__init__.py')
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _path2uri(self, dirpath):
|
||||||
|
''' Convert directory path to uri '''
|
||||||
|
relpath = dirpath.replace(self.root_path, self.package_name)
|
||||||
|
if relpath.startswith(os.path.sep):
|
||||||
|
relpath = relpath[1:]
|
||||||
|
return relpath.replace(os.path.sep, '.')
|
||||||
|
|
||||||
|
def _parse_module(self, uri):
|
||||||
|
''' Parse module defined in *uri* '''
|
||||||
|
filename = self._uri2path(uri)
|
||||||
|
if filename is None:
|
||||||
|
# nothing that we could handle here.
|
||||||
|
return ([],[])
|
||||||
|
f = open(filename, 'rt')
|
||||||
|
functions, classes = self._parse_lines(f)
|
||||||
|
f.close()
|
||||||
|
return functions, classes
|
||||||
|
|
||||||
|
def _parse_lines(self, linesource):
|
||||||
|
''' Parse lines of text for functions and classes '''
|
||||||
|
functions = []
|
||||||
|
classes = []
|
||||||
|
for line in linesource:
|
||||||
|
if line.startswith('def ') and line.count('('):
|
||||||
|
# exclude private stuff
|
||||||
|
name = self._get_object_name(line)
|
||||||
|
if not name.startswith('_'):
|
||||||
|
functions.append(name)
|
||||||
|
elif line.startswith('class '):
|
||||||
|
# exclude private stuff
|
||||||
|
name = self._get_object_name(line)
|
||||||
|
if not name.startswith('_'):
|
||||||
|
classes.append(name)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
functions.sort()
|
||||||
|
classes.sort()
|
||||||
|
return functions, classes
|
||||||
|
|
||||||
|
def generate_api_doc(self, uri):
|
||||||
|
'''Make autodoc documentation template string for a module
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : string
|
||||||
|
python location of module - e.g 'sphinx.builder'
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
S : string
|
||||||
|
Contents of API doc
|
||||||
|
'''
|
||||||
|
# get the names of all classes and functions
|
||||||
|
functions, classes = self._parse_module(uri)
|
||||||
|
if not len(functions) and not len(classes):
|
||||||
|
print('WARNING: Empty -',uri) # dbg
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# Make a shorter version of the uri that omits the package name for
|
||||||
|
# titles
|
||||||
|
uri_short = re.sub(r'^%s\.' % self.package_name,'',uri)
|
||||||
|
|
||||||
|
ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n'
|
||||||
|
|
||||||
|
chap_title = uri_short
|
||||||
|
ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title)
|
||||||
|
+ '\n\n')
|
||||||
|
|
||||||
|
# Set the chapter title to read 'module' for all modules except for the
|
||||||
|
# main packages
|
||||||
|
if '.' in uri:
|
||||||
|
title = 'Module: :mod:`' + uri_short + '`'
|
||||||
|
else:
|
||||||
|
title = ':mod:`' + uri_short + '`'
|
||||||
|
ad += title + '\n' + self.rst_section_levels[2] * len(title)
|
||||||
|
|
||||||
|
if len(classes):
|
||||||
|
ad += '\nInheritance diagram for ``%s``:\n\n' % uri
|
||||||
|
ad += '.. inheritance-diagram:: %s \n' % uri
|
||||||
|
ad += ' :parts: 3\n'
|
||||||
|
|
||||||
|
ad += '\n.. automodule:: ' + uri + '\n'
|
||||||
|
ad += '\n.. currentmodule:: ' + uri + '\n'
|
||||||
|
multi_class = len(classes) > 1
|
||||||
|
multi_fx = len(functions) > 1
|
||||||
|
if multi_class:
|
||||||
|
ad += '\n' + 'Classes' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 7 + '\n'
|
||||||
|
elif len(classes) and multi_fx:
|
||||||
|
ad += '\n' + 'Class' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 5 + '\n'
|
||||||
|
for c in classes:
|
||||||
|
ad += '\n:class:`' + c + '`\n' \
|
||||||
|
+ self.rst_section_levels[multi_class + 2 ] * \
|
||||||
|
(len(c)+9) + '\n\n'
|
||||||
|
ad += '\n.. autoclass:: ' + c + '\n'
|
||||||
|
# must NOT exclude from index to keep cross-refs working
|
||||||
|
ad += ' :members:\n' \
|
||||||
|
' :undoc-members:\n' \
|
||||||
|
' :show-inheritance:\n' \
|
||||||
|
' :inherited-members:\n' \
|
||||||
|
'\n' \
|
||||||
|
' .. automethod:: __init__\n'
|
||||||
|
if multi_fx:
|
||||||
|
ad += '\n' + 'Functions' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 9 + '\n\n'
|
||||||
|
elif len(functions) and multi_class:
|
||||||
|
ad += '\n' + 'Function' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 8 + '\n\n'
|
||||||
|
for f in functions:
|
||||||
|
# must NOT exclude from index to keep cross-refs working
|
||||||
|
ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n'
|
||||||
|
return ad
|
||||||
|
|
||||||
|
def _survives_exclude(self, matchstr, match_type):
|
||||||
|
''' Returns True if *matchstr* does not match patterns
|
||||||
|
|
||||||
|
``self.package_name`` removed from front of string if present
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> dw = ApiDocWriter('sphinx')
|
||||||
|
>>> dw._survives_exclude('sphinx.okpkg', 'package')
|
||||||
|
True
|
||||||
|
>>> dw.package_skip_patterns.append('^\\.badpkg$')
|
||||||
|
>>> dw._survives_exclude('sphinx.badpkg', 'package')
|
||||||
|
False
|
||||||
|
>>> dw._survives_exclude('sphinx.badpkg', 'module')
|
||||||
|
True
|
||||||
|
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
||||||
|
True
|
||||||
|
>>> dw.module_skip_patterns.append('^\\.badmod$')
|
||||||
|
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
||||||
|
False
|
||||||
|
'''
|
||||||
|
if match_type == 'module':
|
||||||
|
patterns = self.module_skip_patterns
|
||||||
|
elif match_type == 'package':
|
||||||
|
patterns = self.package_skip_patterns
|
||||||
|
else:
|
||||||
|
raise ValueError('Cannot interpret match type "%s"'
|
||||||
|
% match_type)
|
||||||
|
# Match to URI without package name
|
||||||
|
L = len(self.package_name)
|
||||||
|
if matchstr[:L] == self.package_name:
|
||||||
|
matchstr = matchstr[L:]
|
||||||
|
for pat in patterns:
|
||||||
|
try:
|
||||||
|
pat.search
|
||||||
|
except AttributeError:
|
||||||
|
pat = re.compile(pat)
|
||||||
|
if pat.search(matchstr):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def discover_modules(self):
|
||||||
|
''' Return module sequence discovered from ``self.package_name``
|
||||||
|
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
None
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
mods : sequence
|
||||||
|
Sequence of module names within ``self.package_name``
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> dw = ApiDocWriter('sphinx')
|
||||||
|
>>> mods = dw.discover_modules()
|
||||||
|
>>> 'sphinx.util' in mods
|
||||||
|
True
|
||||||
|
>>> dw.package_skip_patterns.append('\.util$')
|
||||||
|
>>> 'sphinx.util' in dw.discover_modules()
|
||||||
|
False
|
||||||
|
>>>
|
||||||
|
'''
|
||||||
|
modules = [self.package_name]
|
||||||
|
# raw directory parsing
|
||||||
|
for dirpath, dirnames, filenames in os.walk(self.root_path):
|
||||||
|
# Check directory names for packages
|
||||||
|
root_uri = self._path2uri(os.path.join(self.root_path,
|
||||||
|
dirpath))
|
||||||
|
for dirname in dirnames[:]: # copy list - we modify inplace
|
||||||
|
package_uri = '.'.join((root_uri, dirname))
|
||||||
|
if (self._uri2path(package_uri) and
|
||||||
|
self._survives_exclude(package_uri, 'package')):
|
||||||
|
modules.append(package_uri)
|
||||||
|
else:
|
||||||
|
dirnames.remove(dirname)
|
||||||
|
# Check filenames for modules
|
||||||
|
for filename in filenames:
|
||||||
|
module_name = filename[:-3]
|
||||||
|
module_uri = '.'.join((root_uri, module_name))
|
||||||
|
if (self._uri2path(module_uri) and
|
||||||
|
self._survives_exclude(module_uri, 'module')):
|
||||||
|
modules.append(module_uri)
|
||||||
|
return sorted(modules)
|
||||||
|
|
||||||
|
def write_modules_api(self, modules,outdir):
|
||||||
|
# write the list
|
||||||
|
written_modules = []
|
||||||
|
for m in modules:
|
||||||
|
api_str = self.generate_api_doc(m)
|
||||||
|
if not api_str:
|
||||||
|
continue
|
||||||
|
# write out to file
|
||||||
|
outfile = os.path.join(outdir,
|
||||||
|
m + self.rst_extension)
|
||||||
|
fileobj = open(outfile, 'wt')
|
||||||
|
fileobj.write(api_str)
|
||||||
|
fileobj.close()
|
||||||
|
written_modules.append(m)
|
||||||
|
self.written_modules = written_modules
|
||||||
|
|
||||||
|
def write_api_docs(self, outdir):
|
||||||
|
"""Generate API reST files.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
outdir : string
|
||||||
|
Directory name in which to store files
|
||||||
|
We create automatic filenames for each module
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Sets self.written_modules to list of written modules
|
||||||
|
"""
|
||||||
|
if not os.path.exists(outdir):
|
||||||
|
os.mkdir(outdir)
|
||||||
|
# compose list of modules
|
||||||
|
modules = self.discover_modules()
|
||||||
|
self.write_modules_api(modules,outdir)
|
||||||
|
|
||||||
|
def write_index(self, outdir, froot='gen', relative_to=None):
|
||||||
|
"""Make a reST API index file from written files
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path : string
|
||||||
|
Filename to write index to
|
||||||
|
outdir : string
|
||||||
|
Directory to which to write generated index file
|
||||||
|
froot : string, optional
|
||||||
|
root (filename without extension) of filename to write to
|
||||||
|
Defaults to 'gen'. We add ``self.rst_extension``.
|
||||||
|
relative_to : string
|
||||||
|
path to which written filenames are relative. This
|
||||||
|
component of the written file path will be removed from
|
||||||
|
outdir, in the generated index. Default is None, meaning,
|
||||||
|
leave path as it is.
|
||||||
|
"""
|
||||||
|
if self.written_modules is None:
|
||||||
|
raise ValueError('No modules written')
|
||||||
|
# Get full filename path
|
||||||
|
path = os.path.join(outdir, froot+self.rst_extension)
|
||||||
|
# Path written into index is relative to rootpath
|
||||||
|
if relative_to is not None:
|
||||||
|
relpath = outdir.replace(relative_to + os.path.sep, '')
|
||||||
|
else:
|
||||||
|
relpath = outdir
|
||||||
|
idx = open(path,'wt')
|
||||||
|
w = idx.write
|
||||||
|
w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n')
|
||||||
|
w('.. toctree::\n\n')
|
||||||
|
for f in self.written_modules:
|
||||||
|
w(' %s\n' % os.path.join(relpath,f))
|
||||||
|
idx.close()
|
497
doc/sphinxext/numpydoc/docscrape.py
Normal file
497
doc/sphinxext/numpydoc/docscrape.py
Normal file
@ -0,0 +1,497 @@
|
|||||||
|
"""Extract reference documentation from the NumPy source tree.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import textwrap
|
||||||
|
import re
|
||||||
|
import pydoc
|
||||||
|
from io import StringIO
|
||||||
|
from warnings import warn
|
||||||
|
4
|
||||||
|
class Reader:
|
||||||
|
"""A line-based string reader.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, data):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
data : str
|
||||||
|
String with lines separated by '\n'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if isinstance(data,list):
|
||||||
|
self._str = data
|
||||||
|
else:
|
||||||
|
self._str = data.split('\n') # store string as list of lines
|
||||||
|
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
def __getitem__(self, n):
|
||||||
|
return self._str[n]
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self._l = 0 # current line nr
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
if not self.eof():
|
||||||
|
out = self[self._l]
|
||||||
|
self._l += 1
|
||||||
|
return out
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def seek_next_non_empty_line(self):
|
||||||
|
for l in self[self._l:]:
|
||||||
|
if l.strip():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self._l += 1
|
||||||
|
|
||||||
|
def eof(self):
|
||||||
|
return self._l >= len(self._str)
|
||||||
|
|
||||||
|
def read_to_condition(self, condition_func):
|
||||||
|
start = self._l
|
||||||
|
for line in self[start:]:
|
||||||
|
if condition_func(line):
|
||||||
|
return self[start:self._l]
|
||||||
|
self._l += 1
|
||||||
|
if self.eof():
|
||||||
|
return self[start:self._l+1]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def read_to_next_empty_line(self):
|
||||||
|
self.seek_next_non_empty_line()
|
||||||
|
def is_empty(line):
|
||||||
|
return not line.strip()
|
||||||
|
return self.read_to_condition(is_empty)
|
||||||
|
|
||||||
|
def read_to_next_unindented_line(self):
|
||||||
|
def is_unindented(line):
|
||||||
|
return (line.strip() and (len(line.lstrip()) == len(line)))
|
||||||
|
return self.read_to_condition(is_unindented)
|
||||||
|
|
||||||
|
def peek(self,n=0):
|
||||||
|
if self._l + n < len(self._str):
|
||||||
|
return self[self._l + n]
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def is_empty(self):
|
||||||
|
return not ''.join(self._str).strip()
|
||||||
|
|
||||||
|
|
||||||
|
class NumpyDocString:
|
||||||
|
def __init__(self,docstring):
|
||||||
|
docstring = textwrap.dedent(docstring).split('\n')
|
||||||
|
|
||||||
|
self._doc = Reader(docstring)
|
||||||
|
self._parsed_data = {
|
||||||
|
'Signature': '',
|
||||||
|
'Summary': [''],
|
||||||
|
'Extended Summary': [],
|
||||||
|
'Parameters': [],
|
||||||
|
'Returns': [],
|
||||||
|
'Raises': [],
|
||||||
|
'Warns': [],
|
||||||
|
'Other Parameters': [],
|
||||||
|
'Attributes': [],
|
||||||
|
'Methods': [],
|
||||||
|
'See Also': [],
|
||||||
|
'Notes': [],
|
||||||
|
'Warnings': [],
|
||||||
|
'References': '',
|
||||||
|
'Examples': '',
|
||||||
|
'index': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
self._parse()
|
||||||
|
|
||||||
|
def __getitem__(self,key):
|
||||||
|
return self._parsed_data[key]
|
||||||
|
|
||||||
|
def __setitem__(self,key,val):
|
||||||
|
if key not in self._parsed_data:
|
||||||
|
warn("Unknown section %s" % key)
|
||||||
|
else:
|
||||||
|
self._parsed_data[key] = val
|
||||||
|
|
||||||
|
def _is_at_section(self):
|
||||||
|
self._doc.seek_next_non_empty_line()
|
||||||
|
|
||||||
|
if self._doc.eof():
|
||||||
|
return False
|
||||||
|
|
||||||
|
l1 = self._doc.peek().strip() # e.g. Parameters
|
||||||
|
|
||||||
|
if l1.startswith('.. index::'):
|
||||||
|
return True
|
||||||
|
|
||||||
|
l2 = self._doc.peek(1).strip() # ---------- or ==========
|
||||||
|
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
|
||||||
|
|
||||||
|
def _strip(self,doc):
|
||||||
|
i = 0
|
||||||
|
j = 0
|
||||||
|
for i,line in enumerate(doc):
|
||||||
|
if line.strip(): break
|
||||||
|
|
||||||
|
for j,line in enumerate(doc[::-1]):
|
||||||
|
if line.strip(): break
|
||||||
|
|
||||||
|
return doc[i:len(doc)-j]
|
||||||
|
|
||||||
|
def _read_to_next_section(self):
|
||||||
|
section = self._doc.read_to_next_empty_line()
|
||||||
|
|
||||||
|
while not self._is_at_section() and not self._doc.eof():
|
||||||
|
if not self._doc.peek(-1).strip(): # previous line was empty
|
||||||
|
section += ['']
|
||||||
|
|
||||||
|
section += self._doc.read_to_next_empty_line()
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
def _read_sections(self):
|
||||||
|
while not self._doc.eof():
|
||||||
|
data = self._read_to_next_section()
|
||||||
|
name = data[0].strip()
|
||||||
|
|
||||||
|
if name.startswith('..'): # index section
|
||||||
|
yield name, data[1:]
|
||||||
|
elif len(data) < 2:
|
||||||
|
yield StopIteration
|
||||||
|
else:
|
||||||
|
yield name, self._strip(data[2:])
|
||||||
|
|
||||||
|
def _parse_param_list(self,content):
|
||||||
|
r = Reader(content)
|
||||||
|
params = []
|
||||||
|
while not r.eof():
|
||||||
|
header = r.read().strip()
|
||||||
|
if ' : ' in header:
|
||||||
|
arg_name, arg_type = header.split(' : ')[:2]
|
||||||
|
else:
|
||||||
|
arg_name, arg_type = header, ''
|
||||||
|
|
||||||
|
desc = r.read_to_next_unindented_line()
|
||||||
|
desc = dedent_lines(desc)
|
||||||
|
|
||||||
|
params.append((arg_name,arg_type,desc))
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
||||||
|
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
|
||||||
|
def _parse_see_also(self, content):
|
||||||
|
"""
|
||||||
|
func_name : Descriptive text
|
||||||
|
continued text
|
||||||
|
another_func_name : Descriptive text
|
||||||
|
func_name1, func_name2, :meth:`func_name`, func_name3
|
||||||
|
|
||||||
|
"""
|
||||||
|
items = []
|
||||||
|
|
||||||
|
def parse_item_name(text):
|
||||||
|
"""Match ':role:`name`' or 'name'"""
|
||||||
|
m = self._name_rgx.match(text)
|
||||||
|
if m:
|
||||||
|
g = m.groups()
|
||||||
|
if g[1] is None:
|
||||||
|
return g[3], None
|
||||||
|
else:
|
||||||
|
return g[2], g[1]
|
||||||
|
raise ValueError("%s is not a item name" % text)
|
||||||
|
|
||||||
|
def push_item(name, rest):
|
||||||
|
if not name:
|
||||||
|
return
|
||||||
|
name, role = parse_item_name(name)
|
||||||
|
items.append((name, list(rest), role))
|
||||||
|
del rest[:]
|
||||||
|
|
||||||
|
current_func = None
|
||||||
|
rest = []
|
||||||
|
|
||||||
|
for line in content:
|
||||||
|
if not line.strip(): continue
|
||||||
|
|
||||||
|
m = self._name_rgx.match(line)
|
||||||
|
if m and line[m.end():].strip().startswith(':'):
|
||||||
|
push_item(current_func, rest)
|
||||||
|
current_func, line = line[:m.end()], line[m.end():]
|
||||||
|
rest = [line.split(':', 1)[1].strip()]
|
||||||
|
if not rest[0]:
|
||||||
|
rest = []
|
||||||
|
elif not line.startswith(' '):
|
||||||
|
push_item(current_func, rest)
|
||||||
|
current_func = None
|
||||||
|
if ',' in line:
|
||||||
|
for func in line.split(','):
|
||||||
|
push_item(func, [])
|
||||||
|
elif line.strip():
|
||||||
|
current_func = line
|
||||||
|
elif current_func is not None:
|
||||||
|
rest.append(line.strip())
|
||||||
|
push_item(current_func, rest)
|
||||||
|
return items
|
||||||
|
|
||||||
|
def _parse_index(self, section, content):
|
||||||
|
"""
|
||||||
|
.. index: default
|
||||||
|
:refguide: something, else, and more
|
||||||
|
|
||||||
|
"""
|
||||||
|
def strip_each_in(lst):
|
||||||
|
return [s.strip() for s in lst]
|
||||||
|
|
||||||
|
out = {}
|
||||||
|
section = section.split('::')
|
||||||
|
if len(section) > 1:
|
||||||
|
out['default'] = strip_each_in(section[1].split(','))[0]
|
||||||
|
for line in content:
|
||||||
|
line = line.split(':')
|
||||||
|
if len(line) > 2:
|
||||||
|
out[line[1]] = strip_each_in(line[2].split(','))
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _parse_summary(self):
|
||||||
|
"""Grab signature (if given) and summary"""
|
||||||
|
if self._is_at_section():
|
||||||
|
return
|
||||||
|
|
||||||
|
summary = self._doc.read_to_next_empty_line()
|
||||||
|
summary_str = " ".join([s.strip() for s in summary]).strip()
|
||||||
|
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
|
||||||
|
self['Signature'] = summary_str
|
||||||
|
if not self._is_at_section():
|
||||||
|
self['Summary'] = self._doc.read_to_next_empty_line()
|
||||||
|
else:
|
||||||
|
self['Summary'] = summary
|
||||||
|
|
||||||
|
if not self._is_at_section():
|
||||||
|
self['Extended Summary'] = self._read_to_next_section()
|
||||||
|
|
||||||
|
def _parse(self):
|
||||||
|
self._doc.reset()
|
||||||
|
self._parse_summary()
|
||||||
|
|
||||||
|
for (section,content) in self._read_sections():
|
||||||
|
if not section.startswith('..'):
|
||||||
|
section = ' '.join([s.capitalize() for s in section.split(' ')])
|
||||||
|
if section in ('Parameters', 'Attributes', 'Methods',
|
||||||
|
'Returns', 'Raises', 'Warns'):
|
||||||
|
self[section] = self._parse_param_list(content)
|
||||||
|
elif section.startswith('.. index::'):
|
||||||
|
self['index'] = self._parse_index(section, content)
|
||||||
|
elif section == 'See Also':
|
||||||
|
self['See Also'] = self._parse_see_also(content)
|
||||||
|
else:
|
||||||
|
self[section] = content
|
||||||
|
|
||||||
|
# string conversion routines
|
||||||
|
|
||||||
|
def _str_header(self, name, symbol='-'):
|
||||||
|
return [name, len(name)*symbol]
|
||||||
|
|
||||||
|
def _str_indent(self, doc, indent=4):
|
||||||
|
out = []
|
||||||
|
for line in doc:
|
||||||
|
out += [' '*indent + line]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_signature(self):
|
||||||
|
if self['Signature']:
|
||||||
|
return [self['Signature'].replace('*','\*')] + ['']
|
||||||
|
else:
|
||||||
|
return ['']
|
||||||
|
|
||||||
|
def _str_summary(self):
|
||||||
|
if self['Summary']:
|
||||||
|
return self['Summary'] + ['']
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _str_extended_summary(self):
|
||||||
|
if self['Extended Summary']:
|
||||||
|
return self['Extended Summary'] + ['']
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _str_param_list(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
for param,param_type,desc in self[name]:
|
||||||
|
out += ['%s : %s' % (param, param_type)]
|
||||||
|
out += self._str_indent(desc)
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_section(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
out += self[name]
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_see_also(self, func_role):
|
||||||
|
if not self['See Also']: return []
|
||||||
|
out = []
|
||||||
|
out += self._str_header("See Also")
|
||||||
|
last_had_desc = True
|
||||||
|
for func, desc, role in self['See Also']:
|
||||||
|
if role:
|
||||||
|
link = ':%s:`%s`' % (role, func)
|
||||||
|
elif func_role:
|
||||||
|
link = ':%s:`%s`' % (func_role, func)
|
||||||
|
else:
|
||||||
|
link = "`%s`_" % func
|
||||||
|
if desc or last_had_desc:
|
||||||
|
out += ['']
|
||||||
|
out += [link]
|
||||||
|
else:
|
||||||
|
out[-1] += ", %s" % link
|
||||||
|
if desc:
|
||||||
|
out += self._str_indent([' '.join(desc)])
|
||||||
|
last_had_desc = True
|
||||||
|
else:
|
||||||
|
last_had_desc = False
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_index(self):
|
||||||
|
idx = self['index']
|
||||||
|
out = []
|
||||||
|
out += ['.. index:: %s' % idx.get('default','')]
|
||||||
|
for section, references in idx.items():
|
||||||
|
if section == 'default':
|
||||||
|
continue
|
||||||
|
out += [' :%s: %s' % (section, ', '.join(references))]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def __str__(self, func_role=''):
|
||||||
|
out = []
|
||||||
|
out += self._str_signature()
|
||||||
|
out += self._str_summary()
|
||||||
|
out += self._str_extended_summary()
|
||||||
|
for param_list in ('Parameters','Returns','Raises'):
|
||||||
|
out += self._str_param_list(param_list)
|
||||||
|
out += self._str_section('Warnings')
|
||||||
|
out += self._str_see_also(func_role)
|
||||||
|
for s in ('Notes','References','Examples'):
|
||||||
|
out += self._str_section(s)
|
||||||
|
out += self._str_index()
|
||||||
|
return '\n'.join(out)
|
||||||
|
|
||||||
|
|
||||||
|
def indent(str,indent=4):
|
||||||
|
indent_str = ' '*indent
|
||||||
|
if str is None:
|
||||||
|
return indent_str
|
||||||
|
lines = str.split('\n')
|
||||||
|
return '\n'.join(indent_str + l for l in lines)
|
||||||
|
|
||||||
|
def dedent_lines(lines):
|
||||||
|
"""Deindent a list of lines maximally"""
|
||||||
|
return textwrap.dedent("\n".join(lines)).split("\n")
|
||||||
|
|
||||||
|
def header(text, style='-'):
|
||||||
|
return text + '\n' + style*len(text) + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionDoc(NumpyDocString):
|
||||||
|
def __init__(self, func, role='func', doc=None):
|
||||||
|
self._f = func
|
||||||
|
self._role = role # e.g. "func" or "meth"
|
||||||
|
if doc is None:
|
||||||
|
doc = inspect.getdoc(func) or ''
|
||||||
|
try:
|
||||||
|
NumpyDocString.__init__(self, doc)
|
||||||
|
except ValueError as e:
|
||||||
|
print('*'*78)
|
||||||
|
print("ERROR: '%s' while parsing `%s`" % (e, self._f))
|
||||||
|
print('*'*78)
|
||||||
|
#print "Docstring follows:"
|
||||||
|
#print doclines
|
||||||
|
#print '='*78
|
||||||
|
|
||||||
|
if not self['Signature']:
|
||||||
|
func, func_name = self.get_func()
|
||||||
|
try:
|
||||||
|
# try to read signature
|
||||||
|
argspec = inspect.getargspec(func)
|
||||||
|
argspec = inspect.formatargspec(*argspec)
|
||||||
|
argspec = argspec.replace('*','\*')
|
||||||
|
signature = '%s%s' % (func_name, argspec)
|
||||||
|
except TypeError as e:
|
||||||
|
signature = '%s()' % func_name
|
||||||
|
self['Signature'] = signature
|
||||||
|
|
||||||
|
def get_func(self):
|
||||||
|
func_name = getattr(self._f, '__name__', self.__class__.__name__)
|
||||||
|
if inspect.isclass(self._f):
|
||||||
|
func = getattr(self._f, '__call__', self._f.__init__)
|
||||||
|
else:
|
||||||
|
func = self._f
|
||||||
|
return func, func_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
out = ''
|
||||||
|
|
||||||
|
func, func_name = self.get_func()
|
||||||
|
signature = self['Signature'].replace('*', '\*')
|
||||||
|
|
||||||
|
roles = {'func': 'function',
|
||||||
|
'meth': 'method'}
|
||||||
|
|
||||||
|
if self._role:
|
||||||
|
if self._role not in roles:
|
||||||
|
print("Warning: invalid role %s" % self._role)
|
||||||
|
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
|
||||||
|
func_name)
|
||||||
|
|
||||||
|
out += super(FunctionDoc, self).__str__(func_role=self._role)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class ClassDoc(NumpyDocString):
|
||||||
|
def __init__(self,cls,modulename='',func_doc=FunctionDoc,doc=None):
|
||||||
|
if not inspect.isclass(cls):
|
||||||
|
raise ValueError("Initialise using a class. Got %r" % cls)
|
||||||
|
self._cls = cls
|
||||||
|
|
||||||
|
if modulename and not modulename.endswith('.'):
|
||||||
|
modulename += '.'
|
||||||
|
self._mod = modulename
|
||||||
|
self._name = cls.__name__
|
||||||
|
self._func_doc = func_doc
|
||||||
|
|
||||||
|
if doc is None:
|
||||||
|
doc = pydoc.getdoc(cls)
|
||||||
|
|
||||||
|
NumpyDocString.__init__(self, doc)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def methods(self):
|
||||||
|
return [name for name,func in inspect.getmembers(self._cls)
|
||||||
|
if not name.startswith('_') and callable(func)]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
out = ''
|
||||||
|
out += super(ClassDoc, self).__str__()
|
||||||
|
out += "\n\n"
|
||||||
|
|
||||||
|
#for m in self.methods:
|
||||||
|
# print "Parsing `%s`" % m
|
||||||
|
# out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n'
|
||||||
|
# out += '.. index::\n single: %s; %s\n\n' % (self._name, m)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
136
doc/sphinxext/numpydoc/docscrape_sphinx.py
Normal file
136
doc/sphinxext/numpydoc/docscrape_sphinx.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
import re, inspect, textwrap, pydoc
|
||||||
|
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
|
||||||
|
|
||||||
|
class SphinxDocString(NumpyDocString):
|
||||||
|
# string conversion routines
|
||||||
|
def _str_header(self, name, symbol='`'):
|
||||||
|
return ['.. rubric:: ' + name, '']
|
||||||
|
|
||||||
|
def _str_field_list(self, name):
|
||||||
|
return [':' + name + ':']
|
||||||
|
|
||||||
|
def _str_indent(self, doc, indent=4):
|
||||||
|
out = []
|
||||||
|
for line in doc:
|
||||||
|
out += [' '*indent + line]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_signature(self):
|
||||||
|
return ['']
|
||||||
|
if self['Signature']:
|
||||||
|
return ['``%s``' % self['Signature']] + ['']
|
||||||
|
else:
|
||||||
|
return ['']
|
||||||
|
|
||||||
|
def _str_summary(self):
|
||||||
|
return self['Summary'] + ['']
|
||||||
|
|
||||||
|
def _str_extended_summary(self):
|
||||||
|
return self['Extended Summary'] + ['']
|
||||||
|
|
||||||
|
def _str_param_list(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_field_list(name)
|
||||||
|
out += ['']
|
||||||
|
for param,param_type,desc in self[name]:
|
||||||
|
out += self._str_indent(['**%s** : %s' % (param.strip(),
|
||||||
|
param_type)])
|
||||||
|
out += ['']
|
||||||
|
out += self._str_indent(desc,8)
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_section(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
out += ['']
|
||||||
|
content = textwrap.dedent("\n".join(self[name])).split("\n")
|
||||||
|
out += content
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_see_also(self, func_role):
|
||||||
|
out = []
|
||||||
|
if self['See Also']:
|
||||||
|
see_also = super(SphinxDocString, self)._str_see_also(func_role)
|
||||||
|
out = ['.. seealso::', '']
|
||||||
|
out += self._str_indent(see_also[2:])
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_warnings(self):
|
||||||
|
out = []
|
||||||
|
if self['Warnings']:
|
||||||
|
out = ['.. warning::', '']
|
||||||
|
out += self._str_indent(self['Warnings'])
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_index(self):
|
||||||
|
idx = self['index']
|
||||||
|
out = []
|
||||||
|
if len(idx) == 0:
|
||||||
|
return out
|
||||||
|
|
||||||
|
out += ['.. index:: %s' % idx.get('default','')]
|
||||||
|
for section, references in idx.items():
|
||||||
|
if section == 'default':
|
||||||
|
continue
|
||||||
|
elif section == 'refguide':
|
||||||
|
out += [' single: %s' % (', '.join(references))]
|
||||||
|
else:
|
||||||
|
out += [' %s: %s' % (section, ','.join(references))]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_references(self):
|
||||||
|
out = []
|
||||||
|
if self['References']:
|
||||||
|
out += self._str_header('References')
|
||||||
|
if isinstance(self['References'], str):
|
||||||
|
self['References'] = [self['References']]
|
||||||
|
out.extend(self['References'])
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def __str__(self, indent=0, func_role="obj"):
|
||||||
|
out = []
|
||||||
|
out += self._str_signature()
|
||||||
|
out += self._str_index() + ['']
|
||||||
|
out += self._str_summary()
|
||||||
|
out += self._str_extended_summary()
|
||||||
|
for param_list in ('Parameters', 'Attributes', 'Methods',
|
||||||
|
'Returns','Raises'):
|
||||||
|
out += self._str_param_list(param_list)
|
||||||
|
out += self._str_warnings()
|
||||||
|
out += self._str_see_also(func_role)
|
||||||
|
out += self._str_section('Notes')
|
||||||
|
out += self._str_references()
|
||||||
|
out += self._str_section('Examples')
|
||||||
|
out = self._str_indent(out,indent)
|
||||||
|
return '\n'.join(out)
|
||||||
|
|
||||||
|
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SphinxClassDoc(SphinxDocString, ClassDoc):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_doc_object(obj, what=None, doc=None):
|
||||||
|
if what is None:
|
||||||
|
if inspect.isclass(obj):
|
||||||
|
what = 'class'
|
||||||
|
elif inspect.ismodule(obj):
|
||||||
|
what = 'module'
|
||||||
|
elif callable(obj):
|
||||||
|
what = 'function'
|
||||||
|
else:
|
||||||
|
what = 'object'
|
||||||
|
if what == 'class':
|
||||||
|
return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc)
|
||||||
|
elif what in ('function', 'method'):
|
||||||
|
return SphinxFunctionDoc(obj, '', doc=doc)
|
||||||
|
else:
|
||||||
|
if doc is None:
|
||||||
|
doc = pydoc.getdoc(obj)
|
||||||
|
return SphinxDocString(doc)
|
||||||
|
|
407
doc/sphinxext/numpydoc/inheritance_diagram.py
Normal file
407
doc/sphinxext/numpydoc/inheritance_diagram.py
Normal file
@ -0,0 +1,407 @@
|
|||||||
|
"""
|
||||||
|
Defines a docutils directive for inserting inheritance diagrams.
|
||||||
|
|
||||||
|
Provide the directive with one or more classes or modules (separated
|
||||||
|
by whitespace). For modules, all of the classes in that module will
|
||||||
|
be used.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
Given the following classes:
|
||||||
|
|
||||||
|
class A: pass
|
||||||
|
class B(A): pass
|
||||||
|
class C(A): pass
|
||||||
|
class D(B, C): pass
|
||||||
|
class E(B): pass
|
||||||
|
|
||||||
|
.. inheritance-diagram: D E
|
||||||
|
|
||||||
|
Produces a graph like the following:
|
||||||
|
|
||||||
|
A
|
||||||
|
/ \
|
||||||
|
B C
|
||||||
|
/ \ /
|
||||||
|
E D
|
||||||
|
|
||||||
|
The graph is inserted as a PNG+image map into HTML and a PDF in
|
||||||
|
LaTeX.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
try:
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import md5
|
||||||
|
|
||||||
|
from docutils.nodes import Body, Element
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from sphinx.roles import xfileref_role
|
||||||
|
|
||||||
|
def my_import(name):
|
||||||
|
"""Module importer - taken from the python documentation.
|
||||||
|
|
||||||
|
This function allows importing names with dots in them."""
|
||||||
|
|
||||||
|
mod = __import__(name)
|
||||||
|
components = name.split('.')
|
||||||
|
for comp in components[1:]:
|
||||||
|
mod = getattr(mod, comp)
|
||||||
|
return mod
|
||||||
|
|
||||||
|
class DotException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class InheritanceGraph:
|
||||||
|
"""
|
||||||
|
Given a list of classes, determines the set of classes that
|
||||||
|
they inherit from all the way to the root "object", and then
|
||||||
|
is able to generate a graphviz dot graph from them.
|
||||||
|
"""
|
||||||
|
def __init__(self, class_names, show_builtins=False):
|
||||||
|
"""
|
||||||
|
*class_names* is a list of child classes to show bases from.
|
||||||
|
|
||||||
|
If *show_builtins* is True, then Python builtins will be shown
|
||||||
|
in the graph.
|
||||||
|
"""
|
||||||
|
self.class_names = class_names
|
||||||
|
self.classes = self._import_classes(class_names)
|
||||||
|
self.all_classes = self._all_classes(self.classes)
|
||||||
|
if len(self.all_classes) == 0:
|
||||||
|
raise ValueError("No classes found for inheritance diagram")
|
||||||
|
self.show_builtins = show_builtins
|
||||||
|
|
||||||
|
py_sig_re = re.compile(r'''^([\w.]*\.)? # class names
|
||||||
|
(\w+) \s* $ # optionally arguments
|
||||||
|
''', re.VERBOSE)
|
||||||
|
|
||||||
|
def _import_class_or_module(self, name):
|
||||||
|
"""
|
||||||
|
Import a class using its fully-qualified *name*.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
path, base = self.py_sig_re.match(name).groups()
|
||||||
|
except:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
fullname = (path or '') + base
|
||||||
|
path = (path and path.rstrip('.'))
|
||||||
|
if not path:
|
||||||
|
path = base
|
||||||
|
try:
|
||||||
|
module = __import__(path, None, None, [])
|
||||||
|
# We must do an import of the fully qualified name. Otherwise if a
|
||||||
|
# subpackage 'a.b' is requested where 'import a' does NOT provide
|
||||||
|
# 'a.b' automatically, then 'a.b' will not be found below. This
|
||||||
|
# second call will force the equivalent of 'import a.b' to happen
|
||||||
|
# after the top-level import above.
|
||||||
|
my_import(fullname)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not import class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
todoc = module
|
||||||
|
for comp in fullname.split('.')[1:]:
|
||||||
|
todoc = getattr(todoc, comp)
|
||||||
|
except AttributeError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not find class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
|
||||||
|
# If a class, just return it
|
||||||
|
if inspect.isclass(todoc):
|
||||||
|
return [todoc]
|
||||||
|
elif inspect.ismodule(todoc):
|
||||||
|
classes = []
|
||||||
|
for cls in list(todoc.__dict__.values()):
|
||||||
|
if inspect.isclass(cls) and cls.__module__ == todoc.__name__:
|
||||||
|
classes.append(cls)
|
||||||
|
return classes
|
||||||
|
raise ValueError(
|
||||||
|
"'%s' does not resolve to a class or module" % name)
|
||||||
|
|
||||||
|
def _import_classes(self, class_names):
|
||||||
|
"""
|
||||||
|
Import a list of classes.
|
||||||
|
"""
|
||||||
|
classes = []
|
||||||
|
for name in class_names:
|
||||||
|
classes.extend(self._import_class_or_module(name))
|
||||||
|
return classes
|
||||||
|
|
||||||
|
def _all_classes(self, classes):
|
||||||
|
"""
|
||||||
|
Return a list of all classes that are ancestors of *classes*.
|
||||||
|
"""
|
||||||
|
all_classes = {}
|
||||||
|
|
||||||
|
def recurse(cls):
|
||||||
|
all_classes[cls] = None
|
||||||
|
for c in cls.__bases__:
|
||||||
|
if c not in all_classes:
|
||||||
|
recurse(c)
|
||||||
|
|
||||||
|
for cls in classes:
|
||||||
|
recurse(cls)
|
||||||
|
|
||||||
|
return list(all_classes.keys())
|
||||||
|
|
||||||
|
def class_name(self, cls, parts=0):
|
||||||
|
"""
|
||||||
|
Given a class object, return a fully-qualified name. This
|
||||||
|
works for things I've tested in matplotlib so far, but may not
|
||||||
|
be completely general.
|
||||||
|
"""
|
||||||
|
module = cls.__module__
|
||||||
|
if module == '__builtin__':
|
||||||
|
fullname = cls.__name__
|
||||||
|
else:
|
||||||
|
fullname = "%s.%s" % (module, cls.__name__)
|
||||||
|
if parts == 0:
|
||||||
|
return fullname
|
||||||
|
name_parts = fullname.split('.')
|
||||||
|
return '.'.join(name_parts[-parts:])
|
||||||
|
|
||||||
|
def get_all_class_names(self):
|
||||||
|
"""
|
||||||
|
Get all of the class names involved in the graph.
|
||||||
|
"""
|
||||||
|
return [self.class_name(x) for x in self.all_classes]
|
||||||
|
|
||||||
|
# These are the default options for graphviz
|
||||||
|
default_graph_options = {
|
||||||
|
"rankdir": "LR",
|
||||||
|
"size": '"8.0, 12.0"'
|
||||||
|
}
|
||||||
|
default_node_options = {
|
||||||
|
"shape": "box",
|
||||||
|
"fontsize": 10,
|
||||||
|
"height": 0.25,
|
||||||
|
"fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",
|
||||||
|
"style": '"setlinewidth(0.5)"'
|
||||||
|
}
|
||||||
|
default_edge_options = {
|
||||||
|
"arrowsize": 0.5,
|
||||||
|
"style": '"setlinewidth(0.5)"'
|
||||||
|
}
|
||||||
|
|
||||||
|
def _format_node_options(self, options):
|
||||||
|
return ','.join(["%s=%s" % x for x in list(options.items())])
|
||||||
|
def _format_graph_options(self, options):
|
||||||
|
return ''.join(["%s=%s;\n" % x for x in list(options.items())])
|
||||||
|
|
||||||
|
def generate_dot(self, fd, name, parts=0, urls={},
|
||||||
|
graph_options={}, node_options={},
|
||||||
|
edge_options={}):
|
||||||
|
"""
|
||||||
|
Generate a graphviz dot graph from the classes that
|
||||||
|
were passed in to __init__.
|
||||||
|
|
||||||
|
*fd* is a Python file-like object to write to.
|
||||||
|
|
||||||
|
*name* is the name of the graph
|
||||||
|
|
||||||
|
*urls* is a dictionary mapping class names to http urls
|
||||||
|
|
||||||
|
*graph_options*, *node_options*, *edge_options* are
|
||||||
|
dictionaries containing key/value pairs to pass on as graphviz
|
||||||
|
properties.
|
||||||
|
"""
|
||||||
|
g_options = self.default_graph_options.copy()
|
||||||
|
g_options.update(graph_options)
|
||||||
|
n_options = self.default_node_options.copy()
|
||||||
|
n_options.update(node_options)
|
||||||
|
e_options = self.default_edge_options.copy()
|
||||||
|
e_options.update(edge_options)
|
||||||
|
|
||||||
|
fd.write('digraph %s {\n' % name)
|
||||||
|
fd.write(self._format_graph_options(g_options))
|
||||||
|
|
||||||
|
for cls in self.all_classes:
|
||||||
|
if not self.show_builtins and cls in list(__builtins__.values()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = self.class_name(cls, parts)
|
||||||
|
|
||||||
|
# Write the node
|
||||||
|
this_node_options = n_options.copy()
|
||||||
|
url = urls.get(self.class_name(cls))
|
||||||
|
if url is not None:
|
||||||
|
this_node_options['URL'] = '"%s"' % url
|
||||||
|
fd.write(' "%s" [%s];\n' %
|
||||||
|
(name, self._format_node_options(this_node_options)))
|
||||||
|
|
||||||
|
# Write the edges
|
||||||
|
for base in cls.__bases__:
|
||||||
|
if not self.show_builtins and base in list(__builtins__.values()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
base_name = self.class_name(base, parts)
|
||||||
|
fd.write(' "%s" -> "%s" [%s];\n' %
|
||||||
|
(base_name, name,
|
||||||
|
self._format_node_options(e_options)))
|
||||||
|
fd.write('}\n')
|
||||||
|
|
||||||
|
def run_dot(self, args, name, parts=0, urls={},
|
||||||
|
graph_options={}, node_options={}, edge_options={}):
|
||||||
|
"""
|
||||||
|
Run graphviz 'dot' over this graph, returning whatever 'dot'
|
||||||
|
writes to stdout.
|
||||||
|
|
||||||
|
*args* will be passed along as commandline arguments.
|
||||||
|
|
||||||
|
*name* is the name of the graph
|
||||||
|
|
||||||
|
*urls* is a dictionary mapping class names to http urls
|
||||||
|
|
||||||
|
Raises DotException for any of the many os and
|
||||||
|
installation-related errors that may occur.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dot = subprocess.Popen(['dot'] + list(args),
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
close_fds=True)
|
||||||
|
except OSError:
|
||||||
|
raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?")
|
||||||
|
except ValueError:
|
||||||
|
raise DotException("'dot' called with invalid arguments")
|
||||||
|
except:
|
||||||
|
raise DotException("Unexpected error calling 'dot'")
|
||||||
|
|
||||||
|
self.generate_dot(dot.stdin, name, parts, urls, graph_options,
|
||||||
|
node_options, edge_options)
|
||||||
|
dot.stdin.close()
|
||||||
|
result = dot.stdout.read()
|
||||||
|
returncode = dot.wait()
|
||||||
|
if returncode != 0:
|
||||||
|
raise DotException("'dot' returned the errorcode %d" % returncode)
|
||||||
|
return result
|
||||||
|
|
||||||
|
class inheritance_diagram(Body, Element):
|
||||||
|
"""
|
||||||
|
A docutils node to use as a placeholder for the inheritance
|
||||||
|
diagram.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def inheritance_diagram_directive(name, arguments, options, content, lineno,
|
||||||
|
content_offset, block_text, state,
|
||||||
|
state_machine):
|
||||||
|
"""
|
||||||
|
Run when the inheritance_diagram directive is first encountered.
|
||||||
|
"""
|
||||||
|
node = inheritance_diagram()
|
||||||
|
|
||||||
|
class_names = arguments
|
||||||
|
|
||||||
|
# Create a graph starting with the list of classes
|
||||||
|
graph = InheritanceGraph(class_names)
|
||||||
|
|
||||||
|
# Create xref nodes for each target of the graph's image map and
|
||||||
|
# add them to the doc tree so that Sphinx can resolve the
|
||||||
|
# references to real URLs later. These nodes will eventually be
|
||||||
|
# removed from the doctree after we're done with them.
|
||||||
|
for name in graph.get_all_class_names():
|
||||||
|
refnodes, x = xfileref_role(
|
||||||
|
'class', ':class:`%s`' % name, name, 0, state)
|
||||||
|
node.extend(refnodes)
|
||||||
|
# Store the graph object so we can use it to generate the
|
||||||
|
# dot file later
|
||||||
|
node['graph'] = graph
|
||||||
|
# Store the original content for use as a hash
|
||||||
|
node['parts'] = options.get('parts', 0)
|
||||||
|
node['content'] = " ".join(class_names)
|
||||||
|
return [node]
|
||||||
|
|
||||||
|
def get_graph_hash(node):
|
||||||
|
return md5(node['content'] + str(node['parts'])).hexdigest()[-10:]
|
||||||
|
|
||||||
|
def html_output_graph(self, node):
|
||||||
|
"""
|
||||||
|
Output the graph for HTML. This will insert a PNG with clickable
|
||||||
|
image map.
|
||||||
|
"""
|
||||||
|
graph = node['graph']
|
||||||
|
parts = node['parts']
|
||||||
|
|
||||||
|
graph_hash = get_graph_hash(node)
|
||||||
|
name = "inheritance%s" % graph_hash
|
||||||
|
path = '_images'
|
||||||
|
dest_path = os.path.join(setup.app.builder.outdir, path)
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
png_path = os.path.join(dest_path, name + ".png")
|
||||||
|
path = setup.app.builder.imgpath
|
||||||
|
|
||||||
|
# Create a mapping from fully-qualified class names to URLs.
|
||||||
|
urls = {}
|
||||||
|
for child in node:
|
||||||
|
if child.get('refuri') is not None:
|
||||||
|
urls[child['reftitle']] = child.get('refuri')
|
||||||
|
elif child.get('refid') is not None:
|
||||||
|
urls[child['reftitle']] = '#' + child.get('refid')
|
||||||
|
|
||||||
|
# These arguments to dot will save a PNG file to disk and write
|
||||||
|
# an HTML image map to stdout.
|
||||||
|
image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'],
|
||||||
|
name, parts, urls)
|
||||||
|
return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' %
|
||||||
|
(path, name, name, image_map))
|
||||||
|
|
||||||
|
def latex_output_graph(self, node):
|
||||||
|
"""
|
||||||
|
Output the graph for LaTeX. This will insert a PDF.
|
||||||
|
"""
|
||||||
|
graph = node['graph']
|
||||||
|
parts = node['parts']
|
||||||
|
|
||||||
|
graph_hash = get_graph_hash(node)
|
||||||
|
name = "inheritance%s" % graph_hash
|
||||||
|
dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images'))
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf"))
|
||||||
|
|
||||||
|
graph.run_dot(['-Tpdf', '-o%s' % pdf_path],
|
||||||
|
name, parts, graph_options={'size': '"6.0,6.0"'})
|
||||||
|
return '\n\\includegraphics{%s}\n\n' % pdf_path
|
||||||
|
|
||||||
|
def visit_inheritance_diagram(inner_func):
|
||||||
|
"""
|
||||||
|
This is just a wrapper around html/latex_output_graph to make it
|
||||||
|
easier to handle errors and insert warnings.
|
||||||
|
"""
|
||||||
|
def visitor(self, node):
|
||||||
|
try:
|
||||||
|
content = inner_func(self, node)
|
||||||
|
except DotException as e:
|
||||||
|
# Insert the exception as a warning in the document
|
||||||
|
warning = self.document.reporter.warning(str(e), line=node.line)
|
||||||
|
warning.parent = node
|
||||||
|
node.children = [warning]
|
||||||
|
else:
|
||||||
|
source = self.document.attributes['source']
|
||||||
|
self.body.append(content)
|
||||||
|
node.children = []
|
||||||
|
return visitor
|
||||||
|
|
||||||
|
def do_nothing(self, node):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
setup.app = app
|
||||||
|
setup.confdir = app.confdir
|
||||||
|
|
||||||
|
app.add_node(
|
||||||
|
inheritance_diagram,
|
||||||
|
latex=(visit_inheritance_diagram(latex_output_graph), do_nothing),
|
||||||
|
html=(visit_inheritance_diagram(html_output_graph), do_nothing))
|
||||||
|
app.add_directive(
|
||||||
|
'inheritance-diagram', inheritance_diagram_directive,
|
||||||
|
False, (1, 100, 0), parts = directives.nonnegative_int)
|
114
doc/sphinxext/numpydoc/ipython_console_highlighting.py
Normal file
114
doc/sphinxext/numpydoc/ipython_console_highlighting.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
"""reST directive for syntax-highlighting ipython interactive sessions.
|
||||||
|
|
||||||
|
XXX - See what improvements can be made based on the new (as of Sept 2009)
|
||||||
|
'pycon' lexer for the python console. At the very least it will give better
|
||||||
|
highlighted tracebacks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Needed modules
|
||||||
|
|
||||||
|
# Standard library
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Third party
|
||||||
|
from pygments.lexer import Lexer, do_insertions
|
||||||
|
from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer,
|
||||||
|
PythonTracebackLexer)
|
||||||
|
from pygments.token import Comment, Generic
|
||||||
|
|
||||||
|
from sphinx import highlighting
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Global constants
|
||||||
|
line_re = re.compile('.*?\n')
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Code begins - classes and functions
|
||||||
|
|
||||||
|
class IPythonConsoleLexer(Lexer):
|
||||||
|
"""
|
||||||
|
For IPython console output or doctests, such as:
|
||||||
|
|
||||||
|
.. sourcecode:: ipython
|
||||||
|
|
||||||
|
In [1]: a = 'foo'
|
||||||
|
|
||||||
|
In [2]: a
|
||||||
|
Out[2]: 'foo'
|
||||||
|
|
||||||
|
In [3]: print a
|
||||||
|
foo
|
||||||
|
|
||||||
|
In [4]: 1 / 0
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- Tracebacks are not currently supported.
|
||||||
|
|
||||||
|
- It assumes the default IPython prompts, not customized ones.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = 'IPython console session'
|
||||||
|
aliases = ['ipython']
|
||||||
|
mimetypes = ['text/x-ipython-console']
|
||||||
|
input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)")
|
||||||
|
output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)")
|
||||||
|
continue_prompt = re.compile(" \.\.\.+:")
|
||||||
|
tb_start = re.compile("\-+")
|
||||||
|
|
||||||
|
def get_tokens_unprocessed(self, text):
|
||||||
|
pylexer = PythonLexer(**self.options)
|
||||||
|
tblexer = PythonTracebackLexer(**self.options)
|
||||||
|
|
||||||
|
curcode = ''
|
||||||
|
insertions = []
|
||||||
|
for match in line_re.finditer(text):
|
||||||
|
line = match.group()
|
||||||
|
input_prompt = self.input_prompt.match(line)
|
||||||
|
continue_prompt = self.continue_prompt.match(line.rstrip())
|
||||||
|
output_prompt = self.output_prompt.match(line)
|
||||||
|
if line.startswith("#"):
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Comment, line)]))
|
||||||
|
elif input_prompt is not None:
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Prompt, input_prompt.group())]))
|
||||||
|
curcode += line[input_prompt.end():]
|
||||||
|
elif continue_prompt is not None:
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Prompt, continue_prompt.group())]))
|
||||||
|
curcode += line[continue_prompt.end():]
|
||||||
|
elif output_prompt is not None:
|
||||||
|
# Use the 'error' token for output. We should probably make
|
||||||
|
# our own token, but error is typicaly in a bright color like
|
||||||
|
# red, so it works fine for our output prompts.
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Error, output_prompt.group())]))
|
||||||
|
curcode += line[output_prompt.end():]
|
||||||
|
else:
|
||||||
|
if curcode:
|
||||||
|
for item in do_insertions(insertions,
|
||||||
|
pylexer.get_tokens_unprocessed(curcode)):
|
||||||
|
yield item
|
||||||
|
curcode = ''
|
||||||
|
insertions = []
|
||||||
|
yield match.start(), Generic.Output, line
|
||||||
|
if curcode:
|
||||||
|
for item in do_insertions(insertions,
|
||||||
|
pylexer.get_tokens_unprocessed(curcode)):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
"""Setup as a sphinx extension."""
|
||||||
|
|
||||||
|
# This is only a lexer, so adding it below to pygments appears sufficient.
|
||||||
|
# But if somebody knows that the right API usage should be to do that via
|
||||||
|
# sphinx, by all means fix it here. At least having this setup.py
|
||||||
|
# suppresses the sphinx warning we'd get without it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Register the extension as a valid pygments lexer
|
||||||
|
highlighting.lexers['ipython'] = IPythonConsoleLexer()
|
116
doc/sphinxext/numpydoc/numpydoc.py
Normal file
116
doc/sphinxext/numpydoc/numpydoc.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
========
|
||||||
|
numpydoc
|
||||||
|
========
|
||||||
|
|
||||||
|
Sphinx extension that handles docstrings in the Numpy standard format. [1]
|
||||||
|
|
||||||
|
It will:
|
||||||
|
|
||||||
|
- Convert Parameters etc. sections to field lists.
|
||||||
|
- Convert See Also section to a See also entry.
|
||||||
|
- Renumber references.
|
||||||
|
- Extract the signature from the docstring, if it can't be determined otherwise.
|
||||||
|
|
||||||
|
.. [1] http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines#docstring-standard
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os, re, pydoc
|
||||||
|
from docscrape_sphinx import get_doc_object, SphinxDocString
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
def mangle_docstrings(app, what, name, obj, options, lines,
|
||||||
|
reference_offset=[0]):
|
||||||
|
if what == 'module':
|
||||||
|
# Strip top title
|
||||||
|
title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
|
||||||
|
re.I|re.S)
|
||||||
|
lines[:] = title_re.sub('', "\n".join(lines)).split("\n")
|
||||||
|
else:
|
||||||
|
doc = get_doc_object(obj, what, "\n".join(lines))
|
||||||
|
lines[:] = str(doc).split("\n")
|
||||||
|
|
||||||
|
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
|
||||||
|
obj.__name__:
|
||||||
|
if hasattr(obj, '__module__'):
|
||||||
|
v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__))
|
||||||
|
else:
|
||||||
|
v = dict(full_name=obj.__name__)
|
||||||
|
lines += ['', '.. htmlonly::', '']
|
||||||
|
lines += [' %s' % x for x in
|
||||||
|
(app.config.numpydoc_edit_link % v).split("\n")]
|
||||||
|
|
||||||
|
# replace reference numbers so that there are no duplicates
|
||||||
|
references = []
|
||||||
|
for l in lines:
|
||||||
|
l = l.strip()
|
||||||
|
if l.startswith('.. ['):
|
||||||
|
try:
|
||||||
|
references.append(int(l[len('.. ['):l.index(']')]))
|
||||||
|
except ValueError:
|
||||||
|
print("WARNING: invalid reference in %s docstring" % name)
|
||||||
|
|
||||||
|
# Start renaming from the biggest number, otherwise we may
|
||||||
|
# overwrite references.
|
||||||
|
references.sort()
|
||||||
|
if references:
|
||||||
|
for i, line in enumerate(lines):
|
||||||
|
for r in references:
|
||||||
|
new_r = reference_offset[0] + r
|
||||||
|
lines[i] = lines[i].replace('[%d]_' % r,
|
||||||
|
'[%d]_' % new_r)
|
||||||
|
lines[i] = lines[i].replace('.. [%d]' % r,
|
||||||
|
'.. [%d]' % new_r)
|
||||||
|
|
||||||
|
reference_offset[0] += len(references)
|
||||||
|
|
||||||
|
def mangle_signature(app, what, name, obj, options, sig, retann):
|
||||||
|
# Do not try to inspect classes that don't define `__init__`
|
||||||
|
if (inspect.isclass(obj) and
|
||||||
|
'initializes x; see ' in pydoc.getdoc(obj.__init__)):
|
||||||
|
return '', ''
|
||||||
|
|
||||||
|
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return
|
||||||
|
if not hasattr(obj, '__doc__'): return
|
||||||
|
|
||||||
|
doc = SphinxDocString(pydoc.getdoc(obj))
|
||||||
|
if doc['Signature']:
|
||||||
|
sig = re.sub("^[^(]*", "", doc['Signature'])
|
||||||
|
return sig, ''
|
||||||
|
|
||||||
|
def initialize(app):
|
||||||
|
try:
|
||||||
|
app.connect('autodoc-process-signature', mangle_signature)
|
||||||
|
except:
|
||||||
|
monkeypatch_sphinx_ext_autodoc()
|
||||||
|
|
||||||
|
def setup(app, get_doc_object_=get_doc_object):
|
||||||
|
global get_doc_object
|
||||||
|
get_doc_object = get_doc_object_
|
||||||
|
|
||||||
|
app.connect('autodoc-process-docstring', mangle_docstrings)
|
||||||
|
app.connect('builder-inited', initialize)
|
||||||
|
app.add_config_value('numpydoc_edit_link', None, True)
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5)
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def monkeypatch_sphinx_ext_autodoc():
|
||||||
|
global _original_format_signature
|
||||||
|
import sphinx.ext.autodoc
|
||||||
|
|
||||||
|
if sphinx.ext.autodoc.format_signature is our_format_signature:
|
||||||
|
return
|
||||||
|
|
||||||
|
print("[numpydoc] Monkeypatching sphinx.ext.autodoc ...")
|
||||||
|
_original_format_signature = sphinx.ext.autodoc.format_signature
|
||||||
|
sphinx.ext.autodoc.format_signature = our_format_signature
|
||||||
|
|
||||||
|
def our_format_signature(what, obj):
|
||||||
|
r = mangle_signature(None, what, None, obj, None, None, None)
|
||||||
|
if r is not None:
|
||||||
|
return r[0]
|
||||||
|
else:
|
||||||
|
return _original_format_signature(what, obj)
|
773
doc/sphinxext/numpydoc/plot_directive.py
Normal file
773
doc/sphinxext/numpydoc/plot_directive.py
Normal file
@ -0,0 +1,773 @@
|
|||||||
|
"""
|
||||||
|
A directive for including a matplotlib plot in a Sphinx document.
|
||||||
|
|
||||||
|
By default, in HTML output, `plot` will include a .png file with a
|
||||||
|
link to a high-res .png and .pdf. In LaTeX output, it will include a
|
||||||
|
.pdf.
|
||||||
|
|
||||||
|
The source code for the plot may be included in one of three ways:
|
||||||
|
|
||||||
|
1. **A path to a source file** as the argument to the directive::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py
|
||||||
|
|
||||||
|
When a path to a source file is given, the content of the
|
||||||
|
directive may optionally contain a caption for the plot::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py
|
||||||
|
|
||||||
|
This is the caption for the plot
|
||||||
|
|
||||||
|
Additionally, one my specify the name of a function to call (with
|
||||||
|
no arguments) immediately after importing the module::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py plot_function1
|
||||||
|
|
||||||
|
2. Included as **inline content** to the directive::
|
||||||
|
|
||||||
|
.. plot::
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.image as mpimg
|
||||||
|
import numpy as np
|
||||||
|
img = mpimg.imread('_static/stinkbug.png')
|
||||||
|
imgplot = plt.imshow(img)
|
||||||
|
|
||||||
|
3. Using **doctest** syntax::
|
||||||
|
|
||||||
|
.. plot::
|
||||||
|
A plotting example:
|
||||||
|
>>> import matplotlib.pyplot as plt
|
||||||
|
>>> plt.plot([1,2,3], [4,5,6])
|
||||||
|
|
||||||
|
Options
|
||||||
|
-------
|
||||||
|
|
||||||
|
The ``plot`` directive supports the following options:
|
||||||
|
|
||||||
|
format : {'python', 'doctest'}
|
||||||
|
Specify the format of the input
|
||||||
|
|
||||||
|
include-source : bool
|
||||||
|
Whether to display the source code. The default can be changed
|
||||||
|
using the `plot_include_source` variable in conf.py
|
||||||
|
|
||||||
|
encoding : str
|
||||||
|
If this source file is in a non-UTF8 or non-ASCII encoding,
|
||||||
|
the encoding must be specified using the `:encoding:` option.
|
||||||
|
The encoding will not be inferred using the ``-*- coding -*-``
|
||||||
|
metacomment.
|
||||||
|
|
||||||
|
context : bool
|
||||||
|
If provided, the code will be run in the context of all
|
||||||
|
previous plot directives for which the `:context:` option was
|
||||||
|
specified. This only applies to inline code plot directives,
|
||||||
|
not those run from files.
|
||||||
|
|
||||||
|
nofigs : bool
|
||||||
|
If specified, the code block will be run, but no figures will
|
||||||
|
be inserted. This is usually useful with the ``:context:``
|
||||||
|
option.
|
||||||
|
|
||||||
|
Additionally, this directive supports all of the options of the
|
||||||
|
`image` directive, except for `target` (since plot will add its own
|
||||||
|
target). These include `alt`, `height`, `width`, `scale`, `align` and
|
||||||
|
`class`.
|
||||||
|
|
||||||
|
Configuration options
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The plot directive has the following configuration options:
|
||||||
|
|
||||||
|
plot_include_source
|
||||||
|
Default value for the include-source option
|
||||||
|
|
||||||
|
plot_pre_code
|
||||||
|
Code that should be executed before each plot.
|
||||||
|
|
||||||
|
plot_basedir
|
||||||
|
Base directory, to which ``plot::`` file names are relative
|
||||||
|
to. (If None or empty, file names are relative to the
|
||||||
|
directoly where the file containing the directive is.)
|
||||||
|
|
||||||
|
plot_formats
|
||||||
|
File formats to generate. List of tuples or strings::
|
||||||
|
|
||||||
|
[(suffix, dpi), suffix, ...]
|
||||||
|
|
||||||
|
that determine the file format and the DPI. For entries whose
|
||||||
|
DPI was omitted, sensible defaults are chosen.
|
||||||
|
|
||||||
|
plot_html_show_formats
|
||||||
|
Whether to show links to the files in HTML.
|
||||||
|
|
||||||
|
plot_rcparams
|
||||||
|
A dictionary containing any non-standard rcParams that should
|
||||||
|
be applied before each plot.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys, os, glob, shutil, imp, warnings, io, re, textwrap, \
|
||||||
|
traceback, exceptions
|
||||||
|
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst.directives.images import Image
|
||||||
|
align = Image.align
|
||||||
|
import sphinx
|
||||||
|
|
||||||
|
sphinx_version = sphinx.__version__.split(".")
|
||||||
|
# The split is necessary for sphinx beta versions where the string is
|
||||||
|
# '6b1'
|
||||||
|
sphinx_version = tuple([int(re.split('[a-z]', x)[0])
|
||||||
|
for x in sphinx_version[:2]])
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Sphinx depends on either Jinja or Jinja2
|
||||||
|
import jinja2
|
||||||
|
def format_template(template, **kw):
|
||||||
|
return jinja2.Template(template).render(**kw)
|
||||||
|
except ImportError:
|
||||||
|
import jinja
|
||||||
|
def format_template(template, **kw):
|
||||||
|
return jinja.from_string(template, **kw)
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.cbook as cbook
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from matplotlib import _pylab_helpers
|
||||||
|
|
||||||
|
__version__ = 2
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Relative pathnames
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# os.path.relpath is new in Python 2.6
|
||||||
|
try:
|
||||||
|
from os.path import relpath
|
||||||
|
except ImportError:
|
||||||
|
# Copied from Python 2.7
|
||||||
|
if 'posix' in sys.builtin_module_names:
|
||||||
|
def relpath(path, start=os.path.curdir):
|
||||||
|
"""Return a relative version of a path"""
|
||||||
|
from os.path import sep, curdir, join, abspath, commonprefix, \
|
||||||
|
pardir
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
raise ValueError("no path specified")
|
||||||
|
|
||||||
|
start_list = abspath(start).split(sep)
|
||||||
|
path_list = abspath(path).split(sep)
|
||||||
|
|
||||||
|
# Work out how much of the filepath is shared by start and path.
|
||||||
|
i = len(commonprefix([start_list, path_list]))
|
||||||
|
|
||||||
|
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
|
||||||
|
if not rel_list:
|
||||||
|
return curdir
|
||||||
|
return join(*rel_list)
|
||||||
|
elif 'nt' in sys.builtin_module_names:
|
||||||
|
def relpath(path, start=os.path.curdir):
|
||||||
|
"""Return a relative version of a path"""
|
||||||
|
from os.path import sep, curdir, join, abspath, commonprefix, \
|
||||||
|
pardir, splitunc
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
raise ValueError("no path specified")
|
||||||
|
start_list = abspath(start).split(sep)
|
||||||
|
path_list = abspath(path).split(sep)
|
||||||
|
if start_list[0].lower() != path_list[0].lower():
|
||||||
|
unc_path, rest = splitunc(path)
|
||||||
|
unc_start, rest = splitunc(start)
|
||||||
|
if bool(unc_path) ^ bool(unc_start):
|
||||||
|
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
|
||||||
|
% (path, start))
|
||||||
|
else:
|
||||||
|
raise ValueError("path is on drive %s, start on drive %s"
|
||||||
|
% (path_list[0], start_list[0]))
|
||||||
|
# Work out how much of the filepath is shared by start and path.
|
||||||
|
for i in range(min(len(start_list), len(path_list))):
|
||||||
|
if start_list[i].lower() != path_list[i].lower():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
|
||||||
|
if not rel_list:
|
||||||
|
return curdir
|
||||||
|
return join(*rel_list)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unsupported platform (no relpath available!)")
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Registration hook
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def plot_directive(name, arguments, options, content, lineno,
|
||||||
|
content_offset, block_text, state, state_machine):
|
||||||
|
return run(arguments, content, options, state_machine, state, lineno)
|
||||||
|
plot_directive.__doc__ = __doc__
|
||||||
|
|
||||||
|
def _option_boolean(arg):
|
||||||
|
if not arg or not arg.strip():
|
||||||
|
# no argument given, assume used as a flag
|
||||||
|
return True
|
||||||
|
elif arg.strip().lower() in ('no', '0', 'false'):
|
||||||
|
return False
|
||||||
|
elif arg.strip().lower() in ('yes', '1', 'true'):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError('"%s" unknown boolean' % arg)
|
||||||
|
|
||||||
|
def _option_format(arg):
|
||||||
|
return directives.choice(arg, ('python', 'doctest'))
|
||||||
|
|
||||||
|
def _option_align(arg):
|
||||||
|
return directives.choice(arg, ("top", "middle", "bottom", "left", "center",
|
||||||
|
"right"))
|
||||||
|
|
||||||
|
def mark_plot_labels(app, document):
|
||||||
|
"""
|
||||||
|
To make plots referenceable, we need to move the reference from
|
||||||
|
the "htmlonly" (or "latexonly") node to the actual figure node
|
||||||
|
itself.
|
||||||
|
"""
|
||||||
|
for name, explicit in document.nametypes.items():
|
||||||
|
if not explicit:
|
||||||
|
continue
|
||||||
|
labelid = document.nameids[name]
|
||||||
|
if labelid is None:
|
||||||
|
continue
|
||||||
|
node = document.ids[labelid]
|
||||||
|
if node.tagname in ('html_only', 'latex_only'):
|
||||||
|
for n in node:
|
||||||
|
if n.tagname == 'figure':
|
||||||
|
sectname = name
|
||||||
|
for c in n:
|
||||||
|
if c.tagname == 'caption':
|
||||||
|
sectname = c.astext()
|
||||||
|
break
|
||||||
|
|
||||||
|
node['ids'].remove(labelid)
|
||||||
|
node['names'].remove(name)
|
||||||
|
n['ids'].append(labelid)
|
||||||
|
n['names'].append(name)
|
||||||
|
document.settings.env.labels[name] = \
|
||||||
|
document.settings.env.docname, labelid, sectname
|
||||||
|
break
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
setup.app = app
|
||||||
|
setup.config = app.config
|
||||||
|
setup.confdir = app.confdir
|
||||||
|
|
||||||
|
options = {'alt': directives.unchanged,
|
||||||
|
'height': directives.length_or_unitless,
|
||||||
|
'width': directives.length_or_percentage_or_unitless,
|
||||||
|
'scale': directives.nonnegative_int,
|
||||||
|
'align': _option_align,
|
||||||
|
'class': directives.class_option,
|
||||||
|
'include-source': _option_boolean,
|
||||||
|
'format': _option_format,
|
||||||
|
'context': directives.flag,
|
||||||
|
'nofigs': directives.flag,
|
||||||
|
'encoding': directives.encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
app.add_directive('plot', plot_directive, True, (0, 2, False), **options)
|
||||||
|
app.add_config_value('plot_pre_code', None, True)
|
||||||
|
app.add_config_value('plot_include_source', False, True)
|
||||||
|
app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True)
|
||||||
|
app.add_config_value('plot_basedir', None, True)
|
||||||
|
app.add_config_value('plot_html_show_formats', True, True)
|
||||||
|
app.add_config_value('plot_rcparams', {}, True)
|
||||||
|
|
||||||
|
app.connect('doctree-read', mark_plot_labels)
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Doctest handling
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def contains_doctest(text):
|
||||||
|
try:
|
||||||
|
# check if it's valid Python as-is
|
||||||
|
compile(text, '<string>', 'exec')
|
||||||
|
return False
|
||||||
|
except SyntaxError:
|
||||||
|
pass
|
||||||
|
r = re.compile(r'^\s*>>>', re.M)
|
||||||
|
m = r.search(text)
|
||||||
|
return bool(m)
|
||||||
|
|
||||||
|
def unescape_doctest(text):
|
||||||
|
"""
|
||||||
|
Extract code from a piece of text, which contains either Python code
|
||||||
|
or doctests.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not contains_doctest(text):
|
||||||
|
return text
|
||||||
|
|
||||||
|
code = ""
|
||||||
|
for line in text.split("\n"):
|
||||||
|
m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line)
|
||||||
|
if m:
|
||||||
|
code += m.group(2) + "\n"
|
||||||
|
elif line.strip():
|
||||||
|
code += "# " + line.strip() + "\n"
|
||||||
|
else:
|
||||||
|
code += "\n"
|
||||||
|
return code
|
||||||
|
|
||||||
|
def split_code_at_show(text):
|
||||||
|
"""
|
||||||
|
Split code at plt.show()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
is_doctest = contains_doctest(text)
|
||||||
|
|
||||||
|
part = []
|
||||||
|
for line in text.split("\n"):
|
||||||
|
if (not is_doctest and line.strip() == 'plt.show()') or \
|
||||||
|
(is_doctest and line.strip() == '>>> plt.show()'):
|
||||||
|
part.append(line)
|
||||||
|
parts.append("\n".join(part))
|
||||||
|
part = []
|
||||||
|
else:
|
||||||
|
part.append(line)
|
||||||
|
if "\n".join(part).strip():
|
||||||
|
parts.append("\n".join(part))
|
||||||
|
return parts
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Template
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATE = """
|
||||||
|
{{ source_code }}
|
||||||
|
|
||||||
|
{{ only_html }}
|
||||||
|
|
||||||
|
{% if source_link or (html_show_formats and not multi_image) %}
|
||||||
|
(
|
||||||
|
{%- if source_link -%}
|
||||||
|
`Source code <{{ source_link }}>`__
|
||||||
|
{%- endif -%}
|
||||||
|
{%- if html_show_formats and not multi_image -%}
|
||||||
|
{%- for img in images -%}
|
||||||
|
{%- for fmt in img.formats -%}
|
||||||
|
{%- if source_link or not loop.first -%}, {% endif -%}
|
||||||
|
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
)
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for img in images %}
|
||||||
|
.. figure:: {{ build_dir }}/{{ img.basename }}.png
|
||||||
|
{%- for option in options %}
|
||||||
|
{{ option }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if html_show_formats and multi_image -%}
|
||||||
|
(
|
||||||
|
{%- for fmt in img.formats -%}
|
||||||
|
{%- if not loop.first -%}, {% endif -%}
|
||||||
|
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
|
||||||
|
{%- endfor -%}
|
||||||
|
)
|
||||||
|
{%- endif -%}
|
||||||
|
|
||||||
|
{{ caption }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{{ only_latex }}
|
||||||
|
|
||||||
|
{% for img in images %}
|
||||||
|
.. image:: {{ build_dir }}/{{ img.basename }}.pdf
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
exception_template = """
|
||||||
|
.. htmlonly::
|
||||||
|
|
||||||
|
[`source code <%(linkdir)s/%(basename)s.py>`__]
|
||||||
|
|
||||||
|
Exception occurred rendering plot.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# the context of the plot for all directives specified with the
|
||||||
|
# :context: option
|
||||||
|
plot_context = dict()
|
||||||
|
|
||||||
|
class ImageFile:
|
||||||
|
def __init__(self, basename, dirname):
|
||||||
|
self.basename = basename
|
||||||
|
self.dirname = dirname
|
||||||
|
self.formats = []
|
||||||
|
|
||||||
|
def filename(self, format):
|
||||||
|
return os.path.join(self.dirname, "%s.%s" % (self.basename, format))
|
||||||
|
|
||||||
|
def filenames(self):
|
||||||
|
return [self.filename(fmt) for fmt in self.formats]
|
||||||
|
|
||||||
|
def out_of_date(original, derived):
|
||||||
|
"""
|
||||||
|
Returns True if derivative is out-of-date wrt original,
|
||||||
|
both of which are full file paths.
|
||||||
|
"""
|
||||||
|
return (not os.path.exists(derived) or
|
||||||
|
(os.path.exists(original) and
|
||||||
|
os.stat(derived).st_mtime < os.stat(original).st_mtime))
|
||||||
|
|
||||||
|
class PlotError(RuntimeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run_code(code, code_path, ns=None, function_name=None):
|
||||||
|
"""
|
||||||
|
Import a Python module from a path, and run the function given by
|
||||||
|
name, if function_name is not None.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Change the working directory to the directory of the example, so
|
||||||
|
# it can get at its data files, if any. Add its path to sys.path
|
||||||
|
# so it can import any helper modules sitting beside it.
|
||||||
|
|
||||||
|
pwd = os.getcwd()
|
||||||
|
old_sys_path = list(sys.path)
|
||||||
|
if code_path is not None:
|
||||||
|
dirname = os.path.abspath(os.path.dirname(code_path))
|
||||||
|
os.chdir(dirname)
|
||||||
|
sys.path.insert(0, dirname)
|
||||||
|
|
||||||
|
# Redirect stdout
|
||||||
|
stdout = sys.stdout
|
||||||
|
sys.stdout = io.StringIO()
|
||||||
|
|
||||||
|
# Reset sys.argv
|
||||||
|
old_sys_argv = sys.argv
|
||||||
|
sys.argv = [code_path]
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
code = unescape_doctest(code)
|
||||||
|
if ns is None:
|
||||||
|
ns = {}
|
||||||
|
if not ns:
|
||||||
|
if setup.config.plot_pre_code is None:
|
||||||
|
exec("import numpy as np\nfrom matplotlib import pyplot as plt\n", ns)
|
||||||
|
else:
|
||||||
|
exec(setup.config.plot_pre_code, ns)
|
||||||
|
if "__main__" in code:
|
||||||
|
exec("__name__ = '__main__'", ns)
|
||||||
|
exec(code, ns)
|
||||||
|
if function_name is not None:
|
||||||
|
exec(function_name + "()", ns)
|
||||||
|
except (Exception, SystemExit) as err:
|
||||||
|
raise PlotError(traceback.format_exc())
|
||||||
|
finally:
|
||||||
|
os.chdir(pwd)
|
||||||
|
sys.argv = old_sys_argv
|
||||||
|
sys.path[:] = old_sys_path
|
||||||
|
sys.stdout = stdout
|
||||||
|
return ns
|
||||||
|
|
||||||
|
def clear_state(plot_rcparams):
|
||||||
|
plt.close('all')
|
||||||
|
matplotlib.rc_file_defaults()
|
||||||
|
matplotlib.rcParams.update(plot_rcparams)
|
||||||
|
|
||||||
|
def render_figures(code, code_path, output_dir, output_base, context,
|
||||||
|
function_name, config):
|
||||||
|
"""
|
||||||
|
Run a pyplot script and save the low and high res PNGs and a PDF
|
||||||
|
in outdir.
|
||||||
|
|
||||||
|
Save the images under *output_dir* with file names derived from
|
||||||
|
*output_base*
|
||||||
|
"""
|
||||||
|
# -- Parse format list
|
||||||
|
default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200}
|
||||||
|
formats = []
|
||||||
|
plot_formats = config.plot_formats
|
||||||
|
if isinstance(plot_formats, str):
|
||||||
|
plot_formats = eval(plot_formats)
|
||||||
|
for fmt in plot_formats:
|
||||||
|
if isinstance(fmt, str):
|
||||||
|
formats.append((fmt, default_dpi.get(fmt, 80)))
|
||||||
|
elif type(fmt) in (tuple, list) and len(fmt)==2:
|
||||||
|
formats.append((str(fmt[0]), int(fmt[1])))
|
||||||
|
else:
|
||||||
|
raise PlotError('invalid image format "%r" in plot_formats' % fmt)
|
||||||
|
|
||||||
|
# -- Try to determine if all images already exist
|
||||||
|
|
||||||
|
code_pieces = split_code_at_show(code)
|
||||||
|
|
||||||
|
# Look for single-figure output files first
|
||||||
|
# Look for single-figure output files first
|
||||||
|
all_exists = True
|
||||||
|
img = ImageFile(output_base, output_dir)
|
||||||
|
for format, dpi in formats:
|
||||||
|
if out_of_date(code_path, img.filename(format)):
|
||||||
|
all_exists = False
|
||||||
|
break
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
if all_exists:
|
||||||
|
return [(code, [img])]
|
||||||
|
|
||||||
|
# Then look for multi-figure output files
|
||||||
|
results = []
|
||||||
|
all_exists = True
|
||||||
|
for i, code_piece in enumerate(code_pieces):
|
||||||
|
images = []
|
||||||
|
for j in range(1000):
|
||||||
|
if len(code_pieces) > 1:
|
||||||
|
img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir)
|
||||||
|
else:
|
||||||
|
img = ImageFile('%s_%02d' % (output_base, j), output_dir)
|
||||||
|
for format, dpi in formats:
|
||||||
|
if out_of_date(code_path, img.filename(format)):
|
||||||
|
all_exists = False
|
||||||
|
break
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
# assume that if we have one, we have them all
|
||||||
|
if not all_exists:
|
||||||
|
all_exists = (j > 0)
|
||||||
|
break
|
||||||
|
images.append(img)
|
||||||
|
if not all_exists:
|
||||||
|
break
|
||||||
|
results.append((code_piece, images))
|
||||||
|
|
||||||
|
if all_exists:
|
||||||
|
return results
|
||||||
|
|
||||||
|
# We didn't find the files, so build them
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if context:
|
||||||
|
ns = plot_context
|
||||||
|
else:
|
||||||
|
ns = {}
|
||||||
|
|
||||||
|
for i, code_piece in enumerate(code_pieces):
|
||||||
|
if not context:
|
||||||
|
clear_state(config.plot_rcparams)
|
||||||
|
run_code(code_piece, code_path, ns, function_name)
|
||||||
|
|
||||||
|
images = []
|
||||||
|
fig_managers = _pylab_helpers.Gcf.get_all_fig_managers()
|
||||||
|
for j, figman in enumerate(fig_managers):
|
||||||
|
if len(fig_managers) == 1 and len(code_pieces) == 1:
|
||||||
|
img = ImageFile(output_base, output_dir)
|
||||||
|
elif len(code_pieces) == 1:
|
||||||
|
img = ImageFile("%s_%02d" % (output_base, j), output_dir)
|
||||||
|
else:
|
||||||
|
img = ImageFile("%s_%02d_%02d" % (output_base, i, j),
|
||||||
|
output_dir)
|
||||||
|
images.append(img)
|
||||||
|
for format, dpi in formats:
|
||||||
|
try:
|
||||||
|
figman.canvas.figure.savefig(img.filename(format), dpi=dpi)
|
||||||
|
except Exception as err:
|
||||||
|
raise PlotError(traceback.format_exc())
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
results.append((code_piece, images))
|
||||||
|
|
||||||
|
if not context:
|
||||||
|
clear_state(config.plot_rcparams)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def run(arguments, content, options, state_machine, state, lineno):
|
||||||
|
# The user may provide a filename *or* Python code content, but not both
|
||||||
|
if arguments and content:
|
||||||
|
raise RuntimeError("plot:: directive can't have both args and content")
|
||||||
|
|
||||||
|
document = state_machine.document
|
||||||
|
config = document.settings.env.config
|
||||||
|
nofigs = 'nofigs' in options
|
||||||
|
|
||||||
|
options.setdefault('include-source', config.plot_include_source)
|
||||||
|
context = 'context' in options
|
||||||
|
|
||||||
|
rst_file = document.attributes['source']
|
||||||
|
rst_dir = os.path.dirname(rst_file)
|
||||||
|
|
||||||
|
if len(arguments):
|
||||||
|
if not config.plot_basedir:
|
||||||
|
source_file_name = os.path.join(setup.app.builder.srcdir,
|
||||||
|
directives.uri(arguments[0]))
|
||||||
|
else:
|
||||||
|
source_file_name = os.path.join(setup.app.builder.srcdir, config.plot_basedir,
|
||||||
|
directives.uri(arguments[0]))
|
||||||
|
|
||||||
|
# If there is content, it will be passed as a caption.
|
||||||
|
caption = '\n'.join(content)
|
||||||
|
|
||||||
|
# If the optional function name is provided, use it
|
||||||
|
if len(arguments) == 2:
|
||||||
|
function_name = arguments[1]
|
||||||
|
else:
|
||||||
|
function_name = None
|
||||||
|
|
||||||
|
fd = open(source_file_name, 'r')
|
||||||
|
code = fd.read()
|
||||||
|
fd.close()
|
||||||
|
output_base = os.path.basename(source_file_name)
|
||||||
|
else:
|
||||||
|
source_file_name = rst_file
|
||||||
|
code = textwrap.dedent("\n".join(map(str, content)))
|
||||||
|
counter = document.attributes.get('_plot_counter', 0) + 1
|
||||||
|
document.attributes['_plot_counter'] = counter
|
||||||
|
base, ext = os.path.splitext(os.path.basename(source_file_name))
|
||||||
|
output_base = '%s-%d.py' % (base, counter)
|
||||||
|
function_name = None
|
||||||
|
caption = ''
|
||||||
|
|
||||||
|
base, source_ext = os.path.splitext(output_base)
|
||||||
|
if source_ext in ('.py', '.rst', '.txt'):
|
||||||
|
output_base = base
|
||||||
|
else:
|
||||||
|
source_ext = ''
|
||||||
|
|
||||||
|
# ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
|
||||||
|
output_base = output_base.replace('.', '-')
|
||||||
|
|
||||||
|
# is it in doctest format?
|
||||||
|
is_doctest = contains_doctest(code)
|
||||||
|
if 'format' in options:
|
||||||
|
if options['format'] == 'python':
|
||||||
|
is_doctest = False
|
||||||
|
else:
|
||||||
|
is_doctest = True
|
||||||
|
|
||||||
|
# determine output directory name fragment
|
||||||
|
source_rel_name = relpath(source_file_name, setup.app.srcdir)
|
||||||
|
source_rel_dir = os.path.dirname(source_rel_name)
|
||||||
|
while source_rel_dir.startswith(os.path.sep):
|
||||||
|
source_rel_dir = source_rel_dir[1:]
|
||||||
|
|
||||||
|
# build_dir: where to place output files (temporarily)
|
||||||
|
build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
|
||||||
|
'plot_directive',
|
||||||
|
source_rel_dir)
|
||||||
|
# get rid of .. in paths, also changes pathsep
|
||||||
|
# see note in Python docs for warning about symbolic links on Windows.
|
||||||
|
# need to compare source and dest paths at end
|
||||||
|
build_dir = os.path.normpath(build_dir)
|
||||||
|
|
||||||
|
if not os.path.exists(build_dir):
|
||||||
|
os.makedirs(build_dir)
|
||||||
|
|
||||||
|
# output_dir: final location in the builder's directory
|
||||||
|
dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
|
||||||
|
source_rel_dir))
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
os.makedirs(dest_dir) # no problem here for me, but just use built-ins
|
||||||
|
|
||||||
|
# how to link to files from the RST file
|
||||||
|
dest_dir_link = os.path.join(relpath(setup.app.srcdir, rst_dir),
|
||||||
|
source_rel_dir).replace(os.path.sep, '/')
|
||||||
|
build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
|
||||||
|
source_link = dest_dir_link + '/' + output_base + source_ext
|
||||||
|
|
||||||
|
# make figures
|
||||||
|
try:
|
||||||
|
results = render_figures(code, source_file_name, build_dir, output_base,
|
||||||
|
context, function_name, config)
|
||||||
|
errors = []
|
||||||
|
except PlotError as err:
|
||||||
|
reporter = state.memo.reporter
|
||||||
|
sm = reporter.system_message(
|
||||||
|
2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
|
||||||
|
source_file_name, err),
|
||||||
|
line=lineno)
|
||||||
|
results = [(code, [])]
|
||||||
|
errors = [sm]
|
||||||
|
|
||||||
|
# Properly indent the caption
|
||||||
|
caption = '\n'.join(' ' + line.strip()
|
||||||
|
for line in caption.split('\n'))
|
||||||
|
|
||||||
|
# generate output restructuredtext
|
||||||
|
total_lines = []
|
||||||
|
for j, (code_piece, images) in enumerate(results):
|
||||||
|
if options['include-source']:
|
||||||
|
if is_doctest:
|
||||||
|
lines = ['']
|
||||||
|
lines += [row.rstrip() for row in code_piece.split('\n')]
|
||||||
|
else:
|
||||||
|
lines = ['.. code-block:: python', '']
|
||||||
|
lines += [' %s' % row.rstrip()
|
||||||
|
for row in code_piece.split('\n')]
|
||||||
|
source_code = "\n".join(lines)
|
||||||
|
else:
|
||||||
|
source_code = ""
|
||||||
|
|
||||||
|
if nofigs:
|
||||||
|
images = []
|
||||||
|
|
||||||
|
opts = [':%s: %s' % (key, val) for key, val in list(options.items())
|
||||||
|
if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]
|
||||||
|
|
||||||
|
only_html = ".. only:: html"
|
||||||
|
only_latex = ".. only:: latex"
|
||||||
|
|
||||||
|
if j == 0:
|
||||||
|
src_link = source_link
|
||||||
|
else:
|
||||||
|
src_link = None
|
||||||
|
|
||||||
|
result = format_template(
|
||||||
|
TEMPLATE,
|
||||||
|
dest_dir=dest_dir_link,
|
||||||
|
build_dir=build_dir_link,
|
||||||
|
source_link=src_link,
|
||||||
|
multi_image=len(images) > 1,
|
||||||
|
only_html=only_html,
|
||||||
|
only_latex=only_latex,
|
||||||
|
options=opts,
|
||||||
|
images=images,
|
||||||
|
source_code=source_code,
|
||||||
|
html_show_formats=config.plot_html_show_formats,
|
||||||
|
caption=caption)
|
||||||
|
|
||||||
|
total_lines.extend(result.split("\n"))
|
||||||
|
total_lines.extend("\n")
|
||||||
|
|
||||||
|
if total_lines:
|
||||||
|
state_machine.insert_input(total_lines, source=source_file_name)
|
||||||
|
|
||||||
|
# copy image files to builder's output directory, if necessary
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
cbook.mkdirs(dest_dir)
|
||||||
|
|
||||||
|
for code_piece, images in results:
|
||||||
|
for img in images:
|
||||||
|
for fn in img.filenames():
|
||||||
|
destimg = os.path.join(dest_dir, os.path.basename(fn))
|
||||||
|
if fn != destimg:
|
||||||
|
shutil.copyfile(fn, destimg)
|
||||||
|
|
||||||
|
# copy script (if necessary)
|
||||||
|
#if source_file_name == rst_file:
|
||||||
|
target_name = os.path.join(dest_dir, output_base + source_ext)
|
||||||
|
f = open(target_name, 'w')
|
||||||
|
f.write(unescape_doctest(code))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return errors
|
93
doc/sphinxext/sphinx_autorun/__init__.py
Normal file
93
doc/sphinxext/sphinx_autorun/__init__.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
sphinxcontirb.autorun
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Run the code and insert stdout after the code block.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive, directives
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
from sphinx_autorun import version
|
||||||
|
|
||||||
|
__version__ = version.version
|
||||||
|
|
||||||
|
|
||||||
|
class RunBlockError(SphinxError):
|
||||||
|
category = 'runblock error'
|
||||||
|
|
||||||
|
|
||||||
|
class AutoRun(object):
|
||||||
|
here = os.path.abspath(__file__)
|
||||||
|
pycon = os.path.join(os.path.dirname(here), 'pycon.py')
|
||||||
|
config = {
|
||||||
|
'pycon': 'python ' + pycon,
|
||||||
|
'pycon_prefix_chars': 4,
|
||||||
|
'pycon_show_source': False,
|
||||||
|
'console': 'bash',
|
||||||
|
'console_prefix_chars': 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def builder_init(cls, app):
|
||||||
|
cls.config.update(app.builder.config.autorun_languages)
|
||||||
|
|
||||||
|
|
||||||
|
class RunBlock(Directive):
|
||||||
|
has_content = True
|
||||||
|
required_arguments = 1
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {
|
||||||
|
'linenos': directives.flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
config = AutoRun.config
|
||||||
|
language = self.arguments[0]
|
||||||
|
|
||||||
|
if language not in config:
|
||||||
|
raise RunBlockError('Unknown language %s' % language)
|
||||||
|
|
||||||
|
# Get configuration values for the language
|
||||||
|
args = config[language].split()
|
||||||
|
input_encoding = config.get(language+'_input_encoding', 'utf8')
|
||||||
|
output_encoding = config.get(language+'_output_encoding', 'utf8')
|
||||||
|
prefix_chars = config.get(language+'_prefix_chars', 0)
|
||||||
|
show_source = config.get(language+'_show_source', True)
|
||||||
|
|
||||||
|
# Build the code text
|
||||||
|
proc = Popen(args, bufsize=1, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||||
|
codelines = (line[prefix_chars:] for line in self.content)
|
||||||
|
code = u'\n'.join(codelines).encode(input_encoding)
|
||||||
|
|
||||||
|
# Run the code
|
||||||
|
stdout, stderr = proc.communicate(code)
|
||||||
|
|
||||||
|
# Process output
|
||||||
|
if stdout:
|
||||||
|
out = stdout.decode(output_encoding)
|
||||||
|
if stderr:
|
||||||
|
out = stderr.decode(output_encoding)
|
||||||
|
|
||||||
|
# Get the original code with prefixes
|
||||||
|
if show_source:
|
||||||
|
code = u'\n'.join(self.content)
|
||||||
|
code_out = u'\n'.join((code, out))
|
||||||
|
else:
|
||||||
|
code_out = out
|
||||||
|
|
||||||
|
literal = nodes.literal_block(code_out, code_out)
|
||||||
|
literal['language'] = language
|
||||||
|
literal['linenos'] = 'linenos' in self.options
|
||||||
|
return [literal]
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.add_directive('runblock', RunBlock)
|
||||||
|
app.connect('builder-inited', AutoRun.builder_init)
|
||||||
|
app.add_config_value('autorun_languages', AutoRun.config, 'env')
|
31
doc/sphinxext/sphinx_autorun/pycon.py
Normal file
31
doc/sphinxext/sphinx_autorun/pycon.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import sys
|
||||||
|
from code import InteractiveInterpreter
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Print lines of input along with output.
|
||||||
|
"""
|
||||||
|
source_lines = (line.rstrip() for line in sys.stdin)
|
||||||
|
console = InteractiveInterpreter()
|
||||||
|
source = ''
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
source = next(source_lines)
|
||||||
|
# Allow the user to ignore specific lines of output.
|
||||||
|
if not source.endswith('# ignore'):
|
||||||
|
print('>>>', source)
|
||||||
|
more = console.runsource(source)
|
||||||
|
while more:
|
||||||
|
next_line = next(source_lines)
|
||||||
|
print('...', next_line)
|
||||||
|
source += '\n' + next_line
|
||||||
|
more = console.runsource(source)
|
||||||
|
except StopIteration:
|
||||||
|
if more:
|
||||||
|
print('... ')
|
||||||
|
more = console.runsource(source + '\n')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
4
doc/sphinxext/sphinx_autorun/version.py
Normal file
4
doc/sphinxext/sphinx_autorun/version.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
# file generated by setuptools_scm
|
||||||
|
# don't change, don't track in version control
|
||||||
|
version = '1.1.1'
|
123
doc/sphinxext/triqs_example/triqs_example.py
Normal file
123
doc/sphinxext/triqs_example/triqs_example.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
import tempfile
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# seems to be executed at the level of the conf.py
|
||||||
|
# so we need to link the lib at that place...
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import codecs
|
||||||
|
from os import path
|
||||||
|
from subprocess import Popen,PIPE
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
class TriqsExampleError(SphinxError):
|
||||||
|
category = 'triqs_example error'
|
||||||
|
|
||||||
|
class TriqsExampleRun:
|
||||||
|
#here = os.path.abspath(__file__)
|
||||||
|
#pycon = os.path.join(os.path.dirname(here),'pycon.py')
|
||||||
|
config = dict(
|
||||||
|
)
|
||||||
|
@classmethod
|
||||||
|
def builder_init(cls,app):
|
||||||
|
#cls.config.update(app.builder.config.autorun_languages)
|
||||||
|
#cls.config.update(app.builder.config.autocompile_opts)
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriqsExample(Directive):
|
||||||
|
has_content = True
|
||||||
|
required_arguments = 1
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {
|
||||||
|
'linenos': directives.flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
document = self.state.document
|
||||||
|
filename = self.arguments[0]
|
||||||
|
if not document.settings.file_insertion_enabled:
|
||||||
|
return [document.reporter.warning('File insertion disabled',
|
||||||
|
line=self.lineno)]
|
||||||
|
env = document.settings.env
|
||||||
|
if filename.startswith('/') or filename.startswith(os.sep):
|
||||||
|
rel_fn = filename[1:]
|
||||||
|
else:
|
||||||
|
docdir = path.dirname(env.doc2path(env.docname, base=None))
|
||||||
|
rel_fn = path.normpath(path.join(docdir, filename))
|
||||||
|
try:
|
||||||
|
fn = path.join(env.srcdir, rel_fn)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# the source directory is a bytestring with non-ASCII characters;
|
||||||
|
# let's try to encode the rel_fn in the file system encoding
|
||||||
|
rel_fn = rel_fn.encode(sys.getfilesystemencoding())
|
||||||
|
fn = path.join(env.srcdir, rel_fn)
|
||||||
|
|
||||||
|
encoding = self.options.get('encoding', env.config.source_encoding)
|
||||||
|
try:
|
||||||
|
f = codecs.open(fn, 'rU', encoding)
|
||||||
|
lines = f.readlines()
|
||||||
|
f.close()
|
||||||
|
except (IOError, OSError):
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Include file %r not found or reading it failed' % filename,
|
||||||
|
line=self.lineno)]
|
||||||
|
except UnicodeError:
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Encoding %r used for reading included file %r seems to '
|
||||||
|
'be wrong, try giving an :encoding: option' %
|
||||||
|
(encoding, filename))]
|
||||||
|
|
||||||
|
config = TriqsExampleRun.config
|
||||||
|
|
||||||
|
# Get configuration values for the language
|
||||||
|
input_encoding = 'utf8' #config.get(language+'_input_encoding','ascii')
|
||||||
|
output_encoding = 'utf8' #config.get(language+'_output_encoding','ascii')
|
||||||
|
show_source = True
|
||||||
|
|
||||||
|
# Build the code text
|
||||||
|
code = ''.join(lines).strip()
|
||||||
|
filename_clean = filename.rsplit('.',1)[0]
|
||||||
|
if filename_clean.startswith('./') : filename_clean = filename_clean[2:]
|
||||||
|
#print "Running the example ....",filename_clean
|
||||||
|
#print "Root ?", env.doc2path(env.docname, base=None)
|
||||||
|
|
||||||
|
import subprocess as S
|
||||||
|
error = True
|
||||||
|
try :
|
||||||
|
stdout =''
|
||||||
|
#resout = S.check_output("./example_bin/doc_%s"%(filename_clean) ,stderr=S.STDOUT,shell=True)
|
||||||
|
resout = S.check_output("./%s/doc_%s"%(docdir,filename_clean) ,stderr=S.STDOUT,shell=True)
|
||||||
|
if resout :
|
||||||
|
stdout = '---------- Result is -------\n' + resout.strip()
|
||||||
|
error = False
|
||||||
|
except S.CalledProcessError as E :
|
||||||
|
stdout ='---------- RunTime error -------\n'
|
||||||
|
stdout += E.output
|
||||||
|
|
||||||
|
# Process output
|
||||||
|
if stdout:
|
||||||
|
stdout = stdout.decode(output_encoding,'ignore')
|
||||||
|
out = ''.join(stdout).decode(output_encoding)
|
||||||
|
else:
|
||||||
|
out = '' #.join(stderr).decode(output_encoding)
|
||||||
|
|
||||||
|
# Get the original code with prefixes
|
||||||
|
code_out = '\n'.join((code,out))
|
||||||
|
|
||||||
|
if error : # report on console
|
||||||
|
print(" Error in processing ")
|
||||||
|
print(code_out)
|
||||||
|
|
||||||
|
literal = nodes.literal_block(code_out,code_out)
|
||||||
|
literal['language'] = 'c'
|
||||||
|
literal['linenos'] = 'linenos' in self.options
|
||||||
|
return [literal]
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.add_directive('triqs_example', TriqsExample)
|
||||||
|
app.connect('builder-inited',TriqsExampleRun.builder_init)
|
||||||
|
|
92
doc/themes/agogo/layout.html
vendored
Normal file
92
doc/themes/agogo/layout.html
vendored
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
{#
|
||||||
|
agogo/layout.html
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sphinx layout template for the agogo theme, originally written
|
||||||
|
by Andi Albrecht.
|
||||||
|
|
||||||
|
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
#}
|
||||||
|
{% extends "basic/layout.html" %}
|
||||||
|
|
||||||
|
{% block header %}
|
||||||
|
<div class="header-wrapper">
|
||||||
|
<div class="header">
|
||||||
|
{%- if logo %}
|
||||||
|
<p class="logo"><a href="{{ pathto(theme_index) }}">
|
||||||
|
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
|
||||||
|
</a></p>
|
||||||
|
{%- endif %}
|
||||||
|
{%- block headertitle %}
|
||||||
|
<h1><a href="{{ pathto(theme_index) }}">{{ shorttitle|e }}</a></h1>
|
||||||
|
{%- endblock %}
|
||||||
|
<div class="rel">
|
||||||
|
<a href="http://ipht.cea.fr/triqs" title="TRIQS Home">HOME</a> |
|
||||||
|
{%- for rellink in rellinks %}
|
||||||
|
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
|
||||||
|
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
|
||||||
|
{%- if not loop.last %}{{ reldelim2 }}{% endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="content-wrapper">
|
||||||
|
<div class="content">
|
||||||
|
<div class="document">
|
||||||
|
{%- block document %}
|
||||||
|
{{ super() }}
|
||||||
|
{%- endblock %}
|
||||||
|
</div>
|
||||||
|
<div class="sidebar">
|
||||||
|
{%- block sidebartoc %}
|
||||||
|
<h3>{{ _('Table Of Contents') }}</h3>
|
||||||
|
{{ toctree(maxdepth=3) }}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebarsearch %}
|
||||||
|
<h3 style="margin-top: 1.5em;">{{ _('Search') }}</h3>
|
||||||
|
<form class="search" action="{{ pathto('search') }}" method="get">
|
||||||
|
<input type="text" name="q" size="18" />
|
||||||
|
<input type="submit" value="{{ _('Go') }}" />
|
||||||
|
<input type="hidden" name="check_keywords" value="yes" />
|
||||||
|
<input type="hidden" name="area" value="default" />
|
||||||
|
</form>
|
||||||
|
<p class="searchtip" style="font-size: 90%">
|
||||||
|
{{ _('Enter search terms or a module, class or function name.') }}
|
||||||
|
</p>
|
||||||
|
{%- endblock %}
|
||||||
|
</div>
|
||||||
|
<div class="clearer"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block footer %}
|
||||||
|
<div class="footer-wrapper">
|
||||||
|
<div class="footer">
|
||||||
|
<div class="left">
|
||||||
|
{%- for rellink in rellinks %}
|
||||||
|
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
|
||||||
|
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
|
||||||
|
{%- if not loop.last %}{{ reldelim2 }}{% endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
{%- if show_source and has_source and sourcename %}
|
||||||
|
<br/>
|
||||||
|
<a href="{{ pathto('_sources/' + sourcename, true)|e }}"
|
||||||
|
rel="nofollow">{{ _('Show Source') }}</a>
|
||||||
|
{%- endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="right">
|
||||||
|
{{ super() }}
|
||||||
|
</div>
|
||||||
|
<div class="clearer"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block relbar1 %}{% endblock %}
|
||||||
|
{% block relbar2 %}{% endblock %}
|
519
doc/themes/agogo/static/agogo.css_t
vendored
Normal file
519
doc/themes/agogo/static/agogo.css_t
vendored
Normal file
@ -0,0 +1,519 @@
|
|||||||
|
/*
|
||||||
|
* agogo.css_t
|
||||||
|
* ~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx stylesheet -- agogo theme.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
* {
|
||||||
|
margin: 0px;
|
||||||
|
padding: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: {{ theme_bodyfont }};
|
||||||
|
font-size: 100%; // TRIQS
|
||||||
|
line-height: 1.0em; // TRIQS
|
||||||
|
color: black;
|
||||||
|
background-color: {{ theme_bgcolor }};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Page layout */
|
||||||
|
|
||||||
|
div.header, div.content, div.footer {
|
||||||
|
width: {{ theme_pagewidth }};
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.header-wrapper {
|
||||||
|
background: {{ theme_headerbg }};
|
||||||
|
border-bottom: 3px solid #2e3436;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Default body styles */
|
||||||
|
a {
|
||||||
|
color: {{ theme_linkcolor }};
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper a, div.footer a {
|
||||||
|
text-decoration: none; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper a:hover, div.footer a:hover {
|
||||||
|
text-decoration: underline; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
.clearer {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
.left {
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.right {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block {
|
||||||
|
display: block;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block .line-block {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1, h2, h3, h4 {
|
||||||
|
font-family: {{ theme_headerfont }};
|
||||||
|
font-weight: normal;
|
||||||
|
color: {{ theme_headercolor2 }};
|
||||||
|
margin-bottom: .8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
color: {{ theme_headercolor1 }};
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
padding-bottom: .5em;
|
||||||
|
border-bottom: 1px solid {{ theme_headercolor2 }};
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
visibility: hidden;
|
||||||
|
color: #dddddd;
|
||||||
|
padding-left: .3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1:hover > a.headerlink,
|
||||||
|
h2:hover > a.headerlink,
|
||||||
|
h3:hover > a.headerlink,
|
||||||
|
h4:hover > a.headerlink,
|
||||||
|
h5:hover > a.headerlink,
|
||||||
|
h6:hover > a.headerlink,
|
||||||
|
dt:hover > a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
padding: 2px 7px 1px 7px;
|
||||||
|
border-left: 0.2em solid black;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
margin: 0px 10px 5px 0px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: red; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, .highlighted {
|
||||||
|
background-color: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
|
||||||
|
div.header {
|
||||||
|
padding-top: 10px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.header h1 {
|
||||||
|
font-family: {{ theme_headerfont }};
|
||||||
|
font-weight: normal;
|
||||||
|
font-size: 180%;
|
||||||
|
letter-spacing: .08em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.header h1 a {
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.header div.rel {
|
||||||
|
margin-top: -1em; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
div.header div.rel a {
|
||||||
|
color: {{ theme_headerlinkcolor }};
|
||||||
|
letter-spacing: .1em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.logo {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.logo {
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Content */
|
||||||
|
div.content-wrapper {
|
||||||
|
background-color: white;
|
||||||
|
padding-top: 20px;
|
||||||
|
padding-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: {{ theme_documentwidth }};
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
padding-right: 2em;
|
||||||
|
text-align: {{ theme_textalign }};
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document ul {
|
||||||
|
margin: 1.5em;
|
||||||
|
list-style-type: square;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document dd {
|
||||||
|
margin-left: 1.2em;
|
||||||
|
margin-top: .4em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document .section {
|
||||||
|
margin-top: 1.7em;
|
||||||
|
}
|
||||||
|
div.document .section:first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document div.highlight {
|
||||||
|
line-height: 1.0em; // TRIQS
|
||||||
|
padding: 3px;
|
||||||
|
background-color: #eeeeec;
|
||||||
|
border-top: 2px solid #dddddd;
|
||||||
|
border-bottom: 2px solid #dddddd;
|
||||||
|
margin-top: .8em;
|
||||||
|
margin-bottom: .8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document h2 {
|
||||||
|
margin-top: .7em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document p {
|
||||||
|
margin-bottom: .5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document li.toctree-l1 {
|
||||||
|
margin-bottom: 0em; // TRIQS
|
||||||
|
}
|
||||||
|
div.document li.toctree-l2 {
|
||||||
|
margin-bottom: 0em; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document .descname {
|
||||||
|
font-weight: bold;
|
||||||
|
color: blue; // TRIQS
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document .docutils.literal {
|
||||||
|
background-color: #eeeeec;
|
||||||
|
padding: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document .docutils.xref.literal {
|
||||||
|
background-color: transparent;
|
||||||
|
padding: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document blockquote {
|
||||||
|
margin: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document ol {
|
||||||
|
margin: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Sidebar */
|
||||||
|
|
||||||
|
div.sidebar {
|
||||||
|
width: {{ theme_sidebarwidth }};
|
||||||
|
float: right;
|
||||||
|
font-size: .9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar a, div.header a {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar a:hover, div.header a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar h3 {
|
||||||
|
color: #2e3436;
|
||||||
|
text-transform: uppercase;
|
||||||
|
font-size: 130%;
|
||||||
|
letter-spacing: .1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar ul {
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l1 a {
|
||||||
|
display: block;
|
||||||
|
padding: 1px;
|
||||||
|
border: 1px solid #dddddd;
|
||||||
|
background-color: #eeeeec;
|
||||||
|
margin-bottom: .4em;
|
||||||
|
padding-left: 3px;
|
||||||
|
color: #2e3436;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l2 a {
|
||||||
|
background-color: transparent;
|
||||||
|
border: none;
|
||||||
|
margin-left: 1em;
|
||||||
|
border-bottom: 1px solid #dddddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l3 a {
|
||||||
|
background-color: transparent;
|
||||||
|
border: none;
|
||||||
|
margin-left: 2em;
|
||||||
|
border-bottom: 1px solid #dddddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l2:last-child a {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l1.current a {
|
||||||
|
border-right: 5px solid {{ theme_headerlinkcolor }};
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar li.toctree-l1.current li.toctree-l2 a {
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Footer */
|
||||||
|
|
||||||
|
div.footer-wrapper {
|
||||||
|
background: {{ theme_footerbg }};
|
||||||
|
border-top: 4px solid #babdb6;
|
||||||
|
padding-top: 10px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
min-height: 80px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer, div.footer a {
|
||||||
|
color: #888a85;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer .right {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer .left {
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Styles copied from basic theme */
|
||||||
|
|
||||||
|
/* -- search page ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
ul.search {
|
||||||
|
margin: 10px 0 0 20px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li {
|
||||||
|
padding: 5px 0 5px 20px;
|
||||||
|
background-image: url(file.png);
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: 0 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li div.context {
|
||||||
|
color: #888;
|
||||||
|
margin: 2px 0 0 30px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.keywordmatches li.goodmatch a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- index page ------------------------------------------------------------ */
|
||||||
|
|
||||||
|
table.contentstable {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.contentstable td {
|
||||||
|
padding: 10px 10px 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.contentstable p.biglink {
|
||||||
|
line-height: 150%;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.biglink {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.linkdescr {
|
||||||
|
font-style: italic;
|
||||||
|
padding-top: 5px;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general index --------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.indextable td {
|
||||||
|
text-align: left;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable dl, table.indextable dd {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.pcap {
|
||||||
|
height: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.cap {
|
||||||
|
margin-top: 10px;
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.toggler {
|
||||||
|
margin-right: 3px;
|
||||||
|
margin-top: 3px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- viewcode extension ---------------------------------------------------- */
|
||||||
|
|
||||||
|
.viewcode-link {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-back {
|
||||||
|
float: right;
|
||||||
|
font-family:: {{ theme_bodyfont }};
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
margin: -1px -3px;
|
||||||
|
padding: 0 3px;
|
||||||
|
background-color: #f4debf;
|
||||||
|
border-top: 1px solid #ac9;
|
||||||
|
border-bottom: 1px solid #ac9;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- added for TRIQS ------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
border: 1px solid #86989B;
|
||||||
|
background-color: lightpink; //#f7f7f7;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note {
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
border: 1px solid #86989B;
|
||||||
|
background-color: lightcyan; //#f7f7f7;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note p.admonition-title {
|
||||||
|
margin: 0px 10px 5px 0px;
|
||||||
|
font-weight: bold;
|
||||||
|
color : blue;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight-python {
|
||||||
|
margin-top : 5px;
|
||||||
|
margin-bottom : 7px;
|
||||||
|
background-color: lightcyan; //#f4debf;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- math display ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
img.math {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body div.math p {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- tables ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils head {
|
||||||
|
color : blue;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
padding: 1px 8px 1px 5px;
|
||||||
|
border-top: 0;
|
||||||
|
border-left: 0;
|
||||||
|
border-right: 0;
|
||||||
|
border-bottom: 1px solid #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list td, table.field-list th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td, table.footnote th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
text-align: left;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation {
|
||||||
|
border-left: solid 1px gray;
|
||||||
|
margin-left: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation td {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
20
doc/themes/agogo/theme.conf
vendored
Normal file
20
doc/themes/agogo/theme.conf
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = basic
|
||||||
|
stylesheet = agogo.css
|
||||||
|
pygments_style = tango
|
||||||
|
|
||||||
|
[options]
|
||||||
|
bodyfont = "Verdana", Arial, sans-serif
|
||||||
|
headerfont = "Georgia", "Times New Roman", serif
|
||||||
|
pagewidth = 80%
|
||||||
|
documentwidth = 75%
|
||||||
|
sidebarwidth = 22.5%
|
||||||
|
bgcolor = #eeeeec
|
||||||
|
headerbg = url(bgtop.png) top left repeat-x
|
||||||
|
footerbg = url(bgfooter.png) top left repeat-x
|
||||||
|
linkcolor = #ce5c00
|
||||||
|
headercolor1 = #204a87
|
||||||
|
headercolor2 = #3465a4
|
||||||
|
headerlinkcolor = #fcaf3e
|
||||||
|
textalign = justify
|
||||||
|
index = index
|
52
doc/themes/triqs/layout.html
vendored
Normal file
52
doc/themes/triqs/layout.html
vendored
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
{#
|
||||||
|
layout.html
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
TRIQS layout template heavily based on the sphinxdoc theme.
|
||||||
|
|
||||||
|
:copyright: Copyright 2013 by the TRIQS team.
|
||||||
|
:copyright: Copyright 2007-2013 by the Sphinx team.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
#}
|
||||||
|
{%- extends "basic/layout.html" %}
|
||||||
|
|
||||||
|
{# put the sidebar before the body #}
|
||||||
|
{% block sidebar1 %}{{ sidebar() }}{% endblock %}
|
||||||
|
{% block sidebar2 %}{% endblock %}
|
||||||
|
|
||||||
|
{% block extrahead %}
|
||||||
|
<link href='https://fonts.googleapis.com/css?family=Open+Sans:300,400,700' rel='stylesheet' type='text/css'>
|
||||||
|
<script src="{{ pathto('_static/cufon-yui.js',1) }}" type="text/javascript"></script>
|
||||||
|
<script src="{{ pathto('_static/spaceman.cufonfonts.js',1) }}" type="text/javascript"></script>
|
||||||
|
<script type="text/javascript">
|
||||||
|
Cufon.replace('.triqs', { fontFamily: 'spaceman', hover: true });
|
||||||
|
</script>
|
||||||
|
{{ super() }}
|
||||||
|
{%- if not embedded %}
|
||||||
|
<style type="text/css">
|
||||||
|
table.right { float: right; margin-left: 20px; }
|
||||||
|
table.right td { border: 1px solid #ccc; }
|
||||||
|
{% if pagename == 'index' %}
|
||||||
|
.related { display: none; }
|
||||||
|
{% endif %}
|
||||||
|
</style>
|
||||||
|
{%- endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block rootrellink %}
|
||||||
|
<li><a href="{{ pathto('index') }}">Home</a> »</li>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block header %}
|
||||||
|
<div class="pageheader">
|
||||||
|
<ul>
|
||||||
|
{% for header_link in header_links %}
|
||||||
|
<li><a href="{{ pathto(header_link[1]) }}">{{ header_link[0] }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
<div>
|
||||||
|
<h1 style="padding:0; margin: 10px 0 0 0;"><a class="triqs" href="{{ pathto('index') }}">{{ header_title }}</a></h1>
|
||||||
|
<span style="font-size: 14px; margin: 0px; padding: 0px;">{{ header_subtitle }}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
7
doc/themes/triqs/static/cufon-yui.js
vendored
Normal file
7
doc/themes/triqs/static/cufon-yui.js
vendored
Normal file
File diff suppressed because one or more lines are too long
8
doc/themes/triqs/static/spaceman.cufonfonts.js
vendored
Normal file
8
doc/themes/triqs/static/spaceman.cufonfonts.js
vendored
Normal file
File diff suppressed because one or more lines are too long
449
doc/themes/triqs/static/triqs.css
vendored
Normal file
449
doc/themes/triqs/static/triqs.css
vendored
Normal file
@ -0,0 +1,449 @@
|
|||||||
|
/*
|
||||||
|
* sphinx13.css
|
||||||
|
* ~~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx stylesheet -- sphinx13 theme.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
text-align: center;
|
||||||
|
background-image: url(bodybg.png);
|
||||||
|
color: black;
|
||||||
|
padding: 0;
|
||||||
|
border-right: 1px solid #0a507a;
|
||||||
|
border-left: 1px solid #0a507a;
|
||||||
|
|
||||||
|
margin: 0 auto;
|
||||||
|
min-width: 780px;
|
||||||
|
max-width: 1080px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.red{
|
||||||
|
color: red
|
||||||
|
}
|
||||||
|
.blue{
|
||||||
|
color: blue
|
||||||
|
}
|
||||||
|
.green{
|
||||||
|
color: green
|
||||||
|
}
|
||||||
|
|
||||||
|
.param{
|
||||||
|
color: blue
|
||||||
|
}
|
||||||
|
|
||||||
|
a.triqs {
|
||||||
|
color: #073958;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.triqs:hover {
|
||||||
|
color: #0a527f;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader {
|
||||||
|
background-color: #dcdcdc;
|
||||||
|
text-align: left;
|
||||||
|
padding: 10px 15px;
|
||||||
|
color: #073958;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader ul {
|
||||||
|
float: right;
|
||||||
|
color: #073958;
|
||||||
|
list-style-type: none;
|
||||||
|
padding-left: 0;
|
||||||
|
margin-top: 22px;
|
||||||
|
margin-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li {
|
||||||
|
float: left;
|
||||||
|
margin: 0 0 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li a {
|
||||||
|
padding: 8px 12px;
|
||||||
|
color: #073958;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li a:hover {
|
||||||
|
background-color: #f9f9f0;
|
||||||
|
color: #0a507a;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
background-color: white;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 240px 0 0;
|
||||||
|
border-right: 1px solid #0a507a;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0.5em 20px 20px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
font-size: 1em;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul {
|
||||||
|
background-image: url(relbg.png);
|
||||||
|
height: 1.9em;
|
||||||
|
border-top: 1px solid #002e50;
|
||||||
|
border-bottom: 1px solid #002e50;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li {
|
||||||
|
margin: 0 5px 0 0;
|
||||||
|
padding: 0;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li.right {
|
||||||
|
float: right;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li a {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 5px 0 5px;
|
||||||
|
line-height: 1.75em;
|
||||||
|
color: #f9f9f0;
|
||||||
|
text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li a:hover {
|
||||||
|
color: white;
|
||||||
|
/*text-decoration: underline;*/
|
||||||
|
text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
position: relative;
|
||||||
|
top: 0px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 15px 15px 0;
|
||||||
|
width: 210px;
|
||||||
|
float: right;
|
||||||
|
font-size: 1em;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .logo {
|
||||||
|
font-size: 1.8em;
|
||||||
|
color: #0A507A;
|
||||||
|
font-weight: 300;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .logo img {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #aaa;
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 {
|
||||||
|
font-size: 1.5em;
|
||||||
|
border-top: 1px solid #0a507a;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 0.5em;
|
||||||
|
padding-top: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-size: 1.2em;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3, div.sphinxsidebar h4 {
|
||||||
|
margin-right: -15px;
|
||||||
|
margin-left: -15px;
|
||||||
|
padding-right: 14px;
|
||||||
|
padding-left: 14px;
|
||||||
|
color: #333;
|
||||||
|
font-weight: 300;
|
||||||
|
/*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper > h3:first-child {
|
||||||
|
margin-top: 0.5em;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #333;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
color: #444;
|
||||||
|
margin-top: 7px;
|
||||||
|
padding: 0;
|
||||||
|
line-height: 130%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul {
|
||||||
|
margin-left: 20px;
|
||||||
|
list-style-image: url(listitem.png);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
background-image: url(footerbg.png);
|
||||||
|
color: #ccc;
|
||||||
|
text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8);
|
||||||
|
padding: 3px 8px 3px 0;
|
||||||
|
clear: both;
|
||||||
|
font-size: 0.8em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* no need to make a visible link to Sphinx on the Sphinx page */
|
||||||
|
div.footer a {
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0.8em 0 0.5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #A2881D;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #E1C13F;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body a {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin: 10px 0 0 0;
|
||||||
|
font-size: 2.4em;
|
||||||
|
color: #0A507A;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
margin: 1.em 0 0.2em 0;
|
||||||
|
font-size: 1.5em;
|
||||||
|
font-weight: 300;
|
||||||
|
padding: 0;
|
||||||
|
color: #174967;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
margin: 1em 0 -0.3em 0;
|
||||||
|
font-size: 1.3em;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt {
|
||||||
|
color: #0A507A !important;
|
||||||
|
font-size: inherit !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #0A507A !important;
|
||||||
|
font-size: 12px;
|
||||||
|
margin-left: 6px;
|
||||||
|
padding: 0 4px 0 4px;
|
||||||
|
text-decoration: none !important;
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
background-color: #ccc;
|
||||||
|
color: white!important;
|
||||||
|
}
|
||||||
|
|
||||||
|
cite, code, tt {
|
||||||
|
font-family: 'Consolas', 'DejaVu Sans Mono',
|
||||||
|
'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt {
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
border-radius: 2px;
|
||||||
|
color: #333;
|
||||||
|
padding: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, tt.descclassname, tt.xref {
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
border: 1px solid #abc;
|
||||||
|
margin: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
a tt {
|
||||||
|
border: 0;
|
||||||
|
color: #a2881d;
|
||||||
|
}
|
||||||
|
|
||||||
|
a tt:hover {
|
||||||
|
color: #e1c13f;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
font-family: 'Consolas', 'DejaVu Sans Mono',
|
||||||
|
'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 13px;
|
||||||
|
letter-spacing: 0.015em;
|
||||||
|
line-height: 120%;
|
||||||
|
padding: 0.5em;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
border-radius: 2px;
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.linenos pre {
|
||||||
|
padding: 0.5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.quotebar {
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
max-width: 250px;
|
||||||
|
float: right;
|
||||||
|
padding: 0px 7px;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin: 0 -0.5em 0 -0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table td, table th {
|
||||||
|
padding: 0.2em 0.5em 0.2em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition, div.warning {
|
||||||
|
font-size: 0.9em;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
border: 1px solid #86989B;
|
||||||
|
border-radius: 2px;
|
||||||
|
background-color: #f7f7f7;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p, div.warning p {
|
||||||
|
margin: 0.5em 1em 0.5em 1em;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition pre, div.warning pre {
|
||||||
|
margin: 0.4em 1em 0.4em 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title,
|
||||||
|
div.warning p.admonition-title {
|
||||||
|
margin-top: 1em;
|
||||||
|
padding-top: 0.5em;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
border: 1px solid #940000;
|
||||||
|
/* background-color: #FFCCCF;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning p.admonition-title {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition ul, div.admonition ol,
|
||||||
|
div.warning ul, div.warning ol {
|
||||||
|
margin: 0.1em 0.5em 0.5em 3em;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition .highlight, div.warning .highlight {
|
||||||
|
background-color: #f7f7f7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-back {
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
background-color: #f4debf;
|
||||||
|
border-top: 1px solid #ac9;
|
||||||
|
border-bottom: 1px solid #ac9;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.my-code-block.std-ref {
|
||||||
|
color : red;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cppbrief {
|
||||||
|
color: #C6792C;
|
||||||
|
font-style: oblique;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cppsynopsis {
|
||||||
|
background-color: #E7EDF9;
|
||||||
|
/*font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif;*/
|
||||||
|
/*font-family: monospace; */
|
||||||
|
font-family: Verdana, Arial, Lucida Console;
|
||||||
|
font-size=80%;
|
||||||
|
/*font-style: oblique;*/
|
||||||
|
/* white-space: pre;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
4
doc/themes/triqs/theme.conf
vendored
Normal file
4
doc/themes/triqs/theme.conf
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = basic
|
||||||
|
stylesheet = triqs.css
|
||||||
|
pygments_style = sphinx
|
@ -96,7 +96,7 @@ In order to run DFT+DMFT calculations within Hubbard-I we need the corresponding
|
|||||||
with the CT-QMC solver (see :ref:`singleshot`), however there are also some differences. First
|
with the CT-QMC solver (see :ref:`singleshot`), however there are also some differences. First
|
||||||
difference is that we import the Hubbard-I solver by::
|
difference is that we import the Hubbard-I solver by::
|
||||||
|
|
||||||
from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
||||||
|
|
||||||
The Hubbard-I solver is very fast and we do not need to take into account the DFT block structure
|
The Hubbard-I solver is very fast and we do not need to take into account the DFT block structure
|
||||||
or use any approximation for the *U*-matrix. We load and convert the :program:`dmftproj` output
|
or use any approximation for the *U*-matrix. We load and convert the :program:`dmftproj` output
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
||||||
|
|
||||||
import os
|
import os
|
||||||
dft_filename = os.getcwd().rpartition('/')[2]
|
dft_filename = os.getcwd().rpartition('/')[2]
|
||||||
@ -111,7 +111,7 @@ for iteration_number in range(1,Loops+1):
|
|||||||
#Save essential SumkDFT data:
|
#Save essential SumkDFT data:
|
||||||
SK.save(['chemical_potential','dc_imp','dc_energ','correnerg'])
|
SK.save(['chemical_potential','dc_imp','dc_energ','correnerg'])
|
||||||
if (mpi.is_master_node()):
|
if (mpi.is_master_node()):
|
||||||
print 'DC after solver: ',SK.dc_imp[0]
|
print('DC after solver: ',SK.dc_imp[0])
|
||||||
|
|
||||||
# print out occupancy matrix of Ce 4f
|
# print out occupancy matrix of Ce 4f
|
||||||
mpi.report("Orbital densities of impurity Green function:")
|
mpi.report("Orbital densities of impurity Green function:")
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from triqs_dft_tools.sumk_dft_tools import *
|
from triqs_dft_tools.sumk_dft_tools import *
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
from pytriqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
from triqs.applications.impurity_solvers.hubbard_I.hubbard_solver import Solver
|
||||||
|
|
||||||
# Creates the data directory, cd into it:
|
# Creates the data directory, cd into it:
|
||||||
#Prepare_Run_Directory(DirectoryName = "Ce-Gamma")
|
#Prepare_Run_Directory(DirectoryName = "Ce-Gamma")
|
||||||
@ -31,7 +31,7 @@ SK.dc_imp = mpi.bcast(SK.dc_imp)
|
|||||||
SK.dc_energ = mpi.bcast(SK.dc_energ)
|
SK.dc_energ = mpi.bcast(SK.dc_energ)
|
||||||
|
|
||||||
if (mpi.is_master_node()):
|
if (mpi.is_master_node()):
|
||||||
print 'DC after reading SK: ',SK.dc_imp[0]
|
print('DC after reading SK: ',SK.dc_imp[0])
|
||||||
|
|
||||||
N = SK.corr_shells[0]['dim']
|
N = SK.corr_shells[0]['dim']
|
||||||
l = SK.corr_shells[0]['l']
|
l = SK.corr_shells[0]['l']
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
from itertools import *
|
from itertools import *
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from pytriqs.archive import *
|
from h5 import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from triqs_dft_tools.sumk_dft_tools import *
|
from triqs_dft_tools.sumk_dft_tools import *
|
||||||
from pytriqs.operators.util.hamiltonians import *
|
from triqs.operators.util.hamiltonians import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import warnings
|
import warnings
|
||||||
warnings.filterwarnings("ignore", category=FutureWarning)
|
warnings.filterwarnings("ignore", category=FutureWarning)
|
||||||
@ -39,7 +39,7 @@ if mpi.is_master_node():
|
|||||||
if not 'Iterations' in ar['DMFT_results']: ar['DMFT_results'].create_group('Iterations')
|
if not 'Iterations' in ar['DMFT_results']: ar['DMFT_results'].create_group('Iterations')
|
||||||
if 'iteration_count' in ar['DMFT_results']:
|
if 'iteration_count' in ar['DMFT_results']:
|
||||||
iteration_offset = ar['DMFT_results']['iteration_count']+1
|
iteration_offset = ar['DMFT_results']['iteration_count']+1
|
||||||
print('offset',iteration_offset)
|
print(('offset',iteration_offset))
|
||||||
Sigma_iw = ar['DMFT_results']['Iterations']['Sigma_it'+str(iteration_offset-1)]
|
Sigma_iw = ar['DMFT_results']['Iterations']['Sigma_it'+str(iteration_offset-1)]
|
||||||
SK.dc_imp = ar['DMFT_results']['Iterations']['dc_imp'+str(iteration_offset-1)]
|
SK.dc_imp = ar['DMFT_results']['Iterations']['dc_imp'+str(iteration_offset-1)]
|
||||||
SK.dc_energ = ar['DMFT_results']['Iterations']['dc_energ'+str(iteration_offset-1)]
|
SK.dc_energ = ar['DMFT_results']['Iterations']['dc_energ'+str(iteration_offset-1)]
|
||||||
@ -54,13 +54,13 @@ SK.chemical_potential = mpi.bcast(SK.chemical_potential)
|
|||||||
|
|
||||||
SK.put_Sigma(Sigma_imp = [Sigma_iw])
|
SK.put_Sigma(Sigma_imp = [Sigma_iw])
|
||||||
|
|
||||||
ikarray = numpy.array(range(SK.n_k))
|
ikarray = numpy.array(list(range(SK.n_k)))
|
||||||
|
|
||||||
# set up the orbitally resolved local lattice greens function:
|
# set up the orbitally resolved local lattice greens function:
|
||||||
n_orbs = SK.proj_mat_csc.shape[2]
|
n_orbs = SK.proj_mat_csc.shape[2]
|
||||||
spn = SK.spin_block_names[SK.SO]
|
spn = SK.spin_block_names[SK.SO]
|
||||||
mesh = Sigma_iw.mesh
|
mesh = Sigma_iw.mesh
|
||||||
block_structure = [range(n_orbs) for sp in spn]
|
block_structure = [list(range(n_orbs)) for sp in spn]
|
||||||
gf_struct = [(spn[isp], block_structure[isp])
|
gf_struct = [(spn[isp], block_structure[isp])
|
||||||
for isp in range(SK.n_spin_blocks[SK.SO])]
|
for isp in range(SK.n_spin_blocks[SK.SO])]
|
||||||
block_ind_list = [block for block, inner in gf_struct]
|
block_ind_list = [block for block, inner in gf_struct]
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# Import the modules:
|
# Import the modules:
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from pytriqs.archive import HDFArchive
|
from h5 import HDFArchive
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
# Init the SumK class:
|
# Init the SumK class:
|
||||||
filename = 'Sr2MgOsO6_SOC.h5'
|
filename = 'Sr2MgOsO6_SOC.h5'
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
# Import the modules:
|
# Import the modules:
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from pytriqs.archive import HDFArchive
|
from h5 import HDFArchive
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
# Convert the input
|
# Convert the input
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC")
|
Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC")
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
from triqs_dft_tools.converters.vasp_converter import *
|
from triqs_dft_tools.converters.vasp import *
|
||||||
Converter = VaspConverter(filename = 'nio')
|
Converter = VaspConverter(filename = 'nio')
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from pytriqs.archive import HDFArchive
|
from h5 import HDFArchive
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
|
|
||||||
dft_filename='SrVO3'
|
dft_filename='SrVO3'
|
||||||
beta = 40
|
beta = 40
|
||||||
@ -49,7 +49,7 @@ p["fit_min_n"] = 30
|
|||||||
p["fit_max_n"] = 60
|
p["fit_max_n"] = 60
|
||||||
|
|
||||||
# If conversion step was not done, we could do it here. Uncomment the lines it you want to do this.
|
# If conversion step was not done, we could do it here. Uncomment the lines it you want to do this.
|
||||||
#from triqs_dft_tools.converters.wien2k_converter import *
|
#from triqs_dft_tools.converters.wien2k import *
|
||||||
#Converter = Wien2kConverter(filename=dft_filename, repacking=True)
|
#Converter = Wien2kConverter(filename=dft_filename, repacking=True)
|
||||||
#Converter.convert_dft_input()
|
#Converter.convert_dft_input()
|
||||||
#mpi.barrier()
|
#mpi.barrier()
|
||||||
@ -76,7 +76,7 @@ spin_names = ["up","down"]
|
|||||||
orb_names = [i for i in range(n_orb)]
|
orb_names = [i for i in range(n_orb)]
|
||||||
|
|
||||||
# Use GF structure determined by DFT blocks
|
# Use GF structure determined by DFT blocks
|
||||||
gf_struct = [(block, indices) for block, indices in SK.gf_struct_solver[0].iteritems()]
|
gf_struct = [(block, indices) for block, indices in SK.gf_struct_solver[0].items()]
|
||||||
|
|
||||||
# Construct Solver
|
# Construct Solver
|
||||||
S = Solver(beta=beta, gf_struct=gf_struct)
|
S = Solver(beta=beta, gf_struct=gf_struct)
|
||||||
@ -97,7 +97,7 @@ if previous_present:
|
|||||||
SK.set_dc(dc_imp,dc_energ)
|
SK.set_dc(dc_imp,dc_energ)
|
||||||
|
|
||||||
for iteration_number in range(1,loops+1):
|
for iteration_number in range(1,loops+1):
|
||||||
if mpi.is_master_node(): print "Iteration = ", iteration_number
|
if mpi.is_master_node(): print("Iteration = ", iteration_number)
|
||||||
|
|
||||||
SK.symm_deg_gf(S.Sigma_iw,orb=0) # symmetrise Sigma
|
SK.symm_deg_gf(S.Sigma_iw,orb=0) # symmetrise Sigma
|
||||||
SK.set_Sigma([ S.Sigma_iw ]) # set Sigma into the SumK class
|
SK.set_Sigma([ S.Sigma_iw ]) # set Sigma into the SumK class
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from pytriqs.archive import *
|
from h5 import *
|
||||||
from triqs_maxent import *
|
from triqs_maxent import *
|
||||||
|
|
||||||
filename = 'nio'
|
filename = 'nio'
|
||||||
@ -12,7 +12,7 @@ if 'iteration_count' in ar['DMFT_results']:
|
|||||||
|
|
||||||
tm = TauMaxEnt(cost_function='bryan', probability='normal')
|
tm = TauMaxEnt(cost_function='bryan', probability='normal')
|
||||||
|
|
||||||
print(G_latt['up'][0,0])
|
print((G_latt['up'][0,0]))
|
||||||
t2g_orbs = [0,1,3]
|
t2g_orbs = [0,1,3]
|
||||||
eg_orbs = [2,4]
|
eg_orbs = [2,4]
|
||||||
op_orbs = [5,6,7]
|
op_orbs = [5,6,7]
|
||||||
@ -22,7 +22,7 @@ orbs = [t2g_orbs, eg_orbs, op_orbs]
|
|||||||
|
|
||||||
for orb in orbs:
|
for orb in orbs:
|
||||||
|
|
||||||
print '\n'+str(orb[0])+'\n'
|
print('\n'+str(orb[0])+'\n')
|
||||||
|
|
||||||
gf = 0*G_latt['up'][0,0]
|
gf = 0*G_latt['up'][0,0]
|
||||||
for iO in orb:
|
for iO in orb:
|
||||||
@ -43,7 +43,7 @@ for orb in orbs:
|
|||||||
|
|
||||||
|
|
||||||
# you may be interested in the details of the line analyzer:
|
# you may be interested in the details of the line analyzer:
|
||||||
# from pytriqs.plot.mpl_interface import oplot
|
# from triqs.plot.mpl_interface import oplot
|
||||||
#plt.figure(2)
|
#plt.figure(2)
|
||||||
#result.analyzer_results['LineFitAnalyzer'].plot_linefit()
|
#result.analyzer_results['LineFitAnalyzer'].plot_linefit()
|
||||||
#plt.savefig('ana'+str(orb[0])+'.pdf',fmt='pdf')
|
#plt.savefig('ana'+str(orb[0])+'.pdf',fmt='pdf')
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
from itertools import *
|
from itertools import *
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from pytriqs.archive import *
|
from h5 import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
import sys, pytriqs.version as triqs_version
|
import sys, triqs.version as triqs_version
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from triqs_dft_tools.sumk_dft_tools import *
|
from triqs_dft_tools.sumk_dft_tools import *
|
||||||
from pytriqs.operators.util.hamiltonians import *
|
from triqs.operators.util.hamiltonians import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import triqs_cthyb.version as cthyb_version
|
import triqs_cthyb.version as cthyb_version
|
||||||
import triqs_dft_tools.version as dft_tools_version
|
import triqs_dft_tools.version as dft_tools_version
|
||||||
@ -30,7 +30,7 @@ for i_sh in range(len(SK.deg_shells)):
|
|||||||
mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))
|
mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))
|
||||||
for iblock in range(num_block_deg_orbs):
|
for iblock in range(num_block_deg_orbs):
|
||||||
mpi.report('block {0:d} consists of orbitals:'.format(iblock))
|
mpi.report('block {0:d} consists of orbitals:'.format(iblock))
|
||||||
for keys in SK.deg_shells[i_sh][iblock].keys():
|
for keys in list(SK.deg_shells[i_sh][iblock].keys()):
|
||||||
mpi.report(' '+keys)
|
mpi.report(' '+keys)
|
||||||
|
|
||||||
# Setup CTQMC Solver
|
# Setup CTQMC Solver
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
from itertools import *
|
from itertools import *
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from pytriqs.archive import *
|
from h5 import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
import sys, pytriqs.version as triqs_version
|
import sys, triqs.version as triqs_version
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from triqs_dft_tools.sumk_dft_tools import *
|
from triqs_dft_tools.sumk_dft_tools import *
|
||||||
from pytriqs.operators.util.hamiltonians import *
|
from triqs.operators.util.hamiltonians import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import triqs_cthyb.version as cthyb_version
|
import triqs_cthyb.version as cthyb_version
|
||||||
import triqs_dft_tools.version as dft_tools_version
|
import triqs_dft_tools.version as dft_tools_version
|
||||||
from triqs_dft_tools.converters.vasp_converter import *
|
from triqs_dft_tools.converters.vasp import *
|
||||||
|
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
@ -37,7 +37,7 @@ def dmft_cycle():
|
|||||||
mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))
|
mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))
|
||||||
for iblock in range(num_block_deg_orbs):
|
for iblock in range(num_block_deg_orbs):
|
||||||
mpi.report('block {0:d} consists of orbitals:'.format(iblock))
|
mpi.report('block {0:d} consists of orbitals:'.format(iblock))
|
||||||
for keys in SK.deg_shells[i_sh][iblock].keys():
|
for keys in list(SK.deg_shells[i_sh][iblock].keys()):
|
||||||
mpi.report(' '+keys)
|
mpi.report(' '+keys)
|
||||||
|
|
||||||
# Setup CTQMC Solver
|
# Setup CTQMC Solver
|
||||||
@ -176,15 +176,15 @@ def dmft_cycle():
|
|||||||
|
|
||||||
|
|
||||||
if mpi.is_master_node():
|
if mpi.is_master_node():
|
||||||
print 'calculating mu...'
|
print('calculating mu...')
|
||||||
SK.chemical_potential = SK.calc_mu( precision = 0.000001 )
|
SK.chemical_potential = SK.calc_mu( precision = 0.000001 )
|
||||||
|
|
||||||
if mpi.is_master_node():
|
if mpi.is_master_node():
|
||||||
print 'calculating GAMMA'
|
print('calculating GAMMA')
|
||||||
SK.calc_density_correction(dm_type='vasp')
|
SK.calc_density_correction(dm_type='vasp')
|
||||||
|
|
||||||
if mpi.is_master_node():
|
if mpi.is_master_node():
|
||||||
print 'calculating energy corrections'
|
print('calculating energy corrections')
|
||||||
|
|
||||||
correnerg = 0.5 * (S.G_iw * S.Sigma_iw).total_density()
|
correnerg = 0.5 * (S.G_iw * S.Sigma_iw).total_density()
|
||||||
|
|
||||||
|
@ -57,9 +57,9 @@ At the end of the run you see the density matrix in Wannier space:
|
|||||||
As you can see, there are off-diagonal elements between the :math:`d_{x^2-y^2}` and the :math:`d_{xy}` orbital.
|
As you can see, there are off-diagonal elements between the :math:`d_{x^2-y^2}` and the :math:`d_{xy}` orbital.
|
||||||
|
|
||||||
We convert the output to the hdf5 archive, using
|
We convert the output to the hdf5 archive, using
|
||||||
the python module :class:`Wien2kConverter <dft.converters.wien2k_converter.Wien2kConverter>`. A simple python script doing this is::
|
the python module :class:`Wien2kConverter <dft.converters.wien2k.Wien2kConverter>`. A simple python script doing this is::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC")
|
Converter = Wien2kConverter(filename = "Sr2MgOsO6_noSOC")
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
|
||||||
@ -123,8 +123,8 @@ The interaction Hamiltonian
|
|||||||
|
|
||||||
We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it::
|
We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it::
|
||||||
|
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
|
|
||||||
U = 2.0
|
U = 2.0
|
||||||
J = 0.2
|
J = 0.2
|
||||||
@ -141,7 +141,7 @@ Note that we needed to set up the interaction Hamiltonian for the full set of fi
|
|||||||
Now we have the interaction Hamiltonian for the solver, which we set up next::
|
Now we have the interaction Hamiltonian for the solver, which we set up next::
|
||||||
|
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
beta = 40.0
|
beta = 40.0
|
||||||
S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0])
|
S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0])
|
||||||
|
@ -58,9 +58,9 @@ At the end of the run you see the density matrix in Wannier space:
|
|||||||
As you can see, there are a lot of off-diagonal elements now, in particular also off-diagonal in spin space. This is just telling us that spin is not a good quantum number any more in the presence of SOC.
|
As you can see, there are a lot of off-diagonal elements now, in particular also off-diagonal in spin space. This is just telling us that spin is not a good quantum number any more in the presence of SOC.
|
||||||
|
|
||||||
We convert the output to the hdf5 archive, using
|
We convert the output to the hdf5 archive, using
|
||||||
the python module :class:`Wien2kConverter <dft.converters.wien2k_converter.Wien2kConverter>`. A simple python script doing this is::
|
the python module :class:`Wien2kConverter <dft.converters.wien2k.Wien2kConverter>`. A simple python script doing this is::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
Converter = Wien2kConverter(filename = "Sr2MgOsO6_SOC")
|
Converter = Wien2kConverter(filename = "Sr2MgOsO6_SOC")
|
||||||
Converter.convert_dft_input()
|
Converter.convert_dft_input()
|
||||||
|
|
||||||
@ -119,8 +119,8 @@ The interaction Hamiltonian
|
|||||||
|
|
||||||
We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it. We use *l=2* for *d* orbitals. Also, for SOC calculations, we need to inflate the resulting matrix to size 10x10::
|
We now set up the interaction Hamiltonian. Since we want to rotate the interaction matrix into the local basis, we are using the Slater convention for it. We use *l=2* for *d* orbitals. Also, for SOC calculations, we need to inflate the resulting matrix to size 10x10::
|
||||||
|
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from pytriqs.operators.util.U_matrix import *
|
from triqs.operators.util.U_matrix import *
|
||||||
|
|
||||||
U = 2.0
|
U = 2.0
|
||||||
J = 0.2
|
J = 0.2
|
||||||
@ -139,7 +139,7 @@ Note that we needed to set up the interaction Hamiltonian first for the full set
|
|||||||
Now we have the interaction Hamiltonian for the solver, which we set up next::
|
Now we have the interaction Hamiltonian for the solver, which we set up next::
|
||||||
|
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
beta = 40.0
|
beta = 40.0
|
||||||
S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0])
|
S = Solver(beta=beta, gf_struct=SK.block_structure.gf_struct_solver_list[0])
|
||||||
|
@ -51,10 +51,10 @@ Then :program:`dmftproj` is executed in its default mode (i.e. without spin-pola
|
|||||||
dmftproj
|
dmftproj
|
||||||
|
|
||||||
This program produces the necessary files for the conversion to the hdf5 file structure. This is done using
|
This program produces the necessary files for the conversion to the hdf5 file structure. This is done using
|
||||||
the python module :class:`Wien2kConverter <dft.converters.wien2k_converter.Wien2kConverter>`.
|
the python module :class:`Wien2kConverter <dft.converters.wien2k.Wien2kConverter>`.
|
||||||
A simple python script that initialises the converter is::
|
A simple python script that initialises the converter is::
|
||||||
|
|
||||||
from triqs_dft_tools.converters.wien2k_converter import *
|
from triqs_dft_tools.converters.wien2k import *
|
||||||
Converter = Wien2kConverter(filename = "SrVO3")
|
Converter = Wien2kConverter(filename = "SrVO3")
|
||||||
|
|
||||||
After initializing the interface module, we can now convert the input
|
After initializing the interface module, we can now convert the input
|
||||||
@ -77,11 +77,11 @@ Loading modules
|
|||||||
First, we load the necessary modules::
|
First, we load the necessary modules::
|
||||||
|
|
||||||
from triqs_dft_tools.sumk_dft import *
|
from triqs_dft_tools.sumk_dft import *
|
||||||
from pytriqs.gf import *
|
from triqs.gf import *
|
||||||
from pytriqs.archive import HDFArchive
|
from h5 import HDFArchive
|
||||||
from pytriqs.operators.util import *
|
from triqs.operators.util import *
|
||||||
from triqs_cthyb import *
|
from triqs_cthyb import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
The last two lines load the modules for the construction of the
|
The last two lines load the modules for the construction of the
|
||||||
:ref:`CTHYB solver <triqscthyb:welcome>`.
|
:ref:`CTHYB solver <triqscthyb:welcome>`.
|
||||||
@ -271,7 +271,7 @@ and perform only one DMFT iteration. The resulting self energy can be tail fitte
|
|||||||
Sigma_iw_fit << tail_fit(S.Sigma_iw, fit_max_moment = 4, fit_min_n = 40, fit_max_n = 160)[0]
|
Sigma_iw_fit << tail_fit(S.Sigma_iw, fit_max_moment = 4, fit_min_n = 40, fit_max_n = 160)[0]
|
||||||
|
|
||||||
Plot the self energy and adjust the tail fit parameters such that you obtain a
|
Plot the self energy and adjust the tail fit parameters such that you obtain a
|
||||||
proper fit. The :meth:`fit_tail function <pytriqs.gf.tools.tail_fit>` is part
|
proper fit. The :meth:`fit_tail function <triqs.gf.tools.tail_fit>` is part
|
||||||
of the :ref:`TRIQS <triqslibs:welcome>` library.
|
of the :ref:`TRIQS <triqslibs:welcome>` library.
|
||||||
|
|
||||||
For a self energy which is going to zero for :math:`i\omega \rightarrow 0` our suggestion is
|
For a self energy which is going to zero for :math:`i\omega \rightarrow 0` our suggestion is
|
||||||
|
@ -247,7 +247,7 @@
|
|||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"# import VASPconverter\n",
|
"# import VASPconverter\n",
|
||||||
"from triqs_dft_tools.converters.vasp_converter import *\n",
|
"from triqs_dft_tools.converters.vasp import *\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# create Converter\n",
|
"# create Converter\n",
|
||||||
@ -312,7 +312,7 @@
|
|||||||
" mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))\n",
|
" mpi.report('found {0:d} blocks of degenerate orbitals in shell {1:d}'.format(num_block_deg_orbs, i_sh))\n",
|
||||||
" for iblock in range(num_block_deg_orbs):\n",
|
" for iblock in range(num_block_deg_orbs):\n",
|
||||||
" mpi.report('block {0:d} consists of orbitals:'.format(iblock))\n",
|
" mpi.report('block {0:d} consists of orbitals:'.format(iblock))\n",
|
||||||
" for keys in SK.deg_shells[i_sh][iblock].keys():\n",
|
" for keys in list(SK.deg_shells[i_sh][iblock].keys()):\n",
|
||||||
" mpi.report(' '+keys)"
|
" mpi.report(' '+keys)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -335,21 +335,20 @@
|
|||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "Python 2",
|
"display_name": "Python 3",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
"name": "python2"
|
"name": "python3"
|
||||||
},
|
},
|
||||||
"language_info": {
|
"language_info": {
|
||||||
"codemirror_mode": {
|
"codemirror_mode": {
|
||||||
"name": "ipython",
|
"name": "ipython",
|
||||||
"version": 2
|
"version": 3
|
||||||
},
|
},
|
||||||
"file_extension": ".py",
|
"file_extension": ".py",
|
||||||
"mimetype": "text/x-python",
|
"mimetype": "text/x-python",
|
||||||
"name": "python",
|
"name": "python",
|
||||||
"nbconvert_exporter": "python",
|
"nbconvert_exporter": "python",
|
||||||
"pygments_lexer": "ipython2",
|
"pygments_lexer": "ipython3"
|
||||||
"version": "2.7.15+"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nbformat": 4,
|
"nbformat": 4,
|
||||||
|
@ -6,8 +6,7 @@ set(SOURCES modules.f dmftproj.f readcomline.f set_ang_trans.f setsym.f
|
|||||||
|
|
||||||
# The main target and what to link with...
|
# The main target and what to link with...
|
||||||
add_executable(dmftproj ${SOURCES})
|
add_executable(dmftproj ${SOURCES})
|
||||||
find_package(LAPACK)
|
target_link_libraries(dmftproj triqs::blas_lapack)
|
||||||
target_link_libraries(dmftproj ${LAPACK_LIBRARIES})
|
|
||||||
|
|
||||||
# where to install
|
# where to install
|
||||||
install (TARGETS dmftproj DESTINATION bin)
|
install (TARGETS dmftproj DESTINATION bin)
|
||||||
@ -17,7 +16,7 @@ SET(D ${CMAKE_CURRENT_SOURCE_DIR}/SRC_templates/)
|
|||||||
SET(WIEN_SRC_TEMPL_FILES ${D}/case.cf_f_mm2 ${D}/case.cf_p_cubic ${D}/case.indmftpr ${D}/run_triqs ${D}/runsp_triqs)
|
SET(WIEN_SRC_TEMPL_FILES ${D}/case.cf_f_mm2 ${D}/case.cf_p_cubic ${D}/case.indmftpr ${D}/run_triqs ${D}/runsp_triqs)
|
||||||
message(STATUS "-----------------------------------------------------------------------------")
|
message(STATUS "-----------------------------------------------------------------------------")
|
||||||
message(STATUS " ******** WARNING ******** ")
|
message(STATUS " ******** WARNING ******** ")
|
||||||
message(STATUS " Wien2k 14.2 and older : after installation of TRIQS, copy the files from ")
|
message(STATUS " Wien2k 14.2 and older : after installation of DFTTools, copy the files from ")
|
||||||
message(STATUS " ${CMAKE_INSTALL_PREFIX}/share/triqs/Wien2k_SRC_files/SRC_templates ")
|
message(STATUS " ${CMAKE_INSTALL_PREFIX}/share/triqs/Wien2k_SRC_files/SRC_templates ")
|
||||||
message(STATUS " to your Wien2k installation WIENROOT/SRC_templates (Cf documentation). ")
|
message(STATUS " to your Wien2k installation WIENROOT/SRC_templates (Cf documentation). ")
|
||||||
message(STATUS " For newer versions these files are already shipped with Wien2k. ")
|
message(STATUS " For newer versions these files are already shipped with Wien2k. ")
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
# where will the python end up in triqs?
|
|
||||||
set(PYTHON_LIB_DEST ${CPP2PY_PYTHON_LIB_DEST_ROOT}/triqs_dft_tools)
|
|
||||||
|
|
||||||
# site_customize for build
|
|
||||||
set(package_name "triqs_dft_tools")
|
|
||||||
|
|
||||||
# Create a temporary copy of the python modules so that we can run before installation with the test
|
|
||||||
FILE(GLOB PYTHON_SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py )
|
|
||||||
foreach(f ${PYTHON_SOURCES})
|
|
||||||
configure_file(${f} ${f} COPYONLY)
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
# add version file
|
|
||||||
configure_file(version.py.in version.py @ONLY)
|
|
||||||
|
|
||||||
# install files
|
|
||||||
install(FILES ${PYTHON_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/version.py DESTINATION ${PYTHON_LIB_DEST})
|
|
||||||
|
|
||||||
add_subdirectory(converters)
|
|
1
python/converters/.gitignore
vendored
1
python/converters/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
*.pyc
|
|
@ -1,10 +0,0 @@
|
|||||||
# Create a temporary copy of the python modules so that we can run before installation with the test
|
|
||||||
FILE(GLOB PYTHON_SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py)
|
|
||||||
foreach(f ${PYTHON_SOURCES})
|
|
||||||
configure_file(${f} ${f} COPYONLY)
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
# install files
|
|
||||||
install(FILES ${PYTHON_SOURCES} DESTINATION ${PYTHON_LIB_DEST}/converters)
|
|
||||||
|
|
||||||
add_subdirectory(plovasp)
|
|
@ -1,19 +0,0 @@
|
|||||||
# === Build and install atm module
|
|
||||||
add_cpp2py_module(atm)
|
|
||||||
target_link_libraries(atm atm_c triqs)
|
|
||||||
target_compile_options(atm PRIVATE -std=c++17)
|
|
||||||
target_include_directories(atm PRIVATE ${CMAKE_SOURCE_DIR}/c++)
|
|
||||||
|
|
||||||
install(TARGETS atm DESTINATION ${PYTHON_LIB_DEST}/converters/plovasp)
|
|
||||||
|
|
||||||
# === Copy Python files to current build directory and register for install
|
|
||||||
set(PYTHON_SOURCES __init__.py converter.py elstruct.py inpconf.py plotools.py proj_group.py proj_shell.py sc_dmft.py vaspio.py)
|
|
||||||
foreach(f ${PYTHON_SOURCES})
|
|
||||||
configure_file(${f} ${f} COPYONLY)
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
# install files
|
|
||||||
install(FILES ${PYTHON_SOURCES} DESTINATION ${PYTHON_LIB_DEST}/converters/plovasp)
|
|
||||||
|
|
||||||
# This we need in order for tests to work
|
|
||||||
#add_custom_command(TARGET atm POST_BUILD COMMAND ln -fs ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}/atm.so ${CMAKE_BINARY_DIR}/python/dft/converters/plovasp)
|
|
25
python/triqs_dft_tools/CMakeLists.txt
Normal file
25
python/triqs_dft_tools/CMakeLists.txt
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Configure the version
|
||||||
|
configure_file(version.py.in version.py)
|
||||||
|
|
||||||
|
# All Python files. Copy them in the build dir to have a complete package for the tests.
|
||||||
|
file(GLOB_RECURSE python_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py)
|
||||||
|
file(GLOB_RECURSE wrap_generators RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *_desc.py)
|
||||||
|
list(REMOVE_ITEM python_sources "${wrap_generators}")
|
||||||
|
foreach(file ${python_sources})
|
||||||
|
configure_file(${file} ${file} COPYONLY)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# Install python files to proper location
|
||||||
|
set(PYTHON_LIB_DEST ${TRIQS_PYTHON_LIB_DEST_ROOT}/${PROJECT_NAME})
|
||||||
|
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/version.py DESTINATION ${PYTHON_LIB_DEST})
|
||||||
|
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${TRIQS_PYTHON_LIB_DEST_ROOT} FILES_MATCHING PATTERN "*.py" PATTERN "*_desc.py" EXCLUDE)
|
||||||
|
|
||||||
|
# Build and install any python modules
|
||||||
|
foreach(gen ${wrap_generators})
|
||||||
|
string(REPLACE "_desc.py" "" gen ${gen})
|
||||||
|
get_filename_component(module_name ${gen} NAME_WE)
|
||||||
|
get_filename_component(module_dir ${gen} DIRECTORY)
|
||||||
|
add_cpp2py_module(NAME ${module_name} DIRECTORY ${module_dir})
|
||||||
|
target_link_libraries(${module_name} ${PROJECT_NAME}_c triqs_py)
|
||||||
|
install(TARGETS ${module_name} DESTINATION ${PYTHON_LIB_DEST}/${module_dir})
|
||||||
|
endforeach()
|
@ -20,11 +20,11 @@
|
|||||||
#
|
#
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
from sumk_dft import SumkDFT
|
from .sumk_dft import SumkDFT
|
||||||
from symmetry import Symmetry
|
from .symmetry import Symmetry
|
||||||
from block_structure import BlockStructure
|
from .block_structure import BlockStructure
|
||||||
from sumk_dft_tools import SumkDFTTools
|
from .sumk_dft_tools import SumkDFTTools
|
||||||
from converters import *
|
from .converters import *
|
||||||
|
|
||||||
__all__ = ['SumkDFT', 'Symmetry', 'SumkDFTTools',
|
__all__ = ['SumkDFT', 'Symmetry', 'SumkDFTTools',
|
||||||
'Wien2kConverter', 'HkConverter','BlockStructure']
|
'Wien2kConverter', 'HkConverter','BlockStructure']
|
@ -25,9 +25,9 @@
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pytriqs.gf import GfImFreq, BlockGf
|
from triqs.gf import GfImFreq, BlockGf
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
@ -142,7 +142,7 @@ class BlockStructure(object):
|
|||||||
if self.gf_struct_solver is None:
|
if self.gf_struct_solver is None:
|
||||||
return None
|
return None
|
||||||
# we sort by block name in order to get a reproducible result
|
# we sort by block name in order to get a reproducible result
|
||||||
return [sorted([(k, v) for k, v in gfs.iteritems()], key=lambda x: x[0])
|
return [sorted([(k, v) for k, v in list(gfs.items())], key=lambda x: x[0])
|
||||||
for gfs in self.gf_struct_solver]
|
for gfs in self.gf_struct_solver]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -203,10 +203,10 @@ class BlockStructure(object):
|
|||||||
N_solver = len(np.unique(self.corr_to_inequiv))
|
N_solver = len(np.unique(self.corr_to_inequiv))
|
||||||
if self.gf_struct_solver is not None:
|
if self.gf_struct_solver is not None:
|
||||||
assert N_solver == len(self.gf_struct_solver)
|
assert N_solver == len(self.gf_struct_solver)
|
||||||
assert sorted(np.unique(self.corr_to_inequiv)) == range(N_solver),\
|
assert sorted(np.unique(self.corr_to_inequiv)) == list(range(N_solver)),\
|
||||||
"an inequivalent shell is missing in corr_to_inequiv"
|
"an inequivalent shell is missing in corr_to_inequiv"
|
||||||
return [self.corr_to_inequiv.index(icrsh)
|
return [self.corr_to_inequiv.index(icrsh)
|
||||||
for icrsh in range(N_solver)]
|
for icrsh in list(range(N_solver))]
|
||||||
|
|
||||||
@inequiv_to_corr.setter
|
@inequiv_to_corr.setter
|
||||||
def inequiv_to_corr(self, value):
|
def inequiv_to_corr(self, value):
|
||||||
@ -222,7 +222,7 @@ class BlockStructure(object):
|
|||||||
ret = []
|
ret = []
|
||||||
for ish, icrsh in enumerate(self.inequiv_to_corr):
|
for ish, icrsh in enumerate(self.inequiv_to_corr):
|
||||||
d = defaultdict(list)
|
d = defaultdict(list)
|
||||||
for block_solver, block_sumk in self.solver_to_sumk_block[ish].iteritems():
|
for block_solver, block_sumk in list(self.solver_to_sumk_block[ish].items()):
|
||||||
d[block_sumk].append(block_solver)
|
d[block_sumk].append(block_solver)
|
||||||
ret.append(d)
|
ret.append(d)
|
||||||
return ret
|
return ret
|
||||||
@ -251,7 +251,7 @@ class BlockStructure(object):
|
|||||||
assert len(trans) == len(self.gf_struct_sumk),\
|
assert len(trans) == len(self.gf_struct_sumk),\
|
||||||
"give one transformation per correlated shell"
|
"give one transformation per correlated shell"
|
||||||
|
|
||||||
for icrsh in range(len(trans)):
|
for icrsh in list(range(len(trans))):
|
||||||
ish = self.corr_to_inequiv[icrsh]
|
ish = self.corr_to_inequiv[icrsh]
|
||||||
if trans[icrsh] is None:
|
if trans[icrsh] is None:
|
||||||
trans[icrsh] = {block: np.eye(len(indices))
|
trans[icrsh] = {block: np.eye(len(indices))
|
||||||
@ -261,7 +261,7 @@ class BlockStructure(object):
|
|||||||
trans[icrsh] = {block: copy.deepcopy(trans[icrsh])
|
trans[icrsh] = {block: copy.deepcopy(trans[icrsh])
|
||||||
for block, indices in self.gf_struct_sumk[icrsh]}
|
for block, indices in self.gf_struct_sumk[icrsh]}
|
||||||
|
|
||||||
assert trans[icrsh].keys() == self.gf_struct_sumk_dict[icrsh].keys(),\
|
assert list(trans[icrsh].keys()) == list(self.gf_struct_sumk_dict[icrsh].keys()),\
|
||||||
"wrong block names used in transformation (icrsh = {})".format(icrsh)
|
"wrong block names used in transformation (icrsh = {})".format(icrsh)
|
||||||
|
|
||||||
for block in trans[icrsh]:
|
for block in trans[icrsh]:
|
||||||
@ -422,15 +422,15 @@ class BlockStructure(object):
|
|||||||
# create new solver_to_sumk
|
# create new solver_to_sumk
|
||||||
so2su = {}
|
so2su = {}
|
||||||
so2su_block = {}
|
so2su_block = {}
|
||||||
for blk, idxs in gf_struct.items():
|
for blk,idxs in list(gf_struct.items()):
|
||||||
for i in range(len(idxs)):
|
for i in range(len(idxs)):
|
||||||
so2su[(blk, i)] = self.solver_to_sumk[ish][(blk, idxs[i])]
|
so2su[(blk, i)] = self.solver_to_sumk[ish][(blk, idxs[i])]
|
||||||
so2su_block[blk] = so2su[(blk, i)][0]
|
so2su_block[blk] = so2su[(blk, i)][0]
|
||||||
self.solver_to_sumk[ish] = so2su
|
self.solver_to_sumk[ish] = so2su
|
||||||
self.solver_to_sumk_block[ish] = so2su_block
|
self.solver_to_sumk_block[ish] = so2su_block
|
||||||
# create new sumk_to_solver
|
# create new sumk_to_solver
|
||||||
for k, v in self.sumk_to_solver[ish].items():
|
for k,v in list(self.sumk_to_solver[ish].items()):
|
||||||
blk, ind = v
|
blk,ind=v
|
||||||
if blk in gf_struct and ind in gf_struct[blk]:
|
if blk in gf_struct and ind in gf_struct[blk]:
|
||||||
new_ind = gf_struct[blk].index(ind)
|
new_ind = gf_struct[blk].index(ind)
|
||||||
self.sumk_to_solver[ish][k] = (blk, new_ind)
|
self.sumk_to_solver[ish][k] = (blk, new_ind)
|
||||||
@ -443,8 +443,8 @@ class BlockStructure(object):
|
|||||||
|
|
||||||
# reindexing gf_struct so that it starts with 0
|
# reindexing gf_struct so that it starts with 0
|
||||||
for k in gf_struct:
|
for k in gf_struct:
|
||||||
gf_struct[k] = range(len(gf_struct[k]))
|
gf_struct[k]=list(range(len(gf_struct[k])))
|
||||||
self.gf_struct_solver[ish] = gf_struct
|
self.gf_struct_solver[ish]=gf_struct
|
||||||
|
|
||||||
def adapt_deg_shells(self, gf_struct, ish=0):
|
def adapt_deg_shells(self, gf_struct, ish=0):
|
||||||
""" Adapts the deg_shells to a new gf_struct
|
""" Adapts the deg_shells to a new gf_struct
|
||||||
@ -453,7 +453,7 @@ class BlockStructure(object):
|
|||||||
if self.deg_shells is not None:
|
if self.deg_shells is not None:
|
||||||
for degsh in self.deg_shells[ish]:
|
for degsh in self.deg_shells[ish]:
|
||||||
if isinstance(degsh, dict):
|
if isinstance(degsh, dict):
|
||||||
for key in degsh.keys():
|
for key in list(degsh.keys()):
|
||||||
if not key in gf_struct:
|
if not key in gf_struct:
|
||||||
del degsh[key]
|
del degsh[key]
|
||||||
continue
|
continue
|
||||||
@ -539,11 +539,11 @@ class BlockStructure(object):
|
|||||||
for icrsh in range(len(new_gf_struct_transformed)):
|
for icrsh in range(len(new_gf_struct_transformed)):
|
||||||
ish = self.corr_to_inequiv[icrsh]
|
ish = self.corr_to_inequiv[icrsh]
|
||||||
gfs.append({})
|
gfs.append({})
|
||||||
for block in new_gf_struct_transformed[icrsh].keys():
|
for block in list(new_gf_struct_transformed[icrsh].keys()):
|
||||||
for ind in new_gf_struct_transformed[icrsh][block]:
|
for ind in new_gf_struct_transformed[icrsh][block]:
|
||||||
ind_sol = self.sumk_to_solver[ish][(block, ind)]
|
ind_sol = self.sumk_to_solver[ish][(block,ind)]
|
||||||
if not ind_sol[0] in gfs[icrsh]:
|
if not ind_sol[0] in gfs[icrsh]:
|
||||||
gfs[icrsh][ind_sol[0]] = []
|
gfs[icrsh][ind_sol[0]]=[]
|
||||||
gfs[icrsh][ind_sol[0]].append(ind_sol[1])
|
gfs[icrsh][ind_sol[0]].append(ind_sol[1])
|
||||||
self.pick_gf_struct_solver(gfs)
|
self.pick_gf_struct_solver(gfs)
|
||||||
|
|
||||||
@ -579,7 +579,7 @@ class BlockStructure(object):
|
|||||||
so2su = {}
|
so2su = {}
|
||||||
su2so = {}
|
su2so = {}
|
||||||
so2su_block = {}
|
so2su_block = {}
|
||||||
for frm, to in mapping[ish].iteritems():
|
for frm,to in list(mapping[ish].items()):
|
||||||
if not to[0] in gf_struct:
|
if not to[0] in gf_struct:
|
||||||
gf_struct[to[0]] = []
|
gf_struct[to[0]] = []
|
||||||
gf_struct[to[0]].append(to[1])
|
gf_struct[to[0]].append(to[1])
|
||||||
@ -594,7 +594,7 @@ class BlockStructure(object):
|
|||||||
else:
|
else:
|
||||||
so2su_block[to[0]] =\
|
so2su_block[to[0]] =\
|
||||||
self.solver_to_sumk_block[ish][frm[0]]
|
self.solver_to_sumk_block[ish][frm[0]]
|
||||||
for k in self.sumk_to_solver[ish].keys():
|
for k in list(self.sumk_to_solver[ish].keys()):
|
||||||
if not k in su2so:
|
if not k in su2so:
|
||||||
su2so[k] = (None, None)
|
su2so[k] = (None, None)
|
||||||
|
|
||||||
@ -669,7 +669,7 @@ class BlockStructure(object):
|
|||||||
raise Exception(
|
raise Exception(
|
||||||
"Argument space has to be either 'solver' or 'sumk'.")
|
"Argument space has to be either 'solver' or 'sumk'.")
|
||||||
|
|
||||||
names = gf_struct[ish].keys()
|
names = list(gf_struct[ish].keys())
|
||||||
blocks = []
|
blocks = []
|
||||||
for n in names:
|
for n in names:
|
||||||
G = gf_function(indices=gf_struct[ish][n], **kwargs)
|
G = gf_function(indices=gf_struct[ish][n], **kwargs)
|
||||||
@ -738,7 +738,7 @@ class BlockStructure(object):
|
|||||||
assert len(G) == len(gf_struct),\
|
assert len(G) == len(gf_struct),\
|
||||||
"list of G does not have the correct length"
|
"list of G does not have the correct length"
|
||||||
if ish is None:
|
if ish is None:
|
||||||
ishs = range(len(gf_struct))
|
ishs = list(range(len(gf_struct)))
|
||||||
else:
|
else:
|
||||||
ishs = [ish]
|
ishs = [ish]
|
||||||
for ish in ishs:
|
for ish in ishs:
|
||||||
@ -755,17 +755,17 @@ class BlockStructure(object):
|
|||||||
for block, gf in G:
|
for block, gf in G:
|
||||||
assert block in gf_struct[ish],\
|
assert block in gf_struct[ish],\
|
||||||
"block " + block + " not in struct (shell {})".format(ish)
|
"block " + block + " not in struct (shell {})".format(ish)
|
||||||
assert list(gf.indices) == 2 * [map(str, gf_struct[ish][block])],\
|
assert list(gf.indices) == 2 * [list(map(str, gf_struct[ish][block]))],\
|
||||||
"block " + block + \
|
"block " + block + \
|
||||||
" has wrong indices (shell {})".format(ish)
|
" has wrong indices (shell {})".format(ish)
|
||||||
else:
|
else:
|
||||||
for block in gf_struct[ish]:
|
for block in gf_struct[ish]:
|
||||||
assert block in G,\
|
assert block in G,\
|
||||||
"block " + block + " not in G (shell {})".format(ish)
|
"block " + block + " not in G (shell {})".format(ish)
|
||||||
for block, gf in G.iteritems():
|
for block, gf in list(G.items()):
|
||||||
assert block in gf_struct[ish],\
|
assert block in gf_struct[ish],\
|
||||||
"block " + block + " not in struct (shell {})".format(ish)
|
"block " + block + " not in struct (shell {})".format(ish)
|
||||||
assert range(len(gf)) == 2 * [map(str, gf_struct[ish][block])],\
|
assert list(range(len(gf))) == 2 * [list(map(str, gf_struct[ish][block]))],\
|
||||||
"block " + block + \
|
"block " + block + \
|
||||||
" has wrong indices (shell {})".format(ish)
|
" has wrong indices (shell {})".format(ish)
|
||||||
|
|
||||||
@ -775,20 +775,20 @@ class BlockStructure(object):
|
|||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
O : pytriqs.operators.Operator
|
O : triqs.operators.Operator
|
||||||
Operator in sumk structure
|
Operator in sumk structure
|
||||||
|
|
||||||
ish : int
|
ish : int
|
||||||
shell index on which the operator acts
|
shell index on which the operator acts
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pytriqs.operators import Operator, c, c_dag
|
from triqs.operators import Operator, c, c_dag
|
||||||
|
|
||||||
T = self.transformation[ish]
|
T = self.transformation[ish]
|
||||||
sk2s = self.sumk_to_solver[ish]
|
sk2s = self.sumk_to_solver[ish]
|
||||||
|
|
||||||
O_out = Operator(0)
|
O_out = Operator(0)
|
||||||
|
|
||||||
for monomial in O:
|
for monomial in O:
|
||||||
coefficient = monomial[-1]
|
coefficient = monomial[-1]
|
||||||
new_monomial = Operator(1)
|
new_monomial = Operator(1)
|
||||||
@ -796,7 +796,7 @@ class BlockStructure(object):
|
|||||||
for single_operator in monomial[0]:
|
for single_operator in monomial[0]:
|
||||||
new_single_operator = Operator(0)
|
new_single_operator = Operator(0)
|
||||||
daggered = single_operator[0]
|
daggered = single_operator[0]
|
||||||
|
|
||||||
blockname = single_operator[1][0]
|
blockname = single_operator[1][0]
|
||||||
i = single_operator[1][1]
|
i = single_operator[1][1]
|
||||||
for j in range(len(T[blockname])):
|
for j in range(len(T[blockname])):
|
||||||
@ -961,7 +961,7 @@ class BlockStructure(object):
|
|||||||
else:
|
else:
|
||||||
raise Exception('G is neither BlockGf nor dict.')
|
raise Exception('G is neither BlockGf nor dict.')
|
||||||
|
|
||||||
for block_to in gf_struct_to.keys():
|
for block_to in list(gf_struct_to.keys()):
|
||||||
if isinstance(G, BlockGf):
|
if isinstance(G, BlockGf):
|
||||||
G_out[block_to].zero()
|
G_out[block_to].zero()
|
||||||
else:
|
else:
|
||||||
@ -989,7 +989,7 @@ class BlockStructure(object):
|
|||||||
ish_to=ish_from,
|
ish_to=ish_from,
|
||||||
show_warnings=False, # else we get an endless loop
|
show_warnings=False, # else we get an endless loop
|
||||||
space_from=space_to, space_to=space_from, **kwargs)
|
space_from=space_to, space_to=space_from, **kwargs)
|
||||||
for name, gf in (G_back if isinstance(G, BlockGf) else G_back.iteritems()):
|
for name, gf in (G_back if isinstance(G, BlockGf) else list(G_back.items())):
|
||||||
if isinstance(G, BlockGf):
|
if isinstance(G, BlockGf):
|
||||||
maxdiff = np.max(np.abs(G_back[name].data - G[name].data),
|
maxdiff = np.max(np.abs(G_back[name].data - G[name].data),
|
||||||
axis=0)
|
axis=0)
|
||||||
@ -1033,7 +1033,7 @@ class BlockStructure(object):
|
|||||||
self.gf_struct_solver.append({})
|
self.gf_struct_solver.append({})
|
||||||
self.solver_to_sumk.append({})
|
self.solver_to_sumk.append({})
|
||||||
self.solver_to_sumk_block.append({})
|
self.solver_to_sumk_block.append({})
|
||||||
for frm,to in self.sumk_to_solver[ish].iteritems():
|
for frm,to in list(self.sumk_to_solver[ish].items()):
|
||||||
if to[0] is not None:
|
if to[0] is not None:
|
||||||
self.gf_struct_solver[ish][frm[0]+'_'+str(frm[1])]=[0]
|
self.gf_struct_solver[ish][frm[0]+'_'+str(frm[1])]=[0]
|
||||||
self.sumk_to_solver[ish][frm]=(frm[0]+'_'+str(frm[1]),0)
|
self.sumk_to_solver[ish][frm]=(frm[0]+'_'+str(frm[1]),0)
|
||||||
@ -1061,7 +1061,7 @@ class BlockStructure(object):
|
|||||||
elif isinstance(one,dict):
|
elif isinstance(one,dict):
|
||||||
if set(one.keys()) != set(two.keys()):
|
if set(one.keys()) != set(two.keys()):
|
||||||
return False
|
return False
|
||||||
for k in set(one.keys()).intersection(two.keys()):
|
for k in set(one.keys()).intersection(list(two.keys())):
|
||||||
if not compare(one[k],two[k]):
|
if not compare(one[k],two[k]):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -1096,7 +1096,7 @@ class BlockStructure(object):
|
|||||||
d = []
|
d = []
|
||||||
for ish in range(len(mapping)):
|
for ish in range(len(mapping)):
|
||||||
d.append({})
|
d.append({})
|
||||||
for k,v in mapping[ish].iteritems():
|
for k,v in list(mapping[ish].items()):
|
||||||
d[ish][repr(k)] = repr(v)
|
d[ish][repr(k)] = repr(v)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -1112,7 +1112,7 @@ class BlockStructure(object):
|
|||||||
d = []
|
d = []
|
||||||
for ish in range(len(mapping)):
|
for ish in range(len(mapping)):
|
||||||
d.append({})
|
d.append({})
|
||||||
for k,v in mapping[ish].iteritems():
|
for k,v in list(mapping[ish].items()):
|
||||||
# literal_eval is a saje alternative to eval
|
# literal_eval is a saje alternative to eval
|
||||||
d[ish][literal_eval(k)] = literal_eval(v)
|
d[ish][literal_eval(k)] = literal_eval(v)
|
||||||
return d
|
return d
|
||||||
@ -1138,7 +1138,7 @@ class BlockStructure(object):
|
|||||||
s+=' shell '+str(ish)+'\n'
|
s+=' shell '+str(ish)+'\n'
|
||||||
def keyfun(el):
|
def keyfun(el):
|
||||||
return '{}_{:05d}'.format(el[0],el[1])
|
return '{}_{:05d}'.format(el[0],el[1])
|
||||||
keys = sorted(element[ish].keys(),key=keyfun)
|
keys = sorted(list(element[ish].keys()),key=keyfun)
|
||||||
for k in keys:
|
for k in keys:
|
||||||
s+=' '+str(k)+str(element[ish][k])+'\n'
|
s+=' '+str(k)+str(element[ish][k])+'\n'
|
||||||
s += "deg_shells\n"
|
s += "deg_shells\n"
|
||||||
@ -1147,7 +1147,7 @@ class BlockStructure(object):
|
|||||||
for l in range(len(self.deg_shells[ish])):
|
for l in range(len(self.deg_shells[ish])):
|
||||||
s+=' equivalent group '+str(l)+'\n'
|
s+=' equivalent group '+str(l)+'\n'
|
||||||
if isinstance(self.deg_shells[ish][l],dict):
|
if isinstance(self.deg_shells[ish][l],dict):
|
||||||
for key, val in self.deg_shells[ish][l].iteritems():
|
for key, val in list(self.deg_shells[ish][l].items()):
|
||||||
s+=' '+key+('*' if val[1] else '')+':\n'
|
s+=' '+key+('*' if val[1] else '')+':\n'
|
||||||
s+=' '+str(val[0]).replace('\n','\n ')+'\n'
|
s+=' '+str(val[0]).replace('\n','\n ')+'\n'
|
||||||
else:
|
else:
|
||||||
@ -1157,5 +1157,5 @@ class BlockStructure(object):
|
|||||||
s += str(self.transformation)
|
s += str(self.transformation)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
from pytriqs.archive.hdf_archive_schemes import register_class
|
from h5.formats import register_class
|
||||||
register_class(BlockStructure)
|
register_class(BlockStructure)
|
@ -3,13 +3,13 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
print "Usage: python clear_h5_output.py archive"
|
print("Usage: python clear_h5_output.py archive")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
print """
|
print("""
|
||||||
This script is to remove any SumkDFT generated output from the h5 archive
|
This script is to remove any SumkDFT generated output from the h5 archive
|
||||||
and to restore it to the original post-converter state.
|
and to restore it to the original post-converter state.
|
||||||
"""
|
""")
|
||||||
|
|
||||||
filename = sys.argv[1]
|
filename = sys.argv[1]
|
||||||
A = h5py.File(filename)
|
A = h5py.File(filename)
|
||||||
@ -21,6 +21,6 @@ A.close()
|
|||||||
# Repack to reclaim disk space
|
# Repack to reclaim disk space
|
||||||
retcode = subprocess.call(["h5repack", "-i%s" % filename, "-otemphgfrt.h5"])
|
retcode = subprocess.call(["h5repack", "-i%s" % filename, "-otemphgfrt.h5"])
|
||||||
if retcode != 0:
|
if retcode != 0:
|
||||||
print "h5repack failed!"
|
print("h5repack failed!")
|
||||||
else:
|
else:
|
||||||
subprocess.call(["mv", "-f", "temphgfrt.h5", "%s" % filename])
|
subprocess.call(["mv", "-f", "temphgfrt.h5", "%s" % filename])
|
@ -20,10 +20,10 @@
|
|||||||
#
|
#
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
from wien2k_converter import Wien2kConverter
|
from .wien2k import Wien2kConverter
|
||||||
from hk_converter import HkConverter
|
from .hk import HkConverter
|
||||||
from vasp_converter import VaspConverter
|
from .vasp import VaspConverter
|
||||||
from wannier90_converter import Wannier90Converter
|
from .wannier90 import Wannier90Converter
|
||||||
|
|
||||||
__all__ =['Wien2kConverter','HkConverter','Wannier90Converter','VaspConverter']
|
__all__ =['Wien2kConverter','HkConverter','Wannier90Converter','VaspConverter']
|
||||||
|
|
@ -19,7 +19,7 @@
|
|||||||
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
##########################################################################
|
##########################################################################
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
|
|
||||||
class ConverterTools:
|
class ConverterTools:
|
||||||
|
|
||||||
@ -46,12 +46,12 @@ class ConverterTools:
|
|||||||
import os.path
|
import os.path
|
||||||
import string
|
import string
|
||||||
if not(os.path.exists(filename)):
|
if not(os.path.exists(filename)):
|
||||||
raise IOError, "File %s does not exist." % filename
|
raise IOError("File %s does not exist." % filename)
|
||||||
for line in open(filename, 'r'):
|
for line in open(filename, 'r'):
|
||||||
for old, new in to_replace.iteritems():
|
for old, new in to_replace.items():
|
||||||
line = line.replace(old, new)
|
line = line.replace(old, new)
|
||||||
for x in line.split():
|
for x in line.split():
|
||||||
yield string.atof(x)
|
yield float(x)
|
||||||
|
|
||||||
def repack(self):
|
def repack(self):
|
||||||
"""
|
"""
|
@ -22,10 +22,10 @@
|
|||||||
|
|
||||||
from types import *
|
from types import *
|
||||||
import numpy
|
import numpy
|
||||||
from pytriqs.archive import *
|
from h5 import *
|
||||||
import pytriqs.utility.mpi as mpi
|
import triqs.utility.mpi as mpi
|
||||||
from math import sqrt
|
from math import sqrt
|
||||||
from converter_tools import *
|
from .converter_tools import *
|
||||||
|
|
||||||
|
|
||||||
class HkConverter(ConverterTools):
|
class HkConverter(ConverterTools):
|
||||||
@ -53,8 +53,7 @@ class HkConverter(ConverterTools):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert type(
|
assert isinstance(filename, str), "HkConverter: filename must be a filename."
|
||||||
filename) == StringType, "HkConverter: filename must be a filename."
|
|
||||||
if hdf_filename is None:
|
if hdf_filename is None:
|
||||||
hdf_filename = filename + '.h5'
|
hdf_filename = filename + '.h5'
|
||||||
self.hdf_file = hdf_filename
|
self.hdf_file = hdf_filename
|
||||||
@ -96,20 +95,20 @@ class HkConverter(ConverterTools):
|
|||||||
# the energy conversion factor is 1.0, we assume eV in files
|
# the energy conversion factor is 1.0, we assume eV in files
|
||||||
energy_unit = 1.0
|
energy_unit = 1.0
|
||||||
# read the number of k points
|
# read the number of k points
|
||||||
n_k = int(R.next())
|
n_k = int(next(R))
|
||||||
k_dep_projection = 0
|
k_dep_projection = 0
|
||||||
SP = 0 # no spin-polarision
|
SP = 0 # no spin-polarision
|
||||||
SO = 0 # no spin-orbit
|
SO = 0 # no spin-orbit
|
||||||
# total charge below energy window is set to 0
|
# total charge below energy window is set to 0
|
||||||
charge_below = 0.0
|
charge_below = 0.0
|
||||||
# density required, for setting the chemical potential
|
# density required, for setting the chemical potential
|
||||||
density_required = R.next()
|
density_required = next(R)
|
||||||
symm_op = 0 # No symmetry groups for the k-sum
|
symm_op = 0 # No symmetry groups for the k-sum
|
||||||
|
|
||||||
# the information on the non-correlated shells is needed for
|
# the information on the non-correlated shells is needed for
|
||||||
# defining dimension of matrices:
|
# defining dimension of matrices:
|
||||||
# number of shells considered in the Wanniers
|
# number of shells considered in the Wanniers
|
||||||
n_shells = int(R.next())
|
n_shells = int(next(R))
|
||||||
# corresponds to index R in formulas
|
# corresponds to index R in formulas
|
||||||
# now read the information about the shells (atom, sort, l, dim):
|
# now read the information about the shells (atom, sort, l, dim):
|
||||||
shell_entries = ['atom', 'sort', 'l', 'dim']
|
shell_entries = ['atom', 'sort', 'l', 'dim']
|
||||||
@ -117,7 +116,7 @@ class HkConverter(ConverterTools):
|
|||||||
shell_entries, R)} for ish in range(n_shells)]
|
shell_entries, R)} for ish in range(n_shells)]
|
||||||
|
|
||||||
# number of corr. shells (e.g. Fe d, Ce f) in the unit cell,
|
# number of corr. shells (e.g. Fe d, Ce f) in the unit cell,
|
||||||
n_corr_shells = int(R.next())
|
n_corr_shells = int(next(R))
|
||||||
# corresponds to index R in formulas
|
# corresponds to index R in formulas
|
||||||
# now read the information about the shells (atom, sort, l, dim, SO
|
# now read the information about the shells (atom, sort, l, dim, SO
|
||||||
# flag, irep):
|
# flag, irep):
|
||||||
@ -141,8 +140,8 @@ class HkConverter(ConverterTools):
|
|||||||
T = []
|
T = []
|
||||||
for ish in range(n_inequiv_shells):
|
for ish in range(n_inequiv_shells):
|
||||||
# number of representatives ("subsets"), e.g. t2g and eg
|
# number of representatives ("subsets"), e.g. t2g and eg
|
||||||
n_reps[ish] = int(R.next())
|
n_reps[ish] = int(next(R))
|
||||||
dim_reps[ish] = [int(R.next()) for i in range(
|
dim_reps[ish] = [int(next(R)) for i in range(
|
||||||
n_reps[ish])] # dimensions of the subsets
|
n_reps[ish])] # dimensions of the subsets
|
||||||
|
|
||||||
# The transformation matrix:
|
# The transformation matrix:
|
||||||
@ -201,7 +200,7 @@ class HkConverter(ConverterTools):
|
|||||||
if (weights_in_file):
|
if (weights_in_file):
|
||||||
# weights in the file
|
# weights in the file
|
||||||
for ik in range(n_k):
|
for ik in range(n_k):
|
||||||
bz_weights[ik] = R.next()
|
bz_weights[ik] = next(R)
|
||||||
|
|
||||||
# if the sum over spins is in the weights, take it out again!!
|
# if the sum over spins is in the weights, take it out again!!
|
||||||
sm = sum(bz_weights)
|
sm = sum(bz_weights)
|
||||||
@ -222,7 +221,7 @@ class HkConverter(ConverterTools):
|
|||||||
else:
|
else:
|
||||||
istart = 0
|
istart = 0
|
||||||
for j in range(istart, n_orb):
|
for j in range(istart, n_orb):
|
||||||
hopping[ik, isp, i, j] = R.next()
|
hopping[ik, isp, i, j] = next(R)
|
||||||
|
|
||||||
for i in range(n_orb):
|
for i in range(n_orb):
|
||||||
if (only_upper_triangle):
|
if (only_upper_triangle):
|
||||||
@ -230,7 +229,7 @@ class HkConverter(ConverterTools):
|
|||||||
else:
|
else:
|
||||||
istart = 0
|
istart = 0
|
||||||
for j in range(istart, n_orb):
|
for j in range(istart, n_orb):
|
||||||
hopping[ik, isp, i, j] += R.next() * 1j
|
hopping[ik, isp, i, j] += next(R) * 1j
|
||||||
if ((only_upper_triangle)and(i != j)):
|
if ((only_upper_triangle)and(i != j)):
|
||||||
hopping[ik, isp, j, i] = hopping[
|
hopping[ik, isp, j, i] = hopping[
|
||||||
ik, isp, i, j].conjugate()
|
ik, isp, i, j].conjugate()
|
||||||
@ -243,8 +242,8 @@ class HkConverter(ConverterTools):
|
|||||||
else:
|
else:
|
||||||
istart = 0
|
istart = 0
|
||||||
for j in range(istart, n_orb):
|
for j in range(istart, n_orb):
|
||||||
hopping[ik, isp, i, j] = R.next()
|
hopping[ik, isp, i, j] = next(R)
|
||||||
hopping[ik, isp, i, j] += R.next() * 1j
|
hopping[ik, isp, i, j] += next(R) * 1j
|
||||||
|
|
||||||
if ((only_upper_triangle)and(i != j)):
|
if ((only_upper_triangle)and(i != j)):
|
||||||
hopping[ik, isp, j, i] = hopping[
|
hopping[ik, isp, j, i] = hopping[
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
#
|
#
|
||||||
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
@ -1,5 +1,5 @@
|
|||||||
# Generated automatically using the command :
|
# Generated automatically using the command :
|
||||||
# c++2py.py -m atm -o atm --moduledoc "Analytical Tetrahedron Method for DOS" ../../../c++/plovasp/atm/dos_tetra3d.hpp
|
# c++2py.py -m atm -o atm --moduledoc "Analytical Tetrahedron Method for DOS" ../../../../c++/triqs_dft_tools/converters/vasp/dos_tetra3d.hpp
|
||||||
from cpp2py.wrap_generator import *
|
from cpp2py.wrap_generator import *
|
||||||
|
|
||||||
# The module
|
# The module
|
||||||
@ -8,11 +8,12 @@ module = module_(full_name = "atm", doc = "Analytical Tetrahedron Method for cal
|
|||||||
# All the triqs C++/Python modules
|
# All the triqs C++/Python modules
|
||||||
|
|
||||||
# Add here all includes beyond what is automatically included by the triqs modules
|
# Add here all includes beyond what is automatically included by the triqs modules
|
||||||
module.add_include("plovasp/atm/dos_tetra3d.hpp")
|
module.add_include("triqs_dft_tools/converters/vasp/dos_tetra3d.hpp")
|
||||||
|
|
||||||
# Add here anything to add in the C++ code at the start, e.g. namespace using
|
# Add here anything to add in the C++ code at the start, e.g. namespace using
|
||||||
module.add_preamble("""
|
module.add_preamble("""
|
||||||
#include <triqs/cpp2py_converters/arrays.hpp>
|
#include <triqs/cpp2py_converters/arrays.hpp>
|
||||||
|
using namespace triqs::arrays;
|
||||||
""")
|
""")
|
||||||
|
|
||||||
module.add_function ("array<double,2> dos_tetra_weights_3d (array_view<double,1> eigk, double en, array_view<long,2> itt)", doc = """DOS of a band by analytical tetrahedron method\n\n Returns corner weights for all tetrahedra for a given band and real energy.""")
|
module.add_function ("array<double,2> dos_tetra_weights_3d (array_view<double,1> eigk, double en, array_view<long,2> itt)", doc = """DOS of a band by analytical tetrahedron method\n\n Returns corner weights for all tetrahedra for a given band and real energy.""")
|
@ -36,10 +36,10 @@ r"""
|
|||||||
Usage: python converter.py <conf-file> [<path-to-vasp-calculation>]
|
Usage: python converter.py <conf-file> [<path-to-vasp-calculation>]
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
import vaspio
|
from . import vaspio
|
||||||
from inpconf import ConfigParameters
|
from .inpconf import ConfigParameters
|
||||||
from elstruct import ElectronicStructure
|
from .elstruct import ElectronicStructure
|
||||||
from plotools import generate_plo, output_as_text
|
from .plotools import generate_plo, output_as_text
|
||||||
|
|
||||||
def generate_and_output_as_text(conf_filename, vasp_dir):
|
def generate_and_output_as_text(conf_filename, vasp_dir):
|
||||||
"""
|
"""
|
@ -92,7 +92,7 @@ class ElectronicStructure:
|
|||||||
# removed completely.
|
# removed completely.
|
||||||
# if not vasp_data.eigenval.eigs is None:
|
# if not vasp_data.eigenval.eigs is None:
|
||||||
if False:
|
if False:
|
||||||
print "eigvals from EIGENVAL"
|
print("eigvals from EIGENVAL")
|
||||||
self.eigvals = vasp_data.eigenval.eigs
|
self.eigvals = vasp_data.eigenval.eigs
|
||||||
self.ferw = vasp_data.eigenval.ferw.transpose((2, 0, 1))
|
self.ferw = vasp_data.eigenval.ferw.transpose((2, 0, 1))
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ class ElectronicStructure:
|
|||||||
# Check that the number of band is the same in PROJCAR and EIGENVAL
|
# Check that the number of band is the same in PROJCAR and EIGENVAL
|
||||||
assert nb_plo == self.nband, "PLOCAR is inconsistent with EIGENVAL (number of bands)"
|
assert nb_plo == self.nband, "PLOCAR is inconsistent with EIGENVAL (number of bands)"
|
||||||
else:
|
else:
|
||||||
print "eigvals from LOCPROJ"
|
print("eigvals from LOCPROJ")
|
||||||
self.eigvals = vasp_data.plocar.eigs
|
self.eigvals = vasp_data.plocar.eigs
|
||||||
self.ferw = vasp_data.plocar.ferw.transpose((2, 0, 1))
|
self.ferw = vasp_data.plocar.ferw.transpose((2, 0, 1))
|
||||||
self.efermi = vasp_data.doscar.efermi
|
self.efermi = vasp_data.doscar.efermi
|
||||||
@ -142,7 +142,7 @@ class ElectronicStructure:
|
|||||||
## Construct a map to access coordinates by index
|
## Construct a map to access coordinates by index
|
||||||
# self.structure['ion_index'] = []
|
# self.structure['ion_index'] = []
|
||||||
# for isort, nq in enumerate(self.structure['nq_types']):
|
# for isort, nq in enumerate(self.structure['nq_types']):
|
||||||
# for iq in xrange(nq):
|
# for iq in range(nq):
|
||||||
# self.structure['ion_index'].append((isort, iq))
|
# self.structure['ion_index'].append((isort, iq))
|
||||||
|
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ class ElectronicStructure:
|
|||||||
nproj, ns, nk, nb = plo.shape
|
nproj, ns, nk, nb = plo.shape
|
||||||
ions = sorted(list(set([param['isite'] for param in self.proj_params])))
|
ions = sorted(list(set([param['isite'] for param in self.proj_params])))
|
||||||
nions = len(ions)
|
nions = len(ions)
|
||||||
norb = nproj / nions
|
norb = nproj // nions
|
||||||
|
|
||||||
# Spin factor
|
# Spin factor
|
||||||
sp_fac = 2.0 if ns == 1 and not self.nc_flag else 1.0
|
sp_fac = 2.0 if ns == 1 and not self.nc_flag else 1.0
|
||||||
@ -163,8 +163,8 @@ class ElectronicStructure:
|
|||||||
overlap = np.zeros((ns, nproj, nproj), dtype=np.float64)
|
overlap = np.zeros((ns, nproj, nproj), dtype=np.float64)
|
||||||
# ov_min = np.ones((ns, nproj, nproj), dtype=np.float64) * 100.0
|
# ov_min = np.ones((ns, nproj, nproj), dtype=np.float64) * 100.0
|
||||||
# ov_max = np.zeros((ns, nproj, nproj), dtype=np.float64)
|
# ov_max = np.zeros((ns, nproj, nproj), dtype=np.float64)
|
||||||
for ispin in xrange(ns):
|
for ispin in range(ns):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
kweight = self.kmesh['kweights'][ik]
|
kweight = self.kmesh['kweights'][ik]
|
||||||
occ = self.ferw[ispin, ik, :]
|
occ = self.ferw[ispin, ik, :]
|
||||||
den_mat[ispin, :, :] += np.dot(plo[:, ispin, ik, :] * occ, plo[:, ispin, ik, :].T.conj()).real * kweight * sp_fac
|
den_mat[ispin, :, :] += np.dot(plo[:, ispin, ik, :] * occ, plo[:, ispin, ik, :].T.conj()).real * kweight * sp_fac
|
||||||
@ -174,12 +174,12 @@ class ElectronicStructure:
|
|||||||
# ov_min = np.minimum(ov, ov_min)
|
# ov_min = np.minimum(ov, ov_min)
|
||||||
|
|
||||||
# Output only the site-diagonal parts of the matrices
|
# Output only the site-diagonal parts of the matrices
|
||||||
print
|
print()
|
||||||
print " Unorthonormalized density matrices and overlaps:"
|
print(" Unorthonormalized density matrices and overlaps:")
|
||||||
for ispin in xrange(ns):
|
for ispin in range(ns):
|
||||||
print " Spin:", ispin + 1
|
print(" Spin:", ispin + 1)
|
||||||
for io, ion in enumerate(ions):
|
for io, ion in enumerate(ions):
|
||||||
print " Site:", ion
|
print(" Site:", ion)
|
||||||
iorb_inds = [(ip, param['m']) for ip, param in enumerate(self.proj_params) if param['isite'] == ion]
|
iorb_inds = [(ip, param['m']) for ip, param in enumerate(self.proj_params) if param['isite'] == ion]
|
||||||
norb = len(iorb_inds)
|
norb = len(iorb_inds)
|
||||||
dm = np.zeros((norb, norb))
|
dm = np.zeros((norb, norb))
|
||||||
@ -189,9 +189,9 @@ class ElectronicStructure:
|
|||||||
dm[iorb, iorb2] = den_mat[ispin, ind, ind2]
|
dm[iorb, iorb2] = den_mat[ispin, ind, ind2]
|
||||||
ov[iorb, iorb2] = overlap[ispin, ind, ind2]
|
ov[iorb, iorb2] = overlap[ispin, ind, ind2]
|
||||||
|
|
||||||
print " Density matrix" + (12*norb - 12 + 2)*" " + "Overlap"
|
print(" Density matrix" + (12*norb - 12 + 2)*" " + "Overlap")
|
||||||
for drow, dov in zip(dm, ov):
|
for drow, dov in zip(dm, ov):
|
||||||
out = ''.join(map("{0:12.7f}".format, drow))
|
out = ''.join(map("{0:12.7f}".format, drow))
|
||||||
out += " "
|
out += " "
|
||||||
out += ''.join(map("{0:12.7f}".format, dov))
|
out += ''.join(map("{0:12.7f}".format, dov))
|
||||||
print out
|
print(out)
|
@ -29,20 +29,20 @@ r"""
|
|||||||
|
|
||||||
Module for parsing and checking an input config-file.
|
Module for parsing and checking an input config-file.
|
||||||
"""
|
"""
|
||||||
import ConfigParser
|
import configparser
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import vaspio
|
from . import vaspio
|
||||||
|
|
||||||
def issue_warning(message):
|
def issue_warning(message):
|
||||||
"""
|
"""
|
||||||
Issues a warning.
|
Issues a warning.
|
||||||
"""
|
"""
|
||||||
print
|
print()
|
||||||
print " !!! WARNING !!!: " + message
|
print(" !!! WARNING !!!: " + message)
|
||||||
print
|
print()
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
################################################################################
|
################################################################################
|
||||||
@ -73,7 +73,7 @@ class ConfigParameters:
|
|||||||
################################################################################
|
################################################################################
|
||||||
def __init__(self, input_filename, verbosity=1):
|
def __init__(self, input_filename, verbosity=1):
|
||||||
self.verbosity = verbosity
|
self.verbosity = verbosity
|
||||||
self.cp = ConfigParser.SafeConfigParser()
|
self.cp = configparser.SafeConfigParser()
|
||||||
self.cp.readfp(open(input_filename, 'r'))
|
self.cp.readfp(open(input_filename, 'r'))
|
||||||
|
|
||||||
self.parameters = {}
|
self.parameters = {}
|
||||||
@ -89,7 +89,7 @@ class ConfigParameters:
|
|||||||
'corr': ('corr', self.parse_string_logical, True)}
|
'corr': ('corr', self.parse_string_logical, True)}
|
||||||
|
|
||||||
self.gr_required = {
|
self.gr_required = {
|
||||||
'shells': ('shells', lambda s: map(int, s.split())),
|
'shells': ('shells', lambda s: list(map(int, s.split()))),
|
||||||
'ewindow': ('ewindow', self.parse_energy_window)}
|
'ewindow': ('ewindow', self.parse_energy_window)}
|
||||||
|
|
||||||
self.gr_optional = {
|
self.gr_optional = {
|
||||||
@ -142,7 +142,7 @@ class ConfigParameters:
|
|||||||
else:
|
else:
|
||||||
# Check if a set of indices is given
|
# Check if a set of indices is given
|
||||||
try:
|
try:
|
||||||
l_tmp = map(int, par_str.split())
|
l_tmp = list(map(int, par_str.split()))
|
||||||
l_tmp.sort()
|
l_tmp.sort()
|
||||||
# Subtract 1 so that VASP indices (starting with 1) are converted
|
# Subtract 1 so that VASP indices (starting with 1) are converted
|
||||||
# to Python indices (starting with 0)
|
# to Python indices (starting with 0)
|
||||||
@ -160,7 +160,7 @@ class ConfigParameters:
|
|||||||
ion_list = []
|
ion_list = []
|
||||||
nion = 0
|
nion = 0
|
||||||
for cl in classes:
|
for cl in classes:
|
||||||
ions = map(int, re.findall(patt2, cl))
|
ions = list(map(int, re.findall(patt2, cl)))
|
||||||
ion_list.append([ion - 1 for ion in ions])
|
ion_list.append([ion - 1 for ion in ions])
|
||||||
nion += len(ions)
|
nion += len(ions)
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ class ConfigParameters:
|
|||||||
Energy window is given by two floats, with the first one being smaller
|
Energy window is given by two floats, with the first one being smaller
|
||||||
than the second one.
|
than the second one.
|
||||||
"""
|
"""
|
||||||
ftmp = map(float, par_str.split())
|
ftmp = list(map(float, par_str.split()))
|
||||||
assert len(ftmp) == 2, "EWINDOW must be specified by exactly two floats"
|
assert len(ftmp) == 2, "EWINDOW must be specified by exactly two floats"
|
||||||
assert ftmp[0] < ftmp[1], "The first float in EWINDOW must be smaller than the second one"
|
assert ftmp[0] < ftmp[1], "The first float in EWINDOW must be smaller than the second one"
|
||||||
return tuple(ftmp)
|
return tuple(ftmp)
|
||||||
@ -233,7 +233,7 @@ class ConfigParameters:
|
|||||||
Band window is given by two ints, with the first one being smaller
|
Band window is given by two ints, with the first one being smaller
|
||||||
than the second one.
|
than the second one.
|
||||||
"""
|
"""
|
||||||
ftmp = map(int, par_str.split())
|
ftmp = list(map(int, par_str.split()))
|
||||||
assert len(ftmp) == 2, "BANDS must be specified by exactly two ints"
|
assert len(ftmp) == 2, "BANDS must be specified by exactly two ints"
|
||||||
assert ftmp[0] < ftmp[1], "The first int in BANDS must be smaller than the second one"
|
assert ftmp[0] < ftmp[1], "The first int in BANDS must be smaller than the second one"
|
||||||
return tuple(ftmp)
|
return tuple(ftmp)
|
||||||
@ -250,7 +250,7 @@ class ConfigParameters:
|
|||||||
"""
|
"""
|
||||||
str_rows = par_str.split('\n')
|
str_rows = par_str.split('\n')
|
||||||
try:
|
try:
|
||||||
rows = [map(float, s.split()) for s in str_rows]
|
rows = [list(map(float, s.split())) for s in str_rows]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
err_mess = "Cannot parse a matrix string:\n%s"%(par_str)
|
err_mess = "Cannot parse a matrix string:\n%s"%(par_str)
|
||||||
raise ValueError(err_mess)
|
raise ValueError(err_mess)
|
||||||
@ -266,7 +266,7 @@ class ConfigParameters:
|
|||||||
mat = np.array(rows)
|
mat = np.array(rows)
|
||||||
else:
|
else:
|
||||||
err_mess = "Complex matrix must contain 2*M values:\n%s"%(par_str)
|
err_mess = "Complex matrix must contain 2*M values:\n%s"%(par_str)
|
||||||
assert 2 * (nm / 2) == nm, err_mess
|
assert 2 * (nm // 2) == nm, err_mess
|
||||||
|
|
||||||
tmp = np.array(rows, dtype=np.complex128)
|
tmp = np.array(rows, dtype=np.complex128)
|
||||||
mat = tmp[:, 0::2] + 1.0j * tmp[:, 1::2]
|
mat = tmp[:, 0::2] + 1.0j * tmp[:, 1::2]
|
||||||
@ -339,11 +339,11 @@ class ConfigParameters:
|
|||||||
For required parameters `exception=True` must be set.
|
For required parameters `exception=True` must be set.
|
||||||
"""
|
"""
|
||||||
parsed = {}
|
parsed = {}
|
||||||
for par in param_set.keys():
|
for par in list(param_set.keys()):
|
||||||
key = param_set[par][0]
|
key = param_set[par][0]
|
||||||
try:
|
try:
|
||||||
par_str = self.cp.get(section, par)
|
par_str = self.cp.get(section, par)
|
||||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
|
except (configparser.NoOptionError, configparser.NoSectionError):
|
||||||
if exception:
|
if exception:
|
||||||
message = "Required parameter '%s' not found in section [%s]"%(par, section)
|
message = "Required parameter '%s' not found in section [%s]"%(par, section)
|
||||||
raise Exception(message)
|
raise Exception(message)
|
||||||
@ -354,7 +354,7 @@ class ConfigParameters:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if self.verbosity > 0:
|
if self.verbosity > 0:
|
||||||
print " %s = %s"%(par, par_str)
|
print(" %s = %s"%(par, par_str))
|
||||||
|
|
||||||
parse_fun = param_set[par][1]
|
parse_fun = param_set[par][1]
|
||||||
parsed[key] = parse_fun(par_str)
|
parsed[key] = parse_fun(par_str)
|
||||||
@ -376,27 +376,27 @@ class ConfigParameters:
|
|||||||
sections = self.cp.sections()
|
sections = self.cp.sections()
|
||||||
|
|
||||||
sh_patt1 = re.compile('shell +.*', re.IGNORECASE)
|
sh_patt1 = re.compile('shell +.*', re.IGNORECASE)
|
||||||
sec_shells = filter(sh_patt1.match, sections)
|
sec_shells = list(filter(sh_patt1.match, sections))
|
||||||
|
|
||||||
self.nshells = len(sec_shells)
|
self.nshells = len(sec_shells)
|
||||||
assert self.nshells > 0, "No projected shells found in the input file"
|
assert self.nshells > 0, "No projected shells found in the input file"
|
||||||
|
|
||||||
if self.verbosity > 0:
|
if self.verbosity > 0:
|
||||||
print
|
print()
|
||||||
if self.nshells > 1:
|
if self.nshells > 1:
|
||||||
print " Found %i projected shells"%(self.nshells)
|
print(" Found %i projected shells"%(self.nshells))
|
||||||
else:
|
else:
|
||||||
print " Found 1 projected shell"
|
print(" Found 1 projected shell")
|
||||||
|
|
||||||
# Get shell indices
|
# Get shell indices
|
||||||
sh_patt2 = re.compile('shell +([0-9]*)$', re.IGNORECASE)
|
sh_patt2 = re.compile('shell +([0-9]*)$', re.IGNORECASE)
|
||||||
try:
|
try:
|
||||||
get_ind = lambda s: int(sh_patt2.match(s).groups()[0])
|
get_ind = lambda s: int(sh_patt2.match(s).groups()[0])
|
||||||
sh_inds = map(get_ind, sec_shells)
|
sh_inds = list(map(get_ind, sec_shells))
|
||||||
except (ValueError, AttributeError):
|
except (ValueError, AttributeError):
|
||||||
raise ValueError("Failed to extract shell indices from a list: %s"%(sec_shells))
|
raise ValueError("Failed to extract shell indices from a list: %s"%(sec_shells))
|
||||||
|
|
||||||
self.sh_sections = {ind: sec for ind, sec in it.izip(sh_inds, sec_shells)}
|
self.sh_sections = {ind: sec for ind, sec in zip(sh_inds, sec_shells)}
|
||||||
|
|
||||||
# Check that all indices are unique
|
# Check that all indices are unique
|
||||||
# In principle redundant because the list of sections will contain only unique names
|
# In principle redundant because the list of sections will contain only unique names
|
||||||
@ -405,7 +405,7 @@ class ConfigParameters:
|
|||||||
# Ideally, indices should run from 1 to <nshells>
|
# Ideally, indices should run from 1 to <nshells>
|
||||||
# If it's not the case, issue a warning
|
# If it's not the case, issue a warning
|
||||||
sh_inds.sort()
|
sh_inds.sort()
|
||||||
if sh_inds != range(1, len(sh_inds) + 1):
|
if sh_inds != list(range(1, len(sh_inds) + 1)):
|
||||||
issue_warning("Shell indices are not uniform or not starting from 1. "
|
issue_warning("Shell indices are not uniform or not starting from 1. "
|
||||||
"This might be an indication of a incorrect setup.")
|
"This might be an indication of a incorrect setup.")
|
||||||
|
|
||||||
@ -418,8 +418,8 @@ class ConfigParameters:
|
|||||||
section = self.sh_sections[ind]
|
section = self.sh_sections[ind]
|
||||||
|
|
||||||
if self.verbosity > 0:
|
if self.verbosity > 0:
|
||||||
print
|
print()
|
||||||
print " Shell parameters:"
|
print(" Shell parameters:")
|
||||||
# Shell required parameters
|
# Shell required parameters
|
||||||
parsed = self.parse_parameter_set(section, self.sh_required, exception=True)
|
parsed = self.parse_parameter_set(section, self.sh_required, exception=True)
|
||||||
shell.update(parsed)
|
shell.update(parsed)
|
||||||
@ -453,7 +453,7 @@ class ConfigParameters:
|
|||||||
sections = self.cp.sections()
|
sections = self.cp.sections()
|
||||||
|
|
||||||
gr_patt = re.compile('group +(.*)', re.IGNORECASE)
|
gr_patt = re.compile('group +(.*)', re.IGNORECASE)
|
||||||
sec_groups = filter(gr_patt.match, sections)
|
sec_groups = list(filter(gr_patt.match, sections))
|
||||||
|
|
||||||
self.ngroups = len(sec_groups)
|
self.ngroups = len(sec_groups)
|
||||||
|
|
||||||
@ -471,8 +471,8 @@ class ConfigParameters:
|
|||||||
group['index'] = gr_ind
|
group['index'] = gr_ind
|
||||||
|
|
||||||
if self.verbosity > 0:
|
if self.verbosity > 0:
|
||||||
print
|
print()
|
||||||
print " Group parameters:"
|
print(" Group parameters:")
|
||||||
# Group required parameters
|
# Group required parameters
|
||||||
parsed = self.parse_parameter_set(section, self.gr_required, exception=True)
|
parsed = self.parse_parameter_set(section, self.gr_required, exception=True)
|
||||||
group.update(parsed)
|
group.update(parsed)
|
||||||
@ -514,18 +514,18 @@ class ConfigParameters:
|
|||||||
sh_gr_required = dict(self.gr_required)
|
sh_gr_required = dict(self.gr_required)
|
||||||
sh_gr_required.pop('shells')
|
sh_gr_required.pop('shells')
|
||||||
try:
|
try:
|
||||||
for par in sh_gr_required.keys():
|
for par in list(sh_gr_required.keys()):
|
||||||
key = sh_gr_required[par][0]
|
key = sh_gr_required[par][0]
|
||||||
value = self.shells[0].pop(key)
|
value = self.shells[0].pop(key)
|
||||||
self.groups[0][key] = value
|
self.groups[0][key] = value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
message = "One [Shell] section is specified but no explicit [Group] section is provided."
|
message = "One [Shell] section is specified but no explicit [Group] section is provided."
|
||||||
message += " In this case the [Shell] section must contain all required group information.\n"
|
message += " In this case the [Shell] section must contain all required group information.\n"
|
||||||
message += " Required parameters are: %s"%(sh_gr_required.keys())
|
message += " Required parameters are: %s"%(list(sh_gr_required.keys()))
|
||||||
raise KeyError(message)
|
raise KeyError(message)
|
||||||
|
|
||||||
# Do the same for optional group parameters, but do not raise an exception
|
# Do the same for optional group parameters, but do not raise an exception
|
||||||
for par in self.gr_optional.keys():
|
for par in list(self.gr_optional.keys()):
|
||||||
try:
|
try:
|
||||||
key = self.gr_optional[par][0]
|
key = self.gr_optional[par][0]
|
||||||
value = self.shells[0].pop(key)
|
value = self.shells[0].pop(key)
|
||||||
@ -562,7 +562,7 @@ class ConfigParameters:
|
|||||||
# remove them and issue a warning.
|
# remove them and issue a warning.
|
||||||
#
|
#
|
||||||
# First, required group parameters
|
# First, required group parameters
|
||||||
for par in self.gr_required.keys():
|
for par in list(self.gr_required.keys()):
|
||||||
try:
|
try:
|
||||||
key = self.gr_required[par][0]
|
key = self.gr_required[par][0]
|
||||||
value = shell.pop(key)
|
value = shell.pop(key)
|
||||||
@ -573,7 +573,7 @@ class ConfigParameters:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Second, optional group parameters
|
# Second, optional group parameters
|
||||||
for par in self.gr_optional.keys():
|
for par in list(self.gr_optional.keys()):
|
||||||
try:
|
try:
|
||||||
key = self.gr_optional[par][0]
|
key = self.gr_optional[par][0]
|
||||||
value = shell.pop(key)
|
value = shell.pop(key)
|
||||||
@ -591,7 +591,7 @@ class ConfigParameters:
|
|||||||
sh_refs_used.sort()
|
sh_refs_used.sort()
|
||||||
|
|
||||||
# Check that all shells are referenced in the groups
|
# Check that all shells are referenced in the groups
|
||||||
assert sh_refs_used == range(self.nshells), "Some shells are not inside any of the groups"
|
assert sh_refs_used == list(range(self.nshells)), "Some shells are not inside any of the groups"
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
@ -605,7 +605,7 @@ class ConfigParameters:
|
|||||||
"""
|
"""
|
||||||
self.general = {}
|
self.general = {}
|
||||||
sections = self.cp.sections()
|
sections = self.cp.sections()
|
||||||
gen_section = filter(lambda s: s.lower() == 'general', sections)
|
gen_section = [s for s in sections if s.lower() == 'general']
|
||||||
# If no [General] section is found parse a dummy section name to the parser
|
# If no [General] section is found parse a dummy section name to the parser
|
||||||
# to reset parameters to their default values
|
# to reset parameters to their default values
|
||||||
if len(gen_section) > 1:
|
if len(gen_section) > 1:
|
@ -55,9 +55,9 @@ r"""
|
|||||||
"""
|
"""
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from proj_group import ProjectorGroup
|
from .proj_group import ProjectorGroup
|
||||||
from proj_shell import ProjectorShell
|
from .proj_shell import ProjectorShell
|
||||||
from proj_shell import ComplementShell
|
from .proj_shell import ComplementShell
|
||||||
|
|
||||||
np.set_printoptions(suppress=True)
|
np.set_printoptions(suppress=True)
|
||||||
|
|
||||||
@ -71,9 +71,9 @@ def issue_warning(message):
|
|||||||
"""
|
"""
|
||||||
Issues a warning.
|
Issues a warning.
|
||||||
"""
|
"""
|
||||||
print
|
print()
|
||||||
print " !!! WARNING !!!: " + message
|
print(" !!! WARNING !!!: " + message)
|
||||||
print
|
print()
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# check_data_consistency()
|
# check_data_consistency()
|
||||||
@ -129,18 +129,18 @@ def generate_plo(conf_pars, el_struct):
|
|||||||
# check if at least one shell is correlated
|
# check if at least one shell is correlated
|
||||||
assert np.any([shell['corr'] for shell in conf_pars.shells]), 'at least one shell has be CORR = True'
|
assert np.any([shell['corr'] for shell in conf_pars.shells]), 'at least one shell has be CORR = True'
|
||||||
nshell = len(conf_pars.shells)
|
nshell = len(conf_pars.shells)
|
||||||
print
|
print()
|
||||||
print " Generating %i shell%s..."%(nshell, '' if nshell == 1 else 's')
|
print(" Generating %i shell%s..."%(nshell, '' if nshell == 1 else 's'))
|
||||||
pshells = []
|
pshells = []
|
||||||
for sh_par in conf_pars.shells:
|
for sh_par in conf_pars.shells:
|
||||||
pshell = ProjectorShell(sh_par, proj_raw, el_struct.proj_params, el_struct.kmesh, el_struct.structure, el_struct.nc_flag)
|
pshell = ProjectorShell(sh_par, proj_raw, el_struct.proj_params, el_struct.kmesh, el_struct.structure, el_struct.nc_flag)
|
||||||
print
|
print()
|
||||||
print " Shell : %s"%(pshell.user_index)
|
print(" Shell : %s"%(pshell.user_index))
|
||||||
print " Orbital l : %i"%(pshell.lorb)
|
print(" Orbital l : %i"%(pshell.lorb))
|
||||||
print " Number of ions: %i"%(pshell.nion)
|
print(" Number of ions: %i"%(pshell.nion))
|
||||||
print " Dimension : %i"%(pshell.ndim)
|
print(" Dimension : %i"%(pshell.ndim))
|
||||||
print " Correlated : %r"%(pshell.corr)
|
print(" Correlated : %r"%(pshell.corr))
|
||||||
print " Ion sort : %r"%(pshell.ion_sort)
|
print(" Ion sort : %r"%(pshell.ion_sort))
|
||||||
pshells.append(pshell)
|
pshells.append(pshell)
|
||||||
|
|
||||||
|
|
||||||
@ -153,49 +153,49 @@ def generate_plo(conf_pars, el_struct):
|
|||||||
if conf_pars.general['hk']:
|
if conf_pars.general['hk']:
|
||||||
pgroup.calc_hk(eigvals)
|
pgroup.calc_hk(eigvals)
|
||||||
#testout = 'hk.out.h5'
|
#testout = 'hk.out.h5'
|
||||||
#from pytriqs.archive import HDFArchive
|
#from h5 import HDFArchive
|
||||||
#with HDFArchive(testout, 'w') as h5test:
|
#with HDFArchive(testout, 'w') as h5test:
|
||||||
# h5test['hk'] = pgroup.hk
|
# h5test['hk'] = pgroup.hk
|
||||||
# DEBUG output
|
# DEBUG output
|
||||||
print "Density matrix:"
|
print("Density matrix:")
|
||||||
nimp = 0.0
|
nimp = 0.0
|
||||||
ov_all = []
|
ov_all = []
|
||||||
for ish in pgroup.ishells:
|
for ish in pgroup.ishells:
|
||||||
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell):
|
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell):
|
||||||
print " Shell %i"%(ish + 1)
|
print(" Shell %i"%(ish + 1))
|
||||||
dm_all, ov_all_ = pshells[ish].density_matrix(el_struct)
|
dm_all, ov_all_ = pshells[ish].density_matrix(el_struct)
|
||||||
ov_all.append(ov_all_[0])
|
ov_all.append(ov_all_[0])
|
||||||
spin_fac = 2 if dm_all.shape[0] == 1 else 1
|
spin_fac = 2 if dm_all.shape[0] == 1 else 1
|
||||||
for io in xrange(dm_all.shape[1]):
|
for io in range(dm_all.shape[1]):
|
||||||
print " Site %i"%(io + 1)
|
print(" Site %i"%(io + 1))
|
||||||
dm = spin_fac * dm_all[:, io, : ,:].sum(0)
|
dm = spin_fac * dm_all[:, io, : ,:].sum(0)
|
||||||
for row in dm:
|
for row in dm:
|
||||||
print ''.join(map("{0:14.7f}".format, row))
|
print(''.join(map("{0:14.7f}".format, row)))
|
||||||
ndm = dm.trace()
|
ndm = dm.trace()
|
||||||
if pshells[ish].corr:
|
if pshells[ish].corr:
|
||||||
nimp += ndm
|
nimp += ndm
|
||||||
print " trace: ", ndm
|
print(" trace: ", ndm)
|
||||||
print
|
print()
|
||||||
print " Impurity density:", nimp
|
print(" Impurity density:", nimp)
|
||||||
print
|
print()
|
||||||
print "Overlap:"
|
print("Overlap:")
|
||||||
for io, ov in enumerate(ov_all):
|
for io, ov in enumerate(ov_all):
|
||||||
print " Site %i"%(io + 1)
|
print(" Site %i"%(io + 1))
|
||||||
print ov[0,...]
|
print(ov[0,...])
|
||||||
print
|
print()
|
||||||
print "Local Hamiltonian:"
|
print("Local Hamiltonian:")
|
||||||
for ish in pgroup.ishells:
|
for ish in pgroup.ishells:
|
||||||
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell):
|
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell):
|
||||||
print " Shell %i"%(ish + 1)
|
print(" Shell %i"%(ish + 1))
|
||||||
loc_ham = pshells[pgroup.ishells[ish]].local_hamiltonian(el_struct)
|
loc_ham = pshells[pgroup.ishells[ish]].local_hamiltonian(el_struct)
|
||||||
for io in xrange(loc_ham.shape[1]):
|
for io in range(loc_ham.shape[1]):
|
||||||
print " Site %i (real | complex part)"%(io + 1)
|
print(" Site %i (real | complex part)"%(io + 1))
|
||||||
for row in loc_ham[:, io, :, :].sum(0):
|
for row in loc_ham[:, io, :, :].sum(0):
|
||||||
print ''.join(map("{0:14.7f}".format, row.real))+' |'+''.join(map("{0:14.7f}".format, row.imag))
|
print(''.join(map("{0:14.7f}".format, row.real))+' |'+''.join(map("{0:14.7f}".format, row.imag)))
|
||||||
# END DEBUG output
|
# END DEBUG output
|
||||||
if 'dosmesh' in conf_pars.general:
|
if 'dosmesh' in conf_pars.general:
|
||||||
print
|
print()
|
||||||
print "Evaluating DOS..."
|
print("Evaluating DOS...")
|
||||||
mesh_pars = conf_pars.general['dosmesh']
|
mesh_pars = conf_pars.general['dosmesh']
|
||||||
if np.isnan(mesh_pars['emin']):
|
if np.isnan(mesh_pars['emin']):
|
||||||
dos_emin = pgroup.emin
|
dos_emin = pgroup.emin
|
||||||
@ -208,12 +208,12 @@ def generate_plo(conf_pars, el_struct):
|
|||||||
emesh = np.linspace(dos_emin, dos_emax, n_points)
|
emesh = np.linspace(dos_emin, dos_emax, n_points)
|
||||||
for ish in pgroup.ishells:
|
for ish in pgroup.ishells:
|
||||||
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell) or True:
|
if not isinstance(pshells[pgroup.ishells[ish]],ComplementShell) or True:
|
||||||
print " Shell %i"%(ish + 1)
|
print(" Shell %i"%(ish + 1))
|
||||||
dos = pshells[pgroup.ishells[ish]].density_of_states(el_struct, emesh)
|
dos = pshells[pgroup.ishells[ish]].density_of_states(el_struct, emesh)
|
||||||
de = emesh[1] - emesh[0]
|
de = emesh[1] - emesh[0]
|
||||||
ntot = (dos[1:,...] + dos[:-1,...]).sum(0) / 2 * de
|
ntot = (dos[1:,...] + dos[:-1,...]).sum(0) / 2 * de
|
||||||
print " Total number of states:", ntot
|
print(" Total number of states:", ntot)
|
||||||
for io in xrange(dos.shape[2]):
|
for io in range(dos.shape[2]):
|
||||||
np.savetxt('pdos_%i_%i.dat'%(ish,io), np.vstack((emesh.T, dos[:, 0, io, :].T)).T)
|
np.savetxt('pdos_%i_%i.dat'%(ish,io), np.vstack((emesh.T, dos[:, 0, io, :].T)).T)
|
||||||
|
|
||||||
pgroups.append(pgroup)
|
pgroups.append(pgroup)
|
||||||
@ -254,7 +254,7 @@ def kpoints_output(basename, el_struct):
|
|||||||
f.write("%i\n"%(nktot))
|
f.write("%i\n"%(nktot))
|
||||||
# TODO: add the output of reciprocal lattice vectors
|
# TODO: add the output of reciprocal lattice vectors
|
||||||
f.write("# List of k-points with weights\n")
|
f.write("# List of k-points with weights\n")
|
||||||
for ik in xrange(nktot):
|
for ik in range(nktot):
|
||||||
kx, ky, kz = kmesh['kpoints'][ik, :]
|
kx, ky, kz = kmesh['kpoints'][ik, :]
|
||||||
kwght = kmesh['kweights'][ik]
|
kwght = kmesh['kweights'][ik]
|
||||||
f.write("%15.10f%15.10f%15.10f%20.10f\n"%(kx, ky, kz, kwght))
|
f.write("%15.10f%15.10f%15.10f%20.10f\n"%(kx, ky, kz, kwght))
|
||||||
@ -266,7 +266,7 @@ def kpoints_output(basename, el_struct):
|
|||||||
f.write("\n# Number of tetrahedra and volume: ntet, volt\n")
|
f.write("\n# Number of tetrahedra and volume: ntet, volt\n")
|
||||||
f.write("%i %s\n"%(ntet, volt))
|
f.write("%i %s\n"%(ntet, volt))
|
||||||
f.write("# List of tetrahedra: imult, ik1, ..., ik4\n")
|
f.write("# List of tetrahedra: imult, ik1, ..., ik4\n")
|
||||||
for it in xrange(ntet):
|
for it in range(ntet):
|
||||||
f.write(' '.join(map("{0:d}".format, *kmesh['itet'][it, :])) + '\n')
|
f.write(' '.join(map("{0:d}".format, *kmesh['itet'][it, :])) + '\n')
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
@ -315,14 +315,14 @@ def ctrl_output(conf_pars, el_struct, ng):
|
|||||||
|
|
||||||
header = json.dumps(head_dict, indent=4, separators=(',', ': '))
|
header = json.dumps(head_dict, indent=4, separators=(',', ': '))
|
||||||
|
|
||||||
print " Storing ctrl-file..."
|
print(" Storing ctrl-file...")
|
||||||
with open(ctrl_fname, 'wt') as f:
|
with open(ctrl_fname, 'wt') as f:
|
||||||
f.write(header + "\n")
|
f.write(header + "\n")
|
||||||
f.write("#END OF HEADER\n")
|
f.write("#END OF HEADER\n")
|
||||||
|
|
||||||
f.write("# k-points and weights\n")
|
f.write("# k-points and weights\n")
|
||||||
labels = ['kx', 'ky', 'kz', 'kweight']
|
labels = ['kx', 'ky', 'kz', 'kweight']
|
||||||
out = "".join(map(lambda s: s.center(15), labels))
|
out = "".join([s.center(15) for s in labels])
|
||||||
f.write("#" + out + "\n")
|
f.write("#" + out + "\n")
|
||||||
for ik, kp in enumerate(el_struct.kmesh['kpoints']):
|
for ik, kp in enumerate(el_struct.kmesh['kpoints']):
|
||||||
tmp1 = "".join(map("{0:15.10f}".format, kp))
|
tmp1 = "".join(map("{0:15.10f}".format, kp))
|
||||||
@ -330,7 +330,7 @@ def ctrl_output(conf_pars, el_struct, ng):
|
|||||||
f.write(out + "\n")
|
f.write(out + "\n")
|
||||||
f.write("# k-points and weights cartesian\n")
|
f.write("# k-points and weights cartesian\n")
|
||||||
labels = ['kx', 'ky', 'kz']
|
labels = ['kx', 'ky', 'kz']
|
||||||
out = "".join(map(lambda s: s.center(15), labels))
|
out = "".join([s.center(15) for s in labels])
|
||||||
f.write("#" + out + "\n")
|
f.write("#" + out + "\n")
|
||||||
for ik, kp in enumerate(el_struct.kmesh['kpoints_cart']):
|
for ik, kp in enumerate(el_struct.kmesh['kpoints_cart']):
|
||||||
out = "".join(map("{0:15.10f}".format, kp))
|
out = "".join(map("{0:15.10f}".format, kp))
|
||||||
@ -381,7 +381,7 @@ def plo_output(conf_pars, el_struct, pshells, pgroups):
|
|||||||
"""
|
"""
|
||||||
for ig, pgroup in enumerate(pgroups):
|
for ig, pgroup in enumerate(pgroups):
|
||||||
plo_fname = conf_pars.general['basename'] + '.pg%i'%(ig + 1)
|
plo_fname = conf_pars.general['basename'] + '.pg%i'%(ig + 1)
|
||||||
print " Storing PLO-group file '%s'..."%(plo_fname)
|
print(" Storing PLO-group file '%s'..."%(plo_fname))
|
||||||
head_dict = {}
|
head_dict = {}
|
||||||
|
|
||||||
|
|
||||||
@ -394,7 +394,7 @@ def plo_output(conf_pars, el_struct, pshells, pgroups):
|
|||||||
|
|
||||||
# Number of electrons within the window
|
# Number of electrons within the window
|
||||||
head_dict['nelect'] = pgroup.nelect_window(el_struct)
|
head_dict['nelect'] = pgroup.nelect_window(el_struct)
|
||||||
print " Density within window:", head_dict['nelect']
|
print(" Density within window:", head_dict['nelect'])
|
||||||
|
|
||||||
head_shells = []
|
head_shells = []
|
||||||
for ish in pgroup.ishells:
|
for ish in pgroup.ishells:
|
||||||
@ -430,13 +430,13 @@ def plo_output(conf_pars, el_struct, pshells, pgroups):
|
|||||||
f.write("# Eigenvalues within the energy window: %s, %s\n"%(pgroup.emin, pgroup.emax))
|
f.write("# Eigenvalues within the energy window: %s, %s\n"%(pgroup.emin, pgroup.emax))
|
||||||
|
|
||||||
nk, nband, ns_band = el_struct.eigvals.shape
|
nk, nband, ns_band = el_struct.eigvals.shape
|
||||||
for isp in xrange(ns_band):
|
for isp in range(ns_band):
|
||||||
f.write("# is = %i\n"%(isp + 1))
|
f.write("# is = %i\n"%(isp + 1))
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1]
|
ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1]
|
||||||
# Output band indices in Fortran convention!
|
# Output band indices in Fortran convention!
|
||||||
f.write(" %i %i\n"%(ib1 + 1, ib2 + 1))
|
f.write(" %i %i\n"%(ib1 + 1, ib2 + 1))
|
||||||
for ib in xrange(ib1, ib2 + 1):
|
for ib in range(ib1, ib2 + 1):
|
||||||
eigv_ef = el_struct.eigvals[ik, ib, isp] - el_struct.efermi
|
eigv_ef = el_struct.eigvals[ik, ib, isp] - el_struct.efermi
|
||||||
f_weight = el_struct.ferw[isp, ik, ib]
|
f_weight = el_struct.ferw[isp, ik, ib]
|
||||||
f.write("%13.8f %12.7f\n"%(eigv_ef, f_weight))
|
f.write("%13.8f %12.7f\n"%(eigv_ef, f_weight))
|
||||||
@ -449,15 +449,15 @@ def plo_output(conf_pars, el_struct, pshells, pgroups):
|
|||||||
f.write("# Shell %i\n"%(ish))
|
f.write("# Shell %i\n"%(ish))
|
||||||
|
|
||||||
nion, ns, nk, nlm, nb = shell.proj_win.shape
|
nion, ns, nk, nlm, nb = shell.proj_win.shape
|
||||||
for isp in xrange(ns):
|
for isp in range(ns):
|
||||||
f.write("# is = %i\n"%(isp + 1))
|
f.write("# is = %i\n"%(isp + 1))
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
f.write("# ik = %i\n"%(ik + 1))
|
f.write("# ik = %i\n"%(ik + 1))
|
||||||
for ion in xrange(nion):
|
for ion in range(nion):
|
||||||
for ilm in xrange(nlm):
|
for ilm in range(nlm):
|
||||||
ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1]
|
ib1, ib2 = pgroup.ib_win[ik, isp, 0], pgroup.ib_win[ik, isp, 1]
|
||||||
ib_win = ib2 - ib1 + 1
|
ib_win = ib2 - ib1 + 1
|
||||||
for ib in xrange(ib_win):
|
for ib in range(ib_win):
|
||||||
p = shell.proj_win[ion, isp, ik, ilm, ib]
|
p = shell.proj_win[ion, isp, ik, ilm, ib]
|
||||||
f.write("{0:16.10f}{1:16.10f}\n".format(p.real, p.imag))
|
f.write("{0:16.10f}{1:16.10f}\n".format(p.real, p.imag))
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
@ -494,7 +494,7 @@ def hk_output(conf_pars, el_struct, pgroups):
|
|||||||
for ig, pgroup in enumerate(pgroups):
|
for ig, pgroup in enumerate(pgroups):
|
||||||
|
|
||||||
hk_fname = conf_pars.general['basename'] + '.hk%i'%(ig + 1)
|
hk_fname = conf_pars.general['basename'] + '.hk%i'%(ig + 1)
|
||||||
print " Storing HK-group file '%s'..."%(hk_fname)
|
print(" Storing HK-group file '%s'..."%(hk_fname))
|
||||||
|
|
||||||
head_shells = []
|
head_shells = []
|
||||||
for ish in pgroup.ishells:
|
for ish in pgroup.ishells:
|
||||||
@ -528,13 +528,13 @@ def hk_output(conf_pars, el_struct, pgroups):
|
|||||||
f.write('%i %i %i %i # atom sort l dim\n'%(head['ion_list'][0],head['ion_sort'][0],head['lorb'],head['ndim']))
|
f.write('%i %i %i %i # atom sort l dim\n'%(head['ion_list'][0],head['ion_sort'][0],head['lorb'],head['ndim']))
|
||||||
|
|
||||||
norbs = pgroup.hk.shape[2]
|
norbs = pgroup.hk.shape[2]
|
||||||
for isp in xrange(ns_band):
|
for isp in range(ns_band):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
for io in xrange(norbs):
|
for io in range(norbs):
|
||||||
for iop in xrange(norbs):
|
for iop in range(norbs):
|
||||||
f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].real))
|
f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].real))
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
for io in xrange(norbs):
|
for io in range(norbs):
|
||||||
for iop in xrange(norbs):
|
for iop in range(norbs):
|
||||||
f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].imag))
|
f.write(" {0:14.10f}".format(pgroup.hk[isp,ik,io,iop].imag))
|
||||||
f.write("\n")
|
f.write("\n")
|
@ -30,7 +30,7 @@ r"""
|
|||||||
Storage and manipulation of projector groups.
|
Storage and manipulation of projector groups.
|
||||||
"""
|
"""
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from proj_shell import ComplementShell
|
from .proj_shell import ComplementShell
|
||||||
np.set_printoptions(suppress=True)
|
np.set_printoptions(suppress=True)
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
@ -89,8 +89,8 @@ class ProjectorGroup:
|
|||||||
assert np.all( n_bands == n_bands[0,0] ), "At each band the same number of bands has to be selected for calculating the complement (to end up with an equal number of orbitals at each k-point)."
|
assert np.all( n_bands == n_bands[0,0] ), "At each band the same number of bands has to be selected for calculating the complement (to end up with an equal number of orbitals at each k-point)."
|
||||||
if n_orbs == n_bands[0,0]:
|
if n_orbs == n_bands[0,0]:
|
||||||
self.complement = False
|
self.complement = False
|
||||||
print "\nWARNING: The total number of orbitals in this group is "
|
print("\nWARNING: The total number of orbitals in this group is ")
|
||||||
print "equal to the number of bands. Setting COMPLEMENT to FALSE!\n"
|
print("equal to the number of bands. Setting COMPLEMENT to FALSE!\n")
|
||||||
|
|
||||||
|
|
||||||
# Select projectors within the energy window
|
# Select projectors within the energy window
|
||||||
@ -112,8 +112,8 @@ class ProjectorGroup:
|
|||||||
self.nelect = 0
|
self.nelect = 0
|
||||||
nk, ns_band, _ = self.ib_win.shape
|
nk, ns_band, _ = self.ib_win.shape
|
||||||
rspin = 2.0 if ns_band == 1 else 1.0
|
rspin = 2.0 if ns_band == 1 else 1.0
|
||||||
for isp in xrange(ns_band):
|
for isp in range(ns_band):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
ib1 = self.ib_win[ik, isp, 0]
|
ib1 = self.ib_win[ik, isp, 0]
|
||||||
ib2 = self.ib_win[ik, isp, 1]+1
|
ib2 = self.ib_win[ik, isp, 1]+1
|
||||||
occ = el_struct.ferw[isp, ik, ib1:ib2]
|
occ = el_struct.ferw[isp, ik, ib1:ib2]
|
||||||
@ -154,8 +154,8 @@ class ProjectorGroup:
|
|||||||
_, ns, nk, _, _ = self.shells[0].proj_win.shape
|
_, ns, nk, _, _ = self.shells[0].proj_win.shape
|
||||||
p_mat = np.zeros((ndim, self.nb_max), dtype=np.complex128)
|
p_mat = np.zeros((ndim, self.nb_max), dtype=np.complex128)
|
||||||
# Note that 'ns' and 'nk' are the same for all shells
|
# Note that 'ns' and 'nk' are the same for all shells
|
||||||
for isp in xrange(ns):
|
for isp in range(ns):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
nb = self.ib_win[ik, isp, 1] - self.ib_win[ik, isp, 0] + 1
|
nb = self.ib_win[ik, isp, 1] - self.ib_win[ik, isp, 0] + 1
|
||||||
# Combine all projectors of the group to one block projector
|
# Combine all projectors of the group to one block projector
|
||||||
for bl_map in block_maps:
|
for bl_map in block_maps:
|
||||||
@ -203,8 +203,8 @@ class ProjectorGroup:
|
|||||||
|
|
||||||
self.hk = np.zeros((ns,nk,ndim,ndim), dtype=np.complex128)
|
self.hk = np.zeros((ns,nk,ndim,ndim), dtype=np.complex128)
|
||||||
# Note that 'ns' and 'nk' are the same for all shells
|
# Note that 'ns' and 'nk' are the same for all shells
|
||||||
for isp in xrange(ns):
|
for isp in range(ns):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
bmin = self.ib_win[ik, isp, 0]
|
bmin = self.ib_win[ik, isp, 0]
|
||||||
bmax = self.ib_win[ik, isp, 1]+1
|
bmax = self.ib_win[ik, isp, 1]+1
|
||||||
|
|
||||||
@ -247,7 +247,7 @@ class ProjectorGroup:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print '\nCalculating complement\n'
|
print('\nCalculating complement\n')
|
||||||
|
|
||||||
block_maps, ndim = self.get_block_matrix_map()
|
block_maps, ndim = self.get_block_matrix_map()
|
||||||
_, ns, nk, _, _ = self.shells[0].proj_win.shape
|
_, ns, nk, _, _ = self.shells[0].proj_win.shape
|
||||||
@ -257,8 +257,8 @@ class ProjectorGroup:
|
|||||||
# Note that 'ns' and 'nk' are the same for all shells
|
# Note that 'ns' and 'nk' are the same for all shells
|
||||||
|
|
||||||
|
|
||||||
for isp in xrange(ns):
|
for isp in range(ns):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
bmin = self.ib_win[ik, isp, 0]
|
bmin = self.ib_win[ik, isp, 0]
|
||||||
bmax = self.ib_win[ik, isp, 1]+1
|
bmax = self.ib_win[ik, isp, 1]+1
|
||||||
|
|
||||||
@ -335,7 +335,7 @@ class ProjectorGroup:
|
|||||||
1. Orthogonality is ensured on each site (NORMION = True).
|
1. Orthogonality is ensured on each site (NORMION = True).
|
||||||
For each site 'ion' we have the following mapping:
|
For each site 'ion' we have the following mapping:
|
||||||
|
|
||||||
block_maps = [bl_map[ion] for ion in xrange(shell.nion)
|
block_maps = [bl_map[ion] for ion in range(shell.nion)
|
||||||
for shell in shells]
|
for shell in shells]
|
||||||
|
|
||||||
bl_map = [((i1_start, i1_end), (i1_shell, ion)),
|
bl_map = [((i1_start, i1_end), (i1_shell, ion)),
|
||||||
@ -362,7 +362,7 @@ class ProjectorGroup:
|
|||||||
_shell = self.shells[ish]
|
_shell = self.shells[ish]
|
||||||
nion, ns, nk, nlm, nb_max = _shell.proj_win.shape
|
nion, ns, nk, nlm, nb_max = _shell.proj_win.shape
|
||||||
ndim = max(ndim, nlm)
|
ndim = max(ndim, nlm)
|
||||||
for ion in xrange(nion):
|
for ion in range(nion):
|
||||||
i1_bl = 0
|
i1_bl = 0
|
||||||
i2_bl = nlm
|
i2_bl = nlm
|
||||||
block = {'bmat_range': (i1_bl, i2_bl)}
|
block = {'bmat_range': (i1_bl, i2_bl)}
|
||||||
@ -378,7 +378,7 @@ class ProjectorGroup:
|
|||||||
for ish in self.ishells:
|
for ish in self.ishells:
|
||||||
_shell = self.shells[ish]
|
_shell = self.shells[ish]
|
||||||
nion, ns, nk, nlm, nb_max = _shell.proj_win.shape
|
nion, ns, nk, nlm, nb_max = _shell.proj_win.shape
|
||||||
for ion in xrange(nion):
|
for ion in range(nion):
|
||||||
i2_bl = i1_bl + nlm
|
i2_bl = i1_bl + nlm
|
||||||
block = {'bmat_range': (i1_bl, i2_bl)}
|
block = {'bmat_range': (i1_bl, i2_bl)}
|
||||||
block['shell_ion'] = (ish, ion)
|
block['shell_ion'] = (ish, ion)
|
||||||
@ -456,14 +456,14 @@ class ProjectorGroup:
|
|||||||
|
|
||||||
ib_min = 10000000
|
ib_min = 10000000
|
||||||
ib_max = 0
|
ib_max = 0
|
||||||
for isp in xrange(ns_band):
|
for isp in range(ns_band):
|
||||||
for ik in xrange(nk):
|
for ik in range(nk):
|
||||||
for ib in xrange(nband):
|
for ib in range(nband):
|
||||||
en = eigvals[ik, ib, isp]
|
en = eigvals[ik, ib, isp]
|
||||||
if en >= self.emin:
|
if en >= self.emin:
|
||||||
break
|
break
|
||||||
ib1 = ib
|
ib1 = ib
|
||||||
for ib in xrange(ib1, nband):
|
for ib in range(ib1, nband):
|
||||||
en = eigvals[ik, ib, isp]
|
en = eigvals[ik, ib, isp]
|
||||||
if en > self.emax:
|
if en > self.emax:
|
||||||
break
|
break
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user