Squash app4triqs/3.1.x to reduce skeleton history when tracking
Co-authored-by: Dylan Simon <dylan@dylex.net> Co-authored-by: Alexander Hampel <ahampel@flatironinstitute.org>
45
.clang-format
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
BasedOnStyle: LLVM
|
||||||
|
|
||||||
|
AccessModifierOffset: 0
|
||||||
|
AlignAfterOpenBracket: Align
|
||||||
|
AlignConsecutiveAssignments: true
|
||||||
|
AlignConsecutiveDeclarations: false
|
||||||
|
AlignEscapedNewlinesLeft: false
|
||||||
|
AlignOperands: false
|
||||||
|
AlignTrailingComments: true
|
||||||
|
AllowAllParametersOfDeclarationOnNextLine: false
|
||||||
|
AllowShortBlocksOnASingleLine: true
|
||||||
|
AllowShortCaseLabelsOnASingleLine: true
|
||||||
|
AllowShortFunctionsOnASingleLine: All
|
||||||
|
AllowShortIfStatementsOnASingleLine: true
|
||||||
|
AllowShortLoopsOnASingleLine: true
|
||||||
|
AlwaysBreakBeforeMultilineStrings: true
|
||||||
|
AlwaysBreakTemplateDeclarations: false
|
||||||
|
BinPackArguments: true
|
||||||
|
BinPackParameters: true
|
||||||
|
BreakBeforeBinaryOperators: NonAssignment
|
||||||
|
BreakBeforeBraces: Attach
|
||||||
|
BreakBeforeTernaryOperators: false
|
||||||
|
BreakConstructorInitializersBeforeComma: false
|
||||||
|
BreakStringLiterals: false
|
||||||
|
ColumnLimit: 150
|
||||||
|
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||||
|
ConstructorInitializerIndentWidth: 3
|
||||||
|
ContinuationIndentWidth: 3
|
||||||
|
Cpp11BracedListStyle: true
|
||||||
|
DerivePointerBinding : false
|
||||||
|
IndentCaseLabels: true
|
||||||
|
IndentWidth: 2
|
||||||
|
Language: Cpp
|
||||||
|
MaxEmptyLinesToKeep: 1
|
||||||
|
NamespaceIndentation : All
|
||||||
|
PointerAlignment: Right
|
||||||
|
ReflowComments: false
|
||||||
|
SortIncludes: false
|
||||||
|
SpaceAfterControlStatementKeyword: true
|
||||||
|
SpaceBeforeAssignmentOperators: true
|
||||||
|
SpaceInEmptyParentheses: false
|
||||||
|
SpacesInParentheses: false
|
||||||
|
Standard: Cpp11
|
||||||
|
TabWidth: 2
|
||||||
|
UseTab: Never
|
2
.clang-tidy
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
Checks: '-*,modernize-*,cppcoreguidelines-*,-modernize-use-trailing-return-type'
|
||||||
|
HeaderFilterRegex: 'app4triqs'
|
5
.dockerignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
.travis.yml
|
||||||
|
Dockerfile
|
||||||
|
Jenkinsfile
|
||||||
|
.git/objects/pack
|
||||||
|
build*
|
45
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: Bug report
|
||||||
|
labels: bug
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
* Please check that a similar issue isn't already filed: https://github.com/issues?q=is%3Aissue+user%3Atriqs
|
||||||
|
|
||||||
|
### Description
|
||||||
|
|
||||||
|
[Description of the issue]
|
||||||
|
|
||||||
|
### Steps to Reproduce
|
||||||
|
|
||||||
|
1. [First Step]
|
||||||
|
2. [Second Step]
|
||||||
|
3. [and so on...]
|
||||||
|
|
||||||
|
or paste a minimal code example to reproduce the issue.
|
||||||
|
|
||||||
|
**Expected behavior:** [What you expect to happen]
|
||||||
|
|
||||||
|
**Actual behavior:** [What actually happens]
|
||||||
|
|
||||||
|
### Versions
|
||||||
|
|
||||||
|
Please provide the application version that you used.
|
||||||
|
|
||||||
|
You can get this information from copy and pasting the output of
|
||||||
|
```bash
|
||||||
|
python -c "from app4triqs.version import *; show_version(); show_git_hash();"
|
||||||
|
```
|
||||||
|
from the command line. Also, please include the OS you are running and its version.
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
Please use markdown in your issue message. A useful summary of commands can be found [here](https://guides.github.com/pdfs/markdown-cheatsheet-online.pdf).
|
||||||
|
|
||||||
|
### Additional Information
|
||||||
|
|
||||||
|
Any additional information, configuration or data that might be necessary to reproduce the issue.
|
23
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: Feature request
|
||||||
|
labels: feature
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Summary
|
||||||
|
|
||||||
|
One paragraph explanation of the feature.
|
||||||
|
|
||||||
|
### Motivation
|
||||||
|
|
||||||
|
Why is this feature of general interest?
|
||||||
|
|
||||||
|
### Implementation
|
||||||
|
|
||||||
|
What user interface do you suggest?
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
Please use markdown in your issue message. A useful summary of commands can be found [here](https://guides.github.com/pdfs/markdown-cheatsheet-online.pdf).
|
97
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
name: build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ unstable ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ unstable ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- {os: ubuntu-20.04, cc: gcc-10, cxx: g++-10}
|
||||||
|
- {os: ubuntu-20.04, cc: clang-13, cxx: clang++-13}
|
||||||
|
- {os: macos-11, cc: gcc-11, cxx: g++-11}
|
||||||
|
- {os: macos-11, cc: /usr/local/opt/llvm/bin/clang, cxx: /usr/local/opt/llvm/bin/clang++}
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install ubuntu dependencies
|
||||||
|
if: matrix.os == 'ubuntu-20.04'
|
||||||
|
run: >
|
||||||
|
sudo apt-get update &&
|
||||||
|
sudo apt-get install lsb-release wget software-properties-common &&
|
||||||
|
wget -O /tmp/llvm.sh https://apt.llvm.org/llvm.sh && sudo chmod +x /tmp/llvm.sh && sudo /tmp/llvm.sh 13 &&
|
||||||
|
sudo apt-get install
|
||||||
|
clang-13
|
||||||
|
g++-10
|
||||||
|
gfortran
|
||||||
|
hdf5-tools
|
||||||
|
libblas-dev
|
||||||
|
libboost-dev
|
||||||
|
libclang-13-dev
|
||||||
|
libc++-13-dev
|
||||||
|
libc++abi-13-dev
|
||||||
|
libomp-13-dev
|
||||||
|
libfftw3-dev
|
||||||
|
libgfortran5
|
||||||
|
libgmp-dev
|
||||||
|
libhdf5-dev
|
||||||
|
liblapack-dev
|
||||||
|
libopenmpi-dev
|
||||||
|
openmpi-bin
|
||||||
|
openmpi-common
|
||||||
|
openmpi-doc
|
||||||
|
python3-clang-13
|
||||||
|
python3-dev
|
||||||
|
python3-mako
|
||||||
|
python3-matplotlib
|
||||||
|
python3-mpi4py
|
||||||
|
python3-numpy
|
||||||
|
python3-pip
|
||||||
|
python3-scipy
|
||||||
|
python3-sphinx
|
||||||
|
python3-nbsphinx
|
||||||
|
|
||||||
|
- name: Install homebrew dependencies
|
||||||
|
if: matrix.os == 'macos-11'
|
||||||
|
run: |
|
||||||
|
brew install gcc@11 llvm boost fftw hdf5 open-mpi openblas
|
||||||
|
pip3 install mako numpy scipy mpi4py
|
||||||
|
pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Build & Install TRIQS
|
||||||
|
env:
|
||||||
|
CC: ${{ matrix.cc }}
|
||||||
|
CXX: ${{ matrix.cxx }}
|
||||||
|
run: |
|
||||||
|
git clone https://github.com/TRIQS/triqs --branch unstable
|
||||||
|
mkdir triqs/build && cd triqs/build
|
||||||
|
cmake .. -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$HOME/install
|
||||||
|
make -j1 install VERBOSE=1
|
||||||
|
cd ../
|
||||||
|
|
||||||
|
- name: Build app4triqs
|
||||||
|
env:
|
||||||
|
CC: ${{ matrix.cc }}
|
||||||
|
CXX: ${{ matrix.cxx }}
|
||||||
|
LIBRARY_PATH: /usr/local/opt/llvm/lib
|
||||||
|
run: |
|
||||||
|
source $HOME/install/share/triqs/triqsvars.sh
|
||||||
|
mkdir build && cd build && cmake ..
|
||||||
|
make -j2 || make -j1 VERBOSE=1
|
||||||
|
|
||||||
|
- name: Test app4triqs
|
||||||
|
env:
|
||||||
|
DYLD_FALLBACK_LIBRARY_PATH: /usr/local/opt/llvm/lib
|
||||||
|
run: |
|
||||||
|
source $HOME/install/share/triqs/triqsvars.sh
|
||||||
|
cd build
|
||||||
|
ctest -j2 --output-on-failure
|
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
compile_commands.json
|
||||||
|
doc/cpp2rst_generated
|
172
CMakeLists.txt
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
# ##############################################################################
|
||||||
|
#
|
||||||
|
# app4triqs - An example application using triqs and cpp2py
|
||||||
|
#
|
||||||
|
# Copyright (C) ...
|
||||||
|
#
|
||||||
|
# app4triqs is free software: you can redistribute it and/or modify it under the
|
||||||
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
# version.
|
||||||
|
#
|
||||||
|
# app4triqs is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# app4triqs (in the file COPYING.txt in this directory). If not, see
|
||||||
|
# <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# ##############################################################################
|
||||||
|
|
||||||
|
cmake_minimum_required(VERSION 3.12.4 FATAL_ERROR)
|
||||||
|
cmake_policy(VERSION 3.12.4)
|
||||||
|
if(POLICY CMP0077)
|
||||||
|
cmake_policy(SET CMP0077 NEW)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Define Project
|
||||||
|
project(app4triqs VERSION 3.1.0 LANGUAGES C CXX)
|
||||||
|
get_directory_property(IS_SUBPROJECT PARENT_DIRECTORY)
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Load TRIQS and CPP2PY
|
||||||
|
find_package(TRIQS 3.1 REQUIRED)
|
||||||
|
|
||||||
|
# Get the git hash & print status
|
||||||
|
triqs_get_git_hash_of_source_dir(PROJECT_GIT_HASH)
|
||||||
|
message(STATUS "${PROJECT_NAME} version : ${PROJECT_VERSION}")
|
||||||
|
message(STATUS "${PROJECT_NAME} Git hash: ${PROJECT_GIT_HASH}")
|
||||||
|
|
||||||
|
# Enforce Consistent Versioning
|
||||||
|
if(NOT ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} VERSION_EQUAL ${TRIQS_VERSION_MAJOR}.${TRIQS_VERSION_MINOR})
|
||||||
|
message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Default Install directory to TRIQS_ROOT if not given or invalid.
|
||||||
|
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX}))
|
||||||
|
message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT")
|
||||||
|
set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE)
|
||||||
|
set(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT FALSE)
|
||||||
|
endif()
|
||||||
|
if(NOT IS_SUBPROJECT)
|
||||||
|
message(STATUS "-------- CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} --------")
|
||||||
|
endif()
|
||||||
|
set(${PROJECT_NAME}_BINARY_DIR ${PROJECT_BINARY_DIR} CACHE STRING "Binary directory of the ${PROJECT_NAME} Project")
|
||||||
|
|
||||||
|
# Make additional Find Modules available
|
||||||
|
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/share/cmake/Modules)
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# CMake Options
|
||||||
|
|
||||||
|
# Default to Release build type
|
||||||
|
if(NOT CMAKE_BUILD_TYPE)
|
||||||
|
set(CMAKE_BUILD_TYPE Release CACHE STRING "Type of build" FORCE)
|
||||||
|
endif()
|
||||||
|
if(NOT IS_SUBPROJECT)
|
||||||
|
message(STATUS "-------- BUILD-TYPE: ${CMAKE_BUILD_TYPE} --------")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Python Support
|
||||||
|
option(PythonSupport "Build with Python support" ON)
|
||||||
|
if(PythonSupport AND NOT TRIQS_WITH_PYTHON_SUPPORT)
|
||||||
|
message(FATAL_ERROR "TRIQS was installed without Python support. Cannot build the Python Interface. Disable the build with -DPythonSupport=OFF")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
option(Build_Documentation "Build documentation" OFF)
|
||||||
|
if(Build_Documentation AND NOT PythonSupport)
|
||||||
|
message(FATAL_ERROR "Build_Documentation=ON requires PythonSupport to be enabled")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
option(Build_Tests "Build tests" ON)
|
||||||
|
if(Build_Tests)
|
||||||
|
enable_testing()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Build static libraries by default
|
||||||
|
option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF)
|
||||||
|
|
||||||
|
# ############
|
||||||
|
# Global Compilation Settings
|
||||||
|
|
||||||
|
# Export the list of compile-commands into compile_commands.json
|
||||||
|
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||||
|
|
||||||
|
# Disable compiler extensions
|
||||||
|
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||||
|
|
||||||
|
# Provide additional debugging information for Debug builds
|
||||||
|
add_compile_options($<$<CONFIG:Debug>:-ggdb3>)
|
||||||
|
|
||||||
|
# Create an Interface target for compiler warnings
|
||||||
|
add_library(${PROJECT_NAME}_warnings INTERFACE)
|
||||||
|
target_compile_options(${PROJECT_NAME}_warnings
|
||||||
|
INTERFACE
|
||||||
|
-Wall
|
||||||
|
-Wextra
|
||||||
|
-Wpedantic
|
||||||
|
-Wno-sign-compare
|
||||||
|
$<$<CXX_COMPILER_ID:GNU>:-Wno-comma-subscript>
|
||||||
|
$<$<CXX_COMPILER_ID:GNU>:-Wshadow=local>
|
||||||
|
$<$<CXX_COMPILER_ID:GNU>:-Wno-attributes>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wno-deprecated-comma-subscript>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wno-unknown-warning-option>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wshadow>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wno-gcc-compat>
|
||||||
|
$<$<CXX_COMPILER_ID:Clang>:-Wno-c++20-extensions>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-deprecated-comma-subscript>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-unknown-warning-option>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wshadow>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-gcc-compat>
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-c++20-extensions>
|
||||||
|
)
|
||||||
|
|
||||||
|
# #############
|
||||||
|
# Build Project
|
||||||
|
|
||||||
|
# Find / Build dependencies
|
||||||
|
add_subdirectory(deps)
|
||||||
|
|
||||||
|
# Build and install the library
|
||||||
|
add_subdirectory(c++/${PROJECT_NAME})
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
if(Build_Tests)
|
||||||
|
add_subdirectory(test)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Python
|
||||||
|
if(PythonSupport)
|
||||||
|
add_subdirectory(python/${PROJECT_NAME})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Docs
|
||||||
|
if(Build_Documentation)
|
||||||
|
add_subdirectory(doc)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Additional configuration files
|
||||||
|
add_subdirectory(share)
|
||||||
|
|
||||||
|
# #############
|
||||||
|
# Debian Package
|
||||||
|
|
||||||
|
option(BUILD_DEBIAN_PACKAGE "Build a deb package" OFF)
|
||||||
|
if(BUILD_DEBIAN_PACKAGE AND NOT IS_SUBPROJECT)
|
||||||
|
if(NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
||||||
|
message(FATAL_ERROR "CMAKE_INSTALL_PREFIX must be /usr for packaging")
|
||||||
|
endif()
|
||||||
|
set(CPACK_PACKAGE_NAME ${PROJECT_NAME})
|
||||||
|
set(CPACK_GENERATOR "DEB")
|
||||||
|
set(CPACK_PACKAGE_VERSION ${PROJECT_VERSION})
|
||||||
|
set(CPACK_PACKAGE_CONTACT "https://github.com/TRIQS/${PROJECT_NAME}")
|
||||||
|
execute_process(COMMAND dpkg --print-architecture OUTPUT_VARIABLE CMAKE_DEBIAN_PACKAGE_ARCHITECTURE OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_DEPENDS "triqs (>= 3.1)")
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON)
|
||||||
|
include(CPack)
|
||||||
|
endif()
|
674
COPYING.txt
Normal file
@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
1
ChangeLog.md
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
doc/ChangeLog.md
|
16
Dockerfile
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# See ../triqs/packaging for other options
|
||||||
|
FROM flatironinstitute/triqs:unstable-ubuntu-clang
|
||||||
|
ARG APPNAME=app4triqs
|
||||||
|
|
||||||
|
COPY requirements.txt /src/$APPNAME/requirements.txt
|
||||||
|
RUN pip3 install -r /src/$APPNAME/requirements.txt
|
||||||
|
|
||||||
|
COPY --chown=build . $SRC/$APPNAME
|
||||||
|
WORKDIR $BUILD/$APPNAME
|
||||||
|
RUN chown build .
|
||||||
|
USER build
|
||||||
|
ARG BUILD_ID
|
||||||
|
ARG CMAKE_ARGS
|
||||||
|
RUN cmake $SRC/$APPNAME -DTRIQS_ROOT=${INSTALL} $CMAKE_ARGS && make -j4 || make -j1 VERBOSE=1
|
||||||
|
USER root
|
||||||
|
RUN make install
|
185
Jenkinsfile
vendored
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
def projectName = "app4triqs" /* set to app/repo name */
|
||||||
|
|
||||||
|
def dockerName = projectName.toLowerCase();
|
||||||
|
/* which platform to build documentation on */
|
||||||
|
def documentationPlatform = "ubuntu-clang"
|
||||||
|
/* depend on triqs upstream branch/project */
|
||||||
|
def triqsBranch = env.CHANGE_TARGET ?: env.BRANCH_NAME
|
||||||
|
def triqsProject = '/TRIQS/triqs/' + triqsBranch.replaceAll('/', '%2F')
|
||||||
|
/* whether to keep and publish the results */
|
||||||
|
def keepInstall = !env.BRANCH_NAME.startsWith("PR-")
|
||||||
|
|
||||||
|
properties([
|
||||||
|
disableConcurrentBuilds(),
|
||||||
|
buildDiscarder(logRotator(numToKeepStr: '10', daysToKeepStr: '30')),
|
||||||
|
pipelineTriggers(keepInstall ? [
|
||||||
|
upstream(
|
||||||
|
threshold: 'SUCCESS',
|
||||||
|
upstreamProjects: triqsProject
|
||||||
|
)
|
||||||
|
] : [])
|
||||||
|
])
|
||||||
|
|
||||||
|
/* map of all builds to run, populated below */
|
||||||
|
def platforms = [:]
|
||||||
|
|
||||||
|
/****************** linux builds (in docker) */
|
||||||
|
/* Each platform must have a cooresponding Dockerfile.PLATFORM in triqs/packaging */
|
||||||
|
def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "sanitize"]
|
||||||
|
/* .each is currently broken in jenkins */
|
||||||
|
for (int i = 0; i < dockerPlatforms.size(); i++) {
|
||||||
|
def platform = dockerPlatforms[i]
|
||||||
|
platforms[platform] = { -> node('linux && docker && triqs') {
|
||||||
|
stage(platform) { timeout(time: 1, unit: 'HOURS') { ansiColor('xterm') {
|
||||||
|
checkout scm
|
||||||
|
/* construct a Dockerfile for this base */
|
||||||
|
sh """
|
||||||
|
( echo "FROM flatironinstitute/triqs:${triqsBranch}-${env.STAGE_NAME}" ; sed '0,/^FROM /d' Dockerfile ) > Dockerfile.jenkins
|
||||||
|
mv -f Dockerfile.jenkins Dockerfile
|
||||||
|
"""
|
||||||
|
/* build and tag */
|
||||||
|
def args = ''
|
||||||
|
if (platform == documentationPlatform)
|
||||||
|
args = '-DBuild_Documentation=1'
|
||||||
|
else if (platform == "sanitize")
|
||||||
|
args = '-DASAN=ON -DUBSAN=ON'
|
||||||
|
def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_ID=${env.BUILD_TAG} --build-arg CMAKE_ARGS='${args}' .")
|
||||||
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
||||||
|
img.inside() {
|
||||||
|
sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!keepInstall) {
|
||||||
|
sh "docker rmi --no-prune ${img.imageName()}"
|
||||||
|
}
|
||||||
|
} } }
|
||||||
|
} }
|
||||||
|
}
|
||||||
|
|
||||||
|
/****************** osx builds (on host) */
|
||||||
|
def osxPlatforms = [
|
||||||
|
["gcc", ['CC=gcc-11', 'CXX=g++-11', 'FC=gfortran-11']],
|
||||||
|
["clang", ['CC=$BREW/opt/llvm/bin/clang', 'CXX=$BREW/opt/llvm/bin/clang++', 'FC=gfortran-11', 'CXXFLAGS=-I$BREW/opt/llvm/include', 'LDFLAGS=-L$BREW/opt/llvm/lib']]
|
||||||
|
]
|
||||||
|
for (int i = 0; i < osxPlatforms.size(); i++) {
|
||||||
|
def platformEnv = osxPlatforms[i]
|
||||||
|
def platform = platformEnv[0]
|
||||||
|
platforms["osx-$platform"] = { -> node('osx && triqs') {
|
||||||
|
stage("osx-$platform") { timeout(time: 1, unit: 'HOURS') { ansiColor('xterm') {
|
||||||
|
def srcDir = pwd()
|
||||||
|
def tmpDir = pwd(tmp:true)
|
||||||
|
def buildDir = "$tmpDir/build"
|
||||||
|
/* install real branches in a fixed predictable place so apps can find them */
|
||||||
|
def installDir = keepInstall ? "${env.HOME}/install/${projectName}/${env.BRANCH_NAME}/${platform}" : "$tmpDir/install"
|
||||||
|
def triqsDir = "${env.HOME}/install/triqs/${triqsBranch}/${platform}"
|
||||||
|
def venv = triqsDir
|
||||||
|
dir(installDir) {
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
checkout scm
|
||||||
|
|
||||||
|
def hdf5 = "${env.BREW}/opt/hdf5@1.10"
|
||||||
|
dir(buildDir) { withEnv(platformEnv[1].collect { it.replace('\$BREW', env.BREW) } + [
|
||||||
|
"PATH=$venv/bin:${env.BREW}/bin:/usr/bin:/bin:/usr/sbin",
|
||||||
|
"HDF5_ROOT=$hdf5",
|
||||||
|
"C_INCLUDE_PATH=$hdf5/include:${env.BREW}/include",
|
||||||
|
"CPLUS_INCLUDE_PATH=$venv/include:$hdf5/include:${env.BREW}/include",
|
||||||
|
"LIBRARY_PATH=$venv/lib:$hdf5/lib:${env.BREW}/lib",
|
||||||
|
"LD_LIBRARY_PATH=$hdf5/lib",
|
||||||
|
"PYTHONPATH=$installDir/lib/python3.9/site-packages",
|
||||||
|
"CMAKE_PREFIX_PATH=$venv/lib/cmake/triqs",
|
||||||
|
"OMP_NUM_THREADS=2"]) {
|
||||||
|
deleteDir()
|
||||||
|
/* note: this is installing into the parent (triqs) venv (install dir), which is thus shared among apps and so not be completely safe */
|
||||||
|
sh "pip3 install -U -r $srcDir/requirements.txt"
|
||||||
|
sh "cmake $srcDir -DCMAKE_INSTALL_PREFIX=$installDir -DTRIQS_ROOT=$triqsDir -DBuild_Deps=Always"
|
||||||
|
sh "make -j2 || make -j1 VERBOSE=1"
|
||||||
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { try {
|
||||||
|
sh "make test CTEST_OUTPUT_ON_FAILURE=1"
|
||||||
|
} catch (exc) {
|
||||||
|
archiveArtifacts(artifacts: 'Testing/Temporary/LastTest.log')
|
||||||
|
throw exc
|
||||||
|
} }
|
||||||
|
sh "make install"
|
||||||
|
} }
|
||||||
|
} } }
|
||||||
|
} }
|
||||||
|
}
|
||||||
|
|
||||||
|
/****************** wrap-up */
|
||||||
|
def error = null
|
||||||
|
try {
|
||||||
|
parallel platforms
|
||||||
|
if (keepInstall) { node('linux && docker && triqs') {
|
||||||
|
/* Publish results */
|
||||||
|
stage("publish") { timeout(time: 5, unit: 'MINUTES') {
|
||||||
|
def commit = sh(returnStdout: true, script: "git rev-parse HEAD").trim()
|
||||||
|
def release = env.BRANCH_NAME == "master" || env.BRANCH_NAME == "unstable" || sh(returnStdout: true, script: "git describe --exact-match HEAD || true").trim()
|
||||||
|
def workDir = pwd(tmp:true)
|
||||||
|
lock('triqs_publish') {
|
||||||
|
/* Update documention on gh-pages branch */
|
||||||
|
dir("$workDir/gh-pages") {
|
||||||
|
def subdir = "${projectName}/${env.BRANCH_NAME}"
|
||||||
|
git(url: "ssh://git@github.com/TRIQS/TRIQS.github.io.git", branch: "master", credentialsId: "ssh", changelog: false)
|
||||||
|
sh "rm -rf ${subdir}"
|
||||||
|
docker.image("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${documentationPlatform}").inside() {
|
||||||
|
sh """#!/bin/bash -ex
|
||||||
|
base=\$INSTALL/share/doc
|
||||||
|
dir="${projectName}"
|
||||||
|
[[ -d \$base/triqs_\$dir ]] && dir=triqs_\$dir || [[ -d \$base/\$dir ]]
|
||||||
|
cp -rp \$base/\$dir ${subdir}
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
sh "git add -A ${subdir}"
|
||||||
|
sh """
|
||||||
|
git commit --author='Flatiron Jenkins <jenkins@flatironinstitute.org>' --allow-empty -m 'Generated documentation for ${subdir}' -m '${env.BUILD_TAG} ${commit}'
|
||||||
|
"""
|
||||||
|
// note: credentials used above don't work (need JENKINS-28335)
|
||||||
|
sh "git push origin master"
|
||||||
|
}
|
||||||
|
/* Update packaging repo submodule */
|
||||||
|
if (release) { dir("$workDir/packaging") { try {
|
||||||
|
git(url: "ssh://git@github.com/TRIQS/packaging.git", branch: env.BRANCH_NAME, credentialsId: "ssh", changelog: false)
|
||||||
|
// note: credentials used above don't work (need JENKINS-28335)
|
||||||
|
sh """#!/bin/bash -ex
|
||||||
|
dir="${projectName}"
|
||||||
|
[[ -d triqs_\$dir ]] && dir=triqs_\$dir || [[ -d \$dir ]]
|
||||||
|
echo "160000 commit ${commit}\t\$dir" | git update-index --index-info
|
||||||
|
git commit --author='Flatiron Jenkins <jenkins@flatironinstitute.org>' -m 'Autoupdate ${projectName}' -m '${env.BUILD_TAG}'
|
||||||
|
git push origin ${env.BRANCH_NAME}
|
||||||
|
"""
|
||||||
|
} catch (err) {
|
||||||
|
/* Ignore, non-critical -- might not exist on this branch */
|
||||||
|
echo "Failed to update packaging repo"
|
||||||
|
} } }
|
||||||
|
}
|
||||||
|
} }
|
||||||
|
} }
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
} finally {
|
||||||
|
/* send email on build failure (declarative pipeline's post section would work better) */
|
||||||
|
if ((error != null || currentBuild.currentResult != 'SUCCESS') && env.BRANCH_NAME != "jenkins") emailext(
|
||||||
|
subject: "\$PROJECT_NAME - Build # \$BUILD_NUMBER - FAILED",
|
||||||
|
body: """\$PROJECT_NAME - Build # \$BUILD_NUMBER - FAILED
|
||||||
|
|
||||||
|
Check console output at \$BUILD_URL to view full results.
|
||||||
|
|
||||||
|
Building \$BRANCH_NAME for \$CAUSE
|
||||||
|
\$JOB_DESCRIPTION
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
\$CHANGES
|
||||||
|
|
||||||
|
End of build log:
|
||||||
|
\${BUILD_LOG,maxLines=60}
|
||||||
|
""",
|
||||||
|
to: 'nwentzell@flatironinstitute.org, dsimon@flatironinstitute.org',
|
||||||
|
recipientProviders: [
|
||||||
|
[$class: 'DevelopersRecipientProvider'],
|
||||||
|
],
|
||||||
|
replyTo: '$DEFAULT_REPLYTO'
|
||||||
|
)
|
||||||
|
if (error != null) throw error
|
||||||
|
}
|
18
LICENSE.txt
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
app4triqs - An example application using triqs and cpp2py
|
||||||
|
|
||||||
|
Copyright (C) 2017-2018, N. Wentzell, O. Parcollet
|
||||||
|
Copyright (C) 2018-2019, The Simons Foundation
|
||||||
|
authors: N. Wentzell, D. Simons, H. Strand, O. Parcollet
|
||||||
|
|
||||||
|
app4triqs is free software: you can redistribute it and/or modify it under the
|
||||||
|
terms of the GNU General Public License as published by the Free Software
|
||||||
|
Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
version.
|
||||||
|
|
||||||
|
app4triqs is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License along with
|
||||||
|
app4triqs (in the file COPYING.txt in this directory). If not, see
|
||||||
|
<http://www.gnu.org/licenses/>.
|
79
README.md
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
[![build](https://github.com/TRIQS/app4triqs/workflows/build/badge.svg)](https://github.com/TRIQS/app4triqs/actions?query=workflow%3Abuild)
|
||||||
|
|
||||||
|
# app4triqs - A skeleton for a TRIQS application
|
||||||
|
|
||||||
|
Initial Setup
|
||||||
|
-------------
|
||||||
|
|
||||||
|
To adapt this skeleton for a new TRIQS application, the following steps are necessary:
|
||||||
|
|
||||||
|
* Create a repository, e.g. https://github.com/username/appname
|
||||||
|
|
||||||
|
* Run the following commands in order after replacing **appname** accordingly
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/triqs/app4triqs --branch unstable appname
|
||||||
|
cd appname
|
||||||
|
./share/squash_history.sh
|
||||||
|
./share/replace_and_rename.py appname
|
||||||
|
git add -A && git commit -m "Adjust app4triqs skeleton for appname"
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now add your github repository and push to it
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote add origin https://github.com/username/appname
|
||||||
|
git remote update
|
||||||
|
git push origin unstable
|
||||||
|
```
|
||||||
|
|
||||||
|
If you prefer to use the [SSH interface](https://help.github.com/en/articles/connecting-to-github-with-ssh)
|
||||||
|
to the remote repository, replace the http link with e.g. `git@github.com:username/appname`.
|
||||||
|
|
||||||
|
### Merging app4triqs skeleton updates ###
|
||||||
|
|
||||||
|
You can merge future changes to the app4triqs skeleton into your project with the following commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote update
|
||||||
|
git merge app4triqs_remote/unstable -m "Merge latest app4triqs skeleton changes"
|
||||||
|
```
|
||||||
|
|
||||||
|
If you should encounter any conflicts resolve them and `git commit`.
|
||||||
|
Finally we repeat the replace and rename command from the initial setup.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./share/replace_and_rename.py appname
|
||||||
|
git commit --amend
|
||||||
|
```
|
||||||
|
|
||||||
|
Getting Started
|
||||||
|
---------------
|
||||||
|
|
||||||
|
After setting up your application as described above you should customize the following files and directories
|
||||||
|
according to your needs (replace app4triqs in the following by the name of your application)
|
||||||
|
|
||||||
|
* Adjust or remove the `README.md` and `doc/ChangeLog.md` file
|
||||||
|
* In the `c++/app4triqs` subdirectory adjust the example files `app4triqs.hpp` and `app4triqs.cpp` or add your own source files.
|
||||||
|
* In the `test/c++` subdirectory adjust the example test `basic.cpp` or add your own tests.
|
||||||
|
* In the `python/app4triqs` subdirectory add your Python source files.
|
||||||
|
Be sure to remove the `app4triqs_module_desc.py` file unless you want to generate a Python module from your C++ source code.
|
||||||
|
* In the `test/python` subdirectory adjust the example test `Basic.py` or add your own tests.
|
||||||
|
* Adjust any documentation examples given as `*.rst` files in the doc directory.
|
||||||
|
* Adjust the sphinx configuration in `doc/conf.py.in` as necessary.
|
||||||
|
* The build and install process is identical to the one outline [here](https://triqs.github.io/app4triqs/unstable/install.html).
|
||||||
|
|
||||||
|
### Optional ###
|
||||||
|
----------------
|
||||||
|
|
||||||
|
* If you want to wrap C++ classes and/or functions provided in the `c++/app4triqs/app4triqs.hpp` rerun the `c++2py` tool with
|
||||||
|
```bash
|
||||||
|
c++2py -r app4triqs_module_desc.py
|
||||||
|
```
|
||||||
|
* Add your email address to the bottom section of `Jenkinsfile` for Jenkins CI notification emails
|
||||||
|
```
|
||||||
|
End of build log:
|
||||||
|
\${BUILD_LOG,maxLines=60}
|
||||||
|
""",
|
||||||
|
to: 'user@domain.org',
|
||||||
|
```
|
84
c++/app4triqs/CMakeLists.txt
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
file(GLOB_RECURSE sources *.cpp)
|
||||||
|
add_library(${PROJECT_NAME}_c ${sources})
|
||||||
|
add_library(${PROJECT_NAME}::${PROJECT_NAME}_c ALIAS ${PROJECT_NAME}_c)
|
||||||
|
|
||||||
|
# Link against triqs and enable warnings
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC triqs PRIVATE $<BUILD_INTERFACE:${PROJECT_NAME}_warnings>)
|
||||||
|
|
||||||
|
# Configure target and compilation
|
||||||
|
set_target_properties(${PROJECT_NAME}_c PROPERTIES
|
||||||
|
POSITION_INDEPENDENT_CODE ON
|
||||||
|
VERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}
|
||||||
|
)
|
||||||
|
target_include_directories(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/c++>)
|
||||||
|
target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $<INSTALL_INTERFACE:${CMAKE_INSTALL_PREFIX}/include>)
|
||||||
|
target_compile_definitions(${PROJECT_NAME}_c PUBLIC
|
||||||
|
APP4TRIQS_GIT_HASH=${PROJECT_GIT_HASH}
|
||||||
|
TRIQS_GIT_HASH=${TRIQS_GIT_HASH}
|
||||||
|
$<$<CONFIG:Debug>:APP4TRIQS_DEBUG>
|
||||||
|
$<$<CONFIG:Debug>:TRIQS_DEBUG>
|
||||||
|
$<$<CONFIG:Debug>:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK>
|
||||||
|
)
|
||||||
|
|
||||||
|
# Install library and headers
|
||||||
|
install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION lib)
|
||||||
|
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
|
||||||
|
|
||||||
|
|
||||||
|
# ========= Static Analyzer Checks ==========
|
||||||
|
|
||||||
|
option(ANALYZE_SOURCES OFF "Run static analyzer checks if found (clang-tidy, cppcheck)")
|
||||||
|
if(ANALYZE_SOURCES)
|
||||||
|
|
||||||
|
# Locate static analyzer tools
|
||||||
|
find_program(CPPCHECK_EXECUTABLE NAMES "cppcheck" PATHS ENV PATH)
|
||||||
|
find_program(CLANG_TIDY_EXECUTABLE NAMES "clang-tidy" PATHS ENV PATH)
|
||||||
|
|
||||||
|
# Run clang-tidy if found
|
||||||
|
if(CLANG_TIDY_EXECUTABLE)
|
||||||
|
message(STATUS "clang-tidy found: ${CLANG_TIDY_EXECUTABLE}")
|
||||||
|
set_target_properties(${PROJECT_NAME}_c PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}")
|
||||||
|
else()
|
||||||
|
message(STATUS "clang-tidy not found in $PATH. Please consider installing clang-tidy for additional checks!")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Run cppcheck if found
|
||||||
|
if(CPPCHECK_EXECUTABLE)
|
||||||
|
message(STATUS "cppcheck found: ${CPPCHECK_EXECUTABLE}")
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${PROJECT_NAME}_c
|
||||||
|
COMMAND ${CPPCHECK_EXECUTABLE}
|
||||||
|
--enable=warning,style,performance,portability
|
||||||
|
--std=c++20
|
||||||
|
--template=gcc
|
||||||
|
--verbose
|
||||||
|
--force
|
||||||
|
--quiet
|
||||||
|
${sources}
|
||||||
|
WORKING_DIRECTORY
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}
|
||||||
|
)
|
||||||
|
else()
|
||||||
|
message(STATUS "cppcheck not found in $PATH. Please consider installing cppcheck for additional checks!")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
endif()
|
||||||
|
|
||||||
|
|
||||||
|
# ========= Dynamic Analyzer Checks ==========
|
||||||
|
|
||||||
|
option(ASAN OFF "Compile library and executables with LLVM Address Sanitizer")
|
||||||
|
option(UBSAN OFF "Compile library and executables with LLVM Undefined Behavior Sanitizer")
|
||||||
|
|
||||||
|
if(ASAN)
|
||||||
|
if(NOT TARGET asan)
|
||||||
|
find_package(sanitizer REQUIRED "asan")
|
||||||
|
endif()
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:asan>)
|
||||||
|
endif()
|
||||||
|
if(UBSAN)
|
||||||
|
if(NOT TARGET ubsan)
|
||||||
|
find_package(sanitizer REQUIRED "ubsan")
|
||||||
|
endif()
|
||||||
|
target_link_libraries(${PROJECT_NAME}_c PUBLIC $<BUILD_INTERFACE:ubsan>)
|
||||||
|
endif()
|
37
c++/app4triqs/app4triqs.cpp
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#include <cmath>
|
||||||
|
#include "./app4triqs.hpp"
|
||||||
|
|
||||||
|
namespace app4triqs {
|
||||||
|
|
||||||
|
toto &toto::operator+=(toto const &b) {
|
||||||
|
this->i += b.i;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
toto toto::operator+(toto const &b) const {
|
||||||
|
auto res = *this;
|
||||||
|
res += b;
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool toto::operator==(toto const &b) const { return (this->i == b.i); }
|
||||||
|
|
||||||
|
void h5_write(h5::group grp, std::string subgroup_name, toto const &m) {
|
||||||
|
grp = subgroup_name.empty() ? grp : grp.create_group(subgroup_name);
|
||||||
|
h5_write(grp, "i", m.i);
|
||||||
|
h5_write_attribute(grp, "Format", toto::hdf5_format());
|
||||||
|
}
|
||||||
|
|
||||||
|
void h5_read(h5::group grp, std::string subgroup_name, toto &m) {
|
||||||
|
grp = subgroup_name.empty() ? grp : grp.open_group(subgroup_name);
|
||||||
|
int i;
|
||||||
|
h5_read(grp, "i", i);
|
||||||
|
m = toto(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
int chain(int i, int j) {
|
||||||
|
int n_digits_j = j > 0 ? (int)log10(j) + 1 : 1;
|
||||||
|
return i * int(pow(10, n_digits_j)) + j;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace app4triqs
|
79
c++/app4triqs/app4triqs.hpp
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
#pragma once
|
||||||
|
#include <triqs/gfs.hpp>
|
||||||
|
#include <triqs/mesh.hpp>
|
||||||
|
#include <h5/h5.hpp>
|
||||||
|
|
||||||
|
namespace app4triqs {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A very useful and important class
|
||||||
|
*
|
||||||
|
* @note A Useful note
|
||||||
|
* @include app4triqs/app4triqs.hpp
|
||||||
|
*/
|
||||||
|
class toto {
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
public:
|
||||||
|
toto() = default;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct from integer
|
||||||
|
*
|
||||||
|
* @param i_ a scalar :math:`G(\tau)`
|
||||||
|
*/
|
||||||
|
explicit toto(int i_) : i(i_) {}
|
||||||
|
|
||||||
|
~toto() = default;
|
||||||
|
|
||||||
|
// Copy/Move construction
|
||||||
|
toto(toto const &) = default;
|
||||||
|
toto(toto &&) = default;
|
||||||
|
|
||||||
|
/// Copy/Move assignment
|
||||||
|
toto &operator=(toto const &) = default;
|
||||||
|
toto &operator=(toto &&) = default;
|
||||||
|
|
||||||
|
/// Simple accessor
|
||||||
|
[[nodiscard]] int get_i() const { return i; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simple function with :math:`G(\tau)`
|
||||||
|
*
|
||||||
|
* @param u Nothing useful
|
||||||
|
*/
|
||||||
|
int f(int u) { return u; }
|
||||||
|
|
||||||
|
/// Arithmetic operations
|
||||||
|
toto operator+(toto const &b) const;
|
||||||
|
toto &operator+=(toto const &b);
|
||||||
|
|
||||||
|
/// Comparison
|
||||||
|
bool operator==(toto const &b) const;
|
||||||
|
|
||||||
|
/// HDF5
|
||||||
|
static std::string hdf5_format() { return "Toto"; }
|
||||||
|
|
||||||
|
friend void h5_write(h5::group grp, std::string subgroup_name, toto const &m);
|
||||||
|
friend void h5_read(h5::group grp, std::string subgroup_name, toto &m);
|
||||||
|
|
||||||
|
/// Serialization
|
||||||
|
template <class Archive> void serialize(Archive &ar, const unsigned int) { ar &i; }
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chain digits of two integers
|
||||||
|
*
|
||||||
|
* @head A set of functions that implement chaining
|
||||||
|
*
|
||||||
|
* @tail Do I really need to explain more ?
|
||||||
|
*
|
||||||
|
* @param i The first integer
|
||||||
|
* @param j The second integer
|
||||||
|
* @return An integer containing the digits of both i and j
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
int chain(int i, int j);
|
||||||
|
|
||||||
|
} // namespace app4triqs
|
1
deps/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
*
|
67
deps/CMakeLists.txt
vendored
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
include(external_dependency.cmake)
|
||||||
|
|
||||||
|
# Add your dependencies with the function
|
||||||
|
#
|
||||||
|
# external_dependency(name
|
||||||
|
# [VERSION <version-number>]
|
||||||
|
# [GIT_REPO <url>]
|
||||||
|
# [GIT_TAG <tag>]
|
||||||
|
# [BUILD_ALWAYS]
|
||||||
|
# [EXCLUDE_FROM_ALL]
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# Resolve the dependency using the following steps in order.
|
||||||
|
# If a step was successful, skip the remaining ones.
|
||||||
|
#
|
||||||
|
# 1. Use find_package(name [<version-number>])
|
||||||
|
# to locate the package in the system.
|
||||||
|
# Skip this step if Build_Deps option is set.
|
||||||
|
# 2. Try to find a directory containing the sources
|
||||||
|
# at ${CMAKE_CURRENT_SOURCE_DIR}/name and
|
||||||
|
# ${CMAKE_SOURCE_DIR}/deps/name. If found
|
||||||
|
# build it as a cmake sub-project.
|
||||||
|
# 3. If GIT_REPO is provided, git clone the sources,
|
||||||
|
# and build them as a cmake sub-project.
|
||||||
|
#
|
||||||
|
# Addtional options:
|
||||||
|
#
|
||||||
|
# GIT_TAG - Use this keyword to specify the git-tag, branch or commit hash
|
||||||
|
#
|
||||||
|
# BUILD_ALWAYS - If set, this dependency will always be built from source
|
||||||
|
# and will never be searched in the system.
|
||||||
|
#
|
||||||
|
# EXCLUDE_FROM_ALL - If set, targets of the dependency cmake subproject
|
||||||
|
# will not be included in the ALL target of the project.
|
||||||
|
# In particular the dependency will not be installed.
|
||||||
|
|
||||||
|
if(NOT DEFINED Build_Deps)
|
||||||
|
set(Build_Deps "Always" CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]")
|
||||||
|
else()
|
||||||
|
set(Build_Deps_Opts "Never" "Always" "IfNotFound")
|
||||||
|
if(NOT ${Build_Deps} IN_LIST Build_Deps_Opts)
|
||||||
|
message(FATAL_ERROR "Build_Deps option should be either 'Never', 'Always' or 'IfNotFound'")
|
||||||
|
endif()
|
||||||
|
set(Build_Deps ${Build_Deps} CACHE STRING "Do we build dependencies from source? [Never/Always/IfNotFound]")
|
||||||
|
if(NOT IS_SUBPROJECT AND NOT Build_Deps STREQUAL "Always" AND (ASAN OR UBSAN))
|
||||||
|
message(WARNING "For builds with llvm sanitizers (ASAN/UBSAN) it is recommended to use -DBuild_Deps=Always to avoid false positives.")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Cpp2Py --
|
||||||
|
if(PythonSupport OR Build_Documentation)
|
||||||
|
external_dependency(Cpp2Py
|
||||||
|
GIT_REPO https://github.com/TRIQS/cpp2py
|
||||||
|
VERSION 2.0
|
||||||
|
GIT_TAG master
|
||||||
|
BUILD_ALWAYS
|
||||||
|
EXCLUDE_FROM_ALL
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- GTest --
|
||||||
|
external_dependency(GTest
|
||||||
|
GIT_REPO https://github.com/google/googletest
|
||||||
|
GIT_TAG main
|
||||||
|
BUILD_ALWAYS
|
||||||
|
EXCLUDE_FROM_ALL
|
||||||
|
)
|
95
deps/external_dependency.cmake
vendored
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# Copyright (c) 2020 Simons Foundation
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# https://www.gnu.org/licenses/gpl-3.0.txt
|
||||||
|
|
||||||
|
|
||||||
|
# Consider ROOT env variables in find_package
|
||||||
|
if(POLICY CMP0074)
|
||||||
|
cmake_policy(SET CMP0074 NEW)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Make sure that imported targets are always global
|
||||||
|
get_property(IMPORTED_ALWAYS_GLOBAL GLOBAL PROPERTY IMPORTED_ALWAYS_GLOBAL)
|
||||||
|
if(NOT IMPORTED_ALWAYS_GLOBAL)
|
||||||
|
function(add_library)
|
||||||
|
set(_args ${ARGN})
|
||||||
|
if ("${_args}" MATCHES ";IMPORTED")
|
||||||
|
list(APPEND _args GLOBAL)
|
||||||
|
endif()
|
||||||
|
_add_library(${_args})
|
||||||
|
endfunction()
|
||||||
|
set_property(GLOBAL PROPERTY IMPORTED_ALWAYS_GLOBAL TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Define External Dependency Function
|
||||||
|
function(external_dependency)
|
||||||
|
cmake_parse_arguments(ARG "EXCLUDE_FROM_ALL;BUILD_ALWAYS" "VERSION;GIT_REPO;GIT_TAG" "" ${ARGN})
|
||||||
|
|
||||||
|
# -- Was dependency already found?
|
||||||
|
get_property(${ARGV0}_FOUND GLOBAL PROPERTY ${ARGV0}_FOUND)
|
||||||
|
if(${ARGV0}_FOUND)
|
||||||
|
message(STATUS "Dependency ${ARGV0} was already resolved.")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Try to find package in system.
|
||||||
|
if(NOT ARG_BUILD_ALWAYS AND NOT Build_Deps STREQUAL "Always")
|
||||||
|
find_package(${ARGV0} ${ARG_VERSION} QUIET HINTS ${CMAKE_INSTALL_PREFIX})
|
||||||
|
if(${ARGV0}_FOUND)
|
||||||
|
message(STATUS "Found dependency ${ARGV0} in system ${${ARGV0}_ROOT}")
|
||||||
|
return()
|
||||||
|
elseif(Build_Deps STREQUAL "Never")
|
||||||
|
message(FATAL_ERROR "Could not find dependency ${ARGV0} in system. Please install the dependency manually or use -DBuild_Deps=IfNotFound during cmake configuration to automatically build all dependencies that are not found.")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# -- Build package from source
|
||||||
|
message(STATUS " =============== Configuring Dependency ${ARGV0} =============== ")
|
||||||
|
if(ARG_EXCLUDE_FROM_ALL)
|
||||||
|
set(subdir_opts EXCLUDE_FROM_ALL)
|
||||||
|
set(Build_Tests OFF)
|
||||||
|
set(Build_Documentation OFF)
|
||||||
|
endif()
|
||||||
|
if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0})
|
||||||
|
message(STATUS "Found sources for dependency ${ARGV0} at ${CMAKE_CURRENT_SOURCE_DIR}/${ARGV0}")
|
||||||
|
add_subdirectory(${ARGV0} ${subdir_opts})
|
||||||
|
elseif(IS_DIRECTORY ${CMAKE_SOURCE_DIR}/deps/${ARGV0})
|
||||||
|
message(STATUS "Found sources for dependency ${ARGV0} at ${CMAKE_SOURCE_DIR}/deps/${ARGV0}")
|
||||||
|
add_subdirectory(${ARGV0} ${subdir_opts})
|
||||||
|
elseif(ARG_GIT_REPO)
|
||||||
|
set(bin_dir ${CMAKE_CURRENT_BINARY_DIR}/${ARGV0})
|
||||||
|
set(src_dir ${bin_dir}_src)
|
||||||
|
if(NOT IS_DIRECTORY ${src_dir})
|
||||||
|
if(ARG_GIT_TAG)
|
||||||
|
set(clone_opts --branch ${ARG_GIT_TAG} -c advice.detachedHead=false)
|
||||||
|
endif()
|
||||||
|
if(NOT GIT_EXECUTABLE)
|
||||||
|
find_package(Git REQUIRED)
|
||||||
|
endif()
|
||||||
|
execute_process(COMMAND ${GIT_EXECUTABLE} clone ${ARG_GIT_REPO} --depth 1 ${clone_opts} ${src_dir}
|
||||||
|
RESULT_VARIABLE clone_failed
|
||||||
|
ERROR_VARIABLE clone_error
|
||||||
|
)
|
||||||
|
if(clone_failed)
|
||||||
|
message(FATAL_ERROR "Failed to clone sources for dependency ${ARGV0}.\n ${clone_error}")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
add_subdirectory(${src_dir} ${bin_dir} ${subdir_opts})
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Could not find or build dependency ${ARGV0}")
|
||||||
|
endif()
|
||||||
|
message(STATUS " =============== End ${ARGV0} Configuration =============== ")
|
||||||
|
set_property(GLOBAL PROPERTY ${ARGV0}_FOUND TRUE)
|
||||||
|
|
||||||
|
endfunction()
|
81
doc/CMakeLists.txt
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
# Generate the sphinx config file
|
||||||
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in ${CMAKE_CURRENT_BINARY_DIR}/conf.py @ONLY)
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Create an optional target that allows us to regenerate the C++ doc with c++2rst
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
add_custom_target(${PROJECT_NAME}_docs_cpp2rst)
|
||||||
|
include(${PROJECT_SOURCE_DIR}/share/cmake/extract_flags.cmake)
|
||||||
|
extract_flags(${PROJECT_NAME}_c BUILD_INTERFACE)
|
||||||
|
separate_arguments(${PROJECT_NAME}_c_CXXFLAGS)
|
||||||
|
macro(generate_docs header_file)
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${PROJECT_NAME}_docs_cpp2rst
|
||||||
|
COMMAND rm -rf ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated
|
||||||
|
COMMAND
|
||||||
|
PYTHONPATH=${CPP2PY_BINARY_DIR}:$ENV{PYTHONPATH}
|
||||||
|
PATH=${CPP2PY_BINARY_DIR}/bin:${CPP2PY_ROOT}/bin:$ENV{PATH}
|
||||||
|
c++2rst
|
||||||
|
${header_file}
|
||||||
|
-N ${PROJECT_NAME}
|
||||||
|
--output_directory ${CMAKE_CURRENT_SOURCE_DIR}/cpp2rst_generated
|
||||||
|
-I${PROJECT_SOURCE_DIR}/c++
|
||||||
|
--cxxflags="${${PROJECT_NAME}_c_CXXFLAGS}"
|
||||||
|
)
|
||||||
|
endmacro(generate_docs)
|
||||||
|
|
||||||
|
generate_docs(${PROJECT_SOURCE_DIR}/c++/${PROJECT_NAME}/${PROJECT_NAME}.hpp)
|
||||||
|
|
||||||
|
# --------------------------------------------------------
|
||||||
|
# Build & Run the C++ doc examples and capture the output
|
||||||
|
# --------------------------------------------------------
|
||||||
|
|
||||||
|
add_custom_target(${PROJECT_NAME}_docs_example_output)
|
||||||
|
file(GLOB_RECURSE ExampleList RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
|
||||||
|
foreach(example ${ExampleList})
|
||||||
|
get_filename_component(f ${example} NAME_WE)
|
||||||
|
get_filename_component(d ${example} DIRECTORY)
|
||||||
|
add_executable(${PROJECT_NAME}_doc_${f} EXCLUDE_FROM_ALL ${example})
|
||||||
|
set_property(TARGET ${PROJECT_NAME}_doc_${f} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${d})
|
||||||
|
target_link_libraries(${PROJECT_NAME}_doc_${f} triqs)
|
||||||
|
add_custom_command(TARGET ${PROJECT_NAME}_doc_${f}
|
||||||
|
COMMAND ${PROJECT_NAME}_doc_${f} > ${CMAKE_CURRENT_SOURCE_DIR}/${d}/${f}.output 2>/dev/null
|
||||||
|
WORKING_DIRECTORY ${d}
|
||||||
|
)
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_example_output ${PROJECT_NAME}_doc_${f})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# ---------------------------------
|
||||||
|
# Top Sphinx target
|
||||||
|
# ---------------------------------
|
||||||
|
if(NOT DEFINED SPHINXBUILD_EXECUTABLE)
|
||||||
|
find_package(Sphinx)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Sphinx has internal caching, always run it
|
||||||
|
add_custom_target(${PROJECT_NAME}_docs_sphinx ALL)
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${PROJECT_NAME}_docs_sphinx
|
||||||
|
COMMAND PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SPHINXBUILD_EXECUTABLE} -j auto -c . -b html ${CMAKE_CURRENT_SOURCE_DIR} html
|
||||||
|
)
|
||||||
|
|
||||||
|
option(Sphinx_Only "When building the documentation, skip the Python Modules and the generation of C++ Api and example outputs" OFF)
|
||||||
|
if(NOT Sphinx_Only)
|
||||||
|
# Autodoc usage requires the python modules to be built first
|
||||||
|
get_property(CPP2PY_MODULES_LIST GLOBAL PROPERTY CPP2PY_MODULES_LIST)
|
||||||
|
if(CPP2PY_MODULES_LIST)
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_sphinx ${CPP2PY_MODULES_LIST})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Generation of C++ Api and Example Outputs
|
||||||
|
add_dependencies(${PROJECT_NAME}_docs_sphinx ${PROJECT_NAME}_docs_cpp2rst ${PROJECT_NAME}_docs_example_output)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# ---------------------------------
|
||||||
|
# Install
|
||||||
|
# ---------------------------------
|
||||||
|
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/${PROJECT_NAME}
|
||||||
|
FILES_MATCHING
|
||||||
|
REGEX "\\.(html|pdf|png|gif|jpg|svg|js|xsl|css|py|txt|inv|bib|ttf|woff2|eot)$"
|
||||||
|
PATTERN "_*"
|
||||||
|
)
|
35
doc/ChangeLog.md
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
(changelog)=
|
||||||
|
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
## Version 3.1.0
|
||||||
|
|
||||||
|
app4triqs version 3.1.0 is a compatibility
|
||||||
|
release for TRIQS version 3.1.0 that
|
||||||
|
* moves to cmake 3.12.4 and c++20
|
||||||
|
* improves ghactions and jenkins configuration
|
||||||
|
* switches documentation to read-the-docs theme
|
||||||
|
* uses googletest main branch
|
||||||
|
* fixes several skeleton issues
|
||||||
|
|
||||||
|
We thank all contributors: Alexander Hampel, Dylan Simon, Nils Wentzell
|
||||||
|
|
||||||
|
|
||||||
|
## Version 3.0.0
|
||||||
|
|
||||||
|
app4triqs version 3.0.0 is a compatibility
|
||||||
|
release for TRIQS version 3.0.0 that
|
||||||
|
* introduces compatibility with Python 3 (Python 2 no longer supported)
|
||||||
|
* adds a cmake-based dependency management
|
||||||
|
* fixes several application issues
|
||||||
|
|
||||||
|
|
||||||
|
## Version 2.2.0
|
||||||
|
|
||||||
|
app4triqs Version 2.2.0 provides a project
|
||||||
|
skeleton for TRIQS applications based on
|
||||||
|
the TRIQS Library Version 2.2.0.
|
||||||
|
It is intended for applications with both
|
||||||
|
Python and C++ components.
|
||||||
|
|
||||||
|
This is the initial release for this project.
|
5
doc/_static/css/custom.css
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
@import url("theme.css");
|
||||||
|
|
||||||
|
.wy-nav-content {
|
||||||
|
max-width: 70em;
|
||||||
|
}
|
BIN
doc/_static/logo_cea.png
vendored
Normal file
After Width: | Height: | Size: 39 KiB |
BIN
doc/_static/logo_cnrs.png
vendored
Normal file
After Width: | Height: | Size: 219 KiB |
BIN
doc/_static/logo_erc.jpg
vendored
Normal file
After Width: | Height: | Size: 190 KiB |
BIN
doc/_static/logo_flatiron.png
vendored
Normal file
After Width: | Height: | Size: 31 KiB |
BIN
doc/_static/logo_github.png
vendored
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
doc/_static/logo_simons.jpg
vendored
Normal file
After Width: | Height: | Size: 65 KiB |
BIN
doc/_static/logo_x.png
vendored
Normal file
After Width: | Height: | Size: 243 KiB |
29
doc/_templates/autosummary_class_template.rst
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{{ fullname | escape | underline }}
|
||||||
|
|
||||||
|
.. currentmodule:: {{ module }}
|
||||||
|
|
||||||
|
.. autoclass:: {{ objname }}
|
||||||
|
|
||||||
|
{% block methods %}
|
||||||
|
{% if methods %}
|
||||||
|
.. rubric:: {{ _('Methods') }}
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
{% for item in methods %}
|
||||||
|
~{{ name }}.{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block attributes %}
|
||||||
|
{% if attributes %}
|
||||||
|
.. rubric:: {{ _('Attributes') }}
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
{% for item in attributes %}
|
||||||
|
~{{ name }}.{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
68
doc/_templates/autosummary_module_template.rst
vendored
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
{{ fullname | escape | underline}}
|
||||||
|
|
||||||
|
.. automodule:: {{ fullname }}
|
||||||
|
|
||||||
|
{% block functions %}
|
||||||
|
{% if functions %}
|
||||||
|
.. rubric:: Functions
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
{% for item in functions %}
|
||||||
|
{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block attributes %}
|
||||||
|
{% if attributes %}
|
||||||
|
.. rubric:: Module Attributes
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
{% for item in attributes %}
|
||||||
|
{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block classes %}
|
||||||
|
{% if classes %}
|
||||||
|
.. rubric:: {{ _('Classes') }}
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
:template: autosummary_class_template.rst
|
||||||
|
{% for item in classes %}
|
||||||
|
{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block exceptions %}
|
||||||
|
{% if exceptions %}
|
||||||
|
.. rubric:: {{ _('Exceptions') }}
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
{% for item in exceptions %}
|
||||||
|
{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block modules %}
|
||||||
|
{% if modules %}
|
||||||
|
.. rubric:: Modules
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree:
|
||||||
|
:template: autosummary_module_template.rst
|
||||||
|
:recursive:
|
||||||
|
|
||||||
|
{% for item in modules %}
|
||||||
|
{{ item }}
|
||||||
|
{%- endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
14
doc/_templates/sideb.html
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
<p style="background-color:white;">
|
||||||
|
<a href="http://ipht.cea.fr"> <img style="width: 80px; margin: 10px 5px 0 0" src='_static/logo_cea.png' alt="CEA"/> </a>
|
||||||
|
<a href="http://www.cpht.polytechnique.fr"> <img style="width: 80px; margin: 10px 5px 0 5px" src='_static/logo_x.png' alt="Ecole Polytechnique"/> </a>
|
||||||
|
<br>
|
||||||
|
<a href="http://www.cnrs.fr"> <img style="width: 80px; margin: 10px 0 0 5px" src='_static/logo_cnrs.png' alt="CNRS"/> </a>
|
||||||
|
<img style="width: 80px; margin: 10px 0 0 5px" src='_static/logo_erc.jpg' alt="ERC"/>
|
||||||
|
<a href="https://www.simonsfoundation.org/flatiron"> <img style="width: 200px; margin: 10px 0 0 5px" src='http://itensor.org/flatiron_logo.png' alt="Flatiron Institute"/> </a>
|
||||||
|
<br>
|
||||||
|
<a href="https://www.simonsfoundation.org"> <img style="width: 200px; margin: 10px 0 0 5px" src='http://itensor.org/simons_found_logo.jpg' alt="Simons Foundation"/> </a>
|
||||||
|
</p>
|
||||||
|
<hr>
|
||||||
|
<p>
|
||||||
|
<a href="https://github.com/triqs/app4triqs"> <img style="width: 200px; margin: 10px 0 0 5px" src='_static/logo_github.png' alt="Visit the project on GitHub"/> </a>
|
||||||
|
</p>
|
8
doc/about.rst
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.. _about:
|
||||||
|
|
||||||
|
About app4triqs
|
||||||
|
***************
|
||||||
|
|
||||||
|
An example application using ``cpp2py`` and TRIQS.
|
||||||
|
|
||||||
|
Written and maintained by N. Wentzell with contributions from H. U.R. Strand.
|
133
doc/conf.py.in
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# TRIQS documentation build configuration file
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext")
|
||||||
|
sys.path.insert(0, "@CMAKE_CURRENT_SOURCE_DIR@/sphinxext/numpydoc")
|
||||||
|
|
||||||
|
# exclude these folders from scanning by sphinx
|
||||||
|
exclude_patterns = ['_templates']
|
||||||
|
|
||||||
|
extensions = ['sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.mathjax',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.doctest',
|
||||||
|
'sphinx.ext.todo',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
'sphinx.ext.autosummary',
|
||||||
|
'sphinx.ext.githubpages',
|
||||||
|
'sphinx_autorun',
|
||||||
|
'nbsphinx',
|
||||||
|
'myst_parser',
|
||||||
|
'matplotlib.sphinxext.plot_directive',
|
||||||
|
'numpydoc']
|
||||||
|
|
||||||
|
myst_enable_extensions = [
|
||||||
|
"amsmath",
|
||||||
|
"colon_fence",
|
||||||
|
"deflist",
|
||||||
|
"dollarmath",
|
||||||
|
"html_admonition",
|
||||||
|
"html_image",
|
||||||
|
"linkify",
|
||||||
|
"replacements",
|
||||||
|
"smartquotes",
|
||||||
|
"substitution",
|
||||||
|
"tasklist",
|
||||||
|
]
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# Turn on sphinx.ext.autosummary
|
||||||
|
autosummary_generate = True
|
||||||
|
autosummary_imported_members=False
|
||||||
|
|
||||||
|
project = '@PROJECT_NAME@'
|
||||||
|
version = '@PROJECT_VERSION@'
|
||||||
|
|
||||||
|
copyright = '2017-2018 N. Wentzell, O. Parcollet 2018-2021 The Simons Foundation, authors: N. Wentzell, D. Simons, H. Strand, O. Parcollet'
|
||||||
|
|
||||||
|
mathjax_path = "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/MathJax.js?config=default"
|
||||||
|
templates_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_templates']
|
||||||
|
|
||||||
|
# this requires the sphinx_rtd_theme to be installed via pip
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
# this loads the custom css file to change the page width
|
||||||
|
html_style = 'css/custom.css'
|
||||||
|
|
||||||
|
#html_favicon = '@CMAKE_CURRENT_SOURCE_DIR@/logos/favicon.ico'
|
||||||
|
#html_logo = '@CMAKE_CURRENT_SOURCE_DIR@/logos/logo.png'
|
||||||
|
|
||||||
|
# options for the the rtd theme
|
||||||
|
html_theme_options = {
|
||||||
|
'logo_only': False,
|
||||||
|
'display_version': True,
|
||||||
|
'prev_next_buttons_location': 'bottom',
|
||||||
|
'style_external_links': False,
|
||||||
|
'vcs_pageview_mode': '',
|
||||||
|
'style_nav_header_background': '#7E588A',
|
||||||
|
# Toc options
|
||||||
|
'collapse_navigation': False,
|
||||||
|
'sticky_navigation': True,
|
||||||
|
'navigation_depth': 5,
|
||||||
|
'includehidden': True,
|
||||||
|
'titles_only': False
|
||||||
|
}
|
||||||
|
|
||||||
|
html_show_sphinx = False
|
||||||
|
|
||||||
|
html_context = {'header_title': '@PROJECT_NAME@'}
|
||||||
|
|
||||||
|
html_static_path = ['@CMAKE_CURRENT_SOURCE_DIR@/_static']
|
||||||
|
html_sidebars = {'index': ['sideb.html', 'searchbox.html']}
|
||||||
|
|
||||||
|
htmlhelp_basename = '@PROJECT_NAME@doc'
|
||||||
|
|
||||||
|
intersphinx_mapping = {'python': ('https://docs.python.org/3.8', None), 'triqslibs': ('https://triqs.github.io/triqs/latest', None)}
|
||||||
|
|
||||||
|
# open links in new tab instead of same window
|
||||||
|
from sphinx.writers.html import HTMLTranslator
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.nodes import Element
|
||||||
|
|
||||||
|
class PatchedHTMLTranslator(HTMLTranslator):
|
||||||
|
|
||||||
|
def visit_reference(self, node: Element) -> None:
|
||||||
|
atts = {'class': 'reference'}
|
||||||
|
if node.get('internal') or 'refuri' not in node:
|
||||||
|
atts['class'] += ' internal'
|
||||||
|
else:
|
||||||
|
atts['class'] += ' external'
|
||||||
|
# ---------------------------------------------------------
|
||||||
|
# Customize behavior (open in new tab, secure linking site)
|
||||||
|
atts['target'] = '_blank'
|
||||||
|
atts['rel'] = 'noopener noreferrer'
|
||||||
|
# ---------------------------------------------------------
|
||||||
|
if 'refuri' in node:
|
||||||
|
atts['href'] = node['refuri'] or '#'
|
||||||
|
if self.settings.cloak_email_addresses and atts['href'].startswith('mailto:'):
|
||||||
|
atts['href'] = self.cloak_mailto(atts['href'])
|
||||||
|
self.in_mailto = True
|
||||||
|
else:
|
||||||
|
assert 'refid' in node, \
|
||||||
|
'References must have "refuri" or "refid" attribute.'
|
||||||
|
atts['href'] = '#' + node['refid']
|
||||||
|
if not isinstance(node.parent, nodes.TextElement):
|
||||||
|
assert len(node) == 1 and isinstance(node[0], nodes.image)
|
||||||
|
atts['class'] += ' image-reference'
|
||||||
|
if 'reftitle' in node:
|
||||||
|
atts['title'] = node['reftitle']
|
||||||
|
if 'target' in node:
|
||||||
|
atts['target'] = node['target']
|
||||||
|
self.body.append(self.starttag(node, 'a', '', **atts))
|
||||||
|
|
||||||
|
if node.get('secnumber'):
|
||||||
|
self.body.append(('%s' + self.secnumber_suffix) %
|
||||||
|
'.'.join(map(str, node['secnumber'])))
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.set_translator('html', PatchedHTMLTranslator)
|
29
doc/documentation.rst
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
.. _documentation:
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
*************
|
||||||
|
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
|
||||||
|
(a + b)^2 &= (a + b)(a + b) \\
|
||||||
|
&= a^2 + 2ab + b^2
|
||||||
|
|
||||||
|
|
||||||
|
C++ reference manual
|
||||||
|
====================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 5
|
||||||
|
|
||||||
|
cpp2rst_generated/contents
|
||||||
|
|
||||||
|
Python reference manual
|
||||||
|
=======================
|
||||||
|
|
||||||
|
.. autosummary::
|
||||||
|
:toctree: _autosummary
|
||||||
|
:template: autosummary_module_template.rst
|
||||||
|
:recursive:
|
||||||
|
|
||||||
|
app4triqs
|
33
doc/index.rst
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
.. _welcome:
|
||||||
|
|
||||||
|
app4triqs
|
||||||
|
*********
|
||||||
|
|
||||||
|
.. sidebar:: app4triqs 3.0.0
|
||||||
|
|
||||||
|
This is the homepage of app4triqs v3.0.0.
|
||||||
|
For changes see the :ref:`changelog page <changelog>`.
|
||||||
|
|
||||||
|
.. image:: _static/logo_github.png
|
||||||
|
:width: 75%
|
||||||
|
:align: center
|
||||||
|
:target: https://github.com/triqs/app4triqs
|
||||||
|
|
||||||
|
|
||||||
|
An example application using cpp2py and :ref:`TRIQS <triqslibs:welcome>`.
|
||||||
|
|
||||||
|
This documentation is generated based on `rst <https://de.wikipedia.org/wiki/ReStructuredText>`_ files
|
||||||
|
and the comments in the sources and headers.
|
||||||
|
|
||||||
|
Learn how to use app4triqs in the :ref:`documentation`.
|
||||||
|
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
:hidden:
|
||||||
|
|
||||||
|
install
|
||||||
|
documentation
|
||||||
|
issues
|
||||||
|
ChangeLog.md
|
||||||
|
about
|
77
doc/install.rst
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
.. highlight:: bash
|
||||||
|
|
||||||
|
.. _install:
|
||||||
|
|
||||||
|
Install app4triqs
|
||||||
|
*******************
|
||||||
|
|
||||||
|
Compiling app4triqs from source
|
||||||
|
===============================
|
||||||
|
|
||||||
|
.. note:: To guarantee reproducibility in scientific calculations we strongly recommend the use of a stable `release <https://github.com/TRIQS/triqs/releases>`_ of both TRIQS and its applications.
|
||||||
|
|
||||||
|
Prerequisites
|
||||||
|
-------------
|
||||||
|
|
||||||
|
#. The :ref:`TRIQS <triqslibs:welcome>` library, see :ref:`TRIQS installation instruction <triqslibs:installation>`.
|
||||||
|
In the following, we assume that TRIQS is installed in the directory ``path_to_triqs``.
|
||||||
|
|
||||||
|
Installation steps
|
||||||
|
------------------
|
||||||
|
|
||||||
|
#. Download the source code of the latest stable version by cloning the ``TRIQS/app4triqs`` repository from GitHub::
|
||||||
|
|
||||||
|
$ git clone https://github.com/TRIQS/app4triqs app4triqs.src
|
||||||
|
|
||||||
|
#. Create and move to a new directory where you will compile the code::
|
||||||
|
|
||||||
|
$ mkdir app4triqs.build && cd app4triqs.build
|
||||||
|
|
||||||
|
#. Ensure that your shell contains the TRIQS environment variables by sourcing the ``triqsvars.sh`` file from your TRIQS installation::
|
||||||
|
|
||||||
|
$ source path_to_triqs/share/triqs/triqsvars.sh
|
||||||
|
|
||||||
|
#. In the build directory call cmake, including any additional custom CMake options, see below::
|
||||||
|
|
||||||
|
$ cmake ../app4triqs.src
|
||||||
|
|
||||||
|
#. Compile the code, run the tests and install the application::
|
||||||
|
|
||||||
|
$ make
|
||||||
|
$ make test
|
||||||
|
$ make install
|
||||||
|
|
||||||
|
Version compatibility
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Keep in mind that the version of ``app4triqs`` must be compatible with your TRIQS library version,
|
||||||
|
see :ref:`TRIQS website <triqslibs:versions>`.
|
||||||
|
In particular the Major and Minor Version numbers have to be the same.
|
||||||
|
To use a particular version, go into the directory with the sources, and look at all available versions::
|
||||||
|
|
||||||
|
$ cd app4triqs.src && git tag
|
||||||
|
|
||||||
|
Checkout the version of the code that you want::
|
||||||
|
|
||||||
|
$ git checkout 2.1.0
|
||||||
|
|
||||||
|
and follow steps 2 to 4 above to compile the code.
|
||||||
|
|
||||||
|
Custom CMake options
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
The compilation of ``app4triqs`` can be configured using CMake-options::
|
||||||
|
|
||||||
|
cmake ../app4triqs.src -DOPTION1=value1 -DOPTION2=value2 ...
|
||||||
|
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
| Options | Syntax |
|
||||||
|
+=================================================================+===============================================+
|
||||||
|
| Specify an installation path other than path_to_triqs | -DCMAKE_INSTALL_PREFIX=path_to_app4triqs |
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
| Build in Debugging Mode | -DCMAKE_BUILD_TYPE=Debug |
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
| Disable testing (not recommended) | -DBuild_Tests=OFF |
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
||||||
|
| Build the documentation | -DBuild_Documentation=ON |
|
||||||
|
+-----------------------------------------------------------------+-----------------------------------------------+
|
23
doc/issues.rst
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
.. _issues:
|
||||||
|
|
||||||
|
Reporting issues
|
||||||
|
****************
|
||||||
|
|
||||||
|
Please report all problems and bugs directly at the github issue page
|
||||||
|
`<https://github.com/TRIQS/app4triqs/issues>`_. In order to make it easier for us
|
||||||
|
to solve the issue please follow these guidelines:
|
||||||
|
|
||||||
|
#. In all cases specify which version of the application you are using. You can
|
||||||
|
find the version number in the file :file:`CMakeLists.txt` at the root of the
|
||||||
|
application sources.
|
||||||
|
|
||||||
|
#. If you have a problem during the installation, give us information about
|
||||||
|
your operating system and the compiler you are using. Include the outputs of
|
||||||
|
the ``cmake`` and ``make`` commands as well as the ``CMakeCache.txt`` file
|
||||||
|
which is in the build directory. Please include these outputs in a
|
||||||
|
`gist <http://gist.github.com/>`_ file referenced in the issue.
|
||||||
|
|
||||||
|
#. If you are experiencing a problem during the execution of the application, provide
|
||||||
|
a script which allows to quickly reproduce the problem.
|
||||||
|
|
||||||
|
Thanks!
|
427
doc/sphinxext/numpydoc/apigen.py
Normal file
@ -0,0 +1,427 @@
|
|||||||
|
"""Attempt to generate templates for module reference with Sphinx
|
||||||
|
|
||||||
|
XXX - we exclude extension modules
|
||||||
|
|
||||||
|
To include extension modules, first identify them as valid in the
|
||||||
|
``_uri2path`` method, then handle them in the ``_parse_module`` script.
|
||||||
|
|
||||||
|
We get functions and classes by parsing the text of .py files.
|
||||||
|
Alternatively we could import the modules for discovery, and we'd have
|
||||||
|
to do that for extension modules. This would involve changing the
|
||||||
|
``_parse_module`` method to work via import and introspection, and
|
||||||
|
might involve changing ``discover_modules`` (which determines which
|
||||||
|
files are modules, and therefore which module URIs will be passed to
|
||||||
|
``_parse_module``).
|
||||||
|
|
||||||
|
NOTE: this is a modified version of a script originally shipped with the
|
||||||
|
PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed
|
||||||
|
project."""
|
||||||
|
|
||||||
|
# Stdlib imports
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Functions and classes
|
||||||
|
class ApiDocWriter:
|
||||||
|
''' Class for automatic detection and parsing of API docs
|
||||||
|
to Sphinx-parsable reST format'''
|
||||||
|
|
||||||
|
# only separating first two levels
|
||||||
|
rst_section_levels = ['*', '=', '-', '~', '^']
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
package_name,
|
||||||
|
rst_extension='.rst',
|
||||||
|
package_skip_patterns=None,
|
||||||
|
module_skip_patterns=None,
|
||||||
|
):
|
||||||
|
''' Initialize package for parsing
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
package_name : string
|
||||||
|
Name of the top-level package. *package_name* must be the
|
||||||
|
name of an importable package
|
||||||
|
rst_extension : string, optional
|
||||||
|
Extension for reST files, default '.rst'
|
||||||
|
package_skip_patterns : None or sequence of {strings, regexps}
|
||||||
|
Sequence of strings giving URIs of packages to be excluded
|
||||||
|
Operates on the package path, starting at (including) the
|
||||||
|
first dot in the package path, after *package_name* - so,
|
||||||
|
if *package_name* is ``sphinx``, then ``sphinx.util`` will
|
||||||
|
result in ``.util`` being passed for earching by these
|
||||||
|
regexps. If is None, gives default. Default is:
|
||||||
|
['\.tests$']
|
||||||
|
module_skip_patterns : None or sequence
|
||||||
|
Sequence of strings giving URIs of modules to be excluded
|
||||||
|
Operates on the module name including preceding URI path,
|
||||||
|
back to the first dot after *package_name*. For example
|
||||||
|
``sphinx.util.console`` results in the string to search of
|
||||||
|
``.util.console``
|
||||||
|
If is None, gives default. Default is:
|
||||||
|
['\.setup$', '\._']
|
||||||
|
'''
|
||||||
|
if package_skip_patterns is None:
|
||||||
|
package_skip_patterns = ['\\.tests$']
|
||||||
|
if module_skip_patterns is None:
|
||||||
|
module_skip_patterns = ['\\.setup$', '\\._']
|
||||||
|
self.package_name = package_name
|
||||||
|
self.rst_extension = rst_extension
|
||||||
|
self.package_skip_patterns = package_skip_patterns
|
||||||
|
self.module_skip_patterns = module_skip_patterns
|
||||||
|
|
||||||
|
def get_package_name(self):
|
||||||
|
return self._package_name
|
||||||
|
|
||||||
|
def set_package_name(self, package_name):
|
||||||
|
''' Set package_name
|
||||||
|
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> import sphinx
|
||||||
|
>>> docwriter.root_path == sphinx.__path__[0]
|
||||||
|
True
|
||||||
|
>>> docwriter.package_name = 'docutils'
|
||||||
|
>>> import docutils
|
||||||
|
>>> docwriter.root_path == docutils.__path__[0]
|
||||||
|
True
|
||||||
|
'''
|
||||||
|
# It's also possible to imagine caching the module parsing here
|
||||||
|
self._package_name = package_name
|
||||||
|
self.root_module = __import__(package_name)
|
||||||
|
self.root_path = self.root_module.__path__[0]
|
||||||
|
self.written_modules = None
|
||||||
|
|
||||||
|
package_name = property(get_package_name, set_package_name, None,
|
||||||
|
'get/set package_name')
|
||||||
|
|
||||||
|
def _get_object_name(self, line):
|
||||||
|
''' Get second token in line
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> docwriter._get_object_name(" def func(): ")
|
||||||
|
'func'
|
||||||
|
>>> docwriter._get_object_name(" class Klass: ")
|
||||||
|
'Klass'
|
||||||
|
>>> docwriter._get_object_name(" class Klass: ")
|
||||||
|
'Klass'
|
||||||
|
'''
|
||||||
|
name = line.split()[1].split('(')[0].strip()
|
||||||
|
# in case we have classes which are not derived from object
|
||||||
|
# ie. old style classes
|
||||||
|
return name.rstrip(':')
|
||||||
|
|
||||||
|
def _uri2path(self, uri):
|
||||||
|
''' Convert uri to absolute filepath
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : string
|
||||||
|
URI of python module to return path for
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
path : None or string
|
||||||
|
Returns None if there is no valid path for this URI
|
||||||
|
Otherwise returns absolute file system path for URI
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> docwriter = ApiDocWriter('sphinx')
|
||||||
|
>>> import sphinx
|
||||||
|
>>> modpath = sphinx.__path__[0]
|
||||||
|
>>> res = docwriter._uri2path('sphinx.builder')
|
||||||
|
>>> res == os.path.join(modpath, 'builder.py')
|
||||||
|
True
|
||||||
|
>>> res = docwriter._uri2path('sphinx')
|
||||||
|
>>> res == os.path.join(modpath, '__init__.py')
|
||||||
|
True
|
||||||
|
>>> docwriter._uri2path('sphinx.does_not_exist')
|
||||||
|
|
||||||
|
'''
|
||||||
|
if uri == self.package_name:
|
||||||
|
return os.path.join(self.root_path, '__init__.py')
|
||||||
|
path = uri.replace('.', os.path.sep)
|
||||||
|
path = path.replace(self.package_name + os.path.sep, '')
|
||||||
|
path = os.path.join(self.root_path, path)
|
||||||
|
# XXX maybe check for extensions as well?
|
||||||
|
if os.path.exists(path + '.py'): # file
|
||||||
|
path += '.py'
|
||||||
|
elif os.path.exists(os.path.join(path, '__init__.py')):
|
||||||
|
path = os.path.join(path, '__init__.py')
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _path2uri(self, dirpath):
|
||||||
|
''' Convert directory path to uri '''
|
||||||
|
relpath = dirpath.replace(self.root_path, self.package_name)
|
||||||
|
if relpath.startswith(os.path.sep):
|
||||||
|
relpath = relpath[1:]
|
||||||
|
return relpath.replace(os.path.sep, '.')
|
||||||
|
|
||||||
|
def _parse_module(self, uri):
|
||||||
|
''' Parse module defined in *uri* '''
|
||||||
|
filename = self._uri2path(uri)
|
||||||
|
if filename is None:
|
||||||
|
# nothing that we could handle here.
|
||||||
|
return ([],[])
|
||||||
|
f = open(filename, 'rt')
|
||||||
|
functions, classes = self._parse_lines(f)
|
||||||
|
f.close()
|
||||||
|
return functions, classes
|
||||||
|
|
||||||
|
def _parse_lines(self, linesource):
|
||||||
|
''' Parse lines of text for functions and classes '''
|
||||||
|
functions = []
|
||||||
|
classes = []
|
||||||
|
for line in linesource:
|
||||||
|
if line.startswith('def ') and line.count('('):
|
||||||
|
# exclude private stuff
|
||||||
|
name = self._get_object_name(line)
|
||||||
|
if not name.startswith('_'):
|
||||||
|
functions.append(name)
|
||||||
|
elif line.startswith('class '):
|
||||||
|
# exclude private stuff
|
||||||
|
name = self._get_object_name(line)
|
||||||
|
if not name.startswith('_'):
|
||||||
|
classes.append(name)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
functions.sort()
|
||||||
|
classes.sort()
|
||||||
|
return functions, classes
|
||||||
|
|
||||||
|
def generate_api_doc(self, uri):
|
||||||
|
'''Make autodoc documentation template string for a module
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : string
|
||||||
|
python location of module - e.g 'sphinx.builder'
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
S : string
|
||||||
|
Contents of API doc
|
||||||
|
'''
|
||||||
|
# get the names of all classes and functions
|
||||||
|
functions, classes = self._parse_module(uri)
|
||||||
|
if not len(functions) and not len(classes):
|
||||||
|
print('WARNING: Empty -',uri) # dbg
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# Make a shorter version of the uri that omits the package name for
|
||||||
|
# titles
|
||||||
|
uri_short = re.sub(r'^%s\.' % self.package_name,'',uri)
|
||||||
|
|
||||||
|
ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n'
|
||||||
|
|
||||||
|
chap_title = uri_short
|
||||||
|
ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title)
|
||||||
|
+ '\n\n')
|
||||||
|
|
||||||
|
# Set the chapter title to read 'module' for all modules except for the
|
||||||
|
# main packages
|
||||||
|
if '.' in uri:
|
||||||
|
title = 'Module: :mod:`' + uri_short + '`'
|
||||||
|
else:
|
||||||
|
title = ':mod:`' + uri_short + '`'
|
||||||
|
ad += title + '\n' + self.rst_section_levels[2] * len(title)
|
||||||
|
|
||||||
|
if len(classes):
|
||||||
|
ad += '\nInheritance diagram for ``%s``:\n\n' % uri
|
||||||
|
ad += '.. inheritance-diagram:: %s \n' % uri
|
||||||
|
ad += ' :parts: 3\n'
|
||||||
|
|
||||||
|
ad += '\n.. automodule:: ' + uri + '\n'
|
||||||
|
ad += '\n.. currentmodule:: ' + uri + '\n'
|
||||||
|
multi_class = len(classes) > 1
|
||||||
|
multi_fx = len(functions) > 1
|
||||||
|
if multi_class:
|
||||||
|
ad += '\n' + 'Classes' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 7 + '\n'
|
||||||
|
elif len(classes) and multi_fx:
|
||||||
|
ad += '\n' + 'Class' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 5 + '\n'
|
||||||
|
for c in classes:
|
||||||
|
ad += '\n:class:`' + c + '`\n' \
|
||||||
|
+ self.rst_section_levels[multi_class + 2 ] * \
|
||||||
|
(len(c)+9) + '\n\n'
|
||||||
|
ad += '\n.. autoclass:: ' + c + '\n'
|
||||||
|
# must NOT exclude from index to keep cross-refs working
|
||||||
|
ad += ' :members:\n' \
|
||||||
|
' :undoc-members:\n' \
|
||||||
|
' :show-inheritance:\n' \
|
||||||
|
' :inherited-members:\n' \
|
||||||
|
'\n' \
|
||||||
|
' .. automethod:: __init__\n'
|
||||||
|
if multi_fx:
|
||||||
|
ad += '\n' + 'Functions' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 9 + '\n\n'
|
||||||
|
elif len(functions) and multi_class:
|
||||||
|
ad += '\n' + 'Function' + '\n' + \
|
||||||
|
self.rst_section_levels[2] * 8 + '\n\n'
|
||||||
|
for f in functions:
|
||||||
|
# must NOT exclude from index to keep cross-refs working
|
||||||
|
ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n'
|
||||||
|
return ad
|
||||||
|
|
||||||
|
def _survives_exclude(self, matchstr, match_type):
|
||||||
|
''' Returns True if *matchstr* does not match patterns
|
||||||
|
|
||||||
|
``self.package_name`` removed from front of string if present
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> dw = ApiDocWriter('sphinx')
|
||||||
|
>>> dw._survives_exclude('sphinx.okpkg', 'package')
|
||||||
|
True
|
||||||
|
>>> dw.package_skip_patterns.append('^\\.badpkg$')
|
||||||
|
>>> dw._survives_exclude('sphinx.badpkg', 'package')
|
||||||
|
False
|
||||||
|
>>> dw._survives_exclude('sphinx.badpkg', 'module')
|
||||||
|
True
|
||||||
|
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
||||||
|
True
|
||||||
|
>>> dw.module_skip_patterns.append('^\\.badmod$')
|
||||||
|
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
||||||
|
False
|
||||||
|
'''
|
||||||
|
if match_type == 'module':
|
||||||
|
patterns = self.module_skip_patterns
|
||||||
|
elif match_type == 'package':
|
||||||
|
patterns = self.package_skip_patterns
|
||||||
|
else:
|
||||||
|
raise ValueError('Cannot interpret match type "%s"'
|
||||||
|
% match_type)
|
||||||
|
# Match to URI without package name
|
||||||
|
L = len(self.package_name)
|
||||||
|
if matchstr[:L] == self.package_name:
|
||||||
|
matchstr = matchstr[L:]
|
||||||
|
for pat in patterns:
|
||||||
|
try:
|
||||||
|
pat.search
|
||||||
|
except AttributeError:
|
||||||
|
pat = re.compile(pat)
|
||||||
|
if pat.search(matchstr):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def discover_modules(self):
|
||||||
|
''' Return module sequence discovered from ``self.package_name``
|
||||||
|
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
None
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
mods : sequence
|
||||||
|
Sequence of module names within ``self.package_name``
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> dw = ApiDocWriter('sphinx')
|
||||||
|
>>> mods = dw.discover_modules()
|
||||||
|
>>> 'sphinx.util' in mods
|
||||||
|
True
|
||||||
|
>>> dw.package_skip_patterns.append('\.util$')
|
||||||
|
>>> 'sphinx.util' in dw.discover_modules()
|
||||||
|
False
|
||||||
|
>>>
|
||||||
|
'''
|
||||||
|
modules = [self.package_name]
|
||||||
|
# raw directory parsing
|
||||||
|
for dirpath, dirnames, filenames in os.walk(self.root_path):
|
||||||
|
# Check directory names for packages
|
||||||
|
root_uri = self._path2uri(os.path.join(self.root_path,
|
||||||
|
dirpath))
|
||||||
|
for dirname in dirnames[:]: # copy list - we modify inplace
|
||||||
|
package_uri = '.'.join((root_uri, dirname))
|
||||||
|
if (self._uri2path(package_uri) and
|
||||||
|
self._survives_exclude(package_uri, 'package')):
|
||||||
|
modules.append(package_uri)
|
||||||
|
else:
|
||||||
|
dirnames.remove(dirname)
|
||||||
|
# Check filenames for modules
|
||||||
|
for filename in filenames:
|
||||||
|
module_name = filename[:-3]
|
||||||
|
module_uri = '.'.join((root_uri, module_name))
|
||||||
|
if (self._uri2path(module_uri) and
|
||||||
|
self._survives_exclude(module_uri, 'module')):
|
||||||
|
modules.append(module_uri)
|
||||||
|
return sorted(modules)
|
||||||
|
|
||||||
|
def write_modules_api(self, modules,outdir):
|
||||||
|
# write the list
|
||||||
|
written_modules = []
|
||||||
|
for m in modules:
|
||||||
|
api_str = self.generate_api_doc(m)
|
||||||
|
if not api_str:
|
||||||
|
continue
|
||||||
|
# write out to file
|
||||||
|
outfile = os.path.join(outdir,
|
||||||
|
m + self.rst_extension)
|
||||||
|
fileobj = open(outfile, 'wt')
|
||||||
|
fileobj.write(api_str)
|
||||||
|
fileobj.close()
|
||||||
|
written_modules.append(m)
|
||||||
|
self.written_modules = written_modules
|
||||||
|
|
||||||
|
def write_api_docs(self, outdir):
|
||||||
|
"""Generate API reST files.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
outdir : string
|
||||||
|
Directory name in which to store files
|
||||||
|
We create automatic filenames for each module
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Sets self.written_modules to list of written modules
|
||||||
|
"""
|
||||||
|
if not os.path.exists(outdir):
|
||||||
|
os.mkdir(outdir)
|
||||||
|
# compose list of modules
|
||||||
|
modules = self.discover_modules()
|
||||||
|
self.write_modules_api(modules,outdir)
|
||||||
|
|
||||||
|
def write_index(self, outdir, froot='gen', relative_to=None):
|
||||||
|
"""Make a reST API index file from written files
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path : string
|
||||||
|
Filename to write index to
|
||||||
|
outdir : string
|
||||||
|
Directory to which to write generated index file
|
||||||
|
froot : string, optional
|
||||||
|
root (filename without extension) of filename to write to
|
||||||
|
Defaults to 'gen'. We add ``self.rst_extension``.
|
||||||
|
relative_to : string
|
||||||
|
path to which written filenames are relative. This
|
||||||
|
component of the written file path will be removed from
|
||||||
|
outdir, in the generated index. Default is None, meaning,
|
||||||
|
leave path as it is.
|
||||||
|
"""
|
||||||
|
if self.written_modules is None:
|
||||||
|
raise ValueError('No modules written')
|
||||||
|
# Get full filename path
|
||||||
|
path = os.path.join(outdir, froot+self.rst_extension)
|
||||||
|
# Path written into index is relative to rootpath
|
||||||
|
if relative_to is not None:
|
||||||
|
relpath = outdir.replace(relative_to + os.path.sep, '')
|
||||||
|
else:
|
||||||
|
relpath = outdir
|
||||||
|
idx = open(path,'wt')
|
||||||
|
w = idx.write
|
||||||
|
w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n')
|
||||||
|
w('.. toctree::\n\n')
|
||||||
|
for f in self.written_modules:
|
||||||
|
w(' %s\n' % os.path.join(relpath,f))
|
||||||
|
idx.close()
|
497
doc/sphinxext/numpydoc/docscrape.py
Normal file
@ -0,0 +1,497 @@
|
|||||||
|
"""Extract reference documentation from the NumPy source tree.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import textwrap
|
||||||
|
import re
|
||||||
|
import pydoc
|
||||||
|
from io import StringIO
|
||||||
|
from warnings import warn
|
||||||
|
4
|
||||||
|
class Reader:
|
||||||
|
"""A line-based string reader.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, data):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
data : str
|
||||||
|
String with lines separated by '\n'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if isinstance(data,list):
|
||||||
|
self._str = data
|
||||||
|
else:
|
||||||
|
self._str = data.split('\n') # store string as list of lines
|
||||||
|
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
def __getitem__(self, n):
|
||||||
|
return self._str[n]
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self._l = 0 # current line nr
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
if not self.eof():
|
||||||
|
out = self[self._l]
|
||||||
|
self._l += 1
|
||||||
|
return out
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def seek_next_non_empty_line(self):
|
||||||
|
for l in self[self._l:]:
|
||||||
|
if l.strip():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self._l += 1
|
||||||
|
|
||||||
|
def eof(self):
|
||||||
|
return self._l >= len(self._str)
|
||||||
|
|
||||||
|
def read_to_condition(self, condition_func):
|
||||||
|
start = self._l
|
||||||
|
for line in self[start:]:
|
||||||
|
if condition_func(line):
|
||||||
|
return self[start:self._l]
|
||||||
|
self._l += 1
|
||||||
|
if self.eof():
|
||||||
|
return self[start:self._l+1]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def read_to_next_empty_line(self):
|
||||||
|
self.seek_next_non_empty_line()
|
||||||
|
def is_empty(line):
|
||||||
|
return not line.strip()
|
||||||
|
return self.read_to_condition(is_empty)
|
||||||
|
|
||||||
|
def read_to_next_unindented_line(self):
|
||||||
|
def is_unindented(line):
|
||||||
|
return (line.strip() and (len(line.lstrip()) == len(line)))
|
||||||
|
return self.read_to_condition(is_unindented)
|
||||||
|
|
||||||
|
def peek(self,n=0):
|
||||||
|
if self._l + n < len(self._str):
|
||||||
|
return self[self._l + n]
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def is_empty(self):
|
||||||
|
return not ''.join(self._str).strip()
|
||||||
|
|
||||||
|
|
||||||
|
class NumpyDocString:
|
||||||
|
def __init__(self,docstring):
|
||||||
|
docstring = textwrap.dedent(docstring).split('\n')
|
||||||
|
|
||||||
|
self._doc = Reader(docstring)
|
||||||
|
self._parsed_data = {
|
||||||
|
'Signature': '',
|
||||||
|
'Summary': [''],
|
||||||
|
'Extended Summary': [],
|
||||||
|
'Parameters': [],
|
||||||
|
'Returns': [],
|
||||||
|
'Raises': [],
|
||||||
|
'Warns': [],
|
||||||
|
'Other Parameters': [],
|
||||||
|
'Attributes': [],
|
||||||
|
'Methods': [],
|
||||||
|
'See Also': [],
|
||||||
|
'Notes': [],
|
||||||
|
'Warnings': [],
|
||||||
|
'References': '',
|
||||||
|
'Examples': '',
|
||||||
|
'index': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
self._parse()
|
||||||
|
|
||||||
|
def __getitem__(self,key):
|
||||||
|
return self._parsed_data[key]
|
||||||
|
|
||||||
|
def __setitem__(self,key,val):
|
||||||
|
if key not in self._parsed_data:
|
||||||
|
warn("Unknown section %s" % key)
|
||||||
|
else:
|
||||||
|
self._parsed_data[key] = val
|
||||||
|
|
||||||
|
def _is_at_section(self):
|
||||||
|
self._doc.seek_next_non_empty_line()
|
||||||
|
|
||||||
|
if self._doc.eof():
|
||||||
|
return False
|
||||||
|
|
||||||
|
l1 = self._doc.peek().strip() # e.g. Parameters
|
||||||
|
|
||||||
|
if l1.startswith('.. index::'):
|
||||||
|
return True
|
||||||
|
|
||||||
|
l2 = self._doc.peek(1).strip() # ---------- or ==========
|
||||||
|
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
|
||||||
|
|
||||||
|
def _strip(self,doc):
|
||||||
|
i = 0
|
||||||
|
j = 0
|
||||||
|
for i,line in enumerate(doc):
|
||||||
|
if line.strip(): break
|
||||||
|
|
||||||
|
for j,line in enumerate(doc[::-1]):
|
||||||
|
if line.strip(): break
|
||||||
|
|
||||||
|
return doc[i:len(doc)-j]
|
||||||
|
|
||||||
|
def _read_to_next_section(self):
|
||||||
|
section = self._doc.read_to_next_empty_line()
|
||||||
|
|
||||||
|
while not self._is_at_section() and not self._doc.eof():
|
||||||
|
if not self._doc.peek(-1).strip(): # previous line was empty
|
||||||
|
section += ['']
|
||||||
|
|
||||||
|
section += self._doc.read_to_next_empty_line()
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
def _read_sections(self):
|
||||||
|
while not self._doc.eof():
|
||||||
|
data = self._read_to_next_section()
|
||||||
|
name = data[0].strip()
|
||||||
|
|
||||||
|
if name.startswith('..'): # index section
|
||||||
|
yield name, data[1:]
|
||||||
|
elif len(data) < 2:
|
||||||
|
yield StopIteration
|
||||||
|
else:
|
||||||
|
yield name, self._strip(data[2:])
|
||||||
|
|
||||||
|
def _parse_param_list(self,content):
|
||||||
|
r = Reader(content)
|
||||||
|
params = []
|
||||||
|
while not r.eof():
|
||||||
|
header = r.read().strip()
|
||||||
|
if ' : ' in header:
|
||||||
|
arg_name, arg_type = header.split(' : ')[:2]
|
||||||
|
else:
|
||||||
|
arg_name, arg_type = header, ''
|
||||||
|
|
||||||
|
desc = r.read_to_next_unindented_line()
|
||||||
|
desc = dedent_lines(desc)
|
||||||
|
|
||||||
|
params.append((arg_name,arg_type,desc))
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
||||||
|
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
|
||||||
|
def _parse_see_also(self, content):
|
||||||
|
"""
|
||||||
|
func_name : Descriptive text
|
||||||
|
continued text
|
||||||
|
another_func_name : Descriptive text
|
||||||
|
func_name1, func_name2, :meth:`func_name`, func_name3
|
||||||
|
|
||||||
|
"""
|
||||||
|
items = []
|
||||||
|
|
||||||
|
def parse_item_name(text):
|
||||||
|
"""Match ':role:`name`' or 'name'"""
|
||||||
|
m = self._name_rgx.match(text)
|
||||||
|
if m:
|
||||||
|
g = m.groups()
|
||||||
|
if g[1] is None:
|
||||||
|
return g[3], None
|
||||||
|
else:
|
||||||
|
return g[2], g[1]
|
||||||
|
raise ValueError("%s is not a item name" % text)
|
||||||
|
|
||||||
|
def push_item(name, rest):
|
||||||
|
if not name:
|
||||||
|
return
|
||||||
|
name, role = parse_item_name(name)
|
||||||
|
items.append((name, list(rest), role))
|
||||||
|
del rest[:]
|
||||||
|
|
||||||
|
current_func = None
|
||||||
|
rest = []
|
||||||
|
|
||||||
|
for line in content:
|
||||||
|
if not line.strip(): continue
|
||||||
|
|
||||||
|
m = self._name_rgx.match(line)
|
||||||
|
if m and line[m.end():].strip().startswith(':'):
|
||||||
|
push_item(current_func, rest)
|
||||||
|
current_func, line = line[:m.end()], line[m.end():]
|
||||||
|
rest = [line.split(':', 1)[1].strip()]
|
||||||
|
if not rest[0]:
|
||||||
|
rest = []
|
||||||
|
elif not line.startswith(' '):
|
||||||
|
push_item(current_func, rest)
|
||||||
|
current_func = None
|
||||||
|
if ',' in line:
|
||||||
|
for func in line.split(','):
|
||||||
|
push_item(func, [])
|
||||||
|
elif line.strip():
|
||||||
|
current_func = line
|
||||||
|
elif current_func is not None:
|
||||||
|
rest.append(line.strip())
|
||||||
|
push_item(current_func, rest)
|
||||||
|
return items
|
||||||
|
|
||||||
|
def _parse_index(self, section, content):
|
||||||
|
"""
|
||||||
|
.. index: default
|
||||||
|
:refguide: something, else, and more
|
||||||
|
|
||||||
|
"""
|
||||||
|
def strip_each_in(lst):
|
||||||
|
return [s.strip() for s in lst]
|
||||||
|
|
||||||
|
out = {}
|
||||||
|
section = section.split('::')
|
||||||
|
if len(section) > 1:
|
||||||
|
out['default'] = strip_each_in(section[1].split(','))[0]
|
||||||
|
for line in content:
|
||||||
|
line = line.split(':')
|
||||||
|
if len(line) > 2:
|
||||||
|
out[line[1]] = strip_each_in(line[2].split(','))
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _parse_summary(self):
|
||||||
|
"""Grab signature (if given) and summary"""
|
||||||
|
if self._is_at_section():
|
||||||
|
return
|
||||||
|
|
||||||
|
summary = self._doc.read_to_next_empty_line()
|
||||||
|
summary_str = " ".join([s.strip() for s in summary]).strip()
|
||||||
|
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
|
||||||
|
self['Signature'] = summary_str
|
||||||
|
if not self._is_at_section():
|
||||||
|
self['Summary'] = self._doc.read_to_next_empty_line()
|
||||||
|
else:
|
||||||
|
self['Summary'] = summary
|
||||||
|
|
||||||
|
if not self._is_at_section():
|
||||||
|
self['Extended Summary'] = self._read_to_next_section()
|
||||||
|
|
||||||
|
def _parse(self):
|
||||||
|
self._doc.reset()
|
||||||
|
self._parse_summary()
|
||||||
|
|
||||||
|
for (section,content) in self._read_sections():
|
||||||
|
if not section.startswith('..'):
|
||||||
|
section = ' '.join([s.capitalize() for s in section.split(' ')])
|
||||||
|
if section in ('Parameters', 'Attributes', 'Methods',
|
||||||
|
'Returns', 'Raises', 'Warns'):
|
||||||
|
self[section] = self._parse_param_list(content)
|
||||||
|
elif section.startswith('.. index::'):
|
||||||
|
self['index'] = self._parse_index(section, content)
|
||||||
|
elif section == 'See Also':
|
||||||
|
self['See Also'] = self._parse_see_also(content)
|
||||||
|
else:
|
||||||
|
self[section] = content
|
||||||
|
|
||||||
|
# string conversion routines
|
||||||
|
|
||||||
|
def _str_header(self, name, symbol='-'):
|
||||||
|
return [name, len(name)*symbol]
|
||||||
|
|
||||||
|
def _str_indent(self, doc, indent=4):
|
||||||
|
out = []
|
||||||
|
for line in doc:
|
||||||
|
out += [' '*indent + line]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_signature(self):
|
||||||
|
if self['Signature']:
|
||||||
|
return [self['Signature'].replace('*','\*')] + ['']
|
||||||
|
else:
|
||||||
|
return ['']
|
||||||
|
|
||||||
|
def _str_summary(self):
|
||||||
|
if self['Summary']:
|
||||||
|
return self['Summary'] + ['']
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _str_extended_summary(self):
|
||||||
|
if self['Extended Summary']:
|
||||||
|
return self['Extended Summary'] + ['']
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _str_param_list(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
for param,param_type,desc in self[name]:
|
||||||
|
out += ['%s : %s' % (param, param_type)]
|
||||||
|
out += self._str_indent(desc)
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_section(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
out += self[name]
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_see_also(self, func_role):
|
||||||
|
if not self['See Also']: return []
|
||||||
|
out = []
|
||||||
|
out += self._str_header("See Also")
|
||||||
|
last_had_desc = True
|
||||||
|
for func, desc, role in self['See Also']:
|
||||||
|
if role:
|
||||||
|
link = ':%s:`%s`' % (role, func)
|
||||||
|
elif func_role:
|
||||||
|
link = ':%s:`%s`' % (func_role, func)
|
||||||
|
else:
|
||||||
|
link = "`%s`_" % func
|
||||||
|
if desc or last_had_desc:
|
||||||
|
out += ['']
|
||||||
|
out += [link]
|
||||||
|
else:
|
||||||
|
out[-1] += ", %s" % link
|
||||||
|
if desc:
|
||||||
|
out += self._str_indent([' '.join(desc)])
|
||||||
|
last_had_desc = True
|
||||||
|
else:
|
||||||
|
last_had_desc = False
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_index(self):
|
||||||
|
idx = self['index']
|
||||||
|
out = []
|
||||||
|
out += ['.. index:: %s' % idx.get('default','')]
|
||||||
|
for section, references in idx.items():
|
||||||
|
if section == 'default':
|
||||||
|
continue
|
||||||
|
out += [' :%s: %s' % (section, ', '.join(references))]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def __str__(self, func_role=''):
|
||||||
|
out = []
|
||||||
|
out += self._str_signature()
|
||||||
|
out += self._str_summary()
|
||||||
|
out += self._str_extended_summary()
|
||||||
|
for param_list in ('Parameters','Returns','Raises'):
|
||||||
|
out += self._str_param_list(param_list)
|
||||||
|
out += self._str_section('Warnings')
|
||||||
|
out += self._str_see_also(func_role)
|
||||||
|
for s in ('Notes','References','Examples'):
|
||||||
|
out += self._str_section(s)
|
||||||
|
out += self._str_index()
|
||||||
|
return '\n'.join(out)
|
||||||
|
|
||||||
|
|
||||||
|
def indent(str,indent=4):
|
||||||
|
indent_str = ' '*indent
|
||||||
|
if str is None:
|
||||||
|
return indent_str
|
||||||
|
lines = str.split('\n')
|
||||||
|
return '\n'.join(indent_str + l for l in lines)
|
||||||
|
|
||||||
|
def dedent_lines(lines):
|
||||||
|
"""Deindent a list of lines maximally"""
|
||||||
|
return textwrap.dedent("\n".join(lines)).split("\n")
|
||||||
|
|
||||||
|
def header(text, style='-'):
|
||||||
|
return text + '\n' + style*len(text) + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionDoc(NumpyDocString):
|
||||||
|
def __init__(self, func, role='func', doc=None):
|
||||||
|
self._f = func
|
||||||
|
self._role = role # e.g. "func" or "meth"
|
||||||
|
if doc is None:
|
||||||
|
doc = inspect.getdoc(func) or ''
|
||||||
|
try:
|
||||||
|
NumpyDocString.__init__(self, doc)
|
||||||
|
except ValueError as e:
|
||||||
|
print('*'*78)
|
||||||
|
print("ERROR: '%s' while parsing `%s`" % (e, self._f))
|
||||||
|
print('*'*78)
|
||||||
|
#print "Docstring follows:"
|
||||||
|
#print doclines
|
||||||
|
#print '='*78
|
||||||
|
|
||||||
|
if not self['Signature']:
|
||||||
|
func, func_name = self.get_func()
|
||||||
|
try:
|
||||||
|
# try to read signature
|
||||||
|
argspec = inspect.getargspec(func)
|
||||||
|
argspec = inspect.formatargspec(*argspec)
|
||||||
|
argspec = argspec.replace('*','\*')
|
||||||
|
signature = '%s%s' % (func_name, argspec)
|
||||||
|
except TypeError as e:
|
||||||
|
signature = '%s()' % func_name
|
||||||
|
self['Signature'] = signature
|
||||||
|
|
||||||
|
def get_func(self):
|
||||||
|
func_name = getattr(self._f, '__name__', self.__class__.__name__)
|
||||||
|
if inspect.isclass(self._f):
|
||||||
|
func = getattr(self._f, '__call__', self._f.__init__)
|
||||||
|
else:
|
||||||
|
func = self._f
|
||||||
|
return func, func_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
out = ''
|
||||||
|
|
||||||
|
func, func_name = self.get_func()
|
||||||
|
signature = self['Signature'].replace('*', '\*')
|
||||||
|
|
||||||
|
roles = {'func': 'function',
|
||||||
|
'meth': 'method'}
|
||||||
|
|
||||||
|
if self._role:
|
||||||
|
if self._role not in roles:
|
||||||
|
print("Warning: invalid role %s" % self._role)
|
||||||
|
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
|
||||||
|
func_name)
|
||||||
|
|
||||||
|
out += super(FunctionDoc, self).__str__(func_role=self._role)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class ClassDoc(NumpyDocString):
|
||||||
|
def __init__(self,cls,modulename='',func_doc=FunctionDoc,doc=None):
|
||||||
|
if not inspect.isclass(cls):
|
||||||
|
raise ValueError("Initialise using a class. Got %r" % cls)
|
||||||
|
self._cls = cls
|
||||||
|
|
||||||
|
if modulename and not modulename.endswith('.'):
|
||||||
|
modulename += '.'
|
||||||
|
self._mod = modulename
|
||||||
|
self._name = cls.__name__
|
||||||
|
self._func_doc = func_doc
|
||||||
|
|
||||||
|
if doc is None:
|
||||||
|
doc = pydoc.getdoc(cls)
|
||||||
|
|
||||||
|
NumpyDocString.__init__(self, doc)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def methods(self):
|
||||||
|
return [name for name,func in inspect.getmembers(self._cls)
|
||||||
|
if not name.startswith('_') and callable(func)]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
out = ''
|
||||||
|
out += super(ClassDoc, self).__str__()
|
||||||
|
out += "\n\n"
|
||||||
|
|
||||||
|
#for m in self.methods:
|
||||||
|
# print "Parsing `%s`" % m
|
||||||
|
# out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n'
|
||||||
|
# out += '.. index::\n single: %s; %s\n\n' % (self._name, m)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
136
doc/sphinxext/numpydoc/docscrape_sphinx.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
import re, inspect, textwrap, pydoc
|
||||||
|
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
|
||||||
|
|
||||||
|
class SphinxDocString(NumpyDocString):
|
||||||
|
# string conversion routines
|
||||||
|
def _str_header(self, name, symbol='`'):
|
||||||
|
return ['.. rubric:: ' + name, '']
|
||||||
|
|
||||||
|
def _str_field_list(self, name):
|
||||||
|
return [':' + name + ':']
|
||||||
|
|
||||||
|
def _str_indent(self, doc, indent=4):
|
||||||
|
out = []
|
||||||
|
for line in doc:
|
||||||
|
out += [' '*indent + line]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_signature(self):
|
||||||
|
return ['']
|
||||||
|
if self['Signature']:
|
||||||
|
return ['``%s``' % self['Signature']] + ['']
|
||||||
|
else:
|
||||||
|
return ['']
|
||||||
|
|
||||||
|
def _str_summary(self):
|
||||||
|
return self['Summary'] + ['']
|
||||||
|
|
||||||
|
def _str_extended_summary(self):
|
||||||
|
return self['Extended Summary'] + ['']
|
||||||
|
|
||||||
|
def _str_param_list(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_field_list(name)
|
||||||
|
out += ['']
|
||||||
|
for param,param_type,desc in self[name]:
|
||||||
|
out += self._str_indent(['**%s** : %s' % (param.strip(),
|
||||||
|
param_type)])
|
||||||
|
out += ['']
|
||||||
|
out += self._str_indent(desc,8)
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_section(self, name):
|
||||||
|
out = []
|
||||||
|
if self[name]:
|
||||||
|
out += self._str_header(name)
|
||||||
|
out += ['']
|
||||||
|
content = textwrap.dedent("\n".join(self[name])).split("\n")
|
||||||
|
out += content
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_see_also(self, func_role):
|
||||||
|
out = []
|
||||||
|
if self['See Also']:
|
||||||
|
see_also = super(SphinxDocString, self)._str_see_also(func_role)
|
||||||
|
out = ['.. seealso::', '']
|
||||||
|
out += self._str_indent(see_also[2:])
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_warnings(self):
|
||||||
|
out = []
|
||||||
|
if self['Warnings']:
|
||||||
|
out = ['.. warning::', '']
|
||||||
|
out += self._str_indent(self['Warnings'])
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_index(self):
|
||||||
|
idx = self['index']
|
||||||
|
out = []
|
||||||
|
if len(idx) == 0:
|
||||||
|
return out
|
||||||
|
|
||||||
|
out += ['.. index:: %s' % idx.get('default','')]
|
||||||
|
for section, references in idx.items():
|
||||||
|
if section == 'default':
|
||||||
|
continue
|
||||||
|
elif section == 'refguide':
|
||||||
|
out += [' single: %s' % (', '.join(references))]
|
||||||
|
else:
|
||||||
|
out += [' %s: %s' % (section, ','.join(references))]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _str_references(self):
|
||||||
|
out = []
|
||||||
|
if self['References']:
|
||||||
|
out += self._str_header('References')
|
||||||
|
if isinstance(self['References'], str):
|
||||||
|
self['References'] = [self['References']]
|
||||||
|
out.extend(self['References'])
|
||||||
|
out += ['']
|
||||||
|
return out
|
||||||
|
|
||||||
|
def __str__(self, indent=0, func_role="obj"):
|
||||||
|
out = []
|
||||||
|
out += self._str_signature()
|
||||||
|
out += self._str_index() + ['']
|
||||||
|
out += self._str_summary()
|
||||||
|
out += self._str_extended_summary()
|
||||||
|
for param_list in ('Parameters', 'Attributes', 'Methods',
|
||||||
|
'Returns','Raises'):
|
||||||
|
out += self._str_param_list(param_list)
|
||||||
|
out += self._str_warnings()
|
||||||
|
out += self._str_see_also(func_role)
|
||||||
|
out += self._str_section('Notes')
|
||||||
|
out += self._str_references()
|
||||||
|
out += self._str_section('Examples')
|
||||||
|
out = self._str_indent(out,indent)
|
||||||
|
return '\n'.join(out)
|
||||||
|
|
||||||
|
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SphinxClassDoc(SphinxDocString, ClassDoc):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_doc_object(obj, what=None, doc=None):
|
||||||
|
if what is None:
|
||||||
|
if inspect.isclass(obj):
|
||||||
|
what = 'class'
|
||||||
|
elif inspect.ismodule(obj):
|
||||||
|
what = 'module'
|
||||||
|
elif callable(obj):
|
||||||
|
what = 'function'
|
||||||
|
else:
|
||||||
|
what = 'object'
|
||||||
|
if what == 'class':
|
||||||
|
return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc)
|
||||||
|
elif what in ('function', 'method'):
|
||||||
|
return SphinxFunctionDoc(obj, '', doc=doc)
|
||||||
|
else:
|
||||||
|
if doc is None:
|
||||||
|
doc = pydoc.getdoc(obj)
|
||||||
|
return SphinxDocString(doc)
|
||||||
|
|
407
doc/sphinxext/numpydoc/inheritance_diagram.py
Normal file
@ -0,0 +1,407 @@
|
|||||||
|
"""
|
||||||
|
Defines a docutils directive for inserting inheritance diagrams.
|
||||||
|
|
||||||
|
Provide the directive with one or more classes or modules (separated
|
||||||
|
by whitespace). For modules, all of the classes in that module will
|
||||||
|
be used.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
Given the following classes:
|
||||||
|
|
||||||
|
class A: pass
|
||||||
|
class B(A): pass
|
||||||
|
class C(A): pass
|
||||||
|
class D(B, C): pass
|
||||||
|
class E(B): pass
|
||||||
|
|
||||||
|
.. inheritance-diagram: D E
|
||||||
|
|
||||||
|
Produces a graph like the following:
|
||||||
|
|
||||||
|
A
|
||||||
|
/ \
|
||||||
|
B C
|
||||||
|
/ \ /
|
||||||
|
E D
|
||||||
|
|
||||||
|
The graph is inserted as a PNG+image map into HTML and a PDF in
|
||||||
|
LaTeX.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
try:
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import md5
|
||||||
|
|
||||||
|
from docutils.nodes import Body, Element
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from sphinx.roles import xfileref_role
|
||||||
|
|
||||||
|
def my_import(name):
|
||||||
|
"""Module importer - taken from the python documentation.
|
||||||
|
|
||||||
|
This function allows importing names with dots in them."""
|
||||||
|
|
||||||
|
mod = __import__(name)
|
||||||
|
components = name.split('.')
|
||||||
|
for comp in components[1:]:
|
||||||
|
mod = getattr(mod, comp)
|
||||||
|
return mod
|
||||||
|
|
||||||
|
class DotException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class InheritanceGraph:
|
||||||
|
"""
|
||||||
|
Given a list of classes, determines the set of classes that
|
||||||
|
they inherit from all the way to the root "object", and then
|
||||||
|
is able to generate a graphviz dot graph from them.
|
||||||
|
"""
|
||||||
|
def __init__(self, class_names, show_builtins=False):
|
||||||
|
"""
|
||||||
|
*class_names* is a list of child classes to show bases from.
|
||||||
|
|
||||||
|
If *show_builtins* is True, then Python builtins will be shown
|
||||||
|
in the graph.
|
||||||
|
"""
|
||||||
|
self.class_names = class_names
|
||||||
|
self.classes = self._import_classes(class_names)
|
||||||
|
self.all_classes = self._all_classes(self.classes)
|
||||||
|
if len(self.all_classes) == 0:
|
||||||
|
raise ValueError("No classes found for inheritance diagram")
|
||||||
|
self.show_builtins = show_builtins
|
||||||
|
|
||||||
|
py_sig_re = re.compile(r'''^([\w.]*\.)? # class names
|
||||||
|
(\w+) \s* $ # optionally arguments
|
||||||
|
''', re.VERBOSE)
|
||||||
|
|
||||||
|
def _import_class_or_module(self, name):
|
||||||
|
"""
|
||||||
|
Import a class using its fully-qualified *name*.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
path, base = self.py_sig_re.match(name).groups()
|
||||||
|
except:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
fullname = (path or '') + base
|
||||||
|
path = (path and path.rstrip('.'))
|
||||||
|
if not path:
|
||||||
|
path = base
|
||||||
|
try:
|
||||||
|
module = __import__(path, None, None, [])
|
||||||
|
# We must do an import of the fully qualified name. Otherwise if a
|
||||||
|
# subpackage 'a.b' is requested where 'import a' does NOT provide
|
||||||
|
# 'a.b' automatically, then 'a.b' will not be found below. This
|
||||||
|
# second call will force the equivalent of 'import a.b' to happen
|
||||||
|
# after the top-level import above.
|
||||||
|
my_import(fullname)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not import class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
todoc = module
|
||||||
|
for comp in fullname.split('.')[1:]:
|
||||||
|
todoc = getattr(todoc, comp)
|
||||||
|
except AttributeError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not find class or module '%s' specified for inheritance diagram" % name)
|
||||||
|
|
||||||
|
# If a class, just return it
|
||||||
|
if inspect.isclass(todoc):
|
||||||
|
return [todoc]
|
||||||
|
elif inspect.ismodule(todoc):
|
||||||
|
classes = []
|
||||||
|
for cls in list(todoc.__dict__.values()):
|
||||||
|
if inspect.isclass(cls) and cls.__module__ == todoc.__name__:
|
||||||
|
classes.append(cls)
|
||||||
|
return classes
|
||||||
|
raise ValueError(
|
||||||
|
"'%s' does not resolve to a class or module" % name)
|
||||||
|
|
||||||
|
def _import_classes(self, class_names):
|
||||||
|
"""
|
||||||
|
Import a list of classes.
|
||||||
|
"""
|
||||||
|
classes = []
|
||||||
|
for name in class_names:
|
||||||
|
classes.extend(self._import_class_or_module(name))
|
||||||
|
return classes
|
||||||
|
|
||||||
|
def _all_classes(self, classes):
|
||||||
|
"""
|
||||||
|
Return a list of all classes that are ancestors of *classes*.
|
||||||
|
"""
|
||||||
|
all_classes = {}
|
||||||
|
|
||||||
|
def recurse(cls):
|
||||||
|
all_classes[cls] = None
|
||||||
|
for c in cls.__bases__:
|
||||||
|
if c not in all_classes:
|
||||||
|
recurse(c)
|
||||||
|
|
||||||
|
for cls in classes:
|
||||||
|
recurse(cls)
|
||||||
|
|
||||||
|
return list(all_classes.keys())
|
||||||
|
|
||||||
|
def class_name(self, cls, parts=0):
|
||||||
|
"""
|
||||||
|
Given a class object, return a fully-qualified name. This
|
||||||
|
works for things I've tested in matplotlib so far, but may not
|
||||||
|
be completely general.
|
||||||
|
"""
|
||||||
|
module = cls.__module__
|
||||||
|
if module == '__builtin__':
|
||||||
|
fullname = cls.__name__
|
||||||
|
else:
|
||||||
|
fullname = "%s.%s" % (module, cls.__name__)
|
||||||
|
if parts == 0:
|
||||||
|
return fullname
|
||||||
|
name_parts = fullname.split('.')
|
||||||
|
return '.'.join(name_parts[-parts:])
|
||||||
|
|
||||||
|
def get_all_class_names(self):
|
||||||
|
"""
|
||||||
|
Get all of the class names involved in the graph.
|
||||||
|
"""
|
||||||
|
return [self.class_name(x) for x in self.all_classes]
|
||||||
|
|
||||||
|
# These are the default options for graphviz
|
||||||
|
default_graph_options = {
|
||||||
|
"rankdir": "LR",
|
||||||
|
"size": '"8.0, 12.0"'
|
||||||
|
}
|
||||||
|
default_node_options = {
|
||||||
|
"shape": "box",
|
||||||
|
"fontsize": 10,
|
||||||
|
"height": 0.25,
|
||||||
|
"fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",
|
||||||
|
"style": '"setlinewidth(0.5)"'
|
||||||
|
}
|
||||||
|
default_edge_options = {
|
||||||
|
"arrowsize": 0.5,
|
||||||
|
"style": '"setlinewidth(0.5)"'
|
||||||
|
}
|
||||||
|
|
||||||
|
def _format_node_options(self, options):
|
||||||
|
return ','.join(["%s=%s" % x for x in list(options.items())])
|
||||||
|
def _format_graph_options(self, options):
|
||||||
|
return ''.join(["%s=%s;\n" % x for x in list(options.items())])
|
||||||
|
|
||||||
|
def generate_dot(self, fd, name, parts=0, urls={},
|
||||||
|
graph_options={}, node_options={},
|
||||||
|
edge_options={}):
|
||||||
|
"""
|
||||||
|
Generate a graphviz dot graph from the classes that
|
||||||
|
were passed in to __init__.
|
||||||
|
|
||||||
|
*fd* is a Python file-like object to write to.
|
||||||
|
|
||||||
|
*name* is the name of the graph
|
||||||
|
|
||||||
|
*urls* is a dictionary mapping class names to http urls
|
||||||
|
|
||||||
|
*graph_options*, *node_options*, *edge_options* are
|
||||||
|
dictionaries containing key/value pairs to pass on as graphviz
|
||||||
|
properties.
|
||||||
|
"""
|
||||||
|
g_options = self.default_graph_options.copy()
|
||||||
|
g_options.update(graph_options)
|
||||||
|
n_options = self.default_node_options.copy()
|
||||||
|
n_options.update(node_options)
|
||||||
|
e_options = self.default_edge_options.copy()
|
||||||
|
e_options.update(edge_options)
|
||||||
|
|
||||||
|
fd.write('digraph %s {\n' % name)
|
||||||
|
fd.write(self._format_graph_options(g_options))
|
||||||
|
|
||||||
|
for cls in self.all_classes:
|
||||||
|
if not self.show_builtins and cls in list(__builtins__.values()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = self.class_name(cls, parts)
|
||||||
|
|
||||||
|
# Write the node
|
||||||
|
this_node_options = n_options.copy()
|
||||||
|
url = urls.get(self.class_name(cls))
|
||||||
|
if url is not None:
|
||||||
|
this_node_options['URL'] = '"%s"' % url
|
||||||
|
fd.write(' "%s" [%s];\n' %
|
||||||
|
(name, self._format_node_options(this_node_options)))
|
||||||
|
|
||||||
|
# Write the edges
|
||||||
|
for base in cls.__bases__:
|
||||||
|
if not self.show_builtins and base in list(__builtins__.values()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
base_name = self.class_name(base, parts)
|
||||||
|
fd.write(' "%s" -> "%s" [%s];\n' %
|
||||||
|
(base_name, name,
|
||||||
|
self._format_node_options(e_options)))
|
||||||
|
fd.write('}\n')
|
||||||
|
|
||||||
|
def run_dot(self, args, name, parts=0, urls={},
|
||||||
|
graph_options={}, node_options={}, edge_options={}):
|
||||||
|
"""
|
||||||
|
Run graphviz 'dot' over this graph, returning whatever 'dot'
|
||||||
|
writes to stdout.
|
||||||
|
|
||||||
|
*args* will be passed along as commandline arguments.
|
||||||
|
|
||||||
|
*name* is the name of the graph
|
||||||
|
|
||||||
|
*urls* is a dictionary mapping class names to http urls
|
||||||
|
|
||||||
|
Raises DotException for any of the many os and
|
||||||
|
installation-related errors that may occur.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dot = subprocess.Popen(['dot'] + list(args),
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
close_fds=True)
|
||||||
|
except OSError:
|
||||||
|
raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?")
|
||||||
|
except ValueError:
|
||||||
|
raise DotException("'dot' called with invalid arguments")
|
||||||
|
except:
|
||||||
|
raise DotException("Unexpected error calling 'dot'")
|
||||||
|
|
||||||
|
self.generate_dot(dot.stdin, name, parts, urls, graph_options,
|
||||||
|
node_options, edge_options)
|
||||||
|
dot.stdin.close()
|
||||||
|
result = dot.stdout.read()
|
||||||
|
returncode = dot.wait()
|
||||||
|
if returncode != 0:
|
||||||
|
raise DotException("'dot' returned the errorcode %d" % returncode)
|
||||||
|
return result
|
||||||
|
|
||||||
|
class inheritance_diagram(Body, Element):
|
||||||
|
"""
|
||||||
|
A docutils node to use as a placeholder for the inheritance
|
||||||
|
diagram.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def inheritance_diagram_directive(name, arguments, options, content, lineno,
|
||||||
|
content_offset, block_text, state,
|
||||||
|
state_machine):
|
||||||
|
"""
|
||||||
|
Run when the inheritance_diagram directive is first encountered.
|
||||||
|
"""
|
||||||
|
node = inheritance_diagram()
|
||||||
|
|
||||||
|
class_names = arguments
|
||||||
|
|
||||||
|
# Create a graph starting with the list of classes
|
||||||
|
graph = InheritanceGraph(class_names)
|
||||||
|
|
||||||
|
# Create xref nodes for each target of the graph's image map and
|
||||||
|
# add them to the doc tree so that Sphinx can resolve the
|
||||||
|
# references to real URLs later. These nodes will eventually be
|
||||||
|
# removed from the doctree after we're done with them.
|
||||||
|
for name in graph.get_all_class_names():
|
||||||
|
refnodes, x = xfileref_role(
|
||||||
|
'class', ':class:`%s`' % name, name, 0, state)
|
||||||
|
node.extend(refnodes)
|
||||||
|
# Store the graph object so we can use it to generate the
|
||||||
|
# dot file later
|
||||||
|
node['graph'] = graph
|
||||||
|
# Store the original content for use as a hash
|
||||||
|
node['parts'] = options.get('parts', 0)
|
||||||
|
node['content'] = " ".join(class_names)
|
||||||
|
return [node]
|
||||||
|
|
||||||
|
def get_graph_hash(node):
|
||||||
|
return md5(node['content'] + str(node['parts'])).hexdigest()[-10:]
|
||||||
|
|
||||||
|
def html_output_graph(self, node):
|
||||||
|
"""
|
||||||
|
Output the graph for HTML. This will insert a PNG with clickable
|
||||||
|
image map.
|
||||||
|
"""
|
||||||
|
graph = node['graph']
|
||||||
|
parts = node['parts']
|
||||||
|
|
||||||
|
graph_hash = get_graph_hash(node)
|
||||||
|
name = "inheritance%s" % graph_hash
|
||||||
|
path = '_images'
|
||||||
|
dest_path = os.path.join(setup.app.builder.outdir, path)
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
png_path = os.path.join(dest_path, name + ".png")
|
||||||
|
path = setup.app.builder.imgpath
|
||||||
|
|
||||||
|
# Create a mapping from fully-qualified class names to URLs.
|
||||||
|
urls = {}
|
||||||
|
for child in node:
|
||||||
|
if child.get('refuri') is not None:
|
||||||
|
urls[child['reftitle']] = child.get('refuri')
|
||||||
|
elif child.get('refid') is not None:
|
||||||
|
urls[child['reftitle']] = '#' + child.get('refid')
|
||||||
|
|
||||||
|
# These arguments to dot will save a PNG file to disk and write
|
||||||
|
# an HTML image map to stdout.
|
||||||
|
image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'],
|
||||||
|
name, parts, urls)
|
||||||
|
return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' %
|
||||||
|
(path, name, name, image_map))
|
||||||
|
|
||||||
|
def latex_output_graph(self, node):
|
||||||
|
"""
|
||||||
|
Output the graph for LaTeX. This will insert a PDF.
|
||||||
|
"""
|
||||||
|
graph = node['graph']
|
||||||
|
parts = node['parts']
|
||||||
|
|
||||||
|
graph_hash = get_graph_hash(node)
|
||||||
|
name = "inheritance%s" % graph_hash
|
||||||
|
dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images'))
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf"))
|
||||||
|
|
||||||
|
graph.run_dot(['-Tpdf', '-o%s' % pdf_path],
|
||||||
|
name, parts, graph_options={'size': '"6.0,6.0"'})
|
||||||
|
return '\n\\includegraphics{%s}\n\n' % pdf_path
|
||||||
|
|
||||||
|
def visit_inheritance_diagram(inner_func):
|
||||||
|
"""
|
||||||
|
This is just a wrapper around html/latex_output_graph to make it
|
||||||
|
easier to handle errors and insert warnings.
|
||||||
|
"""
|
||||||
|
def visitor(self, node):
|
||||||
|
try:
|
||||||
|
content = inner_func(self, node)
|
||||||
|
except DotException as e:
|
||||||
|
# Insert the exception as a warning in the document
|
||||||
|
warning = self.document.reporter.warning(str(e), line=node.line)
|
||||||
|
warning.parent = node
|
||||||
|
node.children = [warning]
|
||||||
|
else:
|
||||||
|
source = self.document.attributes['source']
|
||||||
|
self.body.append(content)
|
||||||
|
node.children = []
|
||||||
|
return visitor
|
||||||
|
|
||||||
|
def do_nothing(self, node):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
setup.app = app
|
||||||
|
setup.confdir = app.confdir
|
||||||
|
|
||||||
|
app.add_node(
|
||||||
|
inheritance_diagram,
|
||||||
|
latex=(visit_inheritance_diagram(latex_output_graph), do_nothing),
|
||||||
|
html=(visit_inheritance_diagram(html_output_graph), do_nothing))
|
||||||
|
app.add_directive(
|
||||||
|
'inheritance-diagram', inheritance_diagram_directive,
|
||||||
|
False, (1, 100, 0), parts = directives.nonnegative_int)
|
114
doc/sphinxext/numpydoc/ipython_console_highlighting.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
"""reST directive for syntax-highlighting ipython interactive sessions.
|
||||||
|
|
||||||
|
XXX - See what improvements can be made based on the new (as of Sept 2009)
|
||||||
|
'pycon' lexer for the python console. At the very least it will give better
|
||||||
|
highlighted tracebacks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Needed modules
|
||||||
|
|
||||||
|
# Standard library
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Third party
|
||||||
|
from pygments.lexer import Lexer, do_insertions
|
||||||
|
from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer,
|
||||||
|
PythonTracebackLexer)
|
||||||
|
from pygments.token import Comment, Generic
|
||||||
|
|
||||||
|
from sphinx import highlighting
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Global constants
|
||||||
|
line_re = re.compile('.*?\n')
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Code begins - classes and functions
|
||||||
|
|
||||||
|
class IPythonConsoleLexer(Lexer):
|
||||||
|
"""
|
||||||
|
For IPython console output or doctests, such as:
|
||||||
|
|
||||||
|
.. sourcecode:: ipython
|
||||||
|
|
||||||
|
In [1]: a = 'foo'
|
||||||
|
|
||||||
|
In [2]: a
|
||||||
|
Out[2]: 'foo'
|
||||||
|
|
||||||
|
In [3]: print a
|
||||||
|
foo
|
||||||
|
|
||||||
|
In [4]: 1 / 0
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- Tracebacks are not currently supported.
|
||||||
|
|
||||||
|
- It assumes the default IPython prompts, not customized ones.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = 'IPython console session'
|
||||||
|
aliases = ['ipython']
|
||||||
|
mimetypes = ['text/x-ipython-console']
|
||||||
|
input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)")
|
||||||
|
output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)")
|
||||||
|
continue_prompt = re.compile(" \.\.\.+:")
|
||||||
|
tb_start = re.compile("\-+")
|
||||||
|
|
||||||
|
def get_tokens_unprocessed(self, text):
|
||||||
|
pylexer = PythonLexer(**self.options)
|
||||||
|
tblexer = PythonTracebackLexer(**self.options)
|
||||||
|
|
||||||
|
curcode = ''
|
||||||
|
insertions = []
|
||||||
|
for match in line_re.finditer(text):
|
||||||
|
line = match.group()
|
||||||
|
input_prompt = self.input_prompt.match(line)
|
||||||
|
continue_prompt = self.continue_prompt.match(line.rstrip())
|
||||||
|
output_prompt = self.output_prompt.match(line)
|
||||||
|
if line.startswith("#"):
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Comment, line)]))
|
||||||
|
elif input_prompt is not None:
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Prompt, input_prompt.group())]))
|
||||||
|
curcode += line[input_prompt.end():]
|
||||||
|
elif continue_prompt is not None:
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Prompt, continue_prompt.group())]))
|
||||||
|
curcode += line[continue_prompt.end():]
|
||||||
|
elif output_prompt is not None:
|
||||||
|
# Use the 'error' token for output. We should probably make
|
||||||
|
# our own token, but error is typicaly in a bright color like
|
||||||
|
# red, so it works fine for our output prompts.
|
||||||
|
insertions.append((len(curcode),
|
||||||
|
[(0, Generic.Error, output_prompt.group())]))
|
||||||
|
curcode += line[output_prompt.end():]
|
||||||
|
else:
|
||||||
|
if curcode:
|
||||||
|
for item in do_insertions(insertions,
|
||||||
|
pylexer.get_tokens_unprocessed(curcode)):
|
||||||
|
yield item
|
||||||
|
curcode = ''
|
||||||
|
insertions = []
|
||||||
|
yield match.start(), Generic.Output, line
|
||||||
|
if curcode:
|
||||||
|
for item in do_insertions(insertions,
|
||||||
|
pylexer.get_tokens_unprocessed(curcode)):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
"""Setup as a sphinx extension."""
|
||||||
|
|
||||||
|
# This is only a lexer, so adding it below to pygments appears sufficient.
|
||||||
|
# But if somebody knows that the right API usage should be to do that via
|
||||||
|
# sphinx, by all means fix it here. At least having this setup.py
|
||||||
|
# suppresses the sphinx warning we'd get without it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
# Register the extension as a valid pygments lexer
|
||||||
|
highlighting.lexers['ipython'] = IPythonConsoleLexer()
|
116
doc/sphinxext/numpydoc/numpydoc.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
========
|
||||||
|
numpydoc
|
||||||
|
========
|
||||||
|
|
||||||
|
Sphinx extension that handles docstrings in the Numpy standard format. [1]
|
||||||
|
|
||||||
|
It will:
|
||||||
|
|
||||||
|
- Convert Parameters etc. sections to field lists.
|
||||||
|
- Convert See Also section to a See also entry.
|
||||||
|
- Renumber references.
|
||||||
|
- Extract the signature from the docstring, if it can't be determined otherwise.
|
||||||
|
|
||||||
|
.. [1] http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines#docstring-standard
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os, re, pydoc
|
||||||
|
from docscrape_sphinx import get_doc_object, SphinxDocString
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
def mangle_docstrings(app, what, name, obj, options, lines,
|
||||||
|
reference_offset=[0]):
|
||||||
|
if what == 'module':
|
||||||
|
# Strip top title
|
||||||
|
title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
|
||||||
|
re.I|re.S)
|
||||||
|
lines[:] = title_re.sub('', "\n".join(lines)).split("\n")
|
||||||
|
else:
|
||||||
|
doc = get_doc_object(obj, what, "\n".join(lines))
|
||||||
|
lines[:] = str(doc).split("\n")
|
||||||
|
|
||||||
|
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
|
||||||
|
obj.__name__:
|
||||||
|
if hasattr(obj, '__module__'):
|
||||||
|
v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__))
|
||||||
|
else:
|
||||||
|
v = dict(full_name=obj.__name__)
|
||||||
|
lines += ['', '.. htmlonly::', '']
|
||||||
|
lines += [' %s' % x for x in
|
||||||
|
(app.config.numpydoc_edit_link % v).split("\n")]
|
||||||
|
|
||||||
|
# replace reference numbers so that there are no duplicates
|
||||||
|
references = []
|
||||||
|
for l in lines:
|
||||||
|
l = l.strip()
|
||||||
|
if l.startswith('.. ['):
|
||||||
|
try:
|
||||||
|
references.append(int(l[len('.. ['):l.index(']')]))
|
||||||
|
except ValueError:
|
||||||
|
print("WARNING: invalid reference in %s docstring" % name)
|
||||||
|
|
||||||
|
# Start renaming from the biggest number, otherwise we may
|
||||||
|
# overwrite references.
|
||||||
|
references.sort()
|
||||||
|
if references:
|
||||||
|
for i, line in enumerate(lines):
|
||||||
|
for r in references:
|
||||||
|
new_r = reference_offset[0] + r
|
||||||
|
lines[i] = lines[i].replace('[%d]_' % r,
|
||||||
|
'[%d]_' % new_r)
|
||||||
|
lines[i] = lines[i].replace('.. [%d]' % r,
|
||||||
|
'.. [%d]' % new_r)
|
||||||
|
|
||||||
|
reference_offset[0] += len(references)
|
||||||
|
|
||||||
|
def mangle_signature(app, what, name, obj, options, sig, retann):
|
||||||
|
# Do not try to inspect classes that don't define `__init__`
|
||||||
|
if (inspect.isclass(obj) and
|
||||||
|
'initializes x; see ' in pydoc.getdoc(obj.__init__)):
|
||||||
|
return '', ''
|
||||||
|
|
||||||
|
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return
|
||||||
|
if not hasattr(obj, '__doc__'): return
|
||||||
|
|
||||||
|
doc = SphinxDocString(pydoc.getdoc(obj))
|
||||||
|
if doc['Signature']:
|
||||||
|
sig = re.sub("^[^(]*", "", doc['Signature'])
|
||||||
|
return sig, ''
|
||||||
|
|
||||||
|
def initialize(app):
|
||||||
|
try:
|
||||||
|
app.connect('autodoc-process-signature', mangle_signature)
|
||||||
|
except:
|
||||||
|
monkeypatch_sphinx_ext_autodoc()
|
||||||
|
|
||||||
|
def setup(app, get_doc_object_=get_doc_object):
|
||||||
|
global get_doc_object
|
||||||
|
get_doc_object = get_doc_object_
|
||||||
|
|
||||||
|
app.connect('autodoc-process-docstring', mangle_docstrings)
|
||||||
|
app.connect('builder-inited', initialize)
|
||||||
|
app.add_config_value('numpydoc_edit_link', None, True)
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5)
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def monkeypatch_sphinx_ext_autodoc():
|
||||||
|
global _original_format_signature
|
||||||
|
import sphinx.ext.autodoc
|
||||||
|
|
||||||
|
if sphinx.ext.autodoc.format_signature is our_format_signature:
|
||||||
|
return
|
||||||
|
|
||||||
|
print("[numpydoc] Monkeypatching sphinx.ext.autodoc ...")
|
||||||
|
_original_format_signature = sphinx.ext.autodoc.format_signature
|
||||||
|
sphinx.ext.autodoc.format_signature = our_format_signature
|
||||||
|
|
||||||
|
def our_format_signature(what, obj):
|
||||||
|
r = mangle_signature(None, what, None, obj, None, None, None)
|
||||||
|
if r is not None:
|
||||||
|
return r[0]
|
||||||
|
else:
|
||||||
|
return _original_format_signature(what, obj)
|
773
doc/sphinxext/numpydoc/plot_directive.py
Normal file
@ -0,0 +1,773 @@
|
|||||||
|
"""
|
||||||
|
A directive for including a matplotlib plot in a Sphinx document.
|
||||||
|
|
||||||
|
By default, in HTML output, `plot` will include a .png file with a
|
||||||
|
link to a high-res .png and .pdf. In LaTeX output, it will include a
|
||||||
|
.pdf.
|
||||||
|
|
||||||
|
The source code for the plot may be included in one of three ways:
|
||||||
|
|
||||||
|
1. **A path to a source file** as the argument to the directive::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py
|
||||||
|
|
||||||
|
When a path to a source file is given, the content of the
|
||||||
|
directive may optionally contain a caption for the plot::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py
|
||||||
|
|
||||||
|
This is the caption for the plot
|
||||||
|
|
||||||
|
Additionally, one my specify the name of a function to call (with
|
||||||
|
no arguments) immediately after importing the module::
|
||||||
|
|
||||||
|
.. plot:: path/to/plot.py plot_function1
|
||||||
|
|
||||||
|
2. Included as **inline content** to the directive::
|
||||||
|
|
||||||
|
.. plot::
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.image as mpimg
|
||||||
|
import numpy as np
|
||||||
|
img = mpimg.imread('_static/stinkbug.png')
|
||||||
|
imgplot = plt.imshow(img)
|
||||||
|
|
||||||
|
3. Using **doctest** syntax::
|
||||||
|
|
||||||
|
.. plot::
|
||||||
|
A plotting example:
|
||||||
|
>>> import matplotlib.pyplot as plt
|
||||||
|
>>> plt.plot([1,2,3], [4,5,6])
|
||||||
|
|
||||||
|
Options
|
||||||
|
-------
|
||||||
|
|
||||||
|
The ``plot`` directive supports the following options:
|
||||||
|
|
||||||
|
format : {'python', 'doctest'}
|
||||||
|
Specify the format of the input
|
||||||
|
|
||||||
|
include-source : bool
|
||||||
|
Whether to display the source code. The default can be changed
|
||||||
|
using the `plot_include_source` variable in conf.py
|
||||||
|
|
||||||
|
encoding : str
|
||||||
|
If this source file is in a non-UTF8 or non-ASCII encoding,
|
||||||
|
the encoding must be specified using the `:encoding:` option.
|
||||||
|
The encoding will not be inferred using the ``-*- coding -*-``
|
||||||
|
metacomment.
|
||||||
|
|
||||||
|
context : bool
|
||||||
|
If provided, the code will be run in the context of all
|
||||||
|
previous plot directives for which the `:context:` option was
|
||||||
|
specified. This only applies to inline code plot directives,
|
||||||
|
not those run from files.
|
||||||
|
|
||||||
|
nofigs : bool
|
||||||
|
If specified, the code block will be run, but no figures will
|
||||||
|
be inserted. This is usually useful with the ``:context:``
|
||||||
|
option.
|
||||||
|
|
||||||
|
Additionally, this directive supports all of the options of the
|
||||||
|
`image` directive, except for `target` (since plot will add its own
|
||||||
|
target). These include `alt`, `height`, `width`, `scale`, `align` and
|
||||||
|
`class`.
|
||||||
|
|
||||||
|
Configuration options
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The plot directive has the following configuration options:
|
||||||
|
|
||||||
|
plot_include_source
|
||||||
|
Default value for the include-source option
|
||||||
|
|
||||||
|
plot_pre_code
|
||||||
|
Code that should be executed before each plot.
|
||||||
|
|
||||||
|
plot_basedir
|
||||||
|
Base directory, to which ``plot::`` file names are relative
|
||||||
|
to. (If None or empty, file names are relative to the
|
||||||
|
directoly where the file containing the directive is.)
|
||||||
|
|
||||||
|
plot_formats
|
||||||
|
File formats to generate. List of tuples or strings::
|
||||||
|
|
||||||
|
[(suffix, dpi), suffix, ...]
|
||||||
|
|
||||||
|
that determine the file format and the DPI. For entries whose
|
||||||
|
DPI was omitted, sensible defaults are chosen.
|
||||||
|
|
||||||
|
plot_html_show_formats
|
||||||
|
Whether to show links to the files in HTML.
|
||||||
|
|
||||||
|
plot_rcparams
|
||||||
|
A dictionary containing any non-standard rcParams that should
|
||||||
|
be applied before each plot.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys, os, glob, shutil, imp, warnings, io, re, textwrap, \
|
||||||
|
traceback, exceptions
|
||||||
|
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst.directives.images import Image
|
||||||
|
align = Image.align
|
||||||
|
import sphinx
|
||||||
|
|
||||||
|
sphinx_version = sphinx.__version__.split(".")
|
||||||
|
# The split is necessary for sphinx beta versions where the string is
|
||||||
|
# '6b1'
|
||||||
|
sphinx_version = tuple([int(re.split('[a-z]', x)[0])
|
||||||
|
for x in sphinx_version[:2]])
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Sphinx depends on either Jinja or Jinja2
|
||||||
|
import jinja2
|
||||||
|
def format_template(template, **kw):
|
||||||
|
return jinja2.Template(template).render(**kw)
|
||||||
|
except ImportError:
|
||||||
|
import jinja
|
||||||
|
def format_template(template, **kw):
|
||||||
|
return jinja.from_string(template, **kw)
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.cbook as cbook
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from matplotlib import _pylab_helpers
|
||||||
|
|
||||||
|
__version__ = 2
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Relative pathnames
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# os.path.relpath is new in Python 2.6
|
||||||
|
try:
|
||||||
|
from os.path import relpath
|
||||||
|
except ImportError:
|
||||||
|
# Copied from Python 2.7
|
||||||
|
if 'posix' in sys.builtin_module_names:
|
||||||
|
def relpath(path, start=os.path.curdir):
|
||||||
|
"""Return a relative version of a path"""
|
||||||
|
from os.path import sep, curdir, join, abspath, commonprefix, \
|
||||||
|
pardir
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
raise ValueError("no path specified")
|
||||||
|
|
||||||
|
start_list = abspath(start).split(sep)
|
||||||
|
path_list = abspath(path).split(sep)
|
||||||
|
|
||||||
|
# Work out how much of the filepath is shared by start and path.
|
||||||
|
i = len(commonprefix([start_list, path_list]))
|
||||||
|
|
||||||
|
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
|
||||||
|
if not rel_list:
|
||||||
|
return curdir
|
||||||
|
return join(*rel_list)
|
||||||
|
elif 'nt' in sys.builtin_module_names:
|
||||||
|
def relpath(path, start=os.path.curdir):
|
||||||
|
"""Return a relative version of a path"""
|
||||||
|
from os.path import sep, curdir, join, abspath, commonprefix, \
|
||||||
|
pardir, splitunc
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
raise ValueError("no path specified")
|
||||||
|
start_list = abspath(start).split(sep)
|
||||||
|
path_list = abspath(path).split(sep)
|
||||||
|
if start_list[0].lower() != path_list[0].lower():
|
||||||
|
unc_path, rest = splitunc(path)
|
||||||
|
unc_start, rest = splitunc(start)
|
||||||
|
if bool(unc_path) ^ bool(unc_start):
|
||||||
|
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
|
||||||
|
% (path, start))
|
||||||
|
else:
|
||||||
|
raise ValueError("path is on drive %s, start on drive %s"
|
||||||
|
% (path_list[0], start_list[0]))
|
||||||
|
# Work out how much of the filepath is shared by start and path.
|
||||||
|
for i in range(min(len(start_list), len(path_list))):
|
||||||
|
if start_list[i].lower() != path_list[i].lower():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
|
||||||
|
if not rel_list:
|
||||||
|
return curdir
|
||||||
|
return join(*rel_list)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unsupported platform (no relpath available!)")
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Registration hook
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def plot_directive(name, arguments, options, content, lineno,
|
||||||
|
content_offset, block_text, state, state_machine):
|
||||||
|
return run(arguments, content, options, state_machine, state, lineno)
|
||||||
|
plot_directive.__doc__ = __doc__
|
||||||
|
|
||||||
|
def _option_boolean(arg):
|
||||||
|
if not arg or not arg.strip():
|
||||||
|
# no argument given, assume used as a flag
|
||||||
|
return True
|
||||||
|
elif arg.strip().lower() in ('no', '0', 'false'):
|
||||||
|
return False
|
||||||
|
elif arg.strip().lower() in ('yes', '1', 'true'):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError('"%s" unknown boolean' % arg)
|
||||||
|
|
||||||
|
def _option_format(arg):
|
||||||
|
return directives.choice(arg, ('python', 'doctest'))
|
||||||
|
|
||||||
|
def _option_align(arg):
|
||||||
|
return directives.choice(arg, ("top", "middle", "bottom", "left", "center",
|
||||||
|
"right"))
|
||||||
|
|
||||||
|
def mark_plot_labels(app, document):
|
||||||
|
"""
|
||||||
|
To make plots referenceable, we need to move the reference from
|
||||||
|
the "htmlonly" (or "latexonly") node to the actual figure node
|
||||||
|
itself.
|
||||||
|
"""
|
||||||
|
for name, explicit in document.nametypes.items():
|
||||||
|
if not explicit:
|
||||||
|
continue
|
||||||
|
labelid = document.nameids[name]
|
||||||
|
if labelid is None:
|
||||||
|
continue
|
||||||
|
node = document.ids[labelid]
|
||||||
|
if node.tagname in ('html_only', 'latex_only'):
|
||||||
|
for n in node:
|
||||||
|
if n.tagname == 'figure':
|
||||||
|
sectname = name
|
||||||
|
for c in n:
|
||||||
|
if c.tagname == 'caption':
|
||||||
|
sectname = c.astext()
|
||||||
|
break
|
||||||
|
|
||||||
|
node['ids'].remove(labelid)
|
||||||
|
node['names'].remove(name)
|
||||||
|
n['ids'].append(labelid)
|
||||||
|
n['names'].append(name)
|
||||||
|
document.settings.env.labels[name] = \
|
||||||
|
document.settings.env.docname, labelid, sectname
|
||||||
|
break
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
setup.app = app
|
||||||
|
setup.config = app.config
|
||||||
|
setup.confdir = app.confdir
|
||||||
|
|
||||||
|
options = {'alt': directives.unchanged,
|
||||||
|
'height': directives.length_or_unitless,
|
||||||
|
'width': directives.length_or_percentage_or_unitless,
|
||||||
|
'scale': directives.nonnegative_int,
|
||||||
|
'align': _option_align,
|
||||||
|
'class': directives.class_option,
|
||||||
|
'include-source': _option_boolean,
|
||||||
|
'format': _option_format,
|
||||||
|
'context': directives.flag,
|
||||||
|
'nofigs': directives.flag,
|
||||||
|
'encoding': directives.encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
app.add_directive('plot', plot_directive, True, (0, 2, False), **options)
|
||||||
|
app.add_config_value('plot_pre_code', None, True)
|
||||||
|
app.add_config_value('plot_include_source', False, True)
|
||||||
|
app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True)
|
||||||
|
app.add_config_value('plot_basedir', None, True)
|
||||||
|
app.add_config_value('plot_html_show_formats', True, True)
|
||||||
|
app.add_config_value('plot_rcparams', {}, True)
|
||||||
|
|
||||||
|
app.connect('doctree-read', mark_plot_labels)
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Doctest handling
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def contains_doctest(text):
|
||||||
|
try:
|
||||||
|
# check if it's valid Python as-is
|
||||||
|
compile(text, '<string>', 'exec')
|
||||||
|
return False
|
||||||
|
except SyntaxError:
|
||||||
|
pass
|
||||||
|
r = re.compile(r'^\s*>>>', re.M)
|
||||||
|
m = r.search(text)
|
||||||
|
return bool(m)
|
||||||
|
|
||||||
|
def unescape_doctest(text):
|
||||||
|
"""
|
||||||
|
Extract code from a piece of text, which contains either Python code
|
||||||
|
or doctests.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not contains_doctest(text):
|
||||||
|
return text
|
||||||
|
|
||||||
|
code = ""
|
||||||
|
for line in text.split("\n"):
|
||||||
|
m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line)
|
||||||
|
if m:
|
||||||
|
code += m.group(2) + "\n"
|
||||||
|
elif line.strip():
|
||||||
|
code += "# " + line.strip() + "\n"
|
||||||
|
else:
|
||||||
|
code += "\n"
|
||||||
|
return code
|
||||||
|
|
||||||
|
def split_code_at_show(text):
|
||||||
|
"""
|
||||||
|
Split code at plt.show()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
is_doctest = contains_doctest(text)
|
||||||
|
|
||||||
|
part = []
|
||||||
|
for line in text.split("\n"):
|
||||||
|
if (not is_doctest and line.strip() == 'plt.show()') or \
|
||||||
|
(is_doctest and line.strip() == '>>> plt.show()'):
|
||||||
|
part.append(line)
|
||||||
|
parts.append("\n".join(part))
|
||||||
|
part = []
|
||||||
|
else:
|
||||||
|
part.append(line)
|
||||||
|
if "\n".join(part).strip():
|
||||||
|
parts.append("\n".join(part))
|
||||||
|
return parts
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Template
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATE = """
|
||||||
|
{{ source_code }}
|
||||||
|
|
||||||
|
{{ only_html }}
|
||||||
|
|
||||||
|
{% if source_link or (html_show_formats and not multi_image) %}
|
||||||
|
(
|
||||||
|
{%- if source_link -%}
|
||||||
|
`Source code <{{ source_link }}>`__
|
||||||
|
{%- endif -%}
|
||||||
|
{%- if html_show_formats and not multi_image -%}
|
||||||
|
{%- for img in images -%}
|
||||||
|
{%- for fmt in img.formats -%}
|
||||||
|
{%- if source_link or not loop.first -%}, {% endif -%}
|
||||||
|
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
)
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for img in images %}
|
||||||
|
.. figure:: {{ build_dir }}/{{ img.basename }}.png
|
||||||
|
{%- for option in options %}
|
||||||
|
{{ option }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if html_show_formats and multi_image -%}
|
||||||
|
(
|
||||||
|
{%- for fmt in img.formats -%}
|
||||||
|
{%- if not loop.first -%}, {% endif -%}
|
||||||
|
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
|
||||||
|
{%- endfor -%}
|
||||||
|
)
|
||||||
|
{%- endif -%}
|
||||||
|
|
||||||
|
{{ caption }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{{ only_latex }}
|
||||||
|
|
||||||
|
{% for img in images %}
|
||||||
|
.. image:: {{ build_dir }}/{{ img.basename }}.pdf
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
exception_template = """
|
||||||
|
.. htmlonly::
|
||||||
|
|
||||||
|
[`source code <%(linkdir)s/%(basename)s.py>`__]
|
||||||
|
|
||||||
|
Exception occurred rendering plot.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# the context of the plot for all directives specified with the
|
||||||
|
# :context: option
|
||||||
|
plot_context = dict()
|
||||||
|
|
||||||
|
class ImageFile:
|
||||||
|
def __init__(self, basename, dirname):
|
||||||
|
self.basename = basename
|
||||||
|
self.dirname = dirname
|
||||||
|
self.formats = []
|
||||||
|
|
||||||
|
def filename(self, format):
|
||||||
|
return os.path.join(self.dirname, "%s.%s" % (self.basename, format))
|
||||||
|
|
||||||
|
def filenames(self):
|
||||||
|
return [self.filename(fmt) for fmt in self.formats]
|
||||||
|
|
||||||
|
def out_of_date(original, derived):
|
||||||
|
"""
|
||||||
|
Returns True if derivative is out-of-date wrt original,
|
||||||
|
both of which are full file paths.
|
||||||
|
"""
|
||||||
|
return (not os.path.exists(derived) or
|
||||||
|
(os.path.exists(original) and
|
||||||
|
os.stat(derived).st_mtime < os.stat(original).st_mtime))
|
||||||
|
|
||||||
|
class PlotError(RuntimeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run_code(code, code_path, ns=None, function_name=None):
|
||||||
|
"""
|
||||||
|
Import a Python module from a path, and run the function given by
|
||||||
|
name, if function_name is not None.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Change the working directory to the directory of the example, so
|
||||||
|
# it can get at its data files, if any. Add its path to sys.path
|
||||||
|
# so it can import any helper modules sitting beside it.
|
||||||
|
|
||||||
|
pwd = os.getcwd()
|
||||||
|
old_sys_path = list(sys.path)
|
||||||
|
if code_path is not None:
|
||||||
|
dirname = os.path.abspath(os.path.dirname(code_path))
|
||||||
|
os.chdir(dirname)
|
||||||
|
sys.path.insert(0, dirname)
|
||||||
|
|
||||||
|
# Redirect stdout
|
||||||
|
stdout = sys.stdout
|
||||||
|
sys.stdout = io.StringIO()
|
||||||
|
|
||||||
|
# Reset sys.argv
|
||||||
|
old_sys_argv = sys.argv
|
||||||
|
sys.argv = [code_path]
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
code = unescape_doctest(code)
|
||||||
|
if ns is None:
|
||||||
|
ns = {}
|
||||||
|
if not ns:
|
||||||
|
if setup.config.plot_pre_code is None:
|
||||||
|
exec("import numpy as np\nfrom matplotlib import pyplot as plt\n", ns)
|
||||||
|
else:
|
||||||
|
exec(setup.config.plot_pre_code, ns)
|
||||||
|
if "__main__" in code:
|
||||||
|
exec("__name__ = '__main__'", ns)
|
||||||
|
exec(code, ns)
|
||||||
|
if function_name is not None:
|
||||||
|
exec(function_name + "()", ns)
|
||||||
|
except (Exception, SystemExit) as err:
|
||||||
|
raise PlotError(traceback.format_exc())
|
||||||
|
finally:
|
||||||
|
os.chdir(pwd)
|
||||||
|
sys.argv = old_sys_argv
|
||||||
|
sys.path[:] = old_sys_path
|
||||||
|
sys.stdout = stdout
|
||||||
|
return ns
|
||||||
|
|
||||||
|
def clear_state(plot_rcparams):
|
||||||
|
plt.close('all')
|
||||||
|
matplotlib.rc_file_defaults()
|
||||||
|
matplotlib.rcParams.update(plot_rcparams)
|
||||||
|
|
||||||
|
def render_figures(code, code_path, output_dir, output_base, context,
|
||||||
|
function_name, config):
|
||||||
|
"""
|
||||||
|
Run a pyplot script and save the low and high res PNGs and a PDF
|
||||||
|
in outdir.
|
||||||
|
|
||||||
|
Save the images under *output_dir* with file names derived from
|
||||||
|
*output_base*
|
||||||
|
"""
|
||||||
|
# -- Parse format list
|
||||||
|
default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200}
|
||||||
|
formats = []
|
||||||
|
plot_formats = config.plot_formats
|
||||||
|
if isinstance(plot_formats, str):
|
||||||
|
plot_formats = eval(plot_formats)
|
||||||
|
for fmt in plot_formats:
|
||||||
|
if isinstance(fmt, str):
|
||||||
|
formats.append((fmt, default_dpi.get(fmt, 80)))
|
||||||
|
elif type(fmt) in (tuple, list) and len(fmt)==2:
|
||||||
|
formats.append((str(fmt[0]), int(fmt[1])))
|
||||||
|
else:
|
||||||
|
raise PlotError('invalid image format "%r" in plot_formats' % fmt)
|
||||||
|
|
||||||
|
# -- Try to determine if all images already exist
|
||||||
|
|
||||||
|
code_pieces = split_code_at_show(code)
|
||||||
|
|
||||||
|
# Look for single-figure output files first
|
||||||
|
# Look for single-figure output files first
|
||||||
|
all_exists = True
|
||||||
|
img = ImageFile(output_base, output_dir)
|
||||||
|
for format, dpi in formats:
|
||||||
|
if out_of_date(code_path, img.filename(format)):
|
||||||
|
all_exists = False
|
||||||
|
break
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
if all_exists:
|
||||||
|
return [(code, [img])]
|
||||||
|
|
||||||
|
# Then look for multi-figure output files
|
||||||
|
results = []
|
||||||
|
all_exists = True
|
||||||
|
for i, code_piece in enumerate(code_pieces):
|
||||||
|
images = []
|
||||||
|
for j in range(1000):
|
||||||
|
if len(code_pieces) > 1:
|
||||||
|
img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir)
|
||||||
|
else:
|
||||||
|
img = ImageFile('%s_%02d' % (output_base, j), output_dir)
|
||||||
|
for format, dpi in formats:
|
||||||
|
if out_of_date(code_path, img.filename(format)):
|
||||||
|
all_exists = False
|
||||||
|
break
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
# assume that if we have one, we have them all
|
||||||
|
if not all_exists:
|
||||||
|
all_exists = (j > 0)
|
||||||
|
break
|
||||||
|
images.append(img)
|
||||||
|
if not all_exists:
|
||||||
|
break
|
||||||
|
results.append((code_piece, images))
|
||||||
|
|
||||||
|
if all_exists:
|
||||||
|
return results
|
||||||
|
|
||||||
|
# We didn't find the files, so build them
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if context:
|
||||||
|
ns = plot_context
|
||||||
|
else:
|
||||||
|
ns = {}
|
||||||
|
|
||||||
|
for i, code_piece in enumerate(code_pieces):
|
||||||
|
if not context:
|
||||||
|
clear_state(config.plot_rcparams)
|
||||||
|
run_code(code_piece, code_path, ns, function_name)
|
||||||
|
|
||||||
|
images = []
|
||||||
|
fig_managers = _pylab_helpers.Gcf.get_all_fig_managers()
|
||||||
|
for j, figman in enumerate(fig_managers):
|
||||||
|
if len(fig_managers) == 1 and len(code_pieces) == 1:
|
||||||
|
img = ImageFile(output_base, output_dir)
|
||||||
|
elif len(code_pieces) == 1:
|
||||||
|
img = ImageFile("%s_%02d" % (output_base, j), output_dir)
|
||||||
|
else:
|
||||||
|
img = ImageFile("%s_%02d_%02d" % (output_base, i, j),
|
||||||
|
output_dir)
|
||||||
|
images.append(img)
|
||||||
|
for format, dpi in formats:
|
||||||
|
try:
|
||||||
|
figman.canvas.figure.savefig(img.filename(format), dpi=dpi)
|
||||||
|
except Exception as err:
|
||||||
|
raise PlotError(traceback.format_exc())
|
||||||
|
img.formats.append(format)
|
||||||
|
|
||||||
|
results.append((code_piece, images))
|
||||||
|
|
||||||
|
if not context:
|
||||||
|
clear_state(config.plot_rcparams)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def run(arguments, content, options, state_machine, state, lineno):
|
||||||
|
# The user may provide a filename *or* Python code content, but not both
|
||||||
|
if arguments and content:
|
||||||
|
raise RuntimeError("plot:: directive can't have both args and content")
|
||||||
|
|
||||||
|
document = state_machine.document
|
||||||
|
config = document.settings.env.config
|
||||||
|
nofigs = 'nofigs' in options
|
||||||
|
|
||||||
|
options.setdefault('include-source', config.plot_include_source)
|
||||||
|
context = 'context' in options
|
||||||
|
|
||||||
|
rst_file = document.attributes['source']
|
||||||
|
rst_dir = os.path.dirname(rst_file)
|
||||||
|
|
||||||
|
if len(arguments):
|
||||||
|
if not config.plot_basedir:
|
||||||
|
source_file_name = os.path.join(setup.app.builder.srcdir,
|
||||||
|
directives.uri(arguments[0]))
|
||||||
|
else:
|
||||||
|
source_file_name = os.path.join(setup.app.builder.srcdir, config.plot_basedir,
|
||||||
|
directives.uri(arguments[0]))
|
||||||
|
|
||||||
|
# If there is content, it will be passed as a caption.
|
||||||
|
caption = '\n'.join(content)
|
||||||
|
|
||||||
|
# If the optional function name is provided, use it
|
||||||
|
if len(arguments) == 2:
|
||||||
|
function_name = arguments[1]
|
||||||
|
else:
|
||||||
|
function_name = None
|
||||||
|
|
||||||
|
fd = open(source_file_name, 'r')
|
||||||
|
code = fd.read()
|
||||||
|
fd.close()
|
||||||
|
output_base = os.path.basename(source_file_name)
|
||||||
|
else:
|
||||||
|
source_file_name = rst_file
|
||||||
|
code = textwrap.dedent("\n".join(map(str, content)))
|
||||||
|
counter = document.attributes.get('_plot_counter', 0) + 1
|
||||||
|
document.attributes['_plot_counter'] = counter
|
||||||
|
base, ext = os.path.splitext(os.path.basename(source_file_name))
|
||||||
|
output_base = '%s-%d.py' % (base, counter)
|
||||||
|
function_name = None
|
||||||
|
caption = ''
|
||||||
|
|
||||||
|
base, source_ext = os.path.splitext(output_base)
|
||||||
|
if source_ext in ('.py', '.rst', '.txt'):
|
||||||
|
output_base = base
|
||||||
|
else:
|
||||||
|
source_ext = ''
|
||||||
|
|
||||||
|
# ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
|
||||||
|
output_base = output_base.replace('.', '-')
|
||||||
|
|
||||||
|
# is it in doctest format?
|
||||||
|
is_doctest = contains_doctest(code)
|
||||||
|
if 'format' in options:
|
||||||
|
if options['format'] == 'python':
|
||||||
|
is_doctest = False
|
||||||
|
else:
|
||||||
|
is_doctest = True
|
||||||
|
|
||||||
|
# determine output directory name fragment
|
||||||
|
source_rel_name = relpath(source_file_name, setup.app.srcdir)
|
||||||
|
source_rel_dir = os.path.dirname(source_rel_name)
|
||||||
|
while source_rel_dir.startswith(os.path.sep):
|
||||||
|
source_rel_dir = source_rel_dir[1:]
|
||||||
|
|
||||||
|
# build_dir: where to place output files (temporarily)
|
||||||
|
build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
|
||||||
|
'plot_directive',
|
||||||
|
source_rel_dir)
|
||||||
|
# get rid of .. in paths, also changes pathsep
|
||||||
|
# see note in Python docs for warning about symbolic links on Windows.
|
||||||
|
# need to compare source and dest paths at end
|
||||||
|
build_dir = os.path.normpath(build_dir)
|
||||||
|
|
||||||
|
if not os.path.exists(build_dir):
|
||||||
|
os.makedirs(build_dir)
|
||||||
|
|
||||||
|
# output_dir: final location in the builder's directory
|
||||||
|
dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
|
||||||
|
source_rel_dir))
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
os.makedirs(dest_dir) # no problem here for me, but just use built-ins
|
||||||
|
|
||||||
|
# how to link to files from the RST file
|
||||||
|
dest_dir_link = os.path.join(relpath(setup.app.srcdir, rst_dir),
|
||||||
|
source_rel_dir).replace(os.path.sep, '/')
|
||||||
|
build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
|
||||||
|
source_link = dest_dir_link + '/' + output_base + source_ext
|
||||||
|
|
||||||
|
# make figures
|
||||||
|
try:
|
||||||
|
results = render_figures(code, source_file_name, build_dir, output_base,
|
||||||
|
context, function_name, config)
|
||||||
|
errors = []
|
||||||
|
except PlotError as err:
|
||||||
|
reporter = state.memo.reporter
|
||||||
|
sm = reporter.system_message(
|
||||||
|
2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
|
||||||
|
source_file_name, err),
|
||||||
|
line=lineno)
|
||||||
|
results = [(code, [])]
|
||||||
|
errors = [sm]
|
||||||
|
|
||||||
|
# Properly indent the caption
|
||||||
|
caption = '\n'.join(' ' + line.strip()
|
||||||
|
for line in caption.split('\n'))
|
||||||
|
|
||||||
|
# generate output restructuredtext
|
||||||
|
total_lines = []
|
||||||
|
for j, (code_piece, images) in enumerate(results):
|
||||||
|
if options['include-source']:
|
||||||
|
if is_doctest:
|
||||||
|
lines = ['']
|
||||||
|
lines += [row.rstrip() for row in code_piece.split('\n')]
|
||||||
|
else:
|
||||||
|
lines = ['.. code-block:: python', '']
|
||||||
|
lines += [' %s' % row.rstrip()
|
||||||
|
for row in code_piece.split('\n')]
|
||||||
|
source_code = "\n".join(lines)
|
||||||
|
else:
|
||||||
|
source_code = ""
|
||||||
|
|
||||||
|
if nofigs:
|
||||||
|
images = []
|
||||||
|
|
||||||
|
opts = [':%s: %s' % (key, val) for key, val in list(options.items())
|
||||||
|
if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]
|
||||||
|
|
||||||
|
only_html = ".. only:: html"
|
||||||
|
only_latex = ".. only:: latex"
|
||||||
|
|
||||||
|
if j == 0:
|
||||||
|
src_link = source_link
|
||||||
|
else:
|
||||||
|
src_link = None
|
||||||
|
|
||||||
|
result = format_template(
|
||||||
|
TEMPLATE,
|
||||||
|
dest_dir=dest_dir_link,
|
||||||
|
build_dir=build_dir_link,
|
||||||
|
source_link=src_link,
|
||||||
|
multi_image=len(images) > 1,
|
||||||
|
only_html=only_html,
|
||||||
|
only_latex=only_latex,
|
||||||
|
options=opts,
|
||||||
|
images=images,
|
||||||
|
source_code=source_code,
|
||||||
|
html_show_formats=config.plot_html_show_formats,
|
||||||
|
caption=caption)
|
||||||
|
|
||||||
|
total_lines.extend(result.split("\n"))
|
||||||
|
total_lines.extend("\n")
|
||||||
|
|
||||||
|
if total_lines:
|
||||||
|
state_machine.insert_input(total_lines, source=source_file_name)
|
||||||
|
|
||||||
|
# copy image files to builder's output directory, if necessary
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
cbook.mkdirs(dest_dir)
|
||||||
|
|
||||||
|
for code_piece, images in results:
|
||||||
|
for img in images:
|
||||||
|
for fn in img.filenames():
|
||||||
|
destimg = os.path.join(dest_dir, os.path.basename(fn))
|
||||||
|
if fn != destimg:
|
||||||
|
shutil.copyfile(fn, destimg)
|
||||||
|
|
||||||
|
# copy script (if necessary)
|
||||||
|
#if source_file_name == rst_file:
|
||||||
|
target_name = os.path.join(dest_dir, output_base + source_ext)
|
||||||
|
f = open(target_name, 'w')
|
||||||
|
f.write(unescape_doctest(code))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return errors
|
93
doc/sphinxext/sphinx_autorun/__init__.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
sphinxcontirb.autorun
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Run the code and insert stdout after the code block.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive, directives
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
from sphinx_autorun import version
|
||||||
|
|
||||||
|
__version__ = version.version
|
||||||
|
|
||||||
|
|
||||||
|
class RunBlockError(SphinxError):
|
||||||
|
category = 'runblock error'
|
||||||
|
|
||||||
|
|
||||||
|
class AutoRun(object):
|
||||||
|
here = os.path.abspath(__file__)
|
||||||
|
pycon = os.path.join(os.path.dirname(here), 'pycon.py')
|
||||||
|
config = {
|
||||||
|
'pycon': 'python ' + pycon,
|
||||||
|
'pycon_prefix_chars': 4,
|
||||||
|
'pycon_show_source': False,
|
||||||
|
'console': 'bash',
|
||||||
|
'console_prefix_chars': 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def builder_init(cls, app):
|
||||||
|
cls.config.update(app.builder.config.autorun_languages)
|
||||||
|
|
||||||
|
|
||||||
|
class RunBlock(Directive):
|
||||||
|
has_content = True
|
||||||
|
required_arguments = 1
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {
|
||||||
|
'linenos': directives.flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
config = AutoRun.config
|
||||||
|
language = self.arguments[0]
|
||||||
|
|
||||||
|
if language not in config:
|
||||||
|
raise RunBlockError('Unknown language %s' % language)
|
||||||
|
|
||||||
|
# Get configuration values for the language
|
||||||
|
args = config[language].split()
|
||||||
|
input_encoding = config.get(language+'_input_encoding', 'utf8')
|
||||||
|
output_encoding = config.get(language+'_output_encoding', 'utf8')
|
||||||
|
prefix_chars = config.get(language+'_prefix_chars', 0)
|
||||||
|
show_source = config.get(language+'_show_source', True)
|
||||||
|
|
||||||
|
# Build the code text
|
||||||
|
proc = Popen(args, bufsize=1, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||||
|
codelines = (line[prefix_chars:] for line in self.content)
|
||||||
|
code = u'\n'.join(codelines).encode(input_encoding)
|
||||||
|
|
||||||
|
# Run the code
|
||||||
|
stdout, stderr = proc.communicate(code)
|
||||||
|
|
||||||
|
# Process output
|
||||||
|
if stdout:
|
||||||
|
out = stdout.decode(output_encoding)
|
||||||
|
if stderr:
|
||||||
|
out = stderr.decode(output_encoding)
|
||||||
|
|
||||||
|
# Get the original code with prefixes
|
||||||
|
if show_source:
|
||||||
|
code = u'\n'.join(self.content)
|
||||||
|
code_out = u'\n'.join((code, out))
|
||||||
|
else:
|
||||||
|
code_out = out
|
||||||
|
|
||||||
|
literal = nodes.literal_block(code_out, code_out)
|
||||||
|
literal['language'] = language
|
||||||
|
literal['linenos'] = 'linenos' in self.options
|
||||||
|
return [literal]
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.add_directive('runblock', RunBlock)
|
||||||
|
app.connect('builder-inited', AutoRun.builder_init)
|
||||||
|
app.add_config_value('autorun_languages', AutoRun.config, 'env')
|
31
doc/sphinxext/sphinx_autorun/pycon.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import sys
|
||||||
|
from code import InteractiveInterpreter
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Print lines of input along with output.
|
||||||
|
"""
|
||||||
|
source_lines = (line.rstrip() for line in sys.stdin)
|
||||||
|
console = InteractiveInterpreter()
|
||||||
|
source = ''
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
source = next(source_lines)
|
||||||
|
# Allow the user to ignore specific lines of output.
|
||||||
|
if not source.endswith('# ignore'):
|
||||||
|
print('>>>', source)
|
||||||
|
more = console.runsource(source)
|
||||||
|
while more:
|
||||||
|
next_line = next(source_lines)
|
||||||
|
print('...', next_line)
|
||||||
|
source += '\n' + next_line
|
||||||
|
more = console.runsource(source)
|
||||||
|
except StopIteration:
|
||||||
|
if more:
|
||||||
|
print('... ')
|
||||||
|
more = console.runsource(source + '\n')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
4
doc/sphinxext/sphinx_autorun/version.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
# file generated by setuptools_scm
|
||||||
|
# don't change, don't track in version control
|
||||||
|
version = '1.1.1'
|
123
doc/sphinxext/triqs_example/triqs_example.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
import tempfile
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# seems to be executed at the level of the conf.py
|
||||||
|
# so we need to link the lib at that place...
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import codecs
|
||||||
|
from os import path
|
||||||
|
from subprocess import Popen,PIPE
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
class TriqsExampleError(SphinxError):
|
||||||
|
category = 'triqs_example error'
|
||||||
|
|
||||||
|
class TriqsExampleRun:
|
||||||
|
#here = os.path.abspath(__file__)
|
||||||
|
#pycon = os.path.join(os.path.dirname(here),'pycon.py')
|
||||||
|
config = dict(
|
||||||
|
)
|
||||||
|
@classmethod
|
||||||
|
def builder_init(cls,app):
|
||||||
|
#cls.config.update(app.builder.config.autorun_languages)
|
||||||
|
#cls.config.update(app.builder.config.autocompile_opts)
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriqsExample(Directive):
|
||||||
|
has_content = True
|
||||||
|
required_arguments = 1
|
||||||
|
optional_arguments = 0
|
||||||
|
final_argument_whitespace = False
|
||||||
|
option_spec = {
|
||||||
|
'linenos': directives.flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
document = self.state.document
|
||||||
|
filename = self.arguments[0]
|
||||||
|
if not document.settings.file_insertion_enabled:
|
||||||
|
return [document.reporter.warning('File insertion disabled',
|
||||||
|
line=self.lineno)]
|
||||||
|
env = document.settings.env
|
||||||
|
if filename.startswith('/') or filename.startswith(os.sep):
|
||||||
|
rel_fn = filename[1:]
|
||||||
|
else:
|
||||||
|
docdir = path.dirname(env.doc2path(env.docname, base=None))
|
||||||
|
rel_fn = path.normpath(path.join(docdir, filename))
|
||||||
|
try:
|
||||||
|
fn = path.join(env.srcdir, rel_fn)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# the source directory is a bytestring with non-ASCII characters;
|
||||||
|
# let's try to encode the rel_fn in the file system encoding
|
||||||
|
rel_fn = rel_fn.encode(sys.getfilesystemencoding())
|
||||||
|
fn = path.join(env.srcdir, rel_fn)
|
||||||
|
|
||||||
|
encoding = self.options.get('encoding', env.config.source_encoding)
|
||||||
|
try:
|
||||||
|
f = codecs.open(fn, 'rU', encoding)
|
||||||
|
lines = f.readlines()
|
||||||
|
f.close()
|
||||||
|
except (IOError, OSError):
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Include file %r not found or reading it failed' % filename,
|
||||||
|
line=self.lineno)]
|
||||||
|
except UnicodeError:
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Encoding %r used for reading included file %r seems to '
|
||||||
|
'be wrong, try giving an :encoding: option' %
|
||||||
|
(encoding, filename))]
|
||||||
|
|
||||||
|
config = TriqsExampleRun.config
|
||||||
|
|
||||||
|
# Get configuration values for the language
|
||||||
|
input_encoding = 'utf8' #config.get(language+'_input_encoding','ascii')
|
||||||
|
output_encoding = 'utf8' #config.get(language+'_output_encoding','ascii')
|
||||||
|
show_source = True
|
||||||
|
|
||||||
|
# Build the code text
|
||||||
|
code = ''.join(lines).strip()
|
||||||
|
filename_clean = filename.rsplit('.',1)[0]
|
||||||
|
if filename_clean.startswith('./') : filename_clean = filename_clean[2:]
|
||||||
|
#print "Running the example ....",filename_clean
|
||||||
|
#print "Root ?", env.doc2path(env.docname, base=None)
|
||||||
|
|
||||||
|
import subprocess as S
|
||||||
|
error = True
|
||||||
|
try :
|
||||||
|
stdout =''
|
||||||
|
#resout = S.check_output("./example_bin/doc_%s"%(filename_clean) ,stderr=S.STDOUT,shell=True)
|
||||||
|
resout = S.check_output("./%s/doc_%s"%(docdir,filename_clean) ,stderr=S.STDOUT,shell=True)
|
||||||
|
if resout :
|
||||||
|
stdout = '---------- Result is -------\n' + resout.strip()
|
||||||
|
error = False
|
||||||
|
except S.CalledProcessError as E :
|
||||||
|
stdout ='---------- RunTime error -------\n'
|
||||||
|
stdout += E.output
|
||||||
|
|
||||||
|
# Process output
|
||||||
|
if stdout:
|
||||||
|
stdout = stdout.decode(output_encoding,'ignore')
|
||||||
|
out = ''.join(stdout).decode(output_encoding)
|
||||||
|
else:
|
||||||
|
out = '' #.join(stderr).decode(output_encoding)
|
||||||
|
|
||||||
|
# Get the original code with prefixes
|
||||||
|
code_out = '\n'.join((code,out))
|
||||||
|
|
||||||
|
if error : # report on console
|
||||||
|
print(" Error in processing ")
|
||||||
|
print(code_out)
|
||||||
|
|
||||||
|
literal = nodes.literal_block(code_out,code_out)
|
||||||
|
literal['language'] = 'c'
|
||||||
|
literal['linenos'] = 'linenos' in self.options
|
||||||
|
return [literal]
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.add_directive('triqs_example', TriqsExample)
|
||||||
|
app.connect('builder-inited',TriqsExampleRun.builder_init)
|
||||||
|
|
52
doc/themes/triqs/layout.html
vendored
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
{#
|
||||||
|
layout.html
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
TRIQS layout template heavily based on the sphinxdoc theme.
|
||||||
|
|
||||||
|
:copyright: Copyright 2013 by the TRIQS team.
|
||||||
|
:copyright: Copyright 2007-2013 by the Sphinx team.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
#}
|
||||||
|
{%- extends "basic/layout.html" %}
|
||||||
|
|
||||||
|
{# put the sidebar before the body #}
|
||||||
|
{% block sidebar1 %}{{ sidebar() }}{% endblock %}
|
||||||
|
{% block sidebar2 %}{% endblock %}
|
||||||
|
|
||||||
|
{% block extrahead %}
|
||||||
|
<link href='https://fonts.googleapis.com/css?family=Open+Sans:300,400,700' rel='stylesheet' type='text/css'>
|
||||||
|
<script src="{{ pathto('_static/cufon-yui.js',1) }}" type="text/javascript"></script>
|
||||||
|
<script src="{{ pathto('_static/spaceman.cufonfonts.js',1) }}" type="text/javascript"></script>
|
||||||
|
<script type="text/javascript">
|
||||||
|
Cufon.replace('.triqs', { fontFamily: 'spaceman', hover: true });
|
||||||
|
</script>
|
||||||
|
{{ super() }}
|
||||||
|
{%- if not embedded %}
|
||||||
|
<style type="text/css">
|
||||||
|
table.right { float: right; margin-left: 20px; }
|
||||||
|
table.right td { border: 1px solid #ccc; }
|
||||||
|
{% if pagename == 'index' %}
|
||||||
|
.related { display: none; }
|
||||||
|
{% endif %}
|
||||||
|
</style>
|
||||||
|
{%- endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block rootrellink %}
|
||||||
|
<li><a href="{{ pathto('index') }}">Home</a> »</li>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block header %}
|
||||||
|
<div class="pageheader">
|
||||||
|
<ul>
|
||||||
|
{% for header_link in header_links %}
|
||||||
|
<li><a href="{{ pathto(header_link[1]) }}">{{ header_link[0] }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
<div>
|
||||||
|
<h1 style="padding:0; margin: 10px 0 0 0;"><a class="triqs" href="{{ pathto('index') }}">{{ header_title }}</a></h1>
|
||||||
|
<span style="font-size: 14px; margin: 0px; padding: 0px;">{{ header_subtitle }}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
BIN
doc/themes/triqs/static/bodybg.png
vendored
Normal file
After Width: | Height: | Size: 602 B |
7
doc/themes/triqs/static/cufon-yui.js
vendored
Normal file
BIN
doc/themes/triqs/static/footerbg.png
vendored
Normal file
After Width: | Height: | Size: 313 B |
BIN
doc/themes/triqs/static/headerbg.png
vendored
Normal file
After Width: | Height: | Size: 344 B |
BIN
doc/themes/triqs/static/listitem.png
vendored
Normal file
After Width: | Height: | Size: 207 B |
BIN
doc/themes/triqs/static/relbg.png
vendored
Normal file
After Width: | Height: | Size: 332 B |
8
doc/themes/triqs/static/spaceman.cufonfonts.js
vendored
Normal file
449
doc/themes/triqs/static/triqs.css
vendored
Normal file
@ -0,0 +1,449 @@
|
|||||||
|
/*
|
||||||
|
* sphinx13.css
|
||||||
|
* ~~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx stylesheet -- sphinx13 theme.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
text-align: center;
|
||||||
|
background-image: url(bodybg.png);
|
||||||
|
color: black;
|
||||||
|
padding: 0;
|
||||||
|
border-right: 1px solid #0a507a;
|
||||||
|
border-left: 1px solid #0a507a;
|
||||||
|
|
||||||
|
margin: 0 auto;
|
||||||
|
min-width: 780px;
|
||||||
|
max-width: 1080px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.red{
|
||||||
|
color: red
|
||||||
|
}
|
||||||
|
.blue{
|
||||||
|
color: blue
|
||||||
|
}
|
||||||
|
.green{
|
||||||
|
color: green
|
||||||
|
}
|
||||||
|
|
||||||
|
.param{
|
||||||
|
color: blue
|
||||||
|
}
|
||||||
|
|
||||||
|
a.triqs {
|
||||||
|
color: #073958;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.triqs:hover {
|
||||||
|
color: #0a527f;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader {
|
||||||
|
background-color: #dcdcdc;
|
||||||
|
text-align: left;
|
||||||
|
padding: 10px 15px;
|
||||||
|
color: #073958;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader ul {
|
||||||
|
float: right;
|
||||||
|
color: #073958;
|
||||||
|
list-style-type: none;
|
||||||
|
padding-left: 0;
|
||||||
|
margin-top: 22px;
|
||||||
|
margin-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li {
|
||||||
|
float: left;
|
||||||
|
margin: 0 0 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li a {
|
||||||
|
padding: 8px 12px;
|
||||||
|
color: #073958;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pageheader li a:hover {
|
||||||
|
background-color: #f9f9f0;
|
||||||
|
color: #0a507a;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
background-color: white;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 240px 0 0;
|
||||||
|
border-right: 1px solid #0a507a;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0.5em 20px 20px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
font-size: 1em;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul {
|
||||||
|
background-image: url(relbg.png);
|
||||||
|
height: 1.9em;
|
||||||
|
border-top: 1px solid #002e50;
|
||||||
|
border-bottom: 1px solid #002e50;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li {
|
||||||
|
margin: 0 5px 0 0;
|
||||||
|
padding: 0;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li.right {
|
||||||
|
float: right;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li a {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 5px 0 5px;
|
||||||
|
line-height: 1.75em;
|
||||||
|
color: #f9f9f0;
|
||||||
|
text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li a:hover {
|
||||||
|
color: white;
|
||||||
|
/*text-decoration: underline;*/
|
||||||
|
text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
position: relative;
|
||||||
|
top: 0px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 15px 15px 0;
|
||||||
|
width: 210px;
|
||||||
|
float: right;
|
||||||
|
font-size: 1em;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .logo {
|
||||||
|
font-size: 1.8em;
|
||||||
|
color: #0A507A;
|
||||||
|
font-weight: 300;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .logo img {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #aaa;
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 {
|
||||||
|
font-size: 1.5em;
|
||||||
|
border-top: 1px solid #0a507a;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 0.5em;
|
||||||
|
padding-top: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-size: 1.2em;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3, div.sphinxsidebar h4 {
|
||||||
|
margin-right: -15px;
|
||||||
|
margin-left: -15px;
|
||||||
|
padding-right: 14px;
|
||||||
|
padding-left: 14px;
|
||||||
|
color: #333;
|
||||||
|
font-weight: 300;
|
||||||
|
/*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper > h3:first-child {
|
||||||
|
margin-top: 0.5em;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #333;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
color: #444;
|
||||||
|
margin-top: 7px;
|
||||||
|
padding: 0;
|
||||||
|
line-height: 130%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul {
|
||||||
|
margin-left: 20px;
|
||||||
|
list-style-image: url(listitem.png);
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
background-image: url(footerbg.png);
|
||||||
|
color: #ccc;
|
||||||
|
text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8);
|
||||||
|
padding: 3px 8px 3px 0;
|
||||||
|
clear: both;
|
||||||
|
font-size: 0.8em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* no need to make a visible link to Sphinx on the Sphinx page */
|
||||||
|
div.footer a {
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0.8em 0 0.5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #A2881D;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #E1C13F;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body a {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin: 10px 0 0 0;
|
||||||
|
font-size: 2.4em;
|
||||||
|
color: #0A507A;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
margin: 1.em 0 0.2em 0;
|
||||||
|
font-size: 1.5em;
|
||||||
|
font-weight: 300;
|
||||||
|
padding: 0;
|
||||||
|
color: #174967;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
margin: 1em 0 -0.3em 0;
|
||||||
|
font-size: 1.3em;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt {
|
||||||
|
color: #0A507A !important;
|
||||||
|
font-size: inherit !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #0A507A !important;
|
||||||
|
font-size: 12px;
|
||||||
|
margin-left: 6px;
|
||||||
|
padding: 0 4px 0 4px;
|
||||||
|
text-decoration: none !important;
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
background-color: #ccc;
|
||||||
|
color: white!important;
|
||||||
|
}
|
||||||
|
|
||||||
|
cite, code, tt {
|
||||||
|
font-family: 'Consolas', 'DejaVu Sans Mono',
|
||||||
|
'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt {
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
border-radius: 2px;
|
||||||
|
color: #333;
|
||||||
|
padding: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, tt.descclassname, tt.xref {
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
border: 1px solid #abc;
|
||||||
|
margin: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
a tt {
|
||||||
|
border: 0;
|
||||||
|
color: #a2881d;
|
||||||
|
}
|
||||||
|
|
||||||
|
a tt:hover {
|
||||||
|
color: #e1c13f;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
font-family: 'Consolas', 'DejaVu Sans Mono',
|
||||||
|
'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 13px;
|
||||||
|
letter-spacing: 0.015em;
|
||||||
|
line-height: 120%;
|
||||||
|
padding: 0.5em;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
border-radius: 2px;
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.linenos pre {
|
||||||
|
padding: 0.5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.quotebar {
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
max-width: 250px;
|
||||||
|
float: right;
|
||||||
|
padding: 0px 7px;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin: 0 -0.5em 0 -0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table td, table th {
|
||||||
|
padding: 0.2em 0.5em 0.2em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition, div.warning {
|
||||||
|
font-size: 0.9em;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
border: 1px solid #86989B;
|
||||||
|
border-radius: 2px;
|
||||||
|
background-color: #f7f7f7;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p, div.warning p {
|
||||||
|
margin: 0.5em 1em 0.5em 1em;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition pre, div.warning pre {
|
||||||
|
margin: 0.4em 1em 0.4em 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title,
|
||||||
|
div.warning p.admonition-title {
|
||||||
|
margin-top: 1em;
|
||||||
|
padding-top: 0.5em;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
border: 1px solid #940000;
|
||||||
|
/* background-color: #FFCCCF;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning p.admonition-title {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition ul, div.admonition ol,
|
||||||
|
div.warning ul, div.warning ol {
|
||||||
|
margin: 0.1em 0.5em 0.5em 3em;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition .highlight, div.warning .highlight {
|
||||||
|
background-color: #f7f7f7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-back {
|
||||||
|
font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
|
||||||
|
'Verdana', sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
background-color: #f4debf;
|
||||||
|
border-top: 1px solid #ac9;
|
||||||
|
border-bottom: 1px solid #ac9;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.my-code-block.std-ref {
|
||||||
|
color : red;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cppbrief {
|
||||||
|
color: #C6792C;
|
||||||
|
font-style: oblique;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cppsynopsis {
|
||||||
|
background-color: #E7EDF9;
|
||||||
|
/*font-family: 'Open Sans', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif;*/
|
||||||
|
/*font-family: monospace; */
|
||||||
|
font-family: Verdana, Arial, Lucida Console;
|
||||||
|
font-size=80%;
|
||||||
|
/*font-style: oblique;*/
|
||||||
|
/* white-space: pre;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
4
doc/themes/triqs/theme.conf
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = basic
|
||||||
|
stylesheet = triqs.css
|
||||||
|
pygments_style = sphinx
|
60
packaging/TRIQS-app4triqs-3.1.0-foss-2021b.eb
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
easyblock = 'CMakeMake'
|
||||||
|
|
||||||
|
name = 'TRIQS-app4triqs'
|
||||||
|
version = '3.1.0'
|
||||||
|
|
||||||
|
homepage = 'https://triqs.github.io/app4triqs/'
|
||||||
|
description = """
|
||||||
|
PROVIDE HERE A DESCRIPTION OF YOUR APPLICATION
|
||||||
|
"""
|
||||||
|
|
||||||
|
docurls = ['https://triqs.github.io/app4triqs/%(version_major_minor)s.x/']
|
||||||
|
software_license = 'LicenseGPLv3'
|
||||||
|
|
||||||
|
toolchain = {'name': 'foss', 'version': '2021b'}
|
||||||
|
toolchainopts = {'pic': True, 'usempi': True}
|
||||||
|
|
||||||
|
source_urls = ['https://github.com/TRIQS/app4triqs/releases/download/%(version)s/']
|
||||||
|
sources = ['app4triqs-%(version)s.tar.gz']
|
||||||
|
checksums = ['PUT HERE THE SHA256 OF THE RELEASE TARBALL']
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('Python', '3.9.6'),
|
||||||
|
('SciPy-bundle', '2021.10'),
|
||||||
|
('Boost', '1.77.0'),
|
||||||
|
('Clang', '13.0.1'),
|
||||||
|
('Clang-Python-bindings', '13.0.1'),
|
||||||
|
('GMP', '6.2.1'),
|
||||||
|
('HDF5', '1.10.7'),
|
||||||
|
('Mako', '1.1.4'),
|
||||||
|
('TRIQS', '3.1.0'),
|
||||||
|
('NFFT', '3.5.2')
|
||||||
|
]
|
||||||
|
|
||||||
|
builddependencies = [
|
||||||
|
('CMake', '3.22.1')
|
||||||
|
]
|
||||||
|
|
||||||
|
separate_build_dir = True
|
||||||
|
|
||||||
|
runtest = 'test'
|
||||||
|
|
||||||
|
sanity_check_paths = {
|
||||||
|
'files': ['lib/libapp4triqs_c.a'],
|
||||||
|
'dirs': ['include/app4triqs', 'lib', 'share',
|
||||||
|
'lib/python%(pyshortver)s/site-packages/app4triqs'],
|
||||||
|
}
|
||||||
|
|
||||||
|
sanity_check_commands = ["python -c 'import app4triqs'"]
|
||||||
|
|
||||||
|
modextrapaths = {
|
||||||
|
'CPLUS_INCLUDE_PATH': 'include',
|
||||||
|
'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages',
|
||||||
|
'CMAKE_PREFIX_PATH': 'lib/cmake/app4triqs',
|
||||||
|
}
|
||||||
|
modextravars = {
|
||||||
|
'APP4TRIQS_ROOT': '%(installdir)s',
|
||||||
|
'APP4TRIQS_VERSION': '%(version)s',
|
||||||
|
}
|
||||||
|
|
||||||
|
moduleclass = 'phys'
|
22
packaging/conda/build.sh
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
|
||||||
|
# Openmpi Specific environment setup - Cf. https://github.com/conda-forge/libnetcdf-feedstock/pull/80
|
||||||
|
export OMPI_MCA_btl=self,tcp
|
||||||
|
export OMPI_MCA_plm=isolated
|
||||||
|
export OMPI_MCA_rmaps_base_oversubscribe=yes
|
||||||
|
export OMPI_MCA_btl_vader_single_copy_mechanism=none
|
||||||
|
mpiexec="mpiexec --allow-run-as-root"
|
||||||
|
|
||||||
|
source $PREFIX/share/triqs/triqsvars.sh
|
||||||
|
|
||||||
|
cmake \
|
||||||
|
-DCMAKE_INSTALL_PREFIX=$PREFIX \
|
||||||
|
-DCMAKE_BUILD_TYPE=Release \
|
||||||
|
..
|
||||||
|
|
||||||
|
make -j${CPU_COUNT} VERBOSE=1
|
||||||
|
CTEST_OUTPUT_ON_FAILURE=1 ctest
|
||||||
|
make install
|
3
packaging/conda/conda_build_config.yaml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
mpi:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
57
packaging/conda/meta.yaml
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
{% set version = "3.0.0" %}
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: app4triqs
|
||||||
|
version: {{ version }}
|
||||||
|
|
||||||
|
source:
|
||||||
|
url: https://github.com/TRIQS/app4triqs/releases/download/{{ version }}/app4triqs-{{ version }}.tar.gz
|
||||||
|
sha256: PUT HERE THE SHA256 OF YOUR RELEASE TARBALL
|
||||||
|
|
||||||
|
build:
|
||||||
|
number: 0
|
||||||
|
skip: True # [win or py<30]
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- cmake
|
||||||
|
- make
|
||||||
|
- {{ compiler('c') }}
|
||||||
|
- {{ compiler('cxx') }}
|
||||||
|
host:
|
||||||
|
- triqs {{ '.'.join(version.split('.')[:2]) }}
|
||||||
|
- boost-cpp
|
||||||
|
- hdf5
|
||||||
|
- {{ mpi }}
|
||||||
|
- libblas
|
||||||
|
- liblapack
|
||||||
|
- python
|
||||||
|
run:
|
||||||
|
- {{ pin_compatible("triqs", max_pin="x.x") }}
|
||||||
|
- boost-cpp
|
||||||
|
- hdf5
|
||||||
|
- {{ mpi }}
|
||||||
|
- libblas
|
||||||
|
- liblapack
|
||||||
|
- python
|
||||||
|
|
||||||
|
test:
|
||||||
|
commands:
|
||||||
|
- export OMPI_MCA_btl=self,tcp
|
||||||
|
- export OMPI_MCA_plm=isolated
|
||||||
|
- export OMPI_MCA_rmaps_base_oversubscribe=yes
|
||||||
|
- export OMPI_MCA_btl_vader_single_copy_mechanism=none
|
||||||
|
- export mpiexec="mpiexec --allow-run-as-root"
|
||||||
|
- python -c "import app4triqs"
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: https://triqs.github.io/app4triqs
|
||||||
|
license: GPL-3.0-or-later
|
||||||
|
license_family: GPL
|
||||||
|
license_file: LICENSE.txt
|
||||||
|
summary: 'An application based on the TRIQS library'
|
||||||
|
|
||||||
|
extra:
|
||||||
|
recipe-maintainers:
|
||||||
|
- wentzell
|
||||||
|
- pgunn
|
26
python/app4triqs/CMakeLists.txt
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Configure the version
|
||||||
|
configure_file(version.py.in version.py)
|
||||||
|
|
||||||
|
# All Python files. Copy them in the build dir to have a complete package for the tests.
|
||||||
|
file(GLOB_RECURSE python_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py)
|
||||||
|
file(GLOB_RECURSE wrap_generators RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *_desc.py)
|
||||||
|
list(REMOVE_ITEM python_sources "${wrap_generators}")
|
||||||
|
foreach(file ${python_sources})
|
||||||
|
configure_file(${file} ${file} COPYONLY)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# Install python files to proper location
|
||||||
|
set(PYTHON_LIB_DEST ${TRIQS_PYTHON_LIB_DEST_ROOT}/${PROJECT_NAME})
|
||||||
|
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/version.py DESTINATION ${PYTHON_LIB_DEST})
|
||||||
|
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${TRIQS_PYTHON_LIB_DEST_ROOT} FILES_MATCHING PATTERN "*.py" PATTERN "*_desc.py" EXCLUDE)
|
||||||
|
|
||||||
|
# Build and install any python modules
|
||||||
|
foreach(gen ${wrap_generators})
|
||||||
|
string(REPLACE "_desc.py" "" gen ${gen})
|
||||||
|
get_filename_component(module_name ${gen} NAME_WE)
|
||||||
|
get_filename_component(module_dir ${gen} DIRECTORY)
|
||||||
|
add_cpp2py_module(NAME ${module_name} DIRECTORY ${module_dir})
|
||||||
|
add_library(${PROJECT_NAME}::${module_name} ALIAS ${module_name})
|
||||||
|
target_link_libraries(${module_name} ${PROJECT_NAME}_c triqs_py)
|
||||||
|
install(TARGETS ${module_name} DESTINATION ${PYTHON_LIB_DEST}/${module_dir})
|
||||||
|
endforeach()
|
30
python/app4triqs/__init__.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
################################################################################
|
||||||
|
#
|
||||||
|
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
||||||
|
#
|
||||||
|
# Copyright (C) 2016-2018, N. Wentzell
|
||||||
|
# Copyright (C) 2018-2019, The Simons Foundation
|
||||||
|
# author: N. Wentzell
|
||||||
|
#
|
||||||
|
# TRIQS is free software: you can redistribute it and/or modify it under the
|
||||||
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
# version.
|
||||||
|
#
|
||||||
|
# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||||
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||||
|
# details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
r"""
|
||||||
|
DOC
|
||||||
|
|
||||||
|
"""
|
||||||
|
from .app4triqs_module import Toto, chain
|
||||||
|
|
||||||
|
__all__ = ['Toto', 'chain']
|
76
python/app4triqs/app4triqs_module_desc.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# Generated automatically using the command :
|
||||||
|
# c++2py ../../c++/app4triqs/app4triqs.hpp -p --members_read_only -N app4triqs -a app4triqs -m app4triqs_module -o app4triqs_module --moduledoc="The app4triqs python module" -C triqs --cxxflags="-std=c++17" --target_file_only
|
||||||
|
from cpp2py.wrap_generator import *
|
||||||
|
|
||||||
|
# The module
|
||||||
|
module = module_(full_name = "app4triqs_module", doc = r"The app4triqs python module", app_name = "app4triqs")
|
||||||
|
|
||||||
|
# Imports
|
||||||
|
|
||||||
|
# Add here all includes
|
||||||
|
module.add_include("app4triqs/app4triqs.hpp")
|
||||||
|
|
||||||
|
# Add here anything to add in the C++ code at the start, e.g. namespace using
|
||||||
|
module.add_preamble("""
|
||||||
|
#include <cpp2py/converters/string.hpp>
|
||||||
|
|
||||||
|
using namespace app4triqs;
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
# The class toto
|
||||||
|
c = class_(
|
||||||
|
py_type = "Toto", # name of the python class
|
||||||
|
c_type = "app4triqs::toto", # name of the C++ class
|
||||||
|
doc = r"""A very useful and important class""", # doc of the C++ class
|
||||||
|
hdf5 = True,
|
||||||
|
arithmetic = ("add_only"),
|
||||||
|
comparisons = "==",
|
||||||
|
serializable = "tuple"
|
||||||
|
)
|
||||||
|
|
||||||
|
c.add_constructor("""()""", doc = r"""""")
|
||||||
|
|
||||||
|
c.add_constructor("""(int i_)""", doc = r"""Construct from integer
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
i_
|
||||||
|
a scalar :math:`G(\tau)`""")
|
||||||
|
|
||||||
|
c.add_method("""int f (int u)""",
|
||||||
|
doc = r"""A simple function with :math:`G(\tau)`
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
u
|
||||||
|
Nothing useful""")
|
||||||
|
|
||||||
|
c.add_method("""std::string hdf5_format ()""",
|
||||||
|
is_static = True,
|
||||||
|
doc = r"""HDF5""")
|
||||||
|
|
||||||
|
c.add_property(name = "i",
|
||||||
|
getter = cfunction("int get_i ()"),
|
||||||
|
doc = r"""Simple accessor""")
|
||||||
|
|
||||||
|
module.add_class(c)
|
||||||
|
|
||||||
|
module.add_function ("int app4triqs::chain (int i, int j)", doc = r"""Chain digits of two integers
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
i
|
||||||
|
The first integer
|
||||||
|
|
||||||
|
j
|
||||||
|
The second integer
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
out
|
||||||
|
An integer containing the digits of both i and j""")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
module.generate_code()
|
32
python/app4triqs/version.py.in
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
################################################################################
|
||||||
|
#
|
||||||
|
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
||||||
|
#
|
||||||
|
# Copyright (C) 2016-2018, N. Wentzell
|
||||||
|
# Copyright (C) 2018-2019, Simons Foundation
|
||||||
|
# author: N. Wentzell
|
||||||
|
#
|
||||||
|
# TRIQS is free software: you can redistribute it and/or modify it under the
|
||||||
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||||||
|
# version.
|
||||||
|
#
|
||||||
|
# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||||
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||||
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||||
|
# details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
version = "@PROJECT_VERSION@"
|
||||||
|
triqs_hash = "@TRIQS_GIT_HASH@"
|
||||||
|
@PROJECT_NAME@_hash = "@PROJECT_GIT_HASH@"
|
||||||
|
|
||||||
|
def show_version():
|
||||||
|
print("\nYou are using @PROJECT_NAME@ version %s\n"%version)
|
||||||
|
|
||||||
|
def show_git_hash():
|
||||||
|
print("\nYou are using @PROJECT_NAME@ git hash %s based on triqs git hash %s\n"%("@PROJECT_GIT_HASH@", triqs_hash))
|
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# Required python packages for this application (these should also be added to Dockerfile for Jenkins)
|
||||||
|
mako
|
||||||
|
numpy
|
||||||
|
scipy
|
37
share/CMakeLists.txt
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
add_subdirectory(cmake)
|
||||||
|
|
||||||
|
if(NOT IS_SUBPROJECT
|
||||||
|
AND NOT CMAKE_INSTALL_PREFIX STREQUAL TRIQS_ROOT
|
||||||
|
AND NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr"
|
||||||
|
AND NOT CMAKE_INSTALL_PREFIX STREQUAL "/usr/local"
|
||||||
|
)
|
||||||
|
|
||||||
|
if(PythonSupport)
|
||||||
|
set(EXPORT_PYTHON_PATH "export PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CPP2PY_PYTHON_LIB_DEST_ROOT}:$PYTHONPATH")
|
||||||
|
set(MODFILE_PYTHON_PATH "prepend-path PYTHONPATH $root/${CPP2PY_PYTHON_LIB_DEST_ROOT}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
configure_file(${PROJECT_NAME}.modulefile.in ${PROJECT_NAME}.modulefile @ONLY)
|
||||||
|
configure_file(${PROJECT_NAME}vars.sh.in ${PROJECT_NAME}vars.sh @ONLY)
|
||||||
|
|
||||||
|
install(
|
||||||
|
FILES
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.modulefile
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}vars.sh
|
||||||
|
DESTINATION share/${PROJECT_NAME}
|
||||||
|
)
|
||||||
|
|
||||||
|
message(STATUS "*********************************************************************************")
|
||||||
|
message(STATUS "* Custom install Location. Use: ")
|
||||||
|
message(STATUS "* ")
|
||||||
|
message(STATUS "* source ${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME}/${PROJECT_NAME}vars.sh ")
|
||||||
|
message(STATUS "* ")
|
||||||
|
message(STATUS "* to set up the environment variables ")
|
||||||
|
if(DEFINED ENV{MODULEPATH})
|
||||||
|
message(STATUS "* ")
|
||||||
|
message(STATUS "* Consider copying ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.modulefile ")
|
||||||
|
message(STATUS "* into your environment module directories ")
|
||||||
|
endif()
|
||||||
|
message(STATUS "*********************************************************************************")
|
||||||
|
|
||||||
|
endif()
|
39
share/app4triqs.modulefile.in
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
#%Module1.0
|
||||||
|
#
|
||||||
|
# To be installed as ${MODULEPATH}/@PROJECT_NAME@/@PROJECT_VERSION@
|
||||||
|
|
||||||
|
set name @PROJECT_NAME@
|
||||||
|
set version @PROJECT_VERSION@
|
||||||
|
set root @CMAKE_INSTALL_PREFIX@
|
||||||
|
set git_hash @PROJECT_GIT_HASH@
|
||||||
|
|
||||||
|
set url "https://github.com/TRIQS/@PROJECT_NAME@"
|
||||||
|
set description "An example application using cpp2py and triqs."
|
||||||
|
|
||||||
|
module-whatis "$description"
|
||||||
|
|
||||||
|
proc ModulesHelp { } {
|
||||||
|
global description url version git_hash
|
||||||
|
puts stderr "Description: $description"
|
||||||
|
puts stderr "URL: $url"
|
||||||
|
puts stderr "Version: $version"
|
||||||
|
puts stderr "Git hash: $git_hash"
|
||||||
|
}
|
||||||
|
|
||||||
|
# You may need to edit the next line if the triqs module
|
||||||
|
# is installed under a different name in your setup.
|
||||||
|
prereq triqs/@TRIQS_VERSION@
|
||||||
|
|
||||||
|
# Only one version of @PROJECT_NAME@ can be loaded at a time
|
||||||
|
conflict $name
|
||||||
|
|
||||||
|
setenv @PROJECT_NAME@_ROOT $root
|
||||||
|
setenv @PROJECT_NAME@_VERSION $version
|
||||||
|
setenv @PROJECT_NAME@_GIT_HASH $git_hash
|
||||||
|
|
||||||
|
prepend-path PATH $root/bin
|
||||||
|
prepend-path CPLUS_INCLUDE_PATH $root/include
|
||||||
|
prepend-path LIBRARY_PATH $root/lib
|
||||||
|
prepend-path LD_LIBRARY_PATH $root/lib
|
||||||
|
prepend-path CMAKE_PREFIX_PATH $root
|
||||||
|
@MODFILE_PYTHON_PATH@
|
8
share/app4triqsvars.sh.in
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# Source this in your environment.
|
||||||
|
|
||||||
|
export @PROJECT_NAME@_ROOT=@CMAKE_INSTALL_PREFIX@
|
||||||
|
|
||||||
|
export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH
|
||||||
|
export LD_LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LD_LIBRARY_PATH
|
||||||
|
export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH
|
||||||
|
@EXPORT_PYTHON_PATH@
|
10
share/cmake/CMakeLists.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
configure_file(${PROJECT_NAME}-config.cmake.in ${PROJECT_NAME}-config.cmake @ONLY)
|
||||||
|
configure_file(${PROJECT_NAME}-config-version.cmake.in ${PROJECT_NAME}-config-version.cmake @ONLY)
|
||||||
|
install(
|
||||||
|
FILES
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake
|
||||||
|
DESTINATION lib/cmake/${PROJECT_NAME}
|
||||||
|
)
|
||||||
|
|
||||||
|
install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION lib/cmake/${PROJECT_NAME})
|
37
share/cmake/Modules/FindSphinx.cmake
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Copyright Olivier Parcollet 2017.
|
||||||
|
# Distributed under the Boost Software License, Version 1.0.
|
||||||
|
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
# http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
|
# This module looks for sphinx documentation tool
|
||||||
|
# and define a function that prepares the Makefile for sphinx-build
|
||||||
|
|
||||||
|
find_program(SPHINXBUILD_EXECUTABLE
|
||||||
|
NAMES sphinx-build
|
||||||
|
PATHS /usr/bin /opt/local/bin /usr/local/bin #opt/sphinx-doc/bin
|
||||||
|
PATH_SUFFIXES bin
|
||||||
|
)
|
||||||
|
|
||||||
|
if (NOT SPHINXBUILD_EXECUTABLE)
|
||||||
|
message(FATAL_ERROR "I cannot find sphinx to build the triqs documentation")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND "${SPHINXBUILD_EXECUTABLE}" --version
|
||||||
|
OUTPUT_VARIABLE SPHINXBUILD_VERSION
|
||||||
|
ERROR_VARIABLE SPHINXBUILD_VERSION
|
||||||
|
)
|
||||||
|
if (SPHINXBUILD_VERSION MATCHES "[Ss]phinx.* ([0-9]+\\.[0-9]+(\\.|b)[0-9]+)")
|
||||||
|
set (SPHINXBUILD_VERSION "${CMAKE_MATCH_1}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (SPHINXBUILD_VERSION VERSION_EQUAL 1.6.3)
|
||||||
|
message(FATAL_ERROR "sphinx-build found at ${SPHINXBUILD_EXECUTABLE} but version 1.6.3 has a bug. Upgrade sphinx.")
|
||||||
|
else()
|
||||||
|
message(STATUS "sphinx-build program found at ${SPHINXBUILD_EXECUTABLE} with version ${SPHINXBUILD_VERSION}")
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Sphinx DEFAULT_MSG SPHINXBUILD_EXECUTABLE)
|
||||||
|
|
||||||
|
mark_as_advanced( SPHINXBUILD_EXECUTABLE )
|
13
share/cmake/app4triqs-config-version.cmake.in
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
set(PACKAGE_VERSION @PROJECT_VERSION@)
|
||||||
|
|
||||||
|
if (PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
|
||||||
|
set(PACKAGE_VERSION_EXACT TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (PACKAGE_FIND_VERSION VERSION_LESS PACKAGE_VERSION)
|
||||||
|
set(PACKAGE_VERSION_COMPATIBLE TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (PACKAGE_FIND_VERSION VERSION_GREATER PACKAGE_VERSION)
|
||||||
|
set(PACKAGE_VERSION_UNSUITABLE TRUE)
|
||||||
|
endif()
|
40
share/cmake/app4triqs-config.cmake.in
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# This file allows other CMake Projects to find us
|
||||||
|
# We provide general project information
|
||||||
|
# and reestablish the exported CMake Targets
|
||||||
|
|
||||||
|
# Multiple inclusion guard
|
||||||
|
if(NOT @PROJECT_NAME@_FOUND)
|
||||||
|
set(@PROJECT_NAME@_FOUND TRUE)
|
||||||
|
set_property(GLOBAL PROPERTY @PROJECT_NAME@_FOUND TRUE)
|
||||||
|
|
||||||
|
# version
|
||||||
|
set(@PROJECT_NAME@_VERSION @PROJECT_VERSION@ CACHE STRING "@PROJECT_NAME@ version")
|
||||||
|
set(@PROJECT_NAME@_GIT_HASH @PROJECT_GIT_HASH@ CACHE STRING "@PROJECT_NAME@ git hash")
|
||||||
|
|
||||||
|
# Root of the installation
|
||||||
|
set(@PROJECT_NAME@_ROOT @CMAKE_INSTALL_PREFIX@ CACHE STRING "@PROJECT_NAME@ root directory")
|
||||||
|
|
||||||
|
## Find the target dependencies
|
||||||
|
#function(find_dep)
|
||||||
|
# get_property(${ARGV0}_FOUND GLOBAL PROPERTY ${ARGV0}_FOUND)
|
||||||
|
# if(NOT ${ARGV0}_FOUND)
|
||||||
|
# find_package(${ARGN} REQUIRED HINTS @CMAKE_INSTALL_PREFIX@)
|
||||||
|
# endif()
|
||||||
|
#endfunction()
|
||||||
|
#find_dep(depname 1.0)
|
||||||
|
|
||||||
|
# Include the exported targets of this project
|
||||||
|
include(@CMAKE_INSTALL_PREFIX@/lib/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake)
|
||||||
|
|
||||||
|
message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@, root = @CMAKE_INSTALL_PREFIX@")
|
||||||
|
|
||||||
|
# Was the Project built with Documentation?
|
||||||
|
set(@PROJECT_NAME@_WITH_DOCUMENTATION @Build_Documentation@ CACHE BOOL "Was @PROJECT_NAME@ build with documentation?")
|
||||||
|
|
||||||
|
# Was the Project built with PythonSupport?
|
||||||
|
set(@PROJECT_NAME@_WITH_PYTHON_SUPPORT @PythonSupport@ CACHE BOOL "Was @PROJECT_NAME@ build with python support?")
|
||||||
|
if(@PythonSupport@)
|
||||||
|
set(@PROJECT_NAME@_MODULE_DIR @CMAKE_INSTALL_PREFIX@/@CPP2PY_PYTHON_LIB_DEST_ROOT@ CACHE BOOL "The @PROJECT_NAME@ python module directory")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
endif()
|
123
share/cmake/extract_flags.cmake
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
# Copyright (c) 2019-2020 Simons Foundation
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# https://www.gnu.org/licenses/gpl-3.0.txt
|
||||||
|
# Author: Nils Wentzell
|
||||||
|
|
||||||
|
# Recursively fetch all targets that the interface of a target depends upon
|
||||||
|
macro(get_all_interface_targets name target)
|
||||||
|
get_property(TARGET_LINK_LIBRARIES TARGET ${target} PROPERTY INTERFACE_LINK_LIBRARIES)
|
||||||
|
foreach(lib IN LISTS TARGET_LINK_LIBRARIES)
|
||||||
|
if(TARGET ${lib})
|
||||||
|
# Append to list
|
||||||
|
list(APPEND ${name}_INTERFACE_TARGETS ${lib})
|
||||||
|
# Recure into target dependencies
|
||||||
|
get_all_interface_targets(${name} ${lib})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Extract the property from the target and recursively from all targets it depends upon
|
||||||
|
macro(get_property_recursive)
|
||||||
|
cmake_parse_arguments(get_property_recursive "" "TARGET" "PROPERTY" ${ARGN})
|
||||||
|
set(target ${get_property_recursive_TARGET})
|
||||||
|
set(property ${get_property_recursive_PROPERTY})
|
||||||
|
get_all_interface_targets(${target} ${target})
|
||||||
|
foreach(t IN LISTS ${target}_INTERFACE_TARGETS ITEMS ${target})
|
||||||
|
get_property(p TARGET ${t} PROPERTY ${property})
|
||||||
|
list(APPEND ${ARGV0} ${p})
|
||||||
|
endforeach()
|
||||||
|
# Clean duplicates and any occurance of '/usr/include' dirs
|
||||||
|
if(${ARGV0})
|
||||||
|
list(REMOVE_DUPLICATES ${ARGV0})
|
||||||
|
list(REMOVE_ITEM ${ARGV0} /usr/include)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Recursively fetch all compiler flags attached to the interface of a target
|
||||||
|
macro(extract_flags)
|
||||||
|
|
||||||
|
cmake_parse_arguments(ARG "BUILD_INTERFACE" "" "" ${ARGN})
|
||||||
|
|
||||||
|
set(target ${ARGV0})
|
||||||
|
unset(${target}_CXXFLAGS)
|
||||||
|
unset(${target}_LDFLAGS)
|
||||||
|
|
||||||
|
get_property_recursive(opts TARGET ${target} PROPERTY INTERFACE_COMPILE_OPTIONS)
|
||||||
|
foreach(opt ${opts})
|
||||||
|
set(${target}_LDFLAGS "${${target}_LDFLAGS} ${opt}")
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} ${opt}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
get_property_recursive(cxx_features TARGET ${target} PROPERTY INTERFACE_COMPILE_FEATURES)
|
||||||
|
if(cxx_std_20 IN_LIST cxx_features)
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -std=c++20")
|
||||||
|
elseif(cxx_std_17 IN_LIST cxx_features)
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -std=c++17")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
get_property_recursive(defs TARGET ${target} PROPERTY INTERFACE_COMPILE_DEFINITIONS)
|
||||||
|
foreach(def ${defs})
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -D${def}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
get_property_recursive(inc_dirs TARGET ${target} PROPERTY INTERFACE_INCLUDE_DIRECTORIES)
|
||||||
|
get_property_recursive(sys_inc_dirs TARGET ${target} PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES)
|
||||||
|
if(inc_dirs)
|
||||||
|
list(REMOVE_ITEM sys_inc_dirs ${inc_dirs})
|
||||||
|
endif()
|
||||||
|
foreach(dir ${inc_dirs})
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -I${dir}")
|
||||||
|
endforeach()
|
||||||
|
foreach(dir ${sys_inc_dirs})
|
||||||
|
set(${target}_CXXFLAGS "${${target}_CXXFLAGS} -isystem${dir}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
get_property_recursive(libs TARGET ${target} PROPERTY INTERFACE_LINK_LIBRARIES)
|
||||||
|
foreach(lib ${libs})
|
||||||
|
if(NOT TARGET ${lib} AND NOT IS_DIRECTORY ${lib})
|
||||||
|
set(${target}_LDFLAGS "${${target}_LDFLAGS} ${lib}")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# ==== We have to replace generator expressions explicitly ====
|
||||||
|
|
||||||
|
if(ARG_BUILD_INTERFACE)
|
||||||
|
string(REGEX REPLACE "\\$<BUILD_INTERFACE:([^ ]*)>" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE "\\$<BUILD_INTERFACE:([^ ]*)>" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
else()
|
||||||
|
string(REGEX REPLACE "\\$<INSTALL_INTERFACE:([^ ]*)>" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE "\\$<INSTALL_INTERFACE:([^ ]*)>" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:GNU>:([^ ]*)>" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:GNU>:([^ ]*)>" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:Clang>:([^ ]*)>" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:Clang>:([^ ]*)>" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:AppleClang>:([^ ]*)>" "\\1" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE "\\$<\\$<CXX_COMPILER_ID:AppleClang>:([^ ]*)>" "\\1" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Remove all remaining generator expressions
|
||||||
|
string(REGEX REPLACE " [^ ]*\\$<[^ ]*:[^>]*>" "" ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE " [^ ]*\\$<[^ ]*:[^>]*>" "" ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
|
||||||
|
# Filter out system directories from LDFLAGS and CXXFLAGS
|
||||||
|
string(REGEX REPLACE " -L/usr/lib " " " ${target}_LDFLAGS "${${target}_LDFLAGS}")
|
||||||
|
string(REGEX REPLACE " -I/usr/include " " " ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
string(REGEX REPLACE " -isystem/usr/include " " " ${target}_CXXFLAGS "${${target}_CXXFLAGS}")
|
||||||
|
|
||||||
|
endmacro()
|
45
share/replace_and_rename.py
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env python2
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print("Please pass the application name")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
app_name = str(sys.argv[1]).lower()
|
||||||
|
capital_name = app_name.upper()
|
||||||
|
|
||||||
|
# Move app4triqs directories if necessary
|
||||||
|
if os.path.isdir("c++/app4triqs"): os.rename("c++/app4triqs", "c++/" + app_name)
|
||||||
|
if os.path.isdir("python/app4triqs"): os.rename("python/app4triqs", "python/" + app_name)
|
||||||
|
|
||||||
|
# Ignore these files
|
||||||
|
ignore_lst = [".git/", "replace_and_rename.py", "squash_history.sh"]
|
||||||
|
|
||||||
|
# Find the root directory of app4triqs
|
||||||
|
app4triqs_root = os.path.abspath(os.path.dirname(__file__) + "/..")
|
||||||
|
|
||||||
|
# Recurse over all subdirectories and files
|
||||||
|
for root, dirs, files in os.walk(app4triqs_root):
|
||||||
|
|
||||||
|
for fname in files:
|
||||||
|
fpath = os.path.join(root, fname)
|
||||||
|
|
||||||
|
# Ignore certain files / directories
|
||||||
|
if any(it in fpath for it in ignore_lst): continue
|
||||||
|
|
||||||
|
if os.path.isfile(fpath):
|
||||||
|
# Rename files containing app4triqs in their filename
|
||||||
|
if "app4triqs" in fname:
|
||||||
|
new_fpath = os.path.join(root, fname.replace("app4triqs", app_name))
|
||||||
|
os.rename(fpath, new_fpath)
|
||||||
|
fpath = new_fpath
|
||||||
|
|
||||||
|
# Replace app4triqs and APP4TRIQS in all files
|
||||||
|
with open(fpath, 'r') as f:
|
||||||
|
s = f.read()
|
||||||
|
if "app4triqs" in s or "APP4TRIQS" in s:
|
||||||
|
with open(fpath, 'w') as f:
|
||||||
|
f.write(s.replace("app4triqs", app_name).replace("APP4TRIQS", capital_name))
|
6
share/squash_history.sh
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
git reset $(git commit-tree HEAD\^{tree} -m "Initialize project from github.com/triqs/app4triqs@$(git rev-parse --short HEAD)")
|
||||||
|
git merge --allow-unrelated-histories -s ours HEAD@{1} -m "Track app4triqs skeleton"
|
||||||
|
git remote rm origin
|
||||||
|
git remote add app4triqs_remote https://github.com/triqs/app4triqs
|
5
test/CMakeLists.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
add_subdirectory(c++)
|
||||||
|
|
||||||
|
if(PythonSupport)
|
||||||
|
add_subdirectory(python)
|
||||||
|
endif()
|
35
test/c++/CMakeLists.txt
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Copy h5 files to binary dir
|
||||||
|
file(GLOB_RECURSE all_h5_ref_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.ref.h5)
|
||||||
|
foreach(file ${all_h5_ref_files})
|
||||||
|
configure_file(${file} ${file} COPYONLY)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# List of all tests
|
||||||
|
file(GLOB_RECURSE all_tests RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
|
||||||
|
|
||||||
|
foreach(test ${all_tests})
|
||||||
|
get_filename_component(test_name ${test} NAME_WE)
|
||||||
|
get_filename_component(test_dir ${test} DIRECTORY)
|
||||||
|
add_executable(${test_name} ${test})
|
||||||
|
target_link_libraries(${test_name} ${PROJECT_NAME}::${PROJECT_NAME}_c ${PROJECT_NAME}_warnings gtest_main)
|
||||||
|
set_property(TARGET ${test_name} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
|
||||||
|
add_test(NAME ${test_name} COMMAND ${test_name} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
|
||||||
|
# Run clang-tidy if found
|
||||||
|
if(CLANG_TIDY_EXECUTABLE)
|
||||||
|
set_target_properties(${test_name} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}")
|
||||||
|
endif()
|
||||||
|
# Run cppcheck if found
|
||||||
|
if(CPPCHECK_EXECUTABLE)
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${test_name}
|
||||||
|
COMMAND ${CPPCHECK_EXECUTABLE}
|
||||||
|
--enable=warning,style,performance,portability
|
||||||
|
--std=c++20
|
||||||
|
--template=gcc
|
||||||
|
--verbose
|
||||||
|
--force
|
||||||
|
--quiet
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/${test}
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
endforeach()
|
30
test/c++/basic.cpp
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#include <triqs/test_tools/gfs.hpp>
|
||||||
|
#include <app4triqs/app4triqs.hpp>
|
||||||
|
|
||||||
|
using namespace app4triqs;
|
||||||
|
|
||||||
|
TEST(Toto, Add) { // NOLINT
|
||||||
|
|
||||||
|
toto a(0);
|
||||||
|
toto b(2);
|
||||||
|
|
||||||
|
auto c = a + b;
|
||||||
|
EXPECT_EQ(c, b); // NOLINT
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Toto, H5) { // NOLINT
|
||||||
|
|
||||||
|
toto a(0);
|
||||||
|
{ // Local scope for file
|
||||||
|
h5::file f("f.h5", 'w');
|
||||||
|
h5_write(f, "a", a);
|
||||||
|
}
|
||||||
|
|
||||||
|
toto a2;
|
||||||
|
{
|
||||||
|
h5::file f("f.h5", 'a');
|
||||||
|
h5_read(f, "a", a2);
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECT_EQ(a, a2); // NOLINT
|
||||||
|
}
|
50
test/python/Basic.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from app4triqs import Toto, chain
|
||||||
|
from h5 import *
|
||||||
|
from triqs.utility import mpi
|
||||||
|
|
||||||
|
class test_toto(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_add(self):
|
||||||
|
|
||||||
|
a=Toto(0)
|
||||||
|
b=Toto(2)
|
||||||
|
|
||||||
|
c=a+b
|
||||||
|
self.assertEqual(c, b)
|
||||||
|
|
||||||
|
|
||||||
|
def test_h5(self):
|
||||||
|
|
||||||
|
a=Toto(0)
|
||||||
|
with HDFArchive("f.h5",'w') as A:
|
||||||
|
A["a"] = a
|
||||||
|
with HDFArchive("f.h5",'r') as A:
|
||||||
|
a_read = A["a"]
|
||||||
|
self.assertEqual(a, a_read)
|
||||||
|
|
||||||
|
|
||||||
|
def test_mpi(self):
|
||||||
|
|
||||||
|
a=Toto(0)
|
||||||
|
|
||||||
|
if mpi.is_master_node():
|
||||||
|
a=Toto(1)
|
||||||
|
mpi.bcast(a)
|
||||||
|
|
||||||
|
self.assertEqual(a, Toto(1))
|
||||||
|
|
||||||
|
class test_chain(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_chain(self):
|
||||||
|
|
||||||
|
i = 111
|
||||||
|
j = 222
|
||||||
|
ij = chain(i,j)
|
||||||
|
self.assertEqual(ij, 111222)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
15
test/python/CMakeLists.txt
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Copy h5 files to binary dir
|
||||||
|
file(GLOB_RECURSE all_h5_ref_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.ref.h5)
|
||||||
|
foreach(file ${all_h5_ref_files})
|
||||||
|
configure_file(${file} ${file} COPYONLY)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
# List of all tests
|
||||||
|
set(all_tests Basic)
|
||||||
|
|
||||||
|
foreach(test ${all_tests})
|
||||||
|
get_filename_component(test_name ${test} NAME_WE)
|
||||||
|
get_filename_component(test_dir ${test} DIRECTORY)
|
||||||
|
add_test(NAME Py_${test_name} COMMAND ${TRIQS_PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/${test_dir}/${test_name}.py WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
|
||||||
|
set_property(TEST Py_${test_name} APPEND PROPERTY ENVIRONMENT PYTHONPATH=${PROJECT_BINARY_DIR}/python:$ENV{PYTHONPATH} ${SANITIZER_RT_PRELOAD})
|
||||||
|
endforeach()
|