mirror of
https://github.com/triqs/dft_tools
synced 2025-01-03 01:55:56 +01:00
Merge remote-tracking branch 'app4triqs-remote/unstable' into unstable
This commit is contained in:
commit
eaa7c43551
41
.github/workflows/build.yml
vendored
41
.github/workflows/build.yml
vendored
@ -5,6 +5,19 @@ on:
|
|||||||
branches: [ unstable ]
|
branches: [ unstable ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ unstable ]
|
branches: [ unstable ]
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
CMAKE_C_COMPILER_LAUNCHER: ccache
|
||||||
|
CMAKE_CXX_COMPILER_LAUNCHER: ccache
|
||||||
|
CCACHE_COMPILERCHECK: content
|
||||||
|
CCACHE_BASEDIR: ${{ github.workspace }}
|
||||||
|
CCACHE_DIR: ${{ github.workspace }}/.ccache
|
||||||
|
CCACHE_MAXSIZE: 500M
|
||||||
|
CCACHE_SLOPPINESS: pch_defines,time_macros,include_file_mtime,include_file_ctime
|
||||||
|
CCACHE_COMPRESS: "1"
|
||||||
|
CCACHE_COMPRESSLEVEL: "1"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@ -16,12 +29,19 @@ jobs:
|
|||||||
- {os: ubuntu-22.04, cc: gcc-12, cxx: g++-12}
|
- {os: ubuntu-22.04, cc: gcc-12, cxx: g++-12}
|
||||||
- {os: ubuntu-22.04, cc: clang-15, cxx: clang++-15}
|
- {os: ubuntu-22.04, cc: clang-15, cxx: clang++-15}
|
||||||
- {os: macos-12, cc: gcc-12, cxx: g++-12}
|
- {os: macos-12, cc: gcc-12, cxx: g++-12}
|
||||||
- {os: macos-12, cc: /usr/local/opt/llvm/bin/clang, cxx: /usr/local/opt/llvm/bin/clang++}
|
- {os: macos-12, cc: clang, cxx: clang++}
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.CCACHE_DIR }}
|
||||||
|
key: ccache-${{ matrix.os }}-${{ matrix.cc }}-${{ github.run_id }}
|
||||||
|
restore-keys:
|
||||||
|
ccache-${{ matrix.os }}-${{ matrix.cc }}-
|
||||||
|
|
||||||
- name: Install ubuntu dependencies
|
- name: Install ubuntu dependencies
|
||||||
if: matrix.os == 'ubuntu-22.04'
|
if: matrix.os == 'ubuntu-22.04'
|
||||||
@ -30,6 +50,7 @@ jobs:
|
|||||||
sudo apt-get install lsb-release wget software-properties-common &&
|
sudo apt-get install lsb-release wget software-properties-common &&
|
||||||
wget -O /tmp/llvm.sh https://apt.llvm.org/llvm.sh && sudo chmod +x /tmp/llvm.sh && sudo /tmp/llvm.sh 15 &&
|
wget -O /tmp/llvm.sh https://apt.llvm.org/llvm.sh && sudo chmod +x /tmp/llvm.sh && sudo /tmp/llvm.sh 15 &&
|
||||||
sudo apt-get install
|
sudo apt-get install
|
||||||
|
ccache
|
||||||
clang-15
|
clang-15
|
||||||
g++-12
|
g++-12
|
||||||
gfortran-12
|
gfortran-12
|
||||||
@ -63,13 +84,14 @@ jobs:
|
|||||||
- name: Install homebrew dependencies
|
- name: Install homebrew dependencies
|
||||||
if: matrix.os == 'macos-12'
|
if: matrix.os == 'macos-12'
|
||||||
run: |
|
run: |
|
||||||
brew install gcc@12 llvm boost fftw hdf5 open-mpi openblas
|
brew install ccache gcc@12 llvm boost fftw hdf5 open-mpi openblas
|
||||||
pip3 install mako numpy scipy mpi4py
|
pip3 install mako numpy scipy mpi4py
|
||||||
pip3 install -r requirements.txt
|
pip3 install -r requirements.txt
|
||||||
|
|
||||||
- name: add clang cxxflags
|
- name: add clang cxxflags
|
||||||
if: ${{ contains(matrix.cxx, 'clang') }}
|
if: ${{ contains(matrix.cxx, 'clang') }}
|
||||||
run:
|
run: |
|
||||||
|
echo "PATH=/usr/local/opt/llvm/bin:$PATH" >> $GITHUB_ENV
|
||||||
echo "CXXFLAGS=-stdlib=libc++" >> $GITHUB_ENV
|
echo "CXXFLAGS=-stdlib=libc++" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build & Install TRIQS
|
- name: Build & Install TRIQS
|
||||||
@ -102,7 +124,18 @@ jobs:
|
|||||||
- name: Test app4triqs
|
- name: Test app4triqs
|
||||||
env:
|
env:
|
||||||
DYLD_FALLBACK_LIBRARY_PATH: /usr/local/opt/llvm/lib
|
DYLD_FALLBACK_LIBRARY_PATH: /usr/local/opt/llvm/lib
|
||||||
|
OPENBLAS_NUM_THREADS: "1"
|
||||||
run: |
|
run: |
|
||||||
source $HOME/install/share/triqs/triqsvars.sh
|
source $HOME/install/share/triqs/triqsvars.sh
|
||||||
cd build
|
cd build
|
||||||
ctest -j2 --output-on-failure
|
ctest -j2 --output-on-failure
|
||||||
|
|
||||||
|
- name: ccache statistics
|
||||||
|
if: always()
|
||||||
|
run: ccache -sv
|
||||||
|
|
||||||
|
- uses: actions/cache/save@v3
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
path: ${{ env.CCACHE_DIR }}
|
||||||
|
key: ccache-${{ matrix.os }}-${{ matrix.cc }}-${{ github.run_id }}
|
||||||
|
@ -21,6 +21,9 @@
|
|||||||
|
|
||||||
cmake_minimum_required(VERSION 3.20 FATAL_ERROR)
|
cmake_minimum_required(VERSION 3.20 FATAL_ERROR)
|
||||||
cmake_policy(VERSION 3.20)
|
cmake_policy(VERSION 3.20)
|
||||||
|
if(POLICY CMP0144)
|
||||||
|
cmake_policy(SET CMP0144 NEW)
|
||||||
|
endif()
|
||||||
|
|
||||||
# ############
|
# ############
|
||||||
# Define Project
|
# Define Project
|
||||||
@ -41,7 +44,7 @@ if(NOT ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} VERSION_EQUAL ${TRIQS_V
|
|||||||
message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.")
|
message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Default Install directory to TRIQS_ROOT if not given or invalid.
|
# Default Install directory to TRIQS_ROOT if not given or when provided as relative path.
|
||||||
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX}))
|
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX}))
|
||||||
message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT")
|
message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT")
|
||||||
set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE)
|
set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE)
|
||||||
@ -84,12 +87,12 @@ if(Build_Tests)
|
|||||||
enable_testing()
|
enable_testing()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Build static libraries by default
|
|
||||||
option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF)
|
|
||||||
|
|
||||||
# ############
|
# ############
|
||||||
# Global Compilation Settings
|
# Global Compilation Settings
|
||||||
|
|
||||||
|
# Build static libraries by default
|
||||||
|
option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF)
|
||||||
|
|
||||||
# Export the list of compile-commands into compile_commands.json
|
# Export the list of compile-commands into compile_commands.json
|
||||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||||
|
|
||||||
@ -121,6 +124,9 @@ target_compile_options(${PROJECT_NAME}_warnings
|
|||||||
$<$<CXX_COMPILER_ID:IntelLLVM>:-Wno-tautological-constant-compare>
|
$<$<CXX_COMPILER_ID:IntelLLVM>:-Wno-tautological-constant-compare>
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Provide GNU Installation directories
|
||||||
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
# #############
|
# #############
|
||||||
# Build Project
|
# Build Project
|
||||||
|
|
||||||
@ -156,6 +162,9 @@ add_subdirectory(bin)
|
|||||||
# Additional configuration files
|
# Additional configuration files
|
||||||
add_subdirectory(share)
|
add_subdirectory(share)
|
||||||
|
|
||||||
|
# add packaging for automatic Versioning
|
||||||
|
add_subdirectory(packaging)
|
||||||
|
|
||||||
# #############
|
# #############
|
||||||
# Debian Package
|
# Debian Package
|
||||||
|
|
||||||
|
8
Jenkinsfile
vendored
8
Jenkinsfile
vendored
@ -24,8 +24,8 @@ properties([
|
|||||||
def platforms = [:]
|
def platforms = [:]
|
||||||
|
|
||||||
/****************** linux builds (in docker) */
|
/****************** linux builds (in docker) */
|
||||||
/* Each platform must have a cooresponding Dockerfile.PLATFORM in triqs/packaging */
|
/* Each platform must have a corresponding Dockerfile.PLATFORM in triqs/packaging */
|
||||||
def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "sanitize"]
|
def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "ubuntu-intel", "sanitize"]
|
||||||
/* .each is currently broken in jenkins */
|
/* .each is currently broken in jenkins */
|
||||||
for (int i = 0; i < dockerPlatforms.size(); i++) {
|
for (int i = 0; i < dockerPlatforms.size(); i++) {
|
||||||
def platform = dockerPlatforms[i]
|
def platform = dockerPlatforms[i]
|
||||||
@ -42,10 +42,10 @@ for (int i = 0; i < dockerPlatforms.size(); i++) {
|
|||||||
if (platform == documentationPlatform)
|
if (platform == documentationPlatform)
|
||||||
args = '-DBuild_Documentation=1'
|
args = '-DBuild_Documentation=1'
|
||||||
else if (platform == "sanitize")
|
else if (platform == "sanitize")
|
||||||
args = '-DASAN=ON -DUBSAN=ON'
|
args = '-DASAN=ON -DUBSAN=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo'
|
||||||
def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_ID=${env.BUILD_TAG} --build-arg CMAKE_ARGS='${args}' .")
|
def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_ID=${env.BUILD_TAG} --build-arg CMAKE_ARGS='${args}' .")
|
||||||
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
||||||
img.inside() {
|
img.inside("--shm-size=4gb") {
|
||||||
sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1"
|
sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ target_compile_definitions(${PROJECT_NAME}_c PUBLIC
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Install library and headers
|
# Install library and headers
|
||||||
install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION lib)
|
install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION ${CMAKE_INSTALL_LIBDIR})
|
||||||
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
|
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
|
||||||
|
|
||||||
|
|
||||||
|
2
deps/CMakeLists.txt
vendored
2
deps/CMakeLists.txt
vendored
@ -52,7 +52,7 @@ if(PythonSupport OR (NOT IS_SUBPROJECT AND Build_Documentation))
|
|||||||
external_dependency(Cpp2Py
|
external_dependency(Cpp2Py
|
||||||
GIT_REPO https://github.com/TRIQS/cpp2py
|
GIT_REPO https://github.com/TRIQS/cpp2py
|
||||||
VERSION 2.0
|
VERSION 2.0
|
||||||
GIT_TAG master
|
GIT_TAG unstable
|
||||||
BUILD_ALWAYS
|
BUILD_ALWAYS
|
||||||
EXCLUDE_FROM_ALL
|
EXCLUDE_FROM_ALL
|
||||||
)
|
)
|
||||||
|
@ -1,427 +0,0 @@
|
|||||||
"""Attempt to generate templates for module reference with Sphinx
|
|
||||||
|
|
||||||
XXX - we exclude extension modules
|
|
||||||
|
|
||||||
To include extension modules, first identify them as valid in the
|
|
||||||
``_uri2path`` method, then handle them in the ``_parse_module`` script.
|
|
||||||
|
|
||||||
We get functions and classes by parsing the text of .py files.
|
|
||||||
Alternatively we could import the modules for discovery, and we'd have
|
|
||||||
to do that for extension modules. This would involve changing the
|
|
||||||
``_parse_module`` method to work via import and introspection, and
|
|
||||||
might involve changing ``discover_modules`` (which determines which
|
|
||||||
files are modules, and therefore which module URIs will be passed to
|
|
||||||
``_parse_module``).
|
|
||||||
|
|
||||||
NOTE: this is a modified version of a script originally shipped with the
|
|
||||||
PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed
|
|
||||||
project."""
|
|
||||||
|
|
||||||
# Stdlib imports
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
# Functions and classes
|
|
||||||
class ApiDocWriter:
|
|
||||||
''' Class for automatic detection and parsing of API docs
|
|
||||||
to Sphinx-parsable reST format'''
|
|
||||||
|
|
||||||
# only separating first two levels
|
|
||||||
rst_section_levels = ['*', '=', '-', '~', '^']
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
package_name,
|
|
||||||
rst_extension='.rst',
|
|
||||||
package_skip_patterns=None,
|
|
||||||
module_skip_patterns=None,
|
|
||||||
):
|
|
||||||
''' Initialize package for parsing
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
package_name : string
|
|
||||||
Name of the top-level package. *package_name* must be the
|
|
||||||
name of an importable package
|
|
||||||
rst_extension : string, optional
|
|
||||||
Extension for reST files, default '.rst'
|
|
||||||
package_skip_patterns : None or sequence of {strings, regexps}
|
|
||||||
Sequence of strings giving URIs of packages to be excluded
|
|
||||||
Operates on the package path, starting at (including) the
|
|
||||||
first dot in the package path, after *package_name* - so,
|
|
||||||
if *package_name* is ``sphinx``, then ``sphinx.util`` will
|
|
||||||
result in ``.util`` being passed for earching by these
|
|
||||||
regexps. If is None, gives default. Default is:
|
|
||||||
['\.tests$']
|
|
||||||
module_skip_patterns : None or sequence
|
|
||||||
Sequence of strings giving URIs of modules to be excluded
|
|
||||||
Operates on the module name including preceding URI path,
|
|
||||||
back to the first dot after *package_name*. For example
|
|
||||||
``sphinx.util.console`` results in the string to search of
|
|
||||||
``.util.console``
|
|
||||||
If is None, gives default. Default is:
|
|
||||||
['\.setup$', '\._']
|
|
||||||
'''
|
|
||||||
if package_skip_patterns is None:
|
|
||||||
package_skip_patterns = ['\\.tests$']
|
|
||||||
if module_skip_patterns is None:
|
|
||||||
module_skip_patterns = ['\\.setup$', '\\._']
|
|
||||||
self.package_name = package_name
|
|
||||||
self.rst_extension = rst_extension
|
|
||||||
self.package_skip_patterns = package_skip_patterns
|
|
||||||
self.module_skip_patterns = module_skip_patterns
|
|
||||||
|
|
||||||
def get_package_name(self):
|
|
||||||
return self._package_name
|
|
||||||
|
|
||||||
def set_package_name(self, package_name):
|
|
||||||
''' Set package_name
|
|
||||||
|
|
||||||
>>> docwriter = ApiDocWriter('sphinx')
|
|
||||||
>>> import sphinx
|
|
||||||
>>> docwriter.root_path == sphinx.__path__[0]
|
|
||||||
True
|
|
||||||
>>> docwriter.package_name = 'docutils'
|
|
||||||
>>> import docutils
|
|
||||||
>>> docwriter.root_path == docutils.__path__[0]
|
|
||||||
True
|
|
||||||
'''
|
|
||||||
# It's also possible to imagine caching the module parsing here
|
|
||||||
self._package_name = package_name
|
|
||||||
self.root_module = __import__(package_name)
|
|
||||||
self.root_path = self.root_module.__path__[0]
|
|
||||||
self.written_modules = None
|
|
||||||
|
|
||||||
package_name = property(get_package_name, set_package_name, None,
|
|
||||||
'get/set package_name')
|
|
||||||
|
|
||||||
def _get_object_name(self, line):
|
|
||||||
''' Get second token in line
|
|
||||||
>>> docwriter = ApiDocWriter('sphinx')
|
|
||||||
>>> docwriter._get_object_name(" def func(): ")
|
|
||||||
'func'
|
|
||||||
>>> docwriter._get_object_name(" class Klass: ")
|
|
||||||
'Klass'
|
|
||||||
>>> docwriter._get_object_name(" class Klass: ")
|
|
||||||
'Klass'
|
|
||||||
'''
|
|
||||||
name = line.split()[1].split('(')[0].strip()
|
|
||||||
# in case we have classes which are not derived from object
|
|
||||||
# ie. old style classes
|
|
||||||
return name.rstrip(':')
|
|
||||||
|
|
||||||
def _uri2path(self, uri):
|
|
||||||
''' Convert uri to absolute filepath
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
uri : string
|
|
||||||
URI of python module to return path for
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
path : None or string
|
|
||||||
Returns None if there is no valid path for this URI
|
|
||||||
Otherwise returns absolute file system path for URI
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> docwriter = ApiDocWriter('sphinx')
|
|
||||||
>>> import sphinx
|
|
||||||
>>> modpath = sphinx.__path__[0]
|
|
||||||
>>> res = docwriter._uri2path('sphinx.builder')
|
|
||||||
>>> res == os.path.join(modpath, 'builder.py')
|
|
||||||
True
|
|
||||||
>>> res = docwriter._uri2path('sphinx')
|
|
||||||
>>> res == os.path.join(modpath, '__init__.py')
|
|
||||||
True
|
|
||||||
>>> docwriter._uri2path('sphinx.does_not_exist')
|
|
||||||
|
|
||||||
'''
|
|
||||||
if uri == self.package_name:
|
|
||||||
return os.path.join(self.root_path, '__init__.py')
|
|
||||||
path = uri.replace('.', os.path.sep)
|
|
||||||
path = path.replace(self.package_name + os.path.sep, '')
|
|
||||||
path = os.path.join(self.root_path, path)
|
|
||||||
# XXX maybe check for extensions as well?
|
|
||||||
if os.path.exists(path + '.py'): # file
|
|
||||||
path += '.py'
|
|
||||||
elif os.path.exists(os.path.join(path, '__init__.py')):
|
|
||||||
path = os.path.join(path, '__init__.py')
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
return path
|
|
||||||
|
|
||||||
def _path2uri(self, dirpath):
|
|
||||||
''' Convert directory path to uri '''
|
|
||||||
relpath = dirpath.replace(self.root_path, self.package_name)
|
|
||||||
if relpath.startswith(os.path.sep):
|
|
||||||
relpath = relpath[1:]
|
|
||||||
return relpath.replace(os.path.sep, '.')
|
|
||||||
|
|
||||||
def _parse_module(self, uri):
|
|
||||||
''' Parse module defined in *uri* '''
|
|
||||||
filename = self._uri2path(uri)
|
|
||||||
if filename is None:
|
|
||||||
# nothing that we could handle here.
|
|
||||||
return ([],[])
|
|
||||||
f = open(filename, 'rt')
|
|
||||||
functions, classes = self._parse_lines(f)
|
|
||||||
f.close()
|
|
||||||
return functions, classes
|
|
||||||
|
|
||||||
def _parse_lines(self, linesource):
|
|
||||||
''' Parse lines of text for functions and classes '''
|
|
||||||
functions = []
|
|
||||||
classes = []
|
|
||||||
for line in linesource:
|
|
||||||
if line.startswith('def ') and line.count('('):
|
|
||||||
# exclude private stuff
|
|
||||||
name = self._get_object_name(line)
|
|
||||||
if not name.startswith('_'):
|
|
||||||
functions.append(name)
|
|
||||||
elif line.startswith('class '):
|
|
||||||
# exclude private stuff
|
|
||||||
name = self._get_object_name(line)
|
|
||||||
if not name.startswith('_'):
|
|
||||||
classes.append(name)
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
functions.sort()
|
|
||||||
classes.sort()
|
|
||||||
return functions, classes
|
|
||||||
|
|
||||||
def generate_api_doc(self, uri):
|
|
||||||
'''Make autodoc documentation template string for a module
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
uri : string
|
|
||||||
python location of module - e.g 'sphinx.builder'
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
S : string
|
|
||||||
Contents of API doc
|
|
||||||
'''
|
|
||||||
# get the names of all classes and functions
|
|
||||||
functions, classes = self._parse_module(uri)
|
|
||||||
if not len(functions) and not len(classes):
|
|
||||||
print('WARNING: Empty -',uri) # dbg
|
|
||||||
return ''
|
|
||||||
|
|
||||||
# Make a shorter version of the uri that omits the package name for
|
|
||||||
# titles
|
|
||||||
uri_short = re.sub(r'^%s\.' % self.package_name,'',uri)
|
|
||||||
|
|
||||||
ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n'
|
|
||||||
|
|
||||||
chap_title = uri_short
|
|
||||||
ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title)
|
|
||||||
+ '\n\n')
|
|
||||||
|
|
||||||
# Set the chapter title to read 'module' for all modules except for the
|
|
||||||
# main packages
|
|
||||||
if '.' in uri:
|
|
||||||
title = 'Module: :mod:`' + uri_short + '`'
|
|
||||||
else:
|
|
||||||
title = ':mod:`' + uri_short + '`'
|
|
||||||
ad += title + '\n' + self.rst_section_levels[2] * len(title)
|
|
||||||
|
|
||||||
if len(classes):
|
|
||||||
ad += '\nInheritance diagram for ``%s``:\n\n' % uri
|
|
||||||
ad += '.. inheritance-diagram:: %s \n' % uri
|
|
||||||
ad += ' :parts: 3\n'
|
|
||||||
|
|
||||||
ad += '\n.. automodule:: ' + uri + '\n'
|
|
||||||
ad += '\n.. currentmodule:: ' + uri + '\n'
|
|
||||||
multi_class = len(classes) > 1
|
|
||||||
multi_fx = len(functions) > 1
|
|
||||||
if multi_class:
|
|
||||||
ad += '\n' + 'Classes' + '\n' + \
|
|
||||||
self.rst_section_levels[2] * 7 + '\n'
|
|
||||||
elif len(classes) and multi_fx:
|
|
||||||
ad += '\n' + 'Class' + '\n' + \
|
|
||||||
self.rst_section_levels[2] * 5 + '\n'
|
|
||||||
for c in classes:
|
|
||||||
ad += '\n:class:`' + c + '`\n' \
|
|
||||||
+ self.rst_section_levels[multi_class + 2 ] * \
|
|
||||||
(len(c)+9) + '\n\n'
|
|
||||||
ad += '\n.. autoclass:: ' + c + '\n'
|
|
||||||
# must NOT exclude from index to keep cross-refs working
|
|
||||||
ad += ' :members:\n' \
|
|
||||||
' :undoc-members:\n' \
|
|
||||||
' :show-inheritance:\n' \
|
|
||||||
' :inherited-members:\n' \
|
|
||||||
'\n' \
|
|
||||||
' .. automethod:: __init__\n'
|
|
||||||
if multi_fx:
|
|
||||||
ad += '\n' + 'Functions' + '\n' + \
|
|
||||||
self.rst_section_levels[2] * 9 + '\n\n'
|
|
||||||
elif len(functions) and multi_class:
|
|
||||||
ad += '\n' + 'Function' + '\n' + \
|
|
||||||
self.rst_section_levels[2] * 8 + '\n\n'
|
|
||||||
for f in functions:
|
|
||||||
# must NOT exclude from index to keep cross-refs working
|
|
||||||
ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n'
|
|
||||||
return ad
|
|
||||||
|
|
||||||
def _survives_exclude(self, matchstr, match_type):
|
|
||||||
''' Returns True if *matchstr* does not match patterns
|
|
||||||
|
|
||||||
``self.package_name`` removed from front of string if present
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> dw = ApiDocWriter('sphinx')
|
|
||||||
>>> dw._survives_exclude('sphinx.okpkg', 'package')
|
|
||||||
True
|
|
||||||
>>> dw.package_skip_patterns.append('^\\.badpkg$')
|
|
||||||
>>> dw._survives_exclude('sphinx.badpkg', 'package')
|
|
||||||
False
|
|
||||||
>>> dw._survives_exclude('sphinx.badpkg', 'module')
|
|
||||||
True
|
|
||||||
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
|
||||||
True
|
|
||||||
>>> dw.module_skip_patterns.append('^\\.badmod$')
|
|
||||||
>>> dw._survives_exclude('sphinx.badmod', 'module')
|
|
||||||
False
|
|
||||||
'''
|
|
||||||
if match_type == 'module':
|
|
||||||
patterns = self.module_skip_patterns
|
|
||||||
elif match_type == 'package':
|
|
||||||
patterns = self.package_skip_patterns
|
|
||||||
else:
|
|
||||||
raise ValueError('Cannot interpret match type "%s"'
|
|
||||||
% match_type)
|
|
||||||
# Match to URI without package name
|
|
||||||
L = len(self.package_name)
|
|
||||||
if matchstr[:L] == self.package_name:
|
|
||||||
matchstr = matchstr[L:]
|
|
||||||
for pat in patterns:
|
|
||||||
try:
|
|
||||||
pat.search
|
|
||||||
except AttributeError:
|
|
||||||
pat = re.compile(pat)
|
|
||||||
if pat.search(matchstr):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def discover_modules(self):
|
|
||||||
''' Return module sequence discovered from ``self.package_name``
|
|
||||||
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
None
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
mods : sequence
|
|
||||||
Sequence of module names within ``self.package_name``
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> dw = ApiDocWriter('sphinx')
|
|
||||||
>>> mods = dw.discover_modules()
|
|
||||||
>>> 'sphinx.util' in mods
|
|
||||||
True
|
|
||||||
>>> dw.package_skip_patterns.append('\.util$')
|
|
||||||
>>> 'sphinx.util' in dw.discover_modules()
|
|
||||||
False
|
|
||||||
>>>
|
|
||||||
'''
|
|
||||||
modules = [self.package_name]
|
|
||||||
# raw directory parsing
|
|
||||||
for dirpath, dirnames, filenames in os.walk(self.root_path):
|
|
||||||
# Check directory names for packages
|
|
||||||
root_uri = self._path2uri(os.path.join(self.root_path,
|
|
||||||
dirpath))
|
|
||||||
for dirname in dirnames[:]: # copy list - we modify inplace
|
|
||||||
package_uri = '.'.join((root_uri, dirname))
|
|
||||||
if (self._uri2path(package_uri) and
|
|
||||||
self._survives_exclude(package_uri, 'package')):
|
|
||||||
modules.append(package_uri)
|
|
||||||
else:
|
|
||||||
dirnames.remove(dirname)
|
|
||||||
# Check filenames for modules
|
|
||||||
for filename in filenames:
|
|
||||||
module_name = filename[:-3]
|
|
||||||
module_uri = '.'.join((root_uri, module_name))
|
|
||||||
if (self._uri2path(module_uri) and
|
|
||||||
self._survives_exclude(module_uri, 'module')):
|
|
||||||
modules.append(module_uri)
|
|
||||||
return sorted(modules)
|
|
||||||
|
|
||||||
def write_modules_api(self, modules,outdir):
|
|
||||||
# write the list
|
|
||||||
written_modules = []
|
|
||||||
for m in modules:
|
|
||||||
api_str = self.generate_api_doc(m)
|
|
||||||
if not api_str:
|
|
||||||
continue
|
|
||||||
# write out to file
|
|
||||||
outfile = os.path.join(outdir,
|
|
||||||
m + self.rst_extension)
|
|
||||||
fileobj = open(outfile, 'wt')
|
|
||||||
fileobj.write(api_str)
|
|
||||||
fileobj.close()
|
|
||||||
written_modules.append(m)
|
|
||||||
self.written_modules = written_modules
|
|
||||||
|
|
||||||
def write_api_docs(self, outdir):
|
|
||||||
"""Generate API reST files.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
outdir : string
|
|
||||||
Directory name in which to store files
|
|
||||||
We create automatic filenames for each module
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
None
|
|
||||||
|
|
||||||
Notes
|
|
||||||
-----
|
|
||||||
Sets self.written_modules to list of written modules
|
|
||||||
"""
|
|
||||||
if not os.path.exists(outdir):
|
|
||||||
os.mkdir(outdir)
|
|
||||||
# compose list of modules
|
|
||||||
modules = self.discover_modules()
|
|
||||||
self.write_modules_api(modules,outdir)
|
|
||||||
|
|
||||||
def write_index(self, outdir, froot='gen', relative_to=None):
|
|
||||||
"""Make a reST API index file from written files
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
path : string
|
|
||||||
Filename to write index to
|
|
||||||
outdir : string
|
|
||||||
Directory to which to write generated index file
|
|
||||||
froot : string, optional
|
|
||||||
root (filename without extension) of filename to write to
|
|
||||||
Defaults to 'gen'. We add ``self.rst_extension``.
|
|
||||||
relative_to : string
|
|
||||||
path to which written filenames are relative. This
|
|
||||||
component of the written file path will be removed from
|
|
||||||
outdir, in the generated index. Default is None, meaning,
|
|
||||||
leave path as it is.
|
|
||||||
"""
|
|
||||||
if self.written_modules is None:
|
|
||||||
raise ValueError('No modules written')
|
|
||||||
# Get full filename path
|
|
||||||
path = os.path.join(outdir, froot+self.rst_extension)
|
|
||||||
# Path written into index is relative to rootpath
|
|
||||||
if relative_to is not None:
|
|
||||||
relpath = outdir.replace(relative_to + os.path.sep, '')
|
|
||||||
else:
|
|
||||||
relpath = outdir
|
|
||||||
idx = open(path,'wt')
|
|
||||||
w = idx.write
|
|
||||||
w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n')
|
|
||||||
w('.. toctree::\n\n')
|
|
||||||
for f in self.written_modules:
|
|
||||||
w(' %s\n' % os.path.join(relpath,f))
|
|
||||||
idx.close()
|
|
@ -1,497 +0,0 @@
|
|||||||
"""Extract reference documentation from the NumPy source tree.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
import textwrap
|
|
||||||
import re
|
|
||||||
import pydoc
|
|
||||||
from io import StringIO
|
|
||||||
from warnings import warn
|
|
||||||
4
|
|
||||||
class Reader:
|
|
||||||
"""A line-based string reader.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, data):
|
|
||||||
"""
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
data : str
|
|
||||||
String with lines separated by '\n'.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(data,list):
|
|
||||||
self._str = data
|
|
||||||
else:
|
|
||||||
self._str = data.split('\n') # store string as list of lines
|
|
||||||
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
def __getitem__(self, n):
|
|
||||||
return self._str[n]
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
self._l = 0 # current line nr
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
if not self.eof():
|
|
||||||
out = self[self._l]
|
|
||||||
self._l += 1
|
|
||||||
return out
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def seek_next_non_empty_line(self):
|
|
||||||
for l in self[self._l:]:
|
|
||||||
if l.strip():
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self._l += 1
|
|
||||||
|
|
||||||
def eof(self):
|
|
||||||
return self._l >= len(self._str)
|
|
||||||
|
|
||||||
def read_to_condition(self, condition_func):
|
|
||||||
start = self._l
|
|
||||||
for line in self[start:]:
|
|
||||||
if condition_func(line):
|
|
||||||
return self[start:self._l]
|
|
||||||
self._l += 1
|
|
||||||
if self.eof():
|
|
||||||
return self[start:self._l+1]
|
|
||||||
return []
|
|
||||||
|
|
||||||
def read_to_next_empty_line(self):
|
|
||||||
self.seek_next_non_empty_line()
|
|
||||||
def is_empty(line):
|
|
||||||
return not line.strip()
|
|
||||||
return self.read_to_condition(is_empty)
|
|
||||||
|
|
||||||
def read_to_next_unindented_line(self):
|
|
||||||
def is_unindented(line):
|
|
||||||
return (line.strip() and (len(line.lstrip()) == len(line)))
|
|
||||||
return self.read_to_condition(is_unindented)
|
|
||||||
|
|
||||||
def peek(self,n=0):
|
|
||||||
if self._l + n < len(self._str):
|
|
||||||
return self[self._l + n]
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def is_empty(self):
|
|
||||||
return not ''.join(self._str).strip()
|
|
||||||
|
|
||||||
|
|
||||||
class NumpyDocString:
|
|
||||||
def __init__(self,docstring):
|
|
||||||
docstring = textwrap.dedent(docstring).split('\n')
|
|
||||||
|
|
||||||
self._doc = Reader(docstring)
|
|
||||||
self._parsed_data = {
|
|
||||||
'Signature': '',
|
|
||||||
'Summary': [''],
|
|
||||||
'Extended Summary': [],
|
|
||||||
'Parameters': [],
|
|
||||||
'Returns': [],
|
|
||||||
'Raises': [],
|
|
||||||
'Warns': [],
|
|
||||||
'Other Parameters': [],
|
|
||||||
'Attributes': [],
|
|
||||||
'Methods': [],
|
|
||||||
'See Also': [],
|
|
||||||
'Notes': [],
|
|
||||||
'Warnings': [],
|
|
||||||
'References': '',
|
|
||||||
'Examples': '',
|
|
||||||
'index': {}
|
|
||||||
}
|
|
||||||
|
|
||||||
self._parse()
|
|
||||||
|
|
||||||
def __getitem__(self,key):
|
|
||||||
return self._parsed_data[key]
|
|
||||||
|
|
||||||
def __setitem__(self,key,val):
|
|
||||||
if key not in self._parsed_data:
|
|
||||||
warn("Unknown section %s" % key)
|
|
||||||
else:
|
|
||||||
self._parsed_data[key] = val
|
|
||||||
|
|
||||||
def _is_at_section(self):
|
|
||||||
self._doc.seek_next_non_empty_line()
|
|
||||||
|
|
||||||
if self._doc.eof():
|
|
||||||
return False
|
|
||||||
|
|
||||||
l1 = self._doc.peek().strip() # e.g. Parameters
|
|
||||||
|
|
||||||
if l1.startswith('.. index::'):
|
|
||||||
return True
|
|
||||||
|
|
||||||
l2 = self._doc.peek(1).strip() # ---------- or ==========
|
|
||||||
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
|
|
||||||
|
|
||||||
def _strip(self,doc):
|
|
||||||
i = 0
|
|
||||||
j = 0
|
|
||||||
for i,line in enumerate(doc):
|
|
||||||
if line.strip(): break
|
|
||||||
|
|
||||||
for j,line in enumerate(doc[::-1]):
|
|
||||||
if line.strip(): break
|
|
||||||
|
|
||||||
return doc[i:len(doc)-j]
|
|
||||||
|
|
||||||
def _read_to_next_section(self):
|
|
||||||
section = self._doc.read_to_next_empty_line()
|
|
||||||
|
|
||||||
while not self._is_at_section() and not self._doc.eof():
|
|
||||||
if not self._doc.peek(-1).strip(): # previous line was empty
|
|
||||||
section += ['']
|
|
||||||
|
|
||||||
section += self._doc.read_to_next_empty_line()
|
|
||||||
|
|
||||||
return section
|
|
||||||
|
|
||||||
def _read_sections(self):
|
|
||||||
while not self._doc.eof():
|
|
||||||
data = self._read_to_next_section()
|
|
||||||
name = data[0].strip()
|
|
||||||
|
|
||||||
if name.startswith('..'): # index section
|
|
||||||
yield name, data[1:]
|
|
||||||
elif len(data) < 2:
|
|
||||||
yield StopIteration
|
|
||||||
else:
|
|
||||||
yield name, self._strip(data[2:])
|
|
||||||
|
|
||||||
def _parse_param_list(self,content):
|
|
||||||
r = Reader(content)
|
|
||||||
params = []
|
|
||||||
while not r.eof():
|
|
||||||
header = r.read().strip()
|
|
||||||
if ' : ' in header:
|
|
||||||
arg_name, arg_type = header.split(' : ')[:2]
|
|
||||||
else:
|
|
||||||
arg_name, arg_type = header, ''
|
|
||||||
|
|
||||||
desc = r.read_to_next_unindented_line()
|
|
||||||
desc = dedent_lines(desc)
|
|
||||||
|
|
||||||
params.append((arg_name,arg_type,desc))
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
|
||||||
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
|
|
||||||
def _parse_see_also(self, content):
|
|
||||||
"""
|
|
||||||
func_name : Descriptive text
|
|
||||||
continued text
|
|
||||||
another_func_name : Descriptive text
|
|
||||||
func_name1, func_name2, :meth:`func_name`, func_name3
|
|
||||||
|
|
||||||
"""
|
|
||||||
items = []
|
|
||||||
|
|
||||||
def parse_item_name(text):
|
|
||||||
"""Match ':role:`name`' or 'name'"""
|
|
||||||
m = self._name_rgx.match(text)
|
|
||||||
if m:
|
|
||||||
g = m.groups()
|
|
||||||
if g[1] is None:
|
|
||||||
return g[3], None
|
|
||||||
else:
|
|
||||||
return g[2], g[1]
|
|
||||||
raise ValueError("%s is not a item name" % text)
|
|
||||||
|
|
||||||
def push_item(name, rest):
|
|
||||||
if not name:
|
|
||||||
return
|
|
||||||
name, role = parse_item_name(name)
|
|
||||||
items.append((name, list(rest), role))
|
|
||||||
del rest[:]
|
|
||||||
|
|
||||||
current_func = None
|
|
||||||
rest = []
|
|
||||||
|
|
||||||
for line in content:
|
|
||||||
if not line.strip(): continue
|
|
||||||
|
|
||||||
m = self._name_rgx.match(line)
|
|
||||||
if m and line[m.end():].strip().startswith(':'):
|
|
||||||
push_item(current_func, rest)
|
|
||||||
current_func, line = line[:m.end()], line[m.end():]
|
|
||||||
rest = [line.split(':', 1)[1].strip()]
|
|
||||||
if not rest[0]:
|
|
||||||
rest = []
|
|
||||||
elif not line.startswith(' '):
|
|
||||||
push_item(current_func, rest)
|
|
||||||
current_func = None
|
|
||||||
if ',' in line:
|
|
||||||
for func in line.split(','):
|
|
||||||
push_item(func, [])
|
|
||||||
elif line.strip():
|
|
||||||
current_func = line
|
|
||||||
elif current_func is not None:
|
|
||||||
rest.append(line.strip())
|
|
||||||
push_item(current_func, rest)
|
|
||||||
return items
|
|
||||||
|
|
||||||
def _parse_index(self, section, content):
|
|
||||||
"""
|
|
||||||
.. index: default
|
|
||||||
:refguide: something, else, and more
|
|
||||||
|
|
||||||
"""
|
|
||||||
def strip_each_in(lst):
|
|
||||||
return [s.strip() for s in lst]
|
|
||||||
|
|
||||||
out = {}
|
|
||||||
section = section.split('::')
|
|
||||||
if len(section) > 1:
|
|
||||||
out['default'] = strip_each_in(section[1].split(','))[0]
|
|
||||||
for line in content:
|
|
||||||
line = line.split(':')
|
|
||||||
if len(line) > 2:
|
|
||||||
out[line[1]] = strip_each_in(line[2].split(','))
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _parse_summary(self):
|
|
||||||
"""Grab signature (if given) and summary"""
|
|
||||||
if self._is_at_section():
|
|
||||||
return
|
|
||||||
|
|
||||||
summary = self._doc.read_to_next_empty_line()
|
|
||||||
summary_str = " ".join([s.strip() for s in summary]).strip()
|
|
||||||
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
|
|
||||||
self['Signature'] = summary_str
|
|
||||||
if not self._is_at_section():
|
|
||||||
self['Summary'] = self._doc.read_to_next_empty_line()
|
|
||||||
else:
|
|
||||||
self['Summary'] = summary
|
|
||||||
|
|
||||||
if not self._is_at_section():
|
|
||||||
self['Extended Summary'] = self._read_to_next_section()
|
|
||||||
|
|
||||||
def _parse(self):
|
|
||||||
self._doc.reset()
|
|
||||||
self._parse_summary()
|
|
||||||
|
|
||||||
for (section,content) in self._read_sections():
|
|
||||||
if not section.startswith('..'):
|
|
||||||
section = ' '.join([s.capitalize() for s in section.split(' ')])
|
|
||||||
if section in ('Parameters', 'Attributes', 'Methods',
|
|
||||||
'Returns', 'Raises', 'Warns'):
|
|
||||||
self[section] = self._parse_param_list(content)
|
|
||||||
elif section.startswith('.. index::'):
|
|
||||||
self['index'] = self._parse_index(section, content)
|
|
||||||
elif section == 'See Also':
|
|
||||||
self['See Also'] = self._parse_see_also(content)
|
|
||||||
else:
|
|
||||||
self[section] = content
|
|
||||||
|
|
||||||
# string conversion routines
|
|
||||||
|
|
||||||
def _str_header(self, name, symbol='-'):
|
|
||||||
return [name, len(name)*symbol]
|
|
||||||
|
|
||||||
def _str_indent(self, doc, indent=4):
|
|
||||||
out = []
|
|
||||||
for line in doc:
|
|
||||||
out += [' '*indent + line]
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_signature(self):
|
|
||||||
if self['Signature']:
|
|
||||||
return [self['Signature'].replace('*','\*')] + ['']
|
|
||||||
else:
|
|
||||||
return ['']
|
|
||||||
|
|
||||||
def _str_summary(self):
|
|
||||||
if self['Summary']:
|
|
||||||
return self['Summary'] + ['']
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _str_extended_summary(self):
|
|
||||||
if self['Extended Summary']:
|
|
||||||
return self['Extended Summary'] + ['']
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _str_param_list(self, name):
|
|
||||||
out = []
|
|
||||||
if self[name]:
|
|
||||||
out += self._str_header(name)
|
|
||||||
for param,param_type,desc in self[name]:
|
|
||||||
out += ['%s : %s' % (param, param_type)]
|
|
||||||
out += self._str_indent(desc)
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_section(self, name):
|
|
||||||
out = []
|
|
||||||
if self[name]:
|
|
||||||
out += self._str_header(name)
|
|
||||||
out += self[name]
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_see_also(self, func_role):
|
|
||||||
if not self['See Also']: return []
|
|
||||||
out = []
|
|
||||||
out += self._str_header("See Also")
|
|
||||||
last_had_desc = True
|
|
||||||
for func, desc, role in self['See Also']:
|
|
||||||
if role:
|
|
||||||
link = ':%s:`%s`' % (role, func)
|
|
||||||
elif func_role:
|
|
||||||
link = ':%s:`%s`' % (func_role, func)
|
|
||||||
else:
|
|
||||||
link = "`%s`_" % func
|
|
||||||
if desc or last_had_desc:
|
|
||||||
out += ['']
|
|
||||||
out += [link]
|
|
||||||
else:
|
|
||||||
out[-1] += ", %s" % link
|
|
||||||
if desc:
|
|
||||||
out += self._str_indent([' '.join(desc)])
|
|
||||||
last_had_desc = True
|
|
||||||
else:
|
|
||||||
last_had_desc = False
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_index(self):
|
|
||||||
idx = self['index']
|
|
||||||
out = []
|
|
||||||
out += ['.. index:: %s' % idx.get('default','')]
|
|
||||||
for section, references in idx.items():
|
|
||||||
if section == 'default':
|
|
||||||
continue
|
|
||||||
out += [' :%s: %s' % (section, ', '.join(references))]
|
|
||||||
return out
|
|
||||||
|
|
||||||
def __str__(self, func_role=''):
|
|
||||||
out = []
|
|
||||||
out += self._str_signature()
|
|
||||||
out += self._str_summary()
|
|
||||||
out += self._str_extended_summary()
|
|
||||||
for param_list in ('Parameters','Returns','Raises'):
|
|
||||||
out += self._str_param_list(param_list)
|
|
||||||
out += self._str_section('Warnings')
|
|
||||||
out += self._str_see_also(func_role)
|
|
||||||
for s in ('Notes','References','Examples'):
|
|
||||||
out += self._str_section(s)
|
|
||||||
out += self._str_index()
|
|
||||||
return '\n'.join(out)
|
|
||||||
|
|
||||||
|
|
||||||
def indent(str,indent=4):
|
|
||||||
indent_str = ' '*indent
|
|
||||||
if str is None:
|
|
||||||
return indent_str
|
|
||||||
lines = str.split('\n')
|
|
||||||
return '\n'.join(indent_str + l for l in lines)
|
|
||||||
|
|
||||||
def dedent_lines(lines):
|
|
||||||
"""Deindent a list of lines maximally"""
|
|
||||||
return textwrap.dedent("\n".join(lines)).split("\n")
|
|
||||||
|
|
||||||
def header(text, style='-'):
|
|
||||||
return text + '\n' + style*len(text) + '\n'
|
|
||||||
|
|
||||||
|
|
||||||
class FunctionDoc(NumpyDocString):
|
|
||||||
def __init__(self, func, role='func', doc=None):
|
|
||||||
self._f = func
|
|
||||||
self._role = role # e.g. "func" or "meth"
|
|
||||||
if doc is None:
|
|
||||||
doc = inspect.getdoc(func) or ''
|
|
||||||
try:
|
|
||||||
NumpyDocString.__init__(self, doc)
|
|
||||||
except ValueError as e:
|
|
||||||
print('*'*78)
|
|
||||||
print("ERROR: '%s' while parsing `%s`" % (e, self._f))
|
|
||||||
print('*'*78)
|
|
||||||
#print "Docstring follows:"
|
|
||||||
#print doclines
|
|
||||||
#print '='*78
|
|
||||||
|
|
||||||
if not self['Signature']:
|
|
||||||
func, func_name = self.get_func()
|
|
||||||
try:
|
|
||||||
# try to read signature
|
|
||||||
argspec = inspect.getargspec(func)
|
|
||||||
argspec = inspect.formatargspec(*argspec)
|
|
||||||
argspec = argspec.replace('*','\*')
|
|
||||||
signature = '%s%s' % (func_name, argspec)
|
|
||||||
except TypeError as e:
|
|
||||||
signature = '%s()' % func_name
|
|
||||||
self['Signature'] = signature
|
|
||||||
|
|
||||||
def get_func(self):
|
|
||||||
func_name = getattr(self._f, '__name__', self.__class__.__name__)
|
|
||||||
if inspect.isclass(self._f):
|
|
||||||
func = getattr(self._f, '__call__', self._f.__init__)
|
|
||||||
else:
|
|
||||||
func = self._f
|
|
||||||
return func, func_name
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
out = ''
|
|
||||||
|
|
||||||
func, func_name = self.get_func()
|
|
||||||
signature = self['Signature'].replace('*', '\*')
|
|
||||||
|
|
||||||
roles = {'func': 'function',
|
|
||||||
'meth': 'method'}
|
|
||||||
|
|
||||||
if self._role:
|
|
||||||
if self._role not in roles:
|
|
||||||
print("Warning: invalid role %s" % self._role)
|
|
||||||
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
|
|
||||||
func_name)
|
|
||||||
|
|
||||||
out += super(FunctionDoc, self).__str__(func_role=self._role)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
class ClassDoc(NumpyDocString):
|
|
||||||
def __init__(self,cls,modulename='',func_doc=FunctionDoc,doc=None):
|
|
||||||
if not inspect.isclass(cls):
|
|
||||||
raise ValueError("Initialise using a class. Got %r" % cls)
|
|
||||||
self._cls = cls
|
|
||||||
|
|
||||||
if modulename and not modulename.endswith('.'):
|
|
||||||
modulename += '.'
|
|
||||||
self._mod = modulename
|
|
||||||
self._name = cls.__name__
|
|
||||||
self._func_doc = func_doc
|
|
||||||
|
|
||||||
if doc is None:
|
|
||||||
doc = pydoc.getdoc(cls)
|
|
||||||
|
|
||||||
NumpyDocString.__init__(self, doc)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def methods(self):
|
|
||||||
return [name for name,func in inspect.getmembers(self._cls)
|
|
||||||
if not name.startswith('_') and callable(func)]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
out = ''
|
|
||||||
out += super(ClassDoc, self).__str__()
|
|
||||||
out += "\n\n"
|
|
||||||
|
|
||||||
#for m in self.methods:
|
|
||||||
# print "Parsing `%s`" % m
|
|
||||||
# out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n'
|
|
||||||
# out += '.. index::\n single: %s; %s\n\n' % (self._name, m)
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
@ -1,136 +0,0 @@
|
|||||||
import re, inspect, textwrap, pydoc
|
|
||||||
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
|
|
||||||
|
|
||||||
class SphinxDocString(NumpyDocString):
|
|
||||||
# string conversion routines
|
|
||||||
def _str_header(self, name, symbol='`'):
|
|
||||||
return ['.. rubric:: ' + name, '']
|
|
||||||
|
|
||||||
def _str_field_list(self, name):
|
|
||||||
return [':' + name + ':']
|
|
||||||
|
|
||||||
def _str_indent(self, doc, indent=4):
|
|
||||||
out = []
|
|
||||||
for line in doc:
|
|
||||||
out += [' '*indent + line]
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_signature(self):
|
|
||||||
return ['']
|
|
||||||
if self['Signature']:
|
|
||||||
return ['``%s``' % self['Signature']] + ['']
|
|
||||||
else:
|
|
||||||
return ['']
|
|
||||||
|
|
||||||
def _str_summary(self):
|
|
||||||
return self['Summary'] + ['']
|
|
||||||
|
|
||||||
def _str_extended_summary(self):
|
|
||||||
return self['Extended Summary'] + ['']
|
|
||||||
|
|
||||||
def _str_param_list(self, name):
|
|
||||||
out = []
|
|
||||||
if self[name]:
|
|
||||||
out += self._str_field_list(name)
|
|
||||||
out += ['']
|
|
||||||
for param,param_type,desc in self[name]:
|
|
||||||
out += self._str_indent(['**%s** : %s' % (param.strip(),
|
|
||||||
param_type)])
|
|
||||||
out += ['']
|
|
||||||
out += self._str_indent(desc,8)
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_section(self, name):
|
|
||||||
out = []
|
|
||||||
if self[name]:
|
|
||||||
out += self._str_header(name)
|
|
||||||
out += ['']
|
|
||||||
content = textwrap.dedent("\n".join(self[name])).split("\n")
|
|
||||||
out += content
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_see_also(self, func_role):
|
|
||||||
out = []
|
|
||||||
if self['See Also']:
|
|
||||||
see_also = super(SphinxDocString, self)._str_see_also(func_role)
|
|
||||||
out = ['.. seealso::', '']
|
|
||||||
out += self._str_indent(see_also[2:])
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_warnings(self):
|
|
||||||
out = []
|
|
||||||
if self['Warnings']:
|
|
||||||
out = ['.. warning::', '']
|
|
||||||
out += self._str_indent(self['Warnings'])
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_index(self):
|
|
||||||
idx = self['index']
|
|
||||||
out = []
|
|
||||||
if len(idx) == 0:
|
|
||||||
return out
|
|
||||||
|
|
||||||
out += ['.. index:: %s' % idx.get('default','')]
|
|
||||||
for section, references in idx.items():
|
|
||||||
if section == 'default':
|
|
||||||
continue
|
|
||||||
elif section == 'refguide':
|
|
||||||
out += [' single: %s' % (', '.join(references))]
|
|
||||||
else:
|
|
||||||
out += [' %s: %s' % (section, ','.join(references))]
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _str_references(self):
|
|
||||||
out = []
|
|
||||||
if self['References']:
|
|
||||||
out += self._str_header('References')
|
|
||||||
if isinstance(self['References'], str):
|
|
||||||
self['References'] = [self['References']]
|
|
||||||
out.extend(self['References'])
|
|
||||||
out += ['']
|
|
||||||
return out
|
|
||||||
|
|
||||||
def __str__(self, indent=0, func_role="obj"):
|
|
||||||
out = []
|
|
||||||
out += self._str_signature()
|
|
||||||
out += self._str_index() + ['']
|
|
||||||
out += self._str_summary()
|
|
||||||
out += self._str_extended_summary()
|
|
||||||
for param_list in ('Parameters', 'Attributes', 'Methods',
|
|
||||||
'Returns','Raises'):
|
|
||||||
out += self._str_param_list(param_list)
|
|
||||||
out += self._str_warnings()
|
|
||||||
out += self._str_see_also(func_role)
|
|
||||||
out += self._str_section('Notes')
|
|
||||||
out += self._str_references()
|
|
||||||
out += self._str_section('Examples')
|
|
||||||
out = self._str_indent(out,indent)
|
|
||||||
return '\n'.join(out)
|
|
||||||
|
|
||||||
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class SphinxClassDoc(SphinxDocString, ClassDoc):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_doc_object(obj, what=None, doc=None):
|
|
||||||
if what is None:
|
|
||||||
if inspect.isclass(obj):
|
|
||||||
what = 'class'
|
|
||||||
elif inspect.ismodule(obj):
|
|
||||||
what = 'module'
|
|
||||||
elif callable(obj):
|
|
||||||
what = 'function'
|
|
||||||
else:
|
|
||||||
what = 'object'
|
|
||||||
if what == 'class':
|
|
||||||
return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc)
|
|
||||||
elif what in ('function', 'method'):
|
|
||||||
return SphinxFunctionDoc(obj, '', doc=doc)
|
|
||||||
else:
|
|
||||||
if doc is None:
|
|
||||||
doc = pydoc.getdoc(obj)
|
|
||||||
return SphinxDocString(doc)
|
|
||||||
|
|
@ -1,407 +0,0 @@
|
|||||||
"""
|
|
||||||
Defines a docutils directive for inserting inheritance diagrams.
|
|
||||||
|
|
||||||
Provide the directive with one or more classes or modules (separated
|
|
||||||
by whitespace). For modules, all of the classes in that module will
|
|
||||||
be used.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
Given the following classes:
|
|
||||||
|
|
||||||
class A: pass
|
|
||||||
class B(A): pass
|
|
||||||
class C(A): pass
|
|
||||||
class D(B, C): pass
|
|
||||||
class E(B): pass
|
|
||||||
|
|
||||||
.. inheritance-diagram: D E
|
|
||||||
|
|
||||||
Produces a graph like the following:
|
|
||||||
|
|
||||||
A
|
|
||||||
/ \
|
|
||||||
B C
|
|
||||||
/ \ /
|
|
||||||
E D
|
|
||||||
|
|
||||||
The graph is inserted as a PNG+image map into HTML and a PDF in
|
|
||||||
LaTeX.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
try:
|
|
||||||
from hashlib import md5
|
|
||||||
except ImportError:
|
|
||||||
from md5 import md5
|
|
||||||
|
|
||||||
from docutils.nodes import Body, Element
|
|
||||||
from docutils.parsers.rst import directives
|
|
||||||
from sphinx.roles import xfileref_role
|
|
||||||
|
|
||||||
def my_import(name):
|
|
||||||
"""Module importer - taken from the python documentation.
|
|
||||||
|
|
||||||
This function allows importing names with dots in them."""
|
|
||||||
|
|
||||||
mod = __import__(name)
|
|
||||||
components = name.split('.')
|
|
||||||
for comp in components[1:]:
|
|
||||||
mod = getattr(mod, comp)
|
|
||||||
return mod
|
|
||||||
|
|
||||||
class DotException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class InheritanceGraph:
|
|
||||||
"""
|
|
||||||
Given a list of classes, determines the set of classes that
|
|
||||||
they inherit from all the way to the root "object", and then
|
|
||||||
is able to generate a graphviz dot graph from them.
|
|
||||||
"""
|
|
||||||
def __init__(self, class_names, show_builtins=False):
|
|
||||||
"""
|
|
||||||
*class_names* is a list of child classes to show bases from.
|
|
||||||
|
|
||||||
If *show_builtins* is True, then Python builtins will be shown
|
|
||||||
in the graph.
|
|
||||||
"""
|
|
||||||
self.class_names = class_names
|
|
||||||
self.classes = self._import_classes(class_names)
|
|
||||||
self.all_classes = self._all_classes(self.classes)
|
|
||||||
if len(self.all_classes) == 0:
|
|
||||||
raise ValueError("No classes found for inheritance diagram")
|
|
||||||
self.show_builtins = show_builtins
|
|
||||||
|
|
||||||
py_sig_re = re.compile(r'''^([\w.]*\.)? # class names
|
|
||||||
(\w+) \s* $ # optionally arguments
|
|
||||||
''', re.VERBOSE)
|
|
||||||
|
|
||||||
def _import_class_or_module(self, name):
|
|
||||||
"""
|
|
||||||
Import a class using its fully-qualified *name*.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
path, base = self.py_sig_re.match(name).groups()
|
|
||||||
except:
|
|
||||||
raise ValueError(
|
|
||||||
"Invalid class or module '%s' specified for inheritance diagram" % name)
|
|
||||||
fullname = (path or '') + base
|
|
||||||
path = (path and path.rstrip('.'))
|
|
||||||
if not path:
|
|
||||||
path = base
|
|
||||||
try:
|
|
||||||
module = __import__(path, None, None, [])
|
|
||||||
# We must do an import of the fully qualified name. Otherwise if a
|
|
||||||
# subpackage 'a.b' is requested where 'import a' does NOT provide
|
|
||||||
# 'a.b' automatically, then 'a.b' will not be found below. This
|
|
||||||
# second call will force the equivalent of 'import a.b' to happen
|
|
||||||
# after the top-level import above.
|
|
||||||
my_import(fullname)
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
raise ValueError(
|
|
||||||
"Could not import class or module '%s' specified for inheritance diagram" % name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
todoc = module
|
|
||||||
for comp in fullname.split('.')[1:]:
|
|
||||||
todoc = getattr(todoc, comp)
|
|
||||||
except AttributeError:
|
|
||||||
raise ValueError(
|
|
||||||
"Could not find class or module '%s' specified for inheritance diagram" % name)
|
|
||||||
|
|
||||||
# If a class, just return it
|
|
||||||
if inspect.isclass(todoc):
|
|
||||||
return [todoc]
|
|
||||||
elif inspect.ismodule(todoc):
|
|
||||||
classes = []
|
|
||||||
for cls in list(todoc.__dict__.values()):
|
|
||||||
if inspect.isclass(cls) and cls.__module__ == todoc.__name__:
|
|
||||||
classes.append(cls)
|
|
||||||
return classes
|
|
||||||
raise ValueError(
|
|
||||||
"'%s' does not resolve to a class or module" % name)
|
|
||||||
|
|
||||||
def _import_classes(self, class_names):
|
|
||||||
"""
|
|
||||||
Import a list of classes.
|
|
||||||
"""
|
|
||||||
classes = []
|
|
||||||
for name in class_names:
|
|
||||||
classes.extend(self._import_class_or_module(name))
|
|
||||||
return classes
|
|
||||||
|
|
||||||
def _all_classes(self, classes):
|
|
||||||
"""
|
|
||||||
Return a list of all classes that are ancestors of *classes*.
|
|
||||||
"""
|
|
||||||
all_classes = {}
|
|
||||||
|
|
||||||
def recurse(cls):
|
|
||||||
all_classes[cls] = None
|
|
||||||
for c in cls.__bases__:
|
|
||||||
if c not in all_classes:
|
|
||||||
recurse(c)
|
|
||||||
|
|
||||||
for cls in classes:
|
|
||||||
recurse(cls)
|
|
||||||
|
|
||||||
return list(all_classes.keys())
|
|
||||||
|
|
||||||
def class_name(self, cls, parts=0):
|
|
||||||
"""
|
|
||||||
Given a class object, return a fully-qualified name. This
|
|
||||||
works for things I've tested in matplotlib so far, but may not
|
|
||||||
be completely general.
|
|
||||||
"""
|
|
||||||
module = cls.__module__
|
|
||||||
if module == '__builtin__':
|
|
||||||
fullname = cls.__name__
|
|
||||||
else:
|
|
||||||
fullname = "%s.%s" % (module, cls.__name__)
|
|
||||||
if parts == 0:
|
|
||||||
return fullname
|
|
||||||
name_parts = fullname.split('.')
|
|
||||||
return '.'.join(name_parts[-parts:])
|
|
||||||
|
|
||||||
def get_all_class_names(self):
|
|
||||||
"""
|
|
||||||
Get all of the class names involved in the graph.
|
|
||||||
"""
|
|
||||||
return [self.class_name(x) for x in self.all_classes]
|
|
||||||
|
|
||||||
# These are the default options for graphviz
|
|
||||||
default_graph_options = {
|
|
||||||
"rankdir": "LR",
|
|
||||||
"size": '"8.0, 12.0"'
|
|
||||||
}
|
|
||||||
default_node_options = {
|
|
||||||
"shape": "box",
|
|
||||||
"fontsize": 10,
|
|
||||||
"height": 0.25,
|
|
||||||
"fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",
|
|
||||||
"style": '"setlinewidth(0.5)"'
|
|
||||||
}
|
|
||||||
default_edge_options = {
|
|
||||||
"arrowsize": 0.5,
|
|
||||||
"style": '"setlinewidth(0.5)"'
|
|
||||||
}
|
|
||||||
|
|
||||||
def _format_node_options(self, options):
|
|
||||||
return ','.join(["%s=%s" % x for x in list(options.items())])
|
|
||||||
def _format_graph_options(self, options):
|
|
||||||
return ''.join(["%s=%s;\n" % x for x in list(options.items())])
|
|
||||||
|
|
||||||
def generate_dot(self, fd, name, parts=0, urls={},
|
|
||||||
graph_options={}, node_options={},
|
|
||||||
edge_options={}):
|
|
||||||
"""
|
|
||||||
Generate a graphviz dot graph from the classes that
|
|
||||||
were passed in to __init__.
|
|
||||||
|
|
||||||
*fd* is a Python file-like object to write to.
|
|
||||||
|
|
||||||
*name* is the name of the graph
|
|
||||||
|
|
||||||
*urls* is a dictionary mapping class names to http urls
|
|
||||||
|
|
||||||
*graph_options*, *node_options*, *edge_options* are
|
|
||||||
dictionaries containing key/value pairs to pass on as graphviz
|
|
||||||
properties.
|
|
||||||
"""
|
|
||||||
g_options = self.default_graph_options.copy()
|
|
||||||
g_options.update(graph_options)
|
|
||||||
n_options = self.default_node_options.copy()
|
|
||||||
n_options.update(node_options)
|
|
||||||
e_options = self.default_edge_options.copy()
|
|
||||||
e_options.update(edge_options)
|
|
||||||
|
|
||||||
fd.write('digraph %s {\n' % name)
|
|
||||||
fd.write(self._format_graph_options(g_options))
|
|
||||||
|
|
||||||
for cls in self.all_classes:
|
|
||||||
if not self.show_builtins and cls in list(__builtins__.values()):
|
|
||||||
continue
|
|
||||||
|
|
||||||
name = self.class_name(cls, parts)
|
|
||||||
|
|
||||||
# Write the node
|
|
||||||
this_node_options = n_options.copy()
|
|
||||||
url = urls.get(self.class_name(cls))
|
|
||||||
if url is not None:
|
|
||||||
this_node_options['URL'] = '"%s"' % url
|
|
||||||
fd.write(' "%s" [%s];\n' %
|
|
||||||
(name, self._format_node_options(this_node_options)))
|
|
||||||
|
|
||||||
# Write the edges
|
|
||||||
for base in cls.__bases__:
|
|
||||||
if not self.show_builtins and base in list(__builtins__.values()):
|
|
||||||
continue
|
|
||||||
|
|
||||||
base_name = self.class_name(base, parts)
|
|
||||||
fd.write(' "%s" -> "%s" [%s];\n' %
|
|
||||||
(base_name, name,
|
|
||||||
self._format_node_options(e_options)))
|
|
||||||
fd.write('}\n')
|
|
||||||
|
|
||||||
def run_dot(self, args, name, parts=0, urls={},
|
|
||||||
graph_options={}, node_options={}, edge_options={}):
|
|
||||||
"""
|
|
||||||
Run graphviz 'dot' over this graph, returning whatever 'dot'
|
|
||||||
writes to stdout.
|
|
||||||
|
|
||||||
*args* will be passed along as commandline arguments.
|
|
||||||
|
|
||||||
*name* is the name of the graph
|
|
||||||
|
|
||||||
*urls* is a dictionary mapping class names to http urls
|
|
||||||
|
|
||||||
Raises DotException for any of the many os and
|
|
||||||
installation-related errors that may occur.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
dot = subprocess.Popen(['dot'] + list(args),
|
|
||||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
||||||
close_fds=True)
|
|
||||||
except OSError:
|
|
||||||
raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?")
|
|
||||||
except ValueError:
|
|
||||||
raise DotException("'dot' called with invalid arguments")
|
|
||||||
except:
|
|
||||||
raise DotException("Unexpected error calling 'dot'")
|
|
||||||
|
|
||||||
self.generate_dot(dot.stdin, name, parts, urls, graph_options,
|
|
||||||
node_options, edge_options)
|
|
||||||
dot.stdin.close()
|
|
||||||
result = dot.stdout.read()
|
|
||||||
returncode = dot.wait()
|
|
||||||
if returncode != 0:
|
|
||||||
raise DotException("'dot' returned the errorcode %d" % returncode)
|
|
||||||
return result
|
|
||||||
|
|
||||||
class inheritance_diagram(Body, Element):
|
|
||||||
"""
|
|
||||||
A docutils node to use as a placeholder for the inheritance
|
|
||||||
diagram.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def inheritance_diagram_directive(name, arguments, options, content, lineno,
|
|
||||||
content_offset, block_text, state,
|
|
||||||
state_machine):
|
|
||||||
"""
|
|
||||||
Run when the inheritance_diagram directive is first encountered.
|
|
||||||
"""
|
|
||||||
node = inheritance_diagram()
|
|
||||||
|
|
||||||
class_names = arguments
|
|
||||||
|
|
||||||
# Create a graph starting with the list of classes
|
|
||||||
graph = InheritanceGraph(class_names)
|
|
||||||
|
|
||||||
# Create xref nodes for each target of the graph's image map and
|
|
||||||
# add them to the doc tree so that Sphinx can resolve the
|
|
||||||
# references to real URLs later. These nodes will eventually be
|
|
||||||
# removed from the doctree after we're done with them.
|
|
||||||
for name in graph.get_all_class_names():
|
|
||||||
refnodes, x = xfileref_role(
|
|
||||||
'class', ':class:`%s`' % name, name, 0, state)
|
|
||||||
node.extend(refnodes)
|
|
||||||
# Store the graph object so we can use it to generate the
|
|
||||||
# dot file later
|
|
||||||
node['graph'] = graph
|
|
||||||
# Store the original content for use as a hash
|
|
||||||
node['parts'] = options.get('parts', 0)
|
|
||||||
node['content'] = " ".join(class_names)
|
|
||||||
return [node]
|
|
||||||
|
|
||||||
def get_graph_hash(node):
|
|
||||||
return md5(node['content'] + str(node['parts'])).hexdigest()[-10:]
|
|
||||||
|
|
||||||
def html_output_graph(self, node):
|
|
||||||
"""
|
|
||||||
Output the graph for HTML. This will insert a PNG with clickable
|
|
||||||
image map.
|
|
||||||
"""
|
|
||||||
graph = node['graph']
|
|
||||||
parts = node['parts']
|
|
||||||
|
|
||||||
graph_hash = get_graph_hash(node)
|
|
||||||
name = "inheritance%s" % graph_hash
|
|
||||||
path = '_images'
|
|
||||||
dest_path = os.path.join(setup.app.builder.outdir, path)
|
|
||||||
if not os.path.exists(dest_path):
|
|
||||||
os.makedirs(dest_path)
|
|
||||||
png_path = os.path.join(dest_path, name + ".png")
|
|
||||||
path = setup.app.builder.imgpath
|
|
||||||
|
|
||||||
# Create a mapping from fully-qualified class names to URLs.
|
|
||||||
urls = {}
|
|
||||||
for child in node:
|
|
||||||
if child.get('refuri') is not None:
|
|
||||||
urls[child['reftitle']] = child.get('refuri')
|
|
||||||
elif child.get('refid') is not None:
|
|
||||||
urls[child['reftitle']] = '#' + child.get('refid')
|
|
||||||
|
|
||||||
# These arguments to dot will save a PNG file to disk and write
|
|
||||||
# an HTML image map to stdout.
|
|
||||||
image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'],
|
|
||||||
name, parts, urls)
|
|
||||||
return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' %
|
|
||||||
(path, name, name, image_map))
|
|
||||||
|
|
||||||
def latex_output_graph(self, node):
|
|
||||||
"""
|
|
||||||
Output the graph for LaTeX. This will insert a PDF.
|
|
||||||
"""
|
|
||||||
graph = node['graph']
|
|
||||||
parts = node['parts']
|
|
||||||
|
|
||||||
graph_hash = get_graph_hash(node)
|
|
||||||
name = "inheritance%s" % graph_hash
|
|
||||||
dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images'))
|
|
||||||
if not os.path.exists(dest_path):
|
|
||||||
os.makedirs(dest_path)
|
|
||||||
pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf"))
|
|
||||||
|
|
||||||
graph.run_dot(['-Tpdf', '-o%s' % pdf_path],
|
|
||||||
name, parts, graph_options={'size': '"6.0,6.0"'})
|
|
||||||
return '\n\\includegraphics{%s}\n\n' % pdf_path
|
|
||||||
|
|
||||||
def visit_inheritance_diagram(inner_func):
|
|
||||||
"""
|
|
||||||
This is just a wrapper around html/latex_output_graph to make it
|
|
||||||
easier to handle errors and insert warnings.
|
|
||||||
"""
|
|
||||||
def visitor(self, node):
|
|
||||||
try:
|
|
||||||
content = inner_func(self, node)
|
|
||||||
except DotException as e:
|
|
||||||
# Insert the exception as a warning in the document
|
|
||||||
warning = self.document.reporter.warning(str(e), line=node.line)
|
|
||||||
warning.parent = node
|
|
||||||
node.children = [warning]
|
|
||||||
else:
|
|
||||||
source = self.document.attributes['source']
|
|
||||||
self.body.append(content)
|
|
||||||
node.children = []
|
|
||||||
return visitor
|
|
||||||
|
|
||||||
def do_nothing(self, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
setup.app = app
|
|
||||||
setup.confdir = app.confdir
|
|
||||||
|
|
||||||
app.add_node(
|
|
||||||
inheritance_diagram,
|
|
||||||
latex=(visit_inheritance_diagram(latex_output_graph), do_nothing),
|
|
||||||
html=(visit_inheritance_diagram(html_output_graph), do_nothing))
|
|
||||||
app.add_directive(
|
|
||||||
'inheritance-diagram', inheritance_diagram_directive,
|
|
||||||
False, (1, 100, 0), parts = directives.nonnegative_int)
|
|
@ -1,114 +0,0 @@
|
|||||||
"""reST directive for syntax-highlighting ipython interactive sessions.
|
|
||||||
|
|
||||||
XXX - See what improvements can be made based on the new (as of Sept 2009)
|
|
||||||
'pycon' lexer for the python console. At the very least it will give better
|
|
||||||
highlighted tracebacks.
|
|
||||||
"""
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
# Needed modules
|
|
||||||
|
|
||||||
# Standard library
|
|
||||||
import re
|
|
||||||
|
|
||||||
# Third party
|
|
||||||
from pygments.lexer import Lexer, do_insertions
|
|
||||||
from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer,
|
|
||||||
PythonTracebackLexer)
|
|
||||||
from pygments.token import Comment, Generic
|
|
||||||
|
|
||||||
from sphinx import highlighting
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
# Global constants
|
|
||||||
line_re = re.compile('.*?\n')
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
# Code begins - classes and functions
|
|
||||||
|
|
||||||
class IPythonConsoleLexer(Lexer):
|
|
||||||
"""
|
|
||||||
For IPython console output or doctests, such as:
|
|
||||||
|
|
||||||
.. sourcecode:: ipython
|
|
||||||
|
|
||||||
In [1]: a = 'foo'
|
|
||||||
|
|
||||||
In [2]: a
|
|
||||||
Out[2]: 'foo'
|
|
||||||
|
|
||||||
In [3]: print a
|
|
||||||
foo
|
|
||||||
|
|
||||||
In [4]: 1 / 0
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
|
|
||||||
- Tracebacks are not currently supported.
|
|
||||||
|
|
||||||
- It assumes the default IPython prompts, not customized ones.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = 'IPython console session'
|
|
||||||
aliases = ['ipython']
|
|
||||||
mimetypes = ['text/x-ipython-console']
|
|
||||||
input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)")
|
|
||||||
output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)")
|
|
||||||
continue_prompt = re.compile(" \.\.\.+:")
|
|
||||||
tb_start = re.compile("\-+")
|
|
||||||
|
|
||||||
def get_tokens_unprocessed(self, text):
|
|
||||||
pylexer = PythonLexer(**self.options)
|
|
||||||
tblexer = PythonTracebackLexer(**self.options)
|
|
||||||
|
|
||||||
curcode = ''
|
|
||||||
insertions = []
|
|
||||||
for match in line_re.finditer(text):
|
|
||||||
line = match.group()
|
|
||||||
input_prompt = self.input_prompt.match(line)
|
|
||||||
continue_prompt = self.continue_prompt.match(line.rstrip())
|
|
||||||
output_prompt = self.output_prompt.match(line)
|
|
||||||
if line.startswith("#"):
|
|
||||||
insertions.append((len(curcode),
|
|
||||||
[(0, Comment, line)]))
|
|
||||||
elif input_prompt is not None:
|
|
||||||
insertions.append((len(curcode),
|
|
||||||
[(0, Generic.Prompt, input_prompt.group())]))
|
|
||||||
curcode += line[input_prompt.end():]
|
|
||||||
elif continue_prompt is not None:
|
|
||||||
insertions.append((len(curcode),
|
|
||||||
[(0, Generic.Prompt, continue_prompt.group())]))
|
|
||||||
curcode += line[continue_prompt.end():]
|
|
||||||
elif output_prompt is not None:
|
|
||||||
# Use the 'error' token for output. We should probably make
|
|
||||||
# our own token, but error is typicaly in a bright color like
|
|
||||||
# red, so it works fine for our output prompts.
|
|
||||||
insertions.append((len(curcode),
|
|
||||||
[(0, Generic.Error, output_prompt.group())]))
|
|
||||||
curcode += line[output_prompt.end():]
|
|
||||||
else:
|
|
||||||
if curcode:
|
|
||||||
for item in do_insertions(insertions,
|
|
||||||
pylexer.get_tokens_unprocessed(curcode)):
|
|
||||||
yield item
|
|
||||||
curcode = ''
|
|
||||||
insertions = []
|
|
||||||
yield match.start(), Generic.Output, line
|
|
||||||
if curcode:
|
|
||||||
for item in do_insertions(insertions,
|
|
||||||
pylexer.get_tokens_unprocessed(curcode)):
|
|
||||||
yield item
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
"""Setup as a sphinx extension."""
|
|
||||||
|
|
||||||
# This is only a lexer, so adding it below to pygments appears sufficient.
|
|
||||||
# But if somebody knows that the right API usage should be to do that via
|
|
||||||
# sphinx, by all means fix it here. At least having this setup.py
|
|
||||||
# suppresses the sphinx warning we'd get without it.
|
|
||||||
pass
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
# Register the extension as a valid pygments lexer
|
|
||||||
highlighting.lexers['ipython'] = IPythonConsoleLexer()
|
|
@ -1,116 +0,0 @@
|
|||||||
"""
|
|
||||||
========
|
|
||||||
numpydoc
|
|
||||||
========
|
|
||||||
|
|
||||||
Sphinx extension that handles docstrings in the Numpy standard format. [1]
|
|
||||||
|
|
||||||
It will:
|
|
||||||
|
|
||||||
- Convert Parameters etc. sections to field lists.
|
|
||||||
- Convert See Also section to a See also entry.
|
|
||||||
- Renumber references.
|
|
||||||
- Extract the signature from the docstring, if it can't be determined otherwise.
|
|
||||||
|
|
||||||
.. [1] http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines#docstring-standard
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os, re, pydoc
|
|
||||||
from docscrape_sphinx import get_doc_object, SphinxDocString
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
def mangle_docstrings(app, what, name, obj, options, lines,
|
|
||||||
reference_offset=[0]):
|
|
||||||
if what == 'module':
|
|
||||||
# Strip top title
|
|
||||||
title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
|
|
||||||
re.I|re.S)
|
|
||||||
lines[:] = title_re.sub('', "\n".join(lines)).split("\n")
|
|
||||||
else:
|
|
||||||
doc = get_doc_object(obj, what, "\n".join(lines))
|
|
||||||
lines[:] = str(doc).split("\n")
|
|
||||||
|
|
||||||
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
|
|
||||||
obj.__name__:
|
|
||||||
if hasattr(obj, '__module__'):
|
|
||||||
v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__))
|
|
||||||
else:
|
|
||||||
v = dict(full_name=obj.__name__)
|
|
||||||
lines += ['', '.. htmlonly::', '']
|
|
||||||
lines += [' %s' % x for x in
|
|
||||||
(app.config.numpydoc_edit_link % v).split("\n")]
|
|
||||||
|
|
||||||
# replace reference numbers so that there are no duplicates
|
|
||||||
references = []
|
|
||||||
for l in lines:
|
|
||||||
l = l.strip()
|
|
||||||
if l.startswith('.. ['):
|
|
||||||
try:
|
|
||||||
references.append(int(l[len('.. ['):l.index(']')]))
|
|
||||||
except ValueError:
|
|
||||||
print("WARNING: invalid reference in %s docstring" % name)
|
|
||||||
|
|
||||||
# Start renaming from the biggest number, otherwise we may
|
|
||||||
# overwrite references.
|
|
||||||
references.sort()
|
|
||||||
if references:
|
|
||||||
for i, line in enumerate(lines):
|
|
||||||
for r in references:
|
|
||||||
new_r = reference_offset[0] + r
|
|
||||||
lines[i] = lines[i].replace('[%d]_' % r,
|
|
||||||
'[%d]_' % new_r)
|
|
||||||
lines[i] = lines[i].replace('.. [%d]' % r,
|
|
||||||
'.. [%d]' % new_r)
|
|
||||||
|
|
||||||
reference_offset[0] += len(references)
|
|
||||||
|
|
||||||
def mangle_signature(app, what, name, obj, options, sig, retann):
|
|
||||||
# Do not try to inspect classes that don't define `__init__`
|
|
||||||
if (inspect.isclass(obj) and
|
|
||||||
'initializes x; see ' in pydoc.getdoc(obj.__init__)):
|
|
||||||
return '', ''
|
|
||||||
|
|
||||||
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return
|
|
||||||
if not hasattr(obj, '__doc__'): return
|
|
||||||
|
|
||||||
doc = SphinxDocString(pydoc.getdoc(obj))
|
|
||||||
if doc['Signature']:
|
|
||||||
sig = re.sub("^[^(]*", "", doc['Signature'])
|
|
||||||
return sig, ''
|
|
||||||
|
|
||||||
def initialize(app):
|
|
||||||
try:
|
|
||||||
app.connect('autodoc-process-signature', mangle_signature)
|
|
||||||
except:
|
|
||||||
monkeypatch_sphinx_ext_autodoc()
|
|
||||||
|
|
||||||
def setup(app, get_doc_object_=get_doc_object):
|
|
||||||
global get_doc_object
|
|
||||||
get_doc_object = get_doc_object_
|
|
||||||
|
|
||||||
app.connect('autodoc-process-docstring', mangle_docstrings)
|
|
||||||
app.connect('builder-inited', initialize)
|
|
||||||
app.add_config_value('numpydoc_edit_link', None, True)
|
|
||||||
|
|
||||||
#------------------------------------------------------------------------------
|
|
||||||
# Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5)
|
|
||||||
#------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def monkeypatch_sphinx_ext_autodoc():
|
|
||||||
global _original_format_signature
|
|
||||||
import sphinx.ext.autodoc
|
|
||||||
|
|
||||||
if sphinx.ext.autodoc.format_signature is our_format_signature:
|
|
||||||
return
|
|
||||||
|
|
||||||
print("[numpydoc] Monkeypatching sphinx.ext.autodoc ...")
|
|
||||||
_original_format_signature = sphinx.ext.autodoc.format_signature
|
|
||||||
sphinx.ext.autodoc.format_signature = our_format_signature
|
|
||||||
|
|
||||||
def our_format_signature(what, obj):
|
|
||||||
r = mangle_signature(None, what, None, obj, None, None, None)
|
|
||||||
if r is not None:
|
|
||||||
return r[0]
|
|
||||||
else:
|
|
||||||
return _original_format_signature(what, obj)
|
|
@ -1,933 +0,0 @@
|
|||||||
"""
|
|
||||||
A directive for including a Matplotlib plot in a Sphinx document
|
|
||||||
================================================================
|
|
||||||
|
|
||||||
This is a Sphinx extension providing a reStructuredText directive
|
|
||||||
``.. plot::`` for including a plot in a Sphinx document.
|
|
||||||
|
|
||||||
In HTML output, ``.. plot::`` will include a .png file with a link
|
|
||||||
to a high-res .png and .pdf. In LaTeX output, it will include a .pdf.
|
|
||||||
|
|
||||||
The plot content may be defined in one of three ways:
|
|
||||||
|
|
||||||
1. **A path to a source file** as the argument to the directive::
|
|
||||||
|
|
||||||
.. plot:: path/to/plot.py
|
|
||||||
|
|
||||||
When a path to a source file is given, the content of the
|
|
||||||
directive may optionally contain a caption for the plot::
|
|
||||||
|
|
||||||
.. plot:: path/to/plot.py
|
|
||||||
|
|
||||||
The plot caption.
|
|
||||||
|
|
||||||
Additionally, one may specify the name of a function to call (with
|
|
||||||
no arguments) immediately after importing the module::
|
|
||||||
|
|
||||||
.. plot:: path/to/plot.py plot_function1
|
|
||||||
|
|
||||||
2. Included as **inline content** to the directive::
|
|
||||||
|
|
||||||
.. plot::
|
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
plt.plot([1, 2, 3], [4, 5, 6])
|
|
||||||
plt.title("A plotting exammple")
|
|
||||||
|
|
||||||
3. Using **doctest** syntax::
|
|
||||||
|
|
||||||
.. plot::
|
|
||||||
|
|
||||||
A plotting example:
|
|
||||||
>>> import matplotlib.pyplot as plt
|
|
||||||
>>> plt.plot([1, 2, 3], [4, 5, 6])
|
|
||||||
|
|
||||||
Options
|
|
||||||
-------
|
|
||||||
|
|
||||||
The ``.. plot::`` directive supports the following options:
|
|
||||||
|
|
||||||
``:format:`` : {'python', 'doctest'}
|
|
||||||
The format of the input. If unset, the format is auto-detected.
|
|
||||||
|
|
||||||
``:include-source:`` : bool
|
|
||||||
Whether to display the source code. The default can be changed using
|
|
||||||
the ``plot_include_source`` variable in :file:`conf.py` (which itself
|
|
||||||
defaults to False).
|
|
||||||
|
|
||||||
``:show-source-link:`` : bool
|
|
||||||
Whether to show a link to the source in HTML. The default can be
|
|
||||||
changed using the ``plot_html_show_source_link`` variable in
|
|
||||||
:file:`conf.py` (which itself defaults to True).
|
|
||||||
|
|
||||||
``:context:`` : bool or str
|
|
||||||
If provided, the code will be run in the context of all previous plot
|
|
||||||
directives for which the ``:context:`` option was specified. This only
|
|
||||||
applies to inline code plot directives, not those run from files. If
|
|
||||||
the ``:context: reset`` option is specified, the context is reset
|
|
||||||
for this and future plots, and previous figures are closed prior to
|
|
||||||
running the code. ``:context: close-figs`` keeps the context but closes
|
|
||||||
previous figures before running the code.
|
|
||||||
|
|
||||||
``:nofigs:`` : bool
|
|
||||||
If specified, the code block will be run, but no figures will be
|
|
||||||
inserted. This is usually useful with the ``:context:`` option.
|
|
||||||
|
|
||||||
``:caption:`` : str
|
|
||||||
If specified, the option's argument will be used as a caption for the
|
|
||||||
figure. This overwrites the caption given in the content, when the plot
|
|
||||||
is generated from a file.
|
|
||||||
|
|
||||||
Additionally, this directive supports all the options of the `image directive
|
|
||||||
<https://docutils.sourceforge.io/docs/ref/rst/directives.html#image>`_,
|
|
||||||
except for ``:target:`` (since plot will add its own target). These include
|
|
||||||
``:alt:``, ``:height:``, ``:width:``, ``:scale:``, ``:align:`` and ``:class:``.
|
|
||||||
|
|
||||||
Configuration options
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The plot directive has the following configuration options:
|
|
||||||
|
|
||||||
plot_include_source
|
|
||||||
Default value for the include-source option (default: False).
|
|
||||||
|
|
||||||
plot_html_show_source_link
|
|
||||||
Whether to show a link to the source in HTML (default: True).
|
|
||||||
|
|
||||||
plot_pre_code
|
|
||||||
Code that should be executed before each plot. If None (the default),
|
|
||||||
it will default to a string containing::
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from matplotlib import pyplot as plt
|
|
||||||
|
|
||||||
plot_basedir
|
|
||||||
Base directory, to which ``plot::`` file names are relative to.
|
|
||||||
If None or empty (the default), file names are relative to the
|
|
||||||
directory where the file containing the directive is.
|
|
||||||
|
|
||||||
plot_formats
|
|
||||||
File formats to generate (default: ['png', 'hires.png', 'pdf']).
|
|
||||||
List of tuples or strings::
|
|
||||||
|
|
||||||
[(suffix, dpi), suffix, ...]
|
|
||||||
|
|
||||||
that determine the file format and the DPI. For entries whose
|
|
||||||
DPI was omitted, sensible defaults are chosen. When passing from
|
|
||||||
the command line through sphinx_build the list should be passed as
|
|
||||||
suffix:dpi,suffix:dpi, ...
|
|
||||||
|
|
||||||
plot_html_show_formats
|
|
||||||
Whether to show links to the files in HTML (default: True).
|
|
||||||
|
|
||||||
plot_rcparams
|
|
||||||
A dictionary containing any non-standard rcParams that should
|
|
||||||
be applied before each plot (default: {}).
|
|
||||||
|
|
||||||
plot_apply_rcparams
|
|
||||||
By default, rcParams are applied when ``:context:`` option is not used
|
|
||||||
in a plot directive. If set, this configuration option overrides this
|
|
||||||
behavior and applies rcParams before each plot.
|
|
||||||
|
|
||||||
plot_working_directory
|
|
||||||
By default, the working directory will be changed to the directory of
|
|
||||||
the example, so the code can get at its data files, if any. Also its
|
|
||||||
path will be added to `sys.path` so it can import any helper modules
|
|
||||||
sitting beside it. This configuration option can be used to specify
|
|
||||||
a central directory (also added to `sys.path`) where data files and
|
|
||||||
helper modules for all code are located.
|
|
||||||
|
|
||||||
plot_template
|
|
||||||
Provide a customized template for preparing restructured text.
|
|
||||||
|
|
||||||
plot_srcset
|
|
||||||
Allow the srcset image option for responsive image resolutions. List of
|
|
||||||
strings with the multiplicative factors followed by an "x".
|
|
||||||
e.g. ["2.0x", "1.5x"]. "2.0x" will create a png with the default "png"
|
|
||||||
resolution from plot_formats, multiplied by 2. If plot_srcset is
|
|
||||||
specified, the plot directive uses the
|
|
||||||
:doc:`/api/sphinxext_figmpl_directive_api` (instead of the usual figure
|
|
||||||
directive) in the intermediary rst file that is generated.
|
|
||||||
The plot_srcset option is incompatible with *singlehtml* builds, and an
|
|
||||||
error will be raised.
|
|
||||||
|
|
||||||
Notes on how it works
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The plot directive runs the code it is given, either in the source file or the
|
|
||||||
code under the directive. The figure created (if any) is saved in the sphinx
|
|
||||||
build directory under a subdirectory named ``plot_directive``. It then creates
|
|
||||||
an intermediate rst file that calls a ``.. figure:`` directive (or
|
|
||||||
``.. figmpl::`` directive if ``plot_srcset`` is being used) and has links to
|
|
||||||
the ``*.png`` files in the ``plot_directive`` directory. These translations can
|
|
||||||
be customized by changing the *plot_template*. See the source of
|
|
||||||
:doc:`/api/sphinxext_plot_directive_api` for the templates defined in *TEMPLATE*
|
|
||||||
and *TEMPLATE_SRCSET*.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import doctest
|
|
||||||
from io import StringIO
|
|
||||||
import itertools
|
|
||||||
import os
|
|
||||||
from os.path import relpath
|
|
||||||
from pathlib import Path
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from docutils.parsers.rst import directives, Directive
|
|
||||||
from docutils.parsers.rst.directives.images import Image
|
|
||||||
import jinja2 # Sphinx dependency.
|
|
||||||
|
|
||||||
from sphinx.errors import ExtensionError
|
|
||||||
|
|
||||||
import matplotlib
|
|
||||||
from matplotlib.backend_bases import FigureManagerBase
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
from matplotlib import _pylab_helpers, cbook
|
|
||||||
|
|
||||||
matplotlib.use("agg")
|
|
||||||
|
|
||||||
__version__ = 2
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# Registration hook
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _option_boolean(arg):
|
|
||||||
if not arg or not arg.strip():
|
|
||||||
# no argument given, assume used as a flag
|
|
||||||
return True
|
|
||||||
elif arg.strip().lower() in ('no', '0', 'false'):
|
|
||||||
return False
|
|
||||||
elif arg.strip().lower() in ('yes', '1', 'true'):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
raise ValueError(f'{arg!r} unknown boolean')
|
|
||||||
|
|
||||||
|
|
||||||
def _option_context(arg):
|
|
||||||
if arg in [None, 'reset', 'close-figs']:
|
|
||||||
return arg
|
|
||||||
raise ValueError("Argument should be None or 'reset' or 'close-figs'")
|
|
||||||
|
|
||||||
|
|
||||||
def _option_format(arg):
|
|
||||||
return directives.choice(arg, ('python', 'doctest'))
|
|
||||||
|
|
||||||
|
|
||||||
def mark_plot_labels(app, document):
|
|
||||||
"""
|
|
||||||
To make plots referenceable, we need to move the reference from the
|
|
||||||
"htmlonly" (or "latexonly") node to the actual figure node itself.
|
|
||||||
"""
|
|
||||||
for name, explicit in document.nametypes.items():
|
|
||||||
if not explicit:
|
|
||||||
continue
|
|
||||||
labelid = document.nameids[name]
|
|
||||||
if labelid is None:
|
|
||||||
continue
|
|
||||||
node = document.ids[labelid]
|
|
||||||
if node.tagname in ('html_only', 'latex_only'):
|
|
||||||
for n in node:
|
|
||||||
if n.tagname == 'figure':
|
|
||||||
sectname = name
|
|
||||||
for c in n:
|
|
||||||
if c.tagname == 'caption':
|
|
||||||
sectname = c.astext()
|
|
||||||
break
|
|
||||||
|
|
||||||
node['ids'].remove(labelid)
|
|
||||||
node['names'].remove(name)
|
|
||||||
n['ids'].append(labelid)
|
|
||||||
n['names'].append(name)
|
|
||||||
document.settings.env.labels[name] = \
|
|
||||||
document.settings.env.docname, labelid, sectname
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
class PlotDirective(Directive):
|
|
||||||
"""The ``.. plot::`` directive, as documented in the module's docstring."""
|
|
||||||
|
|
||||||
has_content = True
|
|
||||||
required_arguments = 0
|
|
||||||
optional_arguments = 2
|
|
||||||
final_argument_whitespace = False
|
|
||||||
option_spec = {
|
|
||||||
'alt': directives.unchanged,
|
|
||||||
'height': directives.length_or_unitless,
|
|
||||||
'width': directives.length_or_percentage_or_unitless,
|
|
||||||
'scale': directives.nonnegative_int,
|
|
||||||
'align': Image.align,
|
|
||||||
'class': directives.class_option,
|
|
||||||
'include-source': _option_boolean,
|
|
||||||
'show-source-link': _option_boolean,
|
|
||||||
'format': _option_format,
|
|
||||||
'context': _option_context,
|
|
||||||
'nofigs': directives.flag,
|
|
||||||
'caption': directives.unchanged,
|
|
||||||
}
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""Run the plot directive."""
|
|
||||||
try:
|
|
||||||
return run(self.arguments, self.content, self.options,
|
|
||||||
self.state_machine, self.state, self.lineno)
|
|
||||||
except Exception as e:
|
|
||||||
raise self.error(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
def _copy_css_file(app, exc):
|
|
||||||
if exc is None and app.builder.format == 'html':
|
|
||||||
src = cbook._get_data_path('plot_directive/plot_directive.css')
|
|
||||||
dst = app.outdir / Path('_static')
|
|
||||||
dst.mkdir(exist_ok=True)
|
|
||||||
# Use copyfile because we do not want to copy src's permissions.
|
|
||||||
shutil.copyfile(src, dst / Path('plot_directive.css'))
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
setup.app = app
|
|
||||||
setup.config = app.config
|
|
||||||
setup.confdir = app.confdir
|
|
||||||
app.add_directive('plot', PlotDirective)
|
|
||||||
app.add_config_value('plot_pre_code', None, True)
|
|
||||||
app.add_config_value('plot_include_source', False, True)
|
|
||||||
app.add_config_value('plot_html_show_source_link', True, True)
|
|
||||||
app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True)
|
|
||||||
app.add_config_value('plot_basedir', None, True)
|
|
||||||
app.add_config_value('plot_html_show_formats', True, True)
|
|
||||||
app.add_config_value('plot_rcparams', {}, True)
|
|
||||||
app.add_config_value('plot_apply_rcparams', False, True)
|
|
||||||
app.add_config_value('plot_working_directory', None, True)
|
|
||||||
app.add_config_value('plot_template', None, True)
|
|
||||||
app.add_config_value('plot_srcset', [], True)
|
|
||||||
app.connect('doctree-read', mark_plot_labels)
|
|
||||||
app.add_css_file('plot_directive.css')
|
|
||||||
app.connect('build-finished', _copy_css_file)
|
|
||||||
metadata = {'parallel_read_safe': True, 'parallel_write_safe': True,
|
|
||||||
'version': matplotlib.__version__}
|
|
||||||
return metadata
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# Doctest handling
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def contains_doctest(text):
|
|
||||||
try:
|
|
||||||
# check if it's valid Python as-is
|
|
||||||
compile(text, '<string>', 'exec')
|
|
||||||
return False
|
|
||||||
except SyntaxError:
|
|
||||||
pass
|
|
||||||
r = re.compile(r'^\s*>>>', re.M)
|
|
||||||
m = r.search(text)
|
|
||||||
return bool(m)
|
|
||||||
|
|
||||||
|
|
||||||
def _split_code_at_show(text, function_name):
|
|
||||||
"""Split code at plt.show()."""
|
|
||||||
|
|
||||||
is_doctest = contains_doctest(text)
|
|
||||||
if function_name is None:
|
|
||||||
parts = []
|
|
||||||
part = []
|
|
||||||
for line in text.split("\n"):
|
|
||||||
if ((not is_doctest and line.startswith('plt.show(')) or
|
|
||||||
(is_doctest and line.strip() == '>>> plt.show()')):
|
|
||||||
part.append(line)
|
|
||||||
parts.append("\n".join(part))
|
|
||||||
part = []
|
|
||||||
else:
|
|
||||||
part.append(line)
|
|
||||||
if "\n".join(part).strip():
|
|
||||||
parts.append("\n".join(part))
|
|
||||||
else:
|
|
||||||
parts = [text]
|
|
||||||
return is_doctest, parts
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# Template
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
_SOURCECODE = """
|
|
||||||
{{ source_code }}
|
|
||||||
|
|
||||||
.. only:: html
|
|
||||||
|
|
||||||
{% if src_name or (html_show_formats and not multi_image) %}
|
|
||||||
(
|
|
||||||
{%- if src_name -%}
|
|
||||||
:download:`Source code <{{ build_dir }}/{{ src_name }}>`
|
|
||||||
{%- endif -%}
|
|
||||||
{%- if html_show_formats and not multi_image -%}
|
|
||||||
{%- for img in images -%}
|
|
||||||
{%- for fmt in img.formats -%}
|
|
||||||
{%- if src_name or not loop.first -%}, {% endif -%}
|
|
||||||
:download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>`
|
|
||||||
{%- endfor -%}
|
|
||||||
{%- endfor -%}
|
|
||||||
{%- endif -%}
|
|
||||||
)
|
|
||||||
{% endif %}
|
|
||||||
"""
|
|
||||||
|
|
||||||
TEMPLATE_SRCSET = _SOURCECODE + """
|
|
||||||
{% for img in images %}
|
|
||||||
.. figure-mpl:: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }}
|
|
||||||
{% for option in options -%}
|
|
||||||
{{ option }}
|
|
||||||
{% endfor %}
|
|
||||||
{%- if caption -%}
|
|
||||||
{{ caption }} {# appropriate leading whitespace added beforehand #}
|
|
||||||
{% endif -%}
|
|
||||||
{%- if srcset -%}
|
|
||||||
:srcset: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }}
|
|
||||||
{%- for sr in srcset -%}
|
|
||||||
, {{ build_dir }}/{{ img.basename }}.{{ sr }}.{{ default_fmt }} {{sr}}
|
|
||||||
{%- endfor -%}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if html_show_formats and multi_image %}
|
|
||||||
(
|
|
||||||
{%- for fmt in img.formats -%}
|
|
||||||
{%- if not loop.first -%}, {% endif -%}
|
|
||||||
:download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>`
|
|
||||||
{%- endfor -%}
|
|
||||||
)
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
.. only:: not html
|
|
||||||
|
|
||||||
{% for img in images %}
|
|
||||||
.. figure-mpl:: {{ build_dir }}/{{ img.basename }}.*
|
|
||||||
{% for option in options -%}
|
|
||||||
{{ option }}
|
|
||||||
{% endfor -%}
|
|
||||||
|
|
||||||
{{ caption }} {# appropriate leading whitespace added beforehand #}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
TEMPLATE = _SOURCECODE + """
|
|
||||||
|
|
||||||
{% for img in images %}
|
|
||||||
.. figure:: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }}
|
|
||||||
{% for option in options -%}
|
|
||||||
{{ option }}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% if html_show_formats and multi_image -%}
|
|
||||||
(
|
|
||||||
{%- for fmt in img.formats -%}
|
|
||||||
{%- if not loop.first -%}, {% endif -%}
|
|
||||||
:download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>`
|
|
||||||
{%- endfor -%}
|
|
||||||
)
|
|
||||||
{%- endif -%}
|
|
||||||
|
|
||||||
{{ caption }} {# appropriate leading whitespace added beforehand #}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
.. only:: not html
|
|
||||||
|
|
||||||
{% for img in images %}
|
|
||||||
.. figure:: {{ build_dir }}/{{ img.basename }}.*
|
|
||||||
{% for option in options -%}
|
|
||||||
{{ option }}
|
|
||||||
{% endfor -%}
|
|
||||||
|
|
||||||
{{ caption }} {# appropriate leading whitespace added beforehand #}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
exception_template = """
|
|
||||||
.. only:: html
|
|
||||||
|
|
||||||
[`source code <%(linkdir)s/%(basename)s.py>`__]
|
|
||||||
|
|
||||||
Exception occurred rendering plot.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# the context of the plot for all directives specified with the
|
|
||||||
# :context: option
|
|
||||||
plot_context = dict()
|
|
||||||
|
|
||||||
|
|
||||||
class ImageFile:
|
|
||||||
def __init__(self, basename, dirname):
|
|
||||||
self.basename = basename
|
|
||||||
self.dirname = dirname
|
|
||||||
self.formats = []
|
|
||||||
|
|
||||||
def filename(self, format):
|
|
||||||
return os.path.join(self.dirname, f"{self.basename}.{format}")
|
|
||||||
|
|
||||||
def filenames(self):
|
|
||||||
return [self.filename(fmt) for fmt in self.formats]
|
|
||||||
|
|
||||||
|
|
||||||
def out_of_date(original, derived, includes=None):
|
|
||||||
"""
|
|
||||||
Return whether *derived* is out-of-date relative to *original* or any of
|
|
||||||
the RST files included in it using the RST include directive (*includes*).
|
|
||||||
*derived* and *original* are full paths, and *includes* is optionally a
|
|
||||||
list of full paths which may have been included in the *original*.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(derived):
|
|
||||||
return True
|
|
||||||
|
|
||||||
if includes is None:
|
|
||||||
includes = []
|
|
||||||
files_to_check = [original, *includes]
|
|
||||||
|
|
||||||
def out_of_date_one(original, derived_mtime):
|
|
||||||
return (os.path.exists(original) and
|
|
||||||
derived_mtime < os.stat(original).st_mtime)
|
|
||||||
|
|
||||||
derived_mtime = os.stat(derived).st_mtime
|
|
||||||
return any(out_of_date_one(f, derived_mtime) for f in files_to_check)
|
|
||||||
|
|
||||||
|
|
||||||
class PlotError(RuntimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _run_code(code, code_path, ns=None, function_name=None):
|
|
||||||
"""
|
|
||||||
Import a Python module from a path, and run the function given by
|
|
||||||
name, if function_name is not None.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Change the working directory to the directory of the example, so
|
|
||||||
# it can get at its data files, if any. Add its path to sys.path
|
|
||||||
# so it can import any helper modules sitting beside it.
|
|
||||||
pwd = os.getcwd()
|
|
||||||
if setup.config.plot_working_directory is not None:
|
|
||||||
try:
|
|
||||||
os.chdir(setup.config.plot_working_directory)
|
|
||||||
except OSError as err:
|
|
||||||
raise OSError(f'{err}\n`plot_working_directory` option in '
|
|
||||||
f'Sphinx configuration file must be a valid '
|
|
||||||
f'directory path') from err
|
|
||||||
except TypeError as err:
|
|
||||||
raise TypeError(f'{err}\n`plot_working_directory` option in '
|
|
||||||
f'Sphinx configuration file must be a string or '
|
|
||||||
f'None') from err
|
|
||||||
elif code_path is not None:
|
|
||||||
dirname = os.path.abspath(os.path.dirname(code_path))
|
|
||||||
os.chdir(dirname)
|
|
||||||
|
|
||||||
with cbook._setattr_cm(
|
|
||||||
sys, argv=[code_path], path=[os.getcwd(), *sys.path]), \
|
|
||||||
contextlib.redirect_stdout(StringIO()):
|
|
||||||
try:
|
|
||||||
if ns is None:
|
|
||||||
ns = {}
|
|
||||||
if not ns:
|
|
||||||
if setup.config.plot_pre_code is None:
|
|
||||||
exec('import numpy as np\n'
|
|
||||||
'from matplotlib import pyplot as plt\n', ns)
|
|
||||||
else:
|
|
||||||
exec(str(setup.config.plot_pre_code), ns)
|
|
||||||
if "__main__" in code:
|
|
||||||
ns['__name__'] = '__main__'
|
|
||||||
|
|
||||||
# Patch out non-interactive show() to avoid triggering a warning.
|
|
||||||
with cbook._setattr_cm(FigureManagerBase, show=lambda self: None):
|
|
||||||
exec(code, ns)
|
|
||||||
if function_name is not None:
|
|
||||||
exec(function_name + "()", ns)
|
|
||||||
|
|
||||||
except (Exception, SystemExit) as err:
|
|
||||||
raise PlotError(traceback.format_exc()) from err
|
|
||||||
finally:
|
|
||||||
os.chdir(pwd)
|
|
||||||
return ns
|
|
||||||
|
|
||||||
|
|
||||||
def clear_state(plot_rcparams, close=True):
|
|
||||||
if close:
|
|
||||||
plt.close('all')
|
|
||||||
matplotlib.rc_file_defaults()
|
|
||||||
matplotlib.rcParams.update(plot_rcparams)
|
|
||||||
|
|
||||||
|
|
||||||
def get_plot_formats(config):
|
|
||||||
default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200}
|
|
||||||
formats = []
|
|
||||||
plot_formats = config.plot_formats
|
|
||||||
for fmt in plot_formats:
|
|
||||||
if isinstance(fmt, str):
|
|
||||||
if ':' in fmt:
|
|
||||||
suffix, dpi = fmt.split(':')
|
|
||||||
formats.append((str(suffix), int(dpi)))
|
|
||||||
else:
|
|
||||||
formats.append((fmt, default_dpi.get(fmt, 80)))
|
|
||||||
elif isinstance(fmt, (tuple, list)) and len(fmt) == 2:
|
|
||||||
formats.append((str(fmt[0]), int(fmt[1])))
|
|
||||||
else:
|
|
||||||
raise PlotError('invalid image format "%r" in plot_formats' % fmt)
|
|
||||||
return formats
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_srcset(entries):
|
|
||||||
"""
|
|
||||||
Parse srcset for multiples...
|
|
||||||
"""
|
|
||||||
srcset = {}
|
|
||||||
for entry in entries:
|
|
||||||
entry = entry.strip()
|
|
||||||
if len(entry) >= 2:
|
|
||||||
mult = entry[:-1]
|
|
||||||
srcset[float(mult)] = entry
|
|
||||||
else:
|
|
||||||
raise ExtensionError(f'srcset argument {entry!r} is invalid.')
|
|
||||||
return srcset
|
|
||||||
|
|
||||||
|
|
||||||
def render_figures(code, code_path, output_dir, output_base, context,
|
|
||||||
function_name, config, context_reset=False,
|
|
||||||
close_figs=False,
|
|
||||||
code_includes=None):
|
|
||||||
"""
|
|
||||||
Run a pyplot script and save the images in *output_dir*.
|
|
||||||
|
|
||||||
Save the images under *output_dir* with file names derived from
|
|
||||||
*output_base*
|
|
||||||
"""
|
|
||||||
|
|
||||||
if function_name is not None:
|
|
||||||
output_base = f'{output_base}_{function_name}'
|
|
||||||
formats = get_plot_formats(config)
|
|
||||||
|
|
||||||
# Try to determine if all images already exist
|
|
||||||
|
|
||||||
is_doctest, code_pieces = _split_code_at_show(code, function_name)
|
|
||||||
# Look for single-figure output files first
|
|
||||||
img = ImageFile(output_base, output_dir)
|
|
||||||
for format, dpi in formats:
|
|
||||||
if context or out_of_date(code_path, img.filename(format),
|
|
||||||
includes=code_includes):
|
|
||||||
all_exists = False
|
|
||||||
break
|
|
||||||
img.formats.append(format)
|
|
||||||
else:
|
|
||||||
all_exists = True
|
|
||||||
|
|
||||||
if all_exists:
|
|
||||||
return [(code, [img])]
|
|
||||||
|
|
||||||
# Then look for multi-figure output files
|
|
||||||
results = []
|
|
||||||
for i, code_piece in enumerate(code_pieces):
|
|
||||||
images = []
|
|
||||||
for j in itertools.count():
|
|
||||||
if len(code_pieces) > 1:
|
|
||||||
img = ImageFile('%s_%02d_%02d' % (output_base, i, j),
|
|
||||||
output_dir)
|
|
||||||
else:
|
|
||||||
img = ImageFile('%s_%02d' % (output_base, j), output_dir)
|
|
||||||
for fmt, dpi in formats:
|
|
||||||
if context or out_of_date(code_path, img.filename(fmt),
|
|
||||||
includes=code_includes):
|
|
||||||
all_exists = False
|
|
||||||
break
|
|
||||||
img.formats.append(fmt)
|
|
||||||
|
|
||||||
# assume that if we have one, we have them all
|
|
||||||
if not all_exists:
|
|
||||||
all_exists = (j > 0)
|
|
||||||
break
|
|
||||||
images.append(img)
|
|
||||||
if not all_exists:
|
|
||||||
break
|
|
||||||
results.append((code_piece, images))
|
|
||||||
else:
|
|
||||||
all_exists = True
|
|
||||||
|
|
||||||
if all_exists:
|
|
||||||
return results
|
|
||||||
|
|
||||||
# We didn't find the files, so build them
|
|
||||||
|
|
||||||
results = []
|
|
||||||
ns = plot_context if context else {}
|
|
||||||
|
|
||||||
if context_reset:
|
|
||||||
clear_state(config.plot_rcparams)
|
|
||||||
plot_context.clear()
|
|
||||||
|
|
||||||
close_figs = not context or close_figs
|
|
||||||
|
|
||||||
for i, code_piece in enumerate(code_pieces):
|
|
||||||
|
|
||||||
if not context or config.plot_apply_rcparams:
|
|
||||||
clear_state(config.plot_rcparams, close_figs)
|
|
||||||
elif close_figs:
|
|
||||||
plt.close('all')
|
|
||||||
|
|
||||||
_run_code(doctest.script_from_examples(code_piece) if is_doctest
|
|
||||||
else code_piece,
|
|
||||||
code_path, ns, function_name)
|
|
||||||
|
|
||||||
images = []
|
|
||||||
fig_managers = _pylab_helpers.Gcf.get_all_fig_managers()
|
|
||||||
for j, figman in enumerate(fig_managers):
|
|
||||||
if len(fig_managers) == 1 and len(code_pieces) == 1:
|
|
||||||
img = ImageFile(output_base, output_dir)
|
|
||||||
elif len(code_pieces) == 1:
|
|
||||||
img = ImageFile("%s_%02d" % (output_base, j), output_dir)
|
|
||||||
else:
|
|
||||||
img = ImageFile("%s_%02d_%02d" % (output_base, i, j),
|
|
||||||
output_dir)
|
|
||||||
images.append(img)
|
|
||||||
|
|
||||||
for fmt, dpi in formats:
|
|
||||||
try:
|
|
||||||
figman.canvas.figure.savefig(img.filename(fmt), dpi=dpi)
|
|
||||||
if fmt == formats[0][0] and config.plot_srcset:
|
|
||||||
# save a 2x, 3x etc version of the default...
|
|
||||||
srcset = _parse_srcset(config.plot_srcset)
|
|
||||||
for mult, suffix in srcset.items():
|
|
||||||
fm = f'{suffix}.{fmt}'
|
|
||||||
img.formats.append(fm)
|
|
||||||
figman.canvas.figure.savefig(img.filename(fm),
|
|
||||||
dpi=int(dpi * mult))
|
|
||||||
except Exception as err:
|
|
||||||
raise PlotError(traceback.format_exc()) from err
|
|
||||||
img.formats.append(fmt)
|
|
||||||
|
|
||||||
results.append((code_piece, images))
|
|
||||||
|
|
||||||
if not context or config.plot_apply_rcparams:
|
|
||||||
clear_state(config.plot_rcparams, close=not context)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def run(arguments, content, options, state_machine, state, lineno):
|
|
||||||
document = state_machine.document
|
|
||||||
config = document.settings.env.config
|
|
||||||
nofigs = 'nofigs' in options
|
|
||||||
|
|
||||||
if config.plot_srcset and setup.app.builder.name == 'singlehtml':
|
|
||||||
raise ExtensionError(
|
|
||||||
'plot_srcset option not compatible with single HTML writer')
|
|
||||||
|
|
||||||
formats = get_plot_formats(config)
|
|
||||||
default_fmt = formats[0][0]
|
|
||||||
|
|
||||||
options.setdefault('include-source', config.plot_include_source)
|
|
||||||
options.setdefault('show-source-link', config.plot_html_show_source_link)
|
|
||||||
|
|
||||||
if 'class' in options:
|
|
||||||
# classes are parsed into a list of string, and output by simply
|
|
||||||
# printing the list, abusing the fact that RST guarantees to strip
|
|
||||||
# non-conforming characters
|
|
||||||
options['class'] = ['plot-directive'] + options['class']
|
|
||||||
else:
|
|
||||||
options.setdefault('class', ['plot-directive'])
|
|
||||||
keep_context = 'context' in options
|
|
||||||
context_opt = None if not keep_context else options['context']
|
|
||||||
|
|
||||||
rst_file = document.attributes['source']
|
|
||||||
rst_dir = os.path.dirname(rst_file)
|
|
||||||
|
|
||||||
if len(arguments):
|
|
||||||
if not config.plot_basedir:
|
|
||||||
source_file_name = os.path.join(setup.app.builder.srcdir,
|
|
||||||
directives.uri(arguments[0]))
|
|
||||||
else:
|
|
||||||
source_file_name = os.path.join(setup.confdir, config.plot_basedir,
|
|
||||||
directives.uri(arguments[0]))
|
|
||||||
# If there is content, it will be passed as a caption.
|
|
||||||
caption = '\n'.join(content)
|
|
||||||
|
|
||||||
# Enforce unambiguous use of captions.
|
|
||||||
if "caption" in options:
|
|
||||||
if caption:
|
|
||||||
raise ValueError(
|
|
||||||
'Caption specified in both content and options.'
|
|
||||||
' Please remove ambiguity.'
|
|
||||||
)
|
|
||||||
# Use caption option
|
|
||||||
caption = options["caption"]
|
|
||||||
|
|
||||||
# If the optional function name is provided, use it
|
|
||||||
if len(arguments) == 2:
|
|
||||||
function_name = arguments[1]
|
|
||||||
else:
|
|
||||||
function_name = None
|
|
||||||
|
|
||||||
code = Path(source_file_name).read_text(encoding='utf-8')
|
|
||||||
output_base = os.path.basename(source_file_name)
|
|
||||||
else:
|
|
||||||
source_file_name = rst_file
|
|
||||||
code = textwrap.dedent("\n".join(map(str, content)))
|
|
||||||
counter = document.attributes.get('_plot_counter', 0) + 1
|
|
||||||
document.attributes['_plot_counter'] = counter
|
|
||||||
base, ext = os.path.splitext(os.path.basename(source_file_name))
|
|
||||||
output_base = '%s-%d.py' % (base, counter)
|
|
||||||
function_name = None
|
|
||||||
caption = options.get('caption', '')
|
|
||||||
|
|
||||||
base, source_ext = os.path.splitext(output_base)
|
|
||||||
if source_ext in ('.py', '.rst', '.txt'):
|
|
||||||
output_base = base
|
|
||||||
else:
|
|
||||||
source_ext = ''
|
|
||||||
|
|
||||||
# ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
|
|
||||||
output_base = output_base.replace('.', '-')
|
|
||||||
|
|
||||||
# is it in doctest format?
|
|
||||||
is_doctest = contains_doctest(code)
|
|
||||||
if 'format' in options:
|
|
||||||
if options['format'] == 'python':
|
|
||||||
is_doctest = False
|
|
||||||
else:
|
|
||||||
is_doctest = True
|
|
||||||
|
|
||||||
# determine output directory name fragment
|
|
||||||
source_rel_name = relpath(source_file_name, setup.confdir)
|
|
||||||
source_rel_dir = os.path.dirname(source_rel_name).lstrip(os.path.sep)
|
|
||||||
|
|
||||||
# build_dir: where to place output files (temporarily)
|
|
||||||
build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
|
|
||||||
'plot_directive',
|
|
||||||
source_rel_dir)
|
|
||||||
# get rid of .. in paths, also changes pathsep
|
|
||||||
# see note in Python docs for warning about symbolic links on Windows.
|
|
||||||
# need to compare source and dest paths at end
|
|
||||||
build_dir = os.path.normpath(build_dir)
|
|
||||||
os.makedirs(build_dir, exist_ok=True)
|
|
||||||
|
|
||||||
# how to link to files from the RST file
|
|
||||||
try:
|
|
||||||
build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
|
|
||||||
except ValueError:
|
|
||||||
# on Windows, relpath raises ValueError when path and start are on
|
|
||||||
# different mounts/drives
|
|
||||||
build_dir_link = build_dir
|
|
||||||
|
|
||||||
# get list of included rst files so that the output is updated when any
|
|
||||||
# plots in the included files change. These attributes are modified by the
|
|
||||||
# include directive (see the docutils.parsers.rst.directives.misc module).
|
|
||||||
try:
|
|
||||||
source_file_includes = [os.path.join(os.getcwd(), t[0])
|
|
||||||
for t in state.document.include_log]
|
|
||||||
except AttributeError:
|
|
||||||
# the document.include_log attribute only exists in docutils >=0.17,
|
|
||||||
# before that we need to inspect the state machine
|
|
||||||
possible_sources = {os.path.join(setup.confdir, t[0])
|
|
||||||
for t in state_machine.input_lines.items}
|
|
||||||
source_file_includes = [f for f in possible_sources
|
|
||||||
if os.path.isfile(f)]
|
|
||||||
# remove the source file itself from the includes
|
|
||||||
try:
|
|
||||||
source_file_includes.remove(source_file_name)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# save script (if necessary)
|
|
||||||
if options['show-source-link']:
|
|
||||||
Path(build_dir, output_base + source_ext).write_text(
|
|
||||||
doctest.script_from_examples(code)
|
|
||||||
if source_file_name == rst_file and is_doctest
|
|
||||||
else code,
|
|
||||||
encoding='utf-8')
|
|
||||||
|
|
||||||
# make figures
|
|
||||||
try:
|
|
||||||
results = render_figures(code=code,
|
|
||||||
code_path=source_file_name,
|
|
||||||
output_dir=build_dir,
|
|
||||||
output_base=output_base,
|
|
||||||
context=keep_context,
|
|
||||||
function_name=function_name,
|
|
||||||
config=config,
|
|
||||||
context_reset=context_opt == 'reset',
|
|
||||||
close_figs=context_opt == 'close-figs',
|
|
||||||
code_includes=source_file_includes)
|
|
||||||
errors = []
|
|
||||||
except PlotError as err:
|
|
||||||
reporter = state.memo.reporter
|
|
||||||
sm = reporter.system_message(
|
|
||||||
2, "Exception occurred in plotting {}\n from {}:\n{}".format(
|
|
||||||
output_base, source_file_name, err),
|
|
||||||
line=lineno)
|
|
||||||
results = [(code, [])]
|
|
||||||
errors = [sm]
|
|
||||||
|
|
||||||
# Properly indent the caption
|
|
||||||
if caption and config.plot_srcset:
|
|
||||||
caption = f':caption: {caption}'
|
|
||||||
elif caption:
|
|
||||||
caption = '\n' + '\n'.join(' ' + line.strip()
|
|
||||||
for line in caption.split('\n'))
|
|
||||||
# generate output restructuredtext
|
|
||||||
total_lines = []
|
|
||||||
for j, (code_piece, images) in enumerate(results):
|
|
||||||
if options['include-source']:
|
|
||||||
if is_doctest:
|
|
||||||
lines = ['', *code_piece.splitlines()]
|
|
||||||
else:
|
|
||||||
lines = ['.. code-block:: python', '',
|
|
||||||
*textwrap.indent(code_piece, ' ').splitlines()]
|
|
||||||
source_code = "\n".join(lines)
|
|
||||||
else:
|
|
||||||
source_code = ""
|
|
||||||
|
|
||||||
if nofigs:
|
|
||||||
images = []
|
|
||||||
|
|
||||||
opts = [
|
|
||||||
f':{key}: {val}' for key, val in options.items()
|
|
||||||
if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]
|
|
||||||
|
|
||||||
# Not-None src_name signals the need for a source download in the
|
|
||||||
# generated html
|
|
||||||
if j == 0 and options['show-source-link']:
|
|
||||||
src_name = output_base + source_ext
|
|
||||||
else:
|
|
||||||
src_name = None
|
|
||||||
if config.plot_srcset:
|
|
||||||
srcset = [*_parse_srcset(config.plot_srcset).values()]
|
|
||||||
template = TEMPLATE_SRCSET
|
|
||||||
else:
|
|
||||||
srcset = None
|
|
||||||
template = TEMPLATE
|
|
||||||
|
|
||||||
result = jinja2.Template(config.plot_template or template).render(
|
|
||||||
default_fmt=default_fmt,
|
|
||||||
build_dir=build_dir_link,
|
|
||||||
src_name=src_name,
|
|
||||||
multi_image=len(images) > 1,
|
|
||||||
options=opts,
|
|
||||||
srcset=srcset,
|
|
||||||
images=images,
|
|
||||||
source_code=source_code,
|
|
||||||
html_show_formats=config.plot_html_show_formats and len(images),
|
|
||||||
caption=caption)
|
|
||||||
total_lines.extend(result.split("\n"))
|
|
||||||
total_lines.extend("\n")
|
|
||||||
|
|
||||||
if total_lines:
|
|
||||||
state_machine.insert_input(total_lines, source=source_file_name)
|
|
||||||
|
|
||||||
return errors
|
|
7
packaging/CMakeLists.txt
Normal file
7
packaging/CMakeLists.txt
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# Configure the version of packaging files
|
||||||
|
configure_file(conda/meta.yaml.in conda/meta.yaml)
|
||||||
|
configure_file(conda/build.sh conda/build.sh COPYONLY)
|
||||||
|
configure_file(conda/conda_build_config.yaml conda/conda_build_config.yaml COPYONLY)
|
||||||
|
configure_file(TRIQS-dft_tools-foss-2021b.eb.in TRIQS-dft_tools-foss-2021b.eb)
|
||||||
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
|||||||
easyblock = 'CMakeMake'
|
easyblock = 'CMakeMake'
|
||||||
|
|
||||||
name = 'TRIQS-dft_tools'
|
name = 'TRIQS-dft_tools'
|
||||||
version = '3.2.0'
|
version = '@PROJECT_VERSION@'
|
||||||
|
|
||||||
homepage = 'https://triqs.github.io/dft_tools/'
|
homepage = 'https://triqs.github.io/dft_tools/'
|
||||||
description = """
|
description = """
|
@ -4,7 +4,7 @@ install(
|
|||||||
FILES
|
FILES
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake
|
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake
|
||||||
DESTINATION lib/cmake/${PROJECT_NAME}
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}
|
||||||
)
|
)
|
||||||
|
|
||||||
install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION lib/cmake/${PROJECT_NAME})
|
install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME})
|
||||||
|
@ -24,7 +24,7 @@ set(@PROJECT_NAME@_ROOT @CMAKE_INSTALL_PREFIX@ CACHE STRING "@PROJECT_NAME@ root
|
|||||||
#find_dep(depname 1.0)
|
#find_dep(depname 1.0)
|
||||||
|
|
||||||
# Include the exported targets of this project
|
# Include the exported targets of this project
|
||||||
include(@CMAKE_INSTALL_PREFIX@/lib/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake)
|
include(@CMAKE_INSTALL_FULL_LIBDIR@/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake)
|
||||||
|
|
||||||
message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@, root = @CMAKE_INSTALL_PREFIX@")
|
message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@, root = @CMAKE_INSTALL_PREFIX@")
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
@ -21,12 +21,18 @@ ignore_lst = [".git/", "replace_and_rename.py", "squash_history.sh"]
|
|||||||
# Find the root directory of app4triqs
|
# Find the root directory of app4triqs
|
||||||
app4triqs_root = os.path.abspath(os.path.dirname(__file__) + "/..")
|
app4triqs_root = os.path.abspath(os.path.dirname(__file__) + "/..")
|
||||||
|
|
||||||
|
# Blacklisted file-formats
|
||||||
|
fmt_blacklist = ['.png', '.h5', '.jpg', '.ico']
|
||||||
|
|
||||||
# Recurse over all subdirectories and files
|
# Recurse over all subdirectories and files
|
||||||
for root, dirs, files in os.walk(app4triqs_root):
|
for root, dirs, files in os.walk(app4triqs_root):
|
||||||
|
|
||||||
for fname in files:
|
for fname in files:
|
||||||
fpath = os.path.join(root, fname)
|
fpath = os.path.join(root, fname)
|
||||||
|
|
||||||
|
if os.path.splitext(fname)[1] in fmt_blacklist:
|
||||||
|
continue
|
||||||
|
|
||||||
# Ignore certain files / directories
|
# Ignore certain files / directories
|
||||||
if any(it in fpath for it in ignore_lst): continue
|
if any(it in fpath for it in ignore_lst): continue
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ export @PROJECT_NAME@_ROOT=@CMAKE_INSTALL_PREFIX@
|
|||||||
|
|
||||||
export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_PREFIX@/include:$CPLUS_INCLUDE_PATH
|
export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_PREFIX@/include:$CPLUS_INCLUDE_PATH
|
||||||
export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH
|
export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH
|
||||||
export LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LIBRARY_PATH
|
export LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LIBRARY_PATH
|
||||||
export LD_LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LD_LIBRARY_PATH
|
||||||
export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH
|
export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH
|
||||||
@EXPORT_PYTHON_PATH@
|
@EXPORT_PYTHON_PATH@
|
||||||
|
Loading…
Reference in New Issue
Block a user