mirror of
https://github.com/triqs/dft_tools
synced 2024-12-22 04:13:47 +01:00
Initial commit
This commit is contained in:
parent
c2b45180f0
commit
e85d3b4f2b
1
python/converters/vasp/.gitignore
vendored
Normal file
1
python/converters/vasp/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.pyc
|
74
python/converters/vasp/EIGENVAL
Normal file
74
python/converters/vasp/EIGENVAL
Normal file
@ -0,0 +1,74 @@
|
||||
2 2 1 2
|
||||
0.1333597E+02 0.2987800E-09 0.2987800E-09 0.2987800E-09 0.5000000E-15
|
||||
1.000000000000000E-004
|
||||
CAR
|
||||
V
|
||||
22 4 15
|
||||
|
||||
0.0000000E+00 0.0000000E+00 0.0000000E+00 0.3703704E-01
|
||||
1 -31.708596 -31.708596
|
||||
2 -31.708596 -31.708596
|
||||
3 -31.708596 -31.708596
|
||||
4 -31.061475 -31.061475
|
||||
5 -31.061475 -31.061475
|
||||
6 -31.061475 -31.061475
|
||||
7 -0.811953 -0.811953
|
||||
8 2.607665 2.607665
|
||||
9 2.607665 2.607665
|
||||
10 6.002735 6.002735
|
||||
11 6.002735 6.002735
|
||||
12 6.002735 6.002735
|
||||
13 8.013630 8.013630
|
||||
14 8.013630 8.013630
|
||||
15 9.361252 9.361252
|
||||
|
||||
0.3333333E+00 0.0000000E+00 0.0000000E+00 0.2222222E+00
|
||||
1 -31.682851 -31.682851
|
||||
2 -31.539793 -31.539793
|
||||
3 -31.539793 -31.539793
|
||||
4 -31.363786 -31.363786
|
||||
5 -31.215960 -31.215960
|
||||
6 -31.215960 -31.215960
|
||||
7 1.216017 1.216017
|
||||
8 3.394994 3.394994
|
||||
9 3.771669 3.771669
|
||||
10 6.050595 6.050595
|
||||
11 6.050595 6.050595
|
||||
12 6.457025 6.457025
|
||||
13 6.771661 6.771661
|
||||
14 7.383619 7.383619
|
||||
15 7.383620 7.383620
|
||||
|
||||
0.3333333E+00 0.3333333E+00 0.0000000E+00 0.4444444E+00
|
||||
1 -31.935665 -31.935665
|
||||
2 -31.782518 -31.782518
|
||||
3 -31.447103 -31.447103
|
||||
4 -31.284953 -31.284953
|
||||
5 -31.234384 -31.234384
|
||||
6 -31.067231 -31.067231
|
||||
7 2.201004 2.201004
|
||||
8 3.123182 3.123182
|
||||
9 4.809697 4.809697
|
||||
10 5.123957 5.123957
|
||||
11 5.357025 5.357025
|
||||
12 5.853431 5.853431
|
||||
13 7.260017 7.260017
|
||||
14 7.862532 7.862532
|
||||
15 8.230907 8.230907
|
||||
|
||||
0.3333333E+00 0.3333333E+00 0.3333333E+00 0.2962963E+00
|
||||
1 -31.808727 -31.808727
|
||||
2 -31.713049 -31.713049
|
||||
3 -31.713049 -31.713049
|
||||
4 -31.278227 -31.278227
|
||||
5 -31.278227 -31.278227
|
||||
6 -31.179331 -31.179331
|
||||
7 3.831291 3.831291
|
||||
8 3.998572 3.998572
|
||||
9 4.466211 4.466211
|
||||
10 4.466211 4.466211
|
||||
11 4.724276 4.724276
|
||||
12 4.724276 4.724276
|
||||
13 8.230309 8.230309
|
||||
14 8.253908 8.253831
|
||||
15 8.292606 8.292606
|
2457
python/converters/vasp/IBZKPT
Normal file
2457
python/converters/vasp/IBZKPT
Normal file
File diff suppressed because it is too large
Load Diff
29
python/converters/vasp/POSCAR
Normal file
29
python/converters/vasp/POSCAR
Normal file
@ -0,0 +1,29 @@
|
||||
LuNiO3, low-T, P2_1/n
|
||||
1.0
|
||||
5.1234998703 0.0000000000 0.0000000000
|
||||
0.0000000000 5.5089001656 0.0000000000
|
||||
-0.0166880521 0.0000000000 7.3551808822
|
||||
Lu Ni O
|
||||
4 4 12
|
||||
Cartesian
|
||||
5.00246185 0.42418531 1.86086070
|
||||
0.10434997 5.08471473 5.49431996
|
||||
2.67444393 3.17863552 1.81672974
|
||||
2.43236789 2.33026481 5.53845136
|
||||
2.56174994 0.00000000 0.00000000
|
||||
-0.00834403 2.75445008 3.67759044
|
||||
2.55340591 0.00000000 3.67759044
|
||||
0.00000000 2.75445008 0.00000000
|
||||
0.56002379 2.54896816 1.79539968
|
||||
4.54678788 2.95993201 5.55978141
|
||||
1.99338212 5.30341824 1.88219076
|
||||
3.11342985 0.20548193 5.47298991
|
||||
3.55122302 1.72814193 0.39644425
|
||||
1.55558880 3.78075840 6.95873661
|
||||
4.12568276 4.48259185 3.28114617
|
||||
0.98112906 1.02630815 4.07403471
|
||||
0.93260966 1.11059427 6.94181952
|
||||
4.17420208 4.39830581 0.41336136
|
||||
1.60410812 3.86504444 4.09095181
|
||||
3.50270370 1.64385573 3.26422908
|
||||
|
29
python/converters/vasp/POSCAR.cart
Normal file
29
python/converters/vasp/POSCAR.cart
Normal file
@ -0,0 +1,29 @@
|
||||
LuNiO3, low-T, P2_1/n
|
||||
1.0
|
||||
5.1234998703 0.0000000000 0.0000000000
|
||||
0.0000000000 5.5089001656 0.0000000000
|
||||
-0.0166880521 0.0000000000 7.3551808822
|
||||
Lu Ni O
|
||||
4 4 12
|
||||
Kartesian
|
||||
5.00246185 0.42418531 1.86086070
|
||||
0.10434997 5.08471473 5.49431996
|
||||
2.67444393 3.17863552 1.81672974
|
||||
2.43236789 2.33026481 5.53845136
|
||||
2.56174994 0.00000000 0.00000000
|
||||
-0.00834403 2.75445008 3.67759044
|
||||
2.55340591 0.00000000 3.67759044
|
||||
0.00000000 2.75445008 0.00000000
|
||||
0.56002379 2.54896816 1.79539968
|
||||
4.54678788 2.95993201 5.55978141
|
||||
1.99338212 5.30341824 1.88219076
|
||||
3.11342985 0.20548193 5.47298991
|
||||
3.55122302 1.72814193 0.39644425
|
||||
1.55558880 3.78075840 6.95873661
|
||||
4.12568276 4.48259185 3.28114617
|
||||
0.98112906 1.02630815 4.07403471
|
||||
0.93260966 1.11059427 6.94181952
|
||||
4.17420208 4.39830581 0.41336136
|
||||
1.60410812 3.86504444 4.09095181
|
||||
3.50270370 1.64385573 3.26422908
|
||||
|
28
python/converters/vasp/POSCAR.direct
Normal file
28
python/converters/vasp/POSCAR.direct
Normal file
@ -0,0 +1,28 @@
|
||||
LuNiO3, low-T, P2_1/n
|
||||
1.0
|
||||
5.1234998703 0.0000000000 0.0000000000
|
||||
0.0000000000 5.5089001656 0.0000000000
|
||||
-0.0166880521 0.0000000000 7.3551808822
|
||||
Lu Ni O
|
||||
4 4 12
|
||||
Direct
|
||||
0.977199972 0.077000000 0.252999991
|
||||
0.022800028 0.922999978 0.746999979
|
||||
0.522800028 0.577000022 0.247000009
|
||||
0.477199972 0.423000008 0.753000021
|
||||
0.500000000 0.000000000 0.000000000
|
||||
0.000000000 0.500000000 0.500000000
|
||||
0.500000000 0.000000000 0.500000000
|
||||
0.000000000 0.500000000 0.000000000
|
||||
0.110100001 0.462700009 0.244100004
|
||||
0.889899969 0.537299991 0.755900025
|
||||
0.389899999 0.962700009 0.255899996
|
||||
0.610100031 0.037299991 0.744099975
|
||||
0.693300009 0.313699991 0.053900000
|
||||
0.306699991 0.686300039 0.946099997
|
||||
0.806699991 0.813699961 0.446099997
|
||||
0.193300009 0.186300009 0.553900003
|
||||
0.185100004 0.201600000 0.943799973
|
||||
0.814899981 0.798399985 0.056200027
|
||||
0.314899981 0.701600015 0.556200027
|
||||
0.685100019 0.298399985 0.443799973
|
153
python/converters/vasp/doc/Makefile
Normal file
153
python/converters/vasp/doc/Makefile
Normal file
@ -0,0 +1,153 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build-2.7
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/plotools.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/plotools.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/plotools"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/plotools"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
2
python/converters/vasp/doc/build/.gitignore
vendored
Normal file
2
python/converters/vasp/doc/build/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
*
|
||||
!/.gitignore
|
20
python/converters/vasp/doc/source/adv_example.cfg
Normal file
20
python/converters/vasp/doc/source/adv_example.cfg
Normal file
@ -0,0 +1,20 @@
|
||||
[General]
|
||||
EFERMI = -0.6
|
||||
|
||||
[Group 1]
|
||||
SHELLS = 1 2
|
||||
EMIN = -7.6
|
||||
EMAX = 2.7
|
||||
|
||||
[Shell 1]
|
||||
# Ni shell
|
||||
IONS = 5 6 7 8
|
||||
LSHELL = 2
|
||||
RTRANSFORM =
|
||||
0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 1.0 0.0 0.0
|
||||
|
||||
[Shell 2]
|
||||
# Oxygen shell
|
||||
IONS = 9..20
|
||||
LSHELL = 1
|
25
python/converters/vasp/doc/source/code_struct.rst
Normal file
25
python/converters/vasp/doc/source/code_struct.rst
Normal file
@ -0,0 +1,25 @@
|
||||
Code Structure
|
||||
##############
|
||||
|
||||
.. toctree::
|
||||
|
||||
vaspio
|
||||
plotools
|
||||
converter
|
||||
|
||||
The program consists of three main parts:
|
||||
* :doc:`Import of data from VASP files </vaspio>`
|
||||
* Processing of projectors according to an input config-file
|
||||
* Conversion of the DFT data to TRIQS h5-file
|
||||
|
||||
Import of data from VASP files is implemented in `vaspio.py`. The data
|
||||
is read from VASP files and then stored in objects in raw format
|
||||
(i.e. practically no processing is done at this stage).
|
||||
These objects are then combined into a dictionary which can be easily
|
||||
passed to other routines.
|
||||
|
||||
The basic workflow is prescribed as follows:
|
||||
* raw data is read from VASP files and passed to the main part
|
||||
* in the main part the input config-file is read and the projectors are selected and process accordingly
|
||||
* the processed data is stored into output text-files
|
||||
* when the TRIQS-converter is requested the data is read from text-files and written into a h5-file in an appropriate format
|
243
python/converters/vasp/doc/source/conf.py
Normal file
243
python/converters/vasp/doc/source/conf.py
Normal file
@ -0,0 +1,243 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# plotools documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Feb 3 20:25:36 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('../../'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax']
|
||||
mathjax_path = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js'
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'PLOVasp'
|
||||
copyright = u'2015, Oleg E. Peil'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'plotoolsdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'plotools.tex', u'plotools Documentation',
|
||||
u'Oleg E. Peil', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'plotools', u'plotools Documentation',
|
||||
[u'Oleg E. Peil'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'plotools', u'plotools Documentation',
|
||||
u'Oleg E. Peil', 'plotools', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
61
python/converters/vasp/doc/source/config.rst
Normal file
61
python/converters/vasp/doc/source/config.rst
Normal file
@ -0,0 +1,61 @@
|
||||
Input Config-file
|
||||
=================
|
||||
|
||||
A config-file describes subsets of PLOs that are to be generated.
|
||||
The PLOs are defined in terms of `shells` determined uniquely by an orbital
|
||||
number `l` and a set of ions (nomrmally, of the same sort).
|
||||
The shells are further combined into `groups` such that PLO in each group
|
||||
are orthogonalized together. This allows to create several mutually orthogonal
|
||||
subsets of PLOs. A group is characterized by a single projection energy window.
|
||||
|
||||
A config-file contains three types of sections:
|
||||
|
||||
- **[General]** : providing information applicable to all projected shells
|
||||
(e.g. Fermi level)
|
||||
- **[Shell <Ns>]** : each section like this describes a PLO shell, with the index
|
||||
`Ns` used for referencing
|
||||
- **[Group <Ng>]** : describes shell groups
|
||||
|
||||
..
|
||||
It must contain at least one group defined by a section `[PLO Group 1]`.
|
||||
There is also an optional section `[General]` containing options that concern
|
||||
all PLO groups (e.g. `k`-mesh properties).
|
||||
|
||||
The format requirements are relatively flexible. A config-file must contain
|
||||
at least one `[Shell]` section. If there is only one shell defined, it is possible
|
||||
to specify the energy window by providing parameters `EMIN`, `EMAX` (see below)
|
||||
right in this section, in which case a group
|
||||
will be created automatically and the `[Group]` section can be omitted.
|
||||
If, nevertheless, a group referencing the single shell is explicitly given
|
||||
the energy window parameters provided in the `[Group]` have higher priority
|
||||
and in case of conflict with `[Shell]` section a warning is issued.
|
||||
|
||||
An example of a config-file:
|
||||
|
||||
.. literalinclude:: adv_example.cfg
|
||||
|
||||
A config file must contain at least on group of PLOs. Each group is described
|
||||
by a set of ions to which the projectors are applied, an atomic shell number
|
||||
(:math:`l = 0,1,2,3` for `s,p,d,f`, respectively), and an energy window defining
|
||||
the subset of bands from which the projectors are constructed.
|
||||
|
||||
In addition, one can define a real or complex transformation, which allows one
|
||||
to produce projectors in a basis set different from the original one.
|
||||
|
||||
Below, the format of config-file is described.
|
||||
All option names are case-insensitive.
|
||||
|
||||
Required parameters
|
||||
-------------------
|
||||
|
||||
- **IONS**: ion indices as defined in POSCAR files
|
||||
- **LSHELL**: atomic shell (values 0, 1, 2, 3 for `s,p,d,f` orbitals, respectively)
|
||||
- **EMIN**, **EMAX**: the bottom and top of the energy window with respect to the Fermi level
|
||||
|
||||
Optional parameters
|
||||
-------------------
|
||||
|
||||
- **RTRANSFORM**, **CTRANSFORM**: real or complex transformation matrix used to produce projectors
|
||||
in a different basis; the number of columns is determined by the size of the atomic shell
|
||||
|
||||
|
4
python/converters/vasp/doc/source/converter.rst
Normal file
4
python/converters/vasp/doc/source/converter.rst
Normal file
@ -0,0 +1,4 @@
|
||||
TRIQS Converter
|
||||
###############
|
||||
|
||||
The converter provides an interface between a DFT code and TRIQS solvers.
|
12
python/converters/vasp/doc/source/example.cfg
Normal file
12
python/converters/vasp/doc/source/example.cfg
Normal file
@ -0,0 +1,12 @@
|
||||
[General]
|
||||
|
||||
[PLO Group 1]
|
||||
|
||||
IONS = 5 6 7 8
|
||||
EMIN = -0.6
|
||||
EMAX = 2.7
|
||||
LSHELL = 2
|
||||
RTRANSFORM =
|
||||
0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 1.0 0.0 0.0
|
||||
|
27
python/converters/vasp/doc/source/index.rst
Normal file
27
python/converters/vasp/doc/source/index.rst
Normal file
@ -0,0 +1,27 @@
|
||||
.. plotools documentation master file, created by
|
||||
sphinx-quickstart on Tue Feb 3 20:25:36 2015.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to PLOVasp's documentation!
|
||||
====================================
|
||||
|
||||
PLOVasp documentation.
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
users_guide
|
||||
code_struct
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
114
python/converters/vasp/doc/source/plotools.rst
Normal file
114
python/converters/vasp/doc/source/plotools.rst
Normal file
@ -0,0 +1,114 @@
|
||||
.. highlight:: python
|
||||
|
||||
PLO tools
|
||||
#########
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
This set of tools is intended for processing of raw projectors read
|
||||
from VASP. One of the main tasks is to generate an orthonormalized subset
|
||||
of PLOs constructed according to the :doc:`config-file </config>`.
|
||||
|
||||
As an input this sub-library accepts a dictionary with parameters from
|
||||
the config-file and a dictionary containing VASP-data objects.
|
||||
Generally, the following workflow is adopted:
|
||||
|
||||
* The VASP data is checked for consistency.
|
||||
|
||||
* For each group of PLOs a corresponding subset of projectors and eigenvalues
|
||||
is selected according to the config-file.
|
||||
|
||||
* The selected subsets of projectors are orthogonalized. In general, there are different ways
|
||||
how it can be done (see :ref:`Orthogonalization<ortho>`).
|
||||
|
||||
|
||||
Initial Processing
|
||||
==================
|
||||
|
||||
Consistency check
|
||||
-----------------
|
||||
|
||||
The purpose of a consistency check is to make sure that the VASP data passed
|
||||
to PLOtools are complete and originating from the same calculation.
|
||||
In particular, the following things are supposed to be checked:
|
||||
|
||||
* the basic dimensions, such as the number of bands, number of `k`-points, etc.,
|
||||
are consistent for all data
|
||||
|
||||
* the `k`-point mesh are read both the IBZKPT and EIGENVAL and it is worth checking
|
||||
that both sets are coinciding
|
||||
|
||||
* in case tetrahedron data is read from IBZKPT, the tetrahedron volume must be related
|
||||
to the total volume of the unit cell as derived from POSCAR
|
||||
|
||||
* parameters in the config-file should pass trivial checks such as that the ion
|
||||
list does not contain non-existing ions (boundary check for ion indices)
|
||||
|
||||
.. function:: check_vasp_data_consistency(conf_pars, vasp_data)
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- *conf_pars* (dict) : dictionary of input parameters from conf-file
|
||||
- *vasp_data* (dict) : dictionary containing all VASP data
|
||||
|
||||
**Returns**:
|
||||
|
||||
*None*
|
||||
|
||||
**Raises**:
|
||||
|
||||
A meaningful exception indicating an inconsistency in the input data
|
||||
|
||||
|
||||
Selecting projector subsets
|
||||
---------------------------
|
||||
|
||||
.. autoclass:: plotools.ProjectorSet
|
||||
:members:
|
||||
|
||||
|
||||
The class is using a helper function `select_bands()` for selecting a subset of bands.
|
||||
|
||||
.. function:: select_bands(eigvals, emin, emax)
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- *eigvals* (numpy.array) : array of eigenvalues
|
||||
- *emin*, *emax* (float) : energy window
|
||||
|
||||
**Returns**:
|
||||
|
||||
*ib_win*, *nb_min*, *nb_max* (numpy.array[int], int, int) :
|
||||
lowest and highest indices of the selected bands
|
||||
|
||||
|
||||
.. _ortho:
|
||||
|
||||
Orthogonalization
|
||||
=================
|
||||
|
||||
At the second stage the selected projectors are orthogonalized (orthonormalized).
|
||||
Orthogonalization can be performed in different ways if projection is made
|
||||
on several ions or if several correlated shells per ion are considered.
|
||||
In the case of several correlated ions per unit cell (and one correlated shell per ion)
|
||||
at least two options can be considered:
|
||||
|
||||
#. Projectors are normalized for each ion separetely. In this case, corresponding
|
||||
Wannier functions for different ions are generally not orthogonal.
|
||||
|
||||
#. Projectors are normalized for all ions in the unit cell simultaneously. This
|
||||
ensures that the Wannier functions for different ions are mutually orthogonal.
|
||||
|
||||
If more than one shells is considered (say, `p` and `d` orbitals), the
|
||||
normalization can be imposed either for a combined set of shells or for each shell
|
||||
separately.
|
||||
|
||||
The way the normalization of a PLO group is done is controlled by two flags:
|
||||
|
||||
- **NORMALIZE** (True/False) : indicates whether the PLO group is normalized (True by default)
|
||||
- **NORMION** (True/False) : indicates whether the PLO group is normalized on a per-ion basis
|
||||
|
||||
If there are several PLO groups defined, the convention is the following: All PLO groups
|
||||
marked with `NORMALIZE = True` are orthogonalized with respect to each other.
|
||||
|
7
python/converters/vasp/doc/source/users_guide.rst
Normal file
7
python/converters/vasp/doc/source/users_guide.rst
Normal file
@ -0,0 +1,7 @@
|
||||
User's Guide
|
||||
############
|
||||
|
||||
.. toctree::
|
||||
config
|
||||
|
||||
Here the user interface of PLOVasp is described.
|
9
python/converters/vasp/doc/source/vaspio.rst
Normal file
9
python/converters/vasp/doc/source/vaspio.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. sec_vaspio
|
||||
|
||||
VASP input-output
|
||||
#################
|
||||
|
||||
The following VASP are used by PLOtools:
|
||||
* PLOCAR: raw projectors generated by VASP-PLO interface
|
||||
* EIGENVAL: Kohn-Sham eigenvalues as well as `k`-points with weights
|
||||
* IBZKPT: `k`-point data (:math:`\Gamma`)
|
64
python/converters/vasp/notes.txt
Normal file
64
python/converters/vasp/notes.txt
Normal file
@ -0,0 +1,64 @@
|
||||
|
||||
Processing tools for PLOs generated by VASP.
|
||||
|
||||
The main purpose of this set of tools is to process raw data
|
||||
generated by VASP and to convert data to the TRIQS format. The raw data
|
||||
is read from files:
|
||||
POSCAR (atomic positions)
|
||||
EIGVAL (eignevalues)
|
||||
PLOCAR (non-orthogonalized PLOs and Fermi-weights)
|
||||
IBZKPT (k-points and tetrahedra)
|
||||
|
||||
An appropriate set of orthogonalized projectors is defined by
|
||||
parameters defined in the config file (config-like syntax).
|
||||
The config-file allows to define several groups of projectors.
|
||||
|
||||
|
||||
Structure of PLOtools:
|
||||
|
||||
vaspio.py: reading of VASP-generated files
|
||||
|
||||
|
||||
|
||||
vaspio.py:
|
||||
All VASP data are represented by objects which contain data read
|
||||
from corresponding files. These objects will subsequently be used to
|
||||
handle the data and convert it into a more functional internal representation.
|
||||
|
||||
|
||||
Functions
|
||||
|
||||
read_lines(filename): generator yielding lines from a file <filename>
|
||||
|
||||
Classes:
|
||||
Plocar: raw PLO data from PLOCAR file;
|
||||
the data itself is read using an auxiliary C-routine 'read_plocar'
|
||||
|
||||
Poscar: structure data from POSCAR file
|
||||
|
||||
Kpoints: k-point data from IBZKPT file
|
||||
note that k-point data is also contained in EIGENVAL file;
|
||||
the two k-point sets will be checked for consistency.
|
||||
|
||||
Eigenval: Kohn-Sham eigenvalues as well as k-points with weights
|
||||
|
||||
Symmcar: symmetry operations stored by VASP into SYMMCAR file
|
||||
|
||||
|
||||
ploortho.py (or projectors.py)
|
||||
Set of routines for processing projectors. The functionality includes:
|
||||
-- selecting a proper subset of non-orthogonalized projectors from the raw VASP input
|
||||
-- transforming and orthogonalizing projectors
|
||||
-- calculating DFT density matrix and local Hamiltonian
|
||||
|
||||
General design:
|
||||
Input data: conf_pars (output of 'parse_input()'), VASP file descriptors (Eigenval, Plocar, etc.)
|
||||
|
||||
* A projector for a given k-point is described by class 'Projector'
|
||||
PLOs project on a set or orbitals and a set of ions.
|
||||
|
||||
* Projector set is a container of 'Projector' objects.
|
||||
Out of optimization purposes the projectors are stored in a multi-dimensional
|
||||
array. A view in terms of Projector objects is, however, possible.
|
||||
|
||||
|
4
python/converters/vasp/plocar_io/.gitignore
vendored
Normal file
4
python/converters/vasp/plocar_io/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
makefile
|
||||
*.so
|
||||
*.o
|
||||
*.pyc
|
2
python/converters/vasp/plocar_io/__init__.py
Normal file
2
python/converters/vasp/plocar_io/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
|
||||
|
311
python/converters/vasp/plocar_io/c_plocar_io.c
Normal file
311
python/converters/vasp/plocar_io/c_plocar_io.c
Normal file
@ -0,0 +1,311 @@
|
||||
|
||||
#include <Python.h>
|
||||
#include <arrayobject.h>
|
||||
#include <complex.h>
|
||||
#include <string.h>
|
||||
|
||||
#define MAX_STR_LEN 512
|
||||
|
||||
static int verbose = 1;
|
||||
|
||||
typedef struct {
|
||||
int nion;
|
||||
int ns;
|
||||
int nk;
|
||||
int nb;
|
||||
int nlmmax;
|
||||
int nc_flag;
|
||||
int isdouble;
|
||||
} t_params;
|
||||
|
||||
static PyObject* io_read_plocar(PyObject *self, PyObject *args);
|
||||
PyObject* create_par_dictionary(t_params* p);
|
||||
PyArrayObject* create_plo_array(t_params* p);
|
||||
PyArrayObject* create_ferw_array(t_params* p);
|
||||
int read_arrays(FILE* fh, t_params* p, PyArrayObject* py_plo, PyArrayObject* py_ferw);
|
||||
|
||||
// Python module descriptor
|
||||
static PyMethodDef c_plocar_io[] = {
|
||||
{"read_plocar", io_read_plocar, METH_VARARGS,
|
||||
"Reads from PLOCAR and returns PLOs"},
|
||||
{NULL, NULL, 0, NULL}
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initc_plocar_io()
|
||||
{
|
||||
(void) Py_InitModule("c_plocar_io", c_plocar_io);
|
||||
import_array();
|
||||
}
|
||||
|
||||
/*
|
||||
Main function.
|
||||
|
||||
Reads data from the specified file (default is 'PLOCAR')
|
||||
and returns it as a Python tuple.
|
||||
|
||||
*/
|
||||
static PyObject *
|
||||
io_read_plocar(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyArrayObject *py_plo = NULL;
|
||||
PyArrayObject *py_ferw = NULL;
|
||||
PyObject *par_dict = NULL;
|
||||
PyObject *ret_tuple = NULL;
|
||||
|
||||
char *fname = "PLOCAR";
|
||||
char errmsg[MAX_STR_LEN] = {"\0"};
|
||||
|
||||
FILE* fh;
|
||||
|
||||
int isdouble, prec;
|
||||
t_params params;
|
||||
|
||||
if(!PyArg_ParseTuple(args, "|s", &fname))
|
||||
return NULL;
|
||||
|
||||
if(verbose)
|
||||
printf(" Reading PLO data from file: %s\n", fname);
|
||||
|
||||
//
|
||||
// Read the header
|
||||
//
|
||||
fh = fopen(fname, "r");
|
||||
if(fh == NULL) {
|
||||
// Treat this error separately because no clean-up is necessary
|
||||
strncpy(errmsg, "Error opening PLOCAR\n", MAX_STR_LEN);
|
||||
strncat(errmsg, strerror(errno), MAX_STR_LEN);
|
||||
PyErr_SetString(PyExc_IOError, errmsg);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if(!fread(&prec, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.nion, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.ns, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.nk, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.nb, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.nlmmax, 4, 1, fh)) goto ioerror;
|
||||
if(!fread(¶ms.nc_flag, 4, 1, fh)) goto ioerror;
|
||||
|
||||
switch(prec) {
|
||||
case 8:
|
||||
params.isdouble = 1;
|
||||
if(verbose) printf(" Data in double precision\n");
|
||||
break;
|
||||
case 4:
|
||||
params.isdouble = 0;
|
||||
if(verbose) printf(" Data in single precision\n");
|
||||
break;
|
||||
default:
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Error reading PLOCAR: only 'prec = 4, 8' are supported");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if(verbose) {
|
||||
printf(" nion: %d\n", params.nion);
|
||||
printf(" ns: %d\n", params.ns);
|
||||
printf(" nk: %d\n", params.nk);
|
||||
printf(" nb: %d\n", params.nb);
|
||||
printf(" nlmmax: %d\n", params.nlmmax);
|
||||
printf(" nc_flag: %d\n", params.nc_flag);
|
||||
}
|
||||
|
||||
//
|
||||
// Create parameter dictionary
|
||||
//
|
||||
par_dict = create_par_dictionary(¶ms);
|
||||
|
||||
//
|
||||
// Create PLO and Fermi-weight arrays
|
||||
//
|
||||
py_plo = create_plo_array(¶ms);
|
||||
py_ferw = create_ferw_array(¶ms);
|
||||
|
||||
//
|
||||
// Read the data from file
|
||||
//
|
||||
if(read_arrays(fh, ¶ms, py_plo, py_ferw)) goto ioerror;
|
||||
|
||||
//
|
||||
// Create return tuple
|
||||
//
|
||||
ret_tuple = PyTuple_New(3);
|
||||
|
||||
if(PyTuple_SetItem(ret_tuple, 0, par_dict) < 0) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Error adding element to the return tuple (parameter dictionary)");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if(PyTuple_SetItem(ret_tuple, 1, (PyObject *)py_plo) < 0) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Error adding element to the return tuple (PLO array)");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if(PyTuple_SetItem(ret_tuple, 2, (PyObject *)py_ferw) < 0) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Error adding element to the return tuple (Fermi-weight array)");
|
||||
goto error;
|
||||
}
|
||||
// Py_DECREF(par_dict);
|
||||
|
||||
fclose(fh);
|
||||
|
||||
return ret_tuple;
|
||||
|
||||
//
|
||||
// Handle IO-errors
|
||||
//
|
||||
ioerror:
|
||||
printf("Error number: %d\n", errno);
|
||||
if(feof(fh)) {
|
||||
PyErr_SetString(PyExc_IOError, "End-of-file reading PLOCAR");
|
||||
}
|
||||
else {
|
||||
strncpy(errmsg, "Error reading PLOCAR\n", MAX_STR_LEN);
|
||||
strncat(errmsg, strerror(errno), MAX_STR_LEN);
|
||||
PyErr_SetString(PyExc_IOError, errmsg);
|
||||
}
|
||||
|
||||
//
|
||||
// Clean-up after an error
|
||||
//
|
||||
error:
|
||||
fclose(fh);
|
||||
|
||||
Py_XDECREF(par_dict);
|
||||
Py_XDECREF(py_plo);
|
||||
Py_XDECREF(py_ferw);
|
||||
Py_XDECREF(ret_tuple);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
//
|
||||
// Auxiliary functions
|
||||
//
|
||||
PyObject*
|
||||
create_par_dictionary(t_params* p)
|
||||
{
|
||||
PyObject *par_dict = PyDict_New();
|
||||
PyDict_SetItemString(par_dict, "nion", PyInt_FromLong((long)p->nion));
|
||||
PyDict_SetItemString(par_dict, "ns", PyInt_FromLong((long)p->ns));
|
||||
PyDict_SetItemString(par_dict, "nk", PyInt_FromLong((long)p->nk));
|
||||
PyDict_SetItemString(par_dict, "nb", PyInt_FromLong((long)p->nb));
|
||||
PyDict_SetItemString(par_dict, "nc_flag", PyInt_FromLong((long)p->nc_flag));
|
||||
|
||||
return par_dict;
|
||||
}
|
||||
|
||||
PyArrayObject*
|
||||
create_plo_array(t_params* p)
|
||||
{
|
||||
double complex *plo;
|
||||
npy_intp *dims;
|
||||
int ntot = p->nion * p->ns * p->nk * p->nb * p->nlmmax;
|
||||
int ndim = 5;
|
||||
|
||||
plo = (double complex*)malloc(ntot * sizeof(double complex));
|
||||
memset(plo, 0, ntot * sizeof(double complex));
|
||||
dims = (npy_intp *)malloc(ndim * sizeof(npy_intp));
|
||||
|
||||
dims[0] = p->nion;
|
||||
dims[1] = p->ns;
|
||||
dims[2] = p->nk;
|
||||
dims[3] = p->nb;
|
||||
dims[4] = p->nlmmax;
|
||||
|
||||
return (PyArrayObject *)PyArray_SimpleNewFromData(ndim, dims, NPY_CDOUBLE, plo);
|
||||
}
|
||||
|
||||
PyArrayObject*
|
||||
create_ferw_array(t_params* p)
|
||||
{
|
||||
double *ferw;
|
||||
npy_intp *dims;
|
||||
int ntot = p->nion * p->ns * p->nk * p->nb;
|
||||
int ndim = 4;
|
||||
|
||||
ferw = (double *)malloc(ntot * sizeof(double));
|
||||
memset(ferw, 0, ntot * sizeof(double));
|
||||
dims = (npy_intp *)malloc(ndim * sizeof(npy_intp));
|
||||
|
||||
dims[0] = p->nion;
|
||||
dims[1] = p->ns;
|
||||
dims[2] = p->nk;
|
||||
dims[3] = p->nb;
|
||||
|
||||
return (PyArrayObject *)PyArray_SimpleNewFromData(ndim, dims, NPY_DOUBLE, ferw);
|
||||
}
|
||||
|
||||
int read_arrays(FILE* fh, t_params* p, PyArrayObject* py_plo, PyArrayObject* py_ferw)
|
||||
{
|
||||
double complex *plo;
|
||||
double *ferw;
|
||||
int strd_plo[5], strd_ferw[4];
|
||||
|
||||
int ion, ik, ib, is, ilm;
|
||||
int nlm;
|
||||
long ind1, ind2;
|
||||
float rtmp;
|
||||
float complex rbuf[50];
|
||||
double dtmp;
|
||||
double complex dbuf[50];
|
||||
|
||||
// Initialize array pointers and strides
|
||||
plo = (double complex *)py_plo->data;
|
||||
strd_plo[0] = py_plo->strides[0] / sizeof(double complex);
|
||||
strd_plo[1] = py_plo->strides[1] / sizeof(double complex);
|
||||
strd_plo[2] = py_plo->strides[2] / sizeof(double complex);
|
||||
strd_plo[3] = py_plo->strides[3] / sizeof(double complex);
|
||||
strd_plo[4] = py_plo->strides[4] / sizeof(double complex);
|
||||
|
||||
ferw = (double *)py_ferw->data;
|
||||
strd_ferw[0] = py_ferw->strides[0] / sizeof(double);
|
||||
strd_ferw[1] = py_ferw->strides[1] / sizeof(double);
|
||||
strd_ferw[2] = py_ferw->strides[2] / sizeof(double);
|
||||
strd_ferw[3] = py_ferw->strides[3] / sizeof(double);
|
||||
|
||||
ind1 = 0;
|
||||
ind2 = 0;
|
||||
for(ion = 0; ion < p->nion; ion++) {
|
||||
fread(&nlm, 4, 1, fh);
|
||||
// printf(" nlm = %d\n", nlm);
|
||||
for(is = 0; is < p->ns; is++)
|
||||
for(ik = 0; ik < p->nk; ik++)
|
||||
for(ib = 0; ib < p->nb; ib++) {
|
||||
ind1 = strd_ferw[0] * ion + strd_ferw[1] * is + strd_ferw[2] * ik + strd_ferw[3] * ib;
|
||||
ind2 = strd_plo[0] * ion + strd_plo[1] * is + strd_plo[2] * ik + strd_plo[3] * ib;
|
||||
|
||||
if(p->isdouble) {
|
||||
if(fread(&dtmp, sizeof(double), 1, fh) < 1) goto error;
|
||||
if(fread(dbuf, sizeof(double complex), nlm, fh) < nlm) goto error;
|
||||
|
||||
ferw[ind1] = dtmp;
|
||||
// printf("%5d %5d %5d %5d %lf\n", ion, is, ik, ib, dtmp);
|
||||
memcpy(plo + ind2, dbuf, nlm * sizeof(double complex));
|
||||
}
|
||||
else {
|
||||
if(fread(&rtmp, sizeof(float), 1, fh) < 1) goto error;
|
||||
if(fread(rbuf, sizeof(float complex), nlm, fh) < nlm) goto error;
|
||||
|
||||
ferw[ind1] = (double)rtmp;
|
||||
// printf("%5d %5d %5d %5d %f\n", ion, is, ik, ib, rtmp);
|
||||
// In this case destination and source arrays are not compatible,
|
||||
// we have to copy element-wise
|
||||
for(ilm = 0; ilm < nlm; ilm++) {
|
||||
plo[ind2 + ilm] = (double complex)rbuf[ilm];
|
||||
// printf("%5d %5d %f\n", ilm, ind2 + ilm, rbuf[ilm]);
|
||||
}
|
||||
} // if p->isdouble
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
error:
|
||||
return -1;
|
||||
}
|
9
python/converters/vasp/plocar_io/makefile.darwin
Normal file
9
python/converters/vasp/plocar_io/makefile.darwin
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
CC=gcc
|
||||
INC_FLAGS=-I/opt/local/Library/Frameworks/Python.framework/Versions/Current/include/python2.7 -I/opt/local/Library/Frameworks/Python.framework/Versions/Current/lib/python2.7/site-packages/numpy/core/include/numpy
|
||||
LIB_FLAGS=-L/opt/local/Library/Frameworks/Python.framework/Versions/Current/lib/python2.7
|
||||
|
||||
c_plocar_io.so: c_plocar_io.c
|
||||
$(CC) $< -fPIC -shared -o $@ $(INC_FLAGS) $(LIB_FLAGS) -lpython2.7
|
||||
cp $@ ..
|
||||
|
215
python/converters/vasp/plotools.py
Normal file
215
python/converters/vasp/plotools.py
Normal file
@ -0,0 +1,215 @@
|
||||
|
||||
import numpy as np
|
||||
|
||||
class Projector:
|
||||
"""
|
||||
Class describing a local-orbital projector.
|
||||
"""
|
||||
|
||||
def __init__(self, matrix, ib1=1, ib2=None, nion=1):
|
||||
self.p_matrix = matrix.astype(np.complex128)
|
||||
self.norb, self.nb = matrix.shape
|
||||
self.nion = nion
|
||||
self.ib1 = ib1 - 1
|
||||
if not ib2 is None:
|
||||
self.ib2 = ib2 - 1
|
||||
else:
|
||||
self.ib2 = self.nb - 1
|
||||
|
||||
def project_up(self, mat):
|
||||
return np.dot(self.p_matrix.conj().T, np.dot(mat, self.p_matrix))
|
||||
|
||||
def project_down(self, mat):
|
||||
assert mat.shape == (self.nb, self.nb), " Matrix must match projector in size"
|
||||
return np.dot(self.p_matrix, np.dot(mat, self.p_matrix.conj().T))
|
||||
|
||||
def orthogonalize(self):
|
||||
"""
|
||||
Orthogonalizes a projector.
|
||||
Returns an overlap matrix and its eigenvalues for initial projectors.
|
||||
"""
|
||||
self.p_matrix, overlap, over_eig = orthogonalize_projector(self.p_matrix)
|
||||
|
||||
return (overlap, over_eig)
|
||||
|
||||
################################################################################
|
||||
# orthogonalize_projector()
|
||||
################################################################################
|
||||
def orthogonalize_projector(p_matrix):
|
||||
"""
|
||||
Orthogonalizes a projector defined by a rectangular matrix `p_matrix`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
p_matrix (numpy.array[complex]) : matrix `Nm x Nb`, where `Nm` is
|
||||
the number of orbitals, `Nb` number of bands
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
||||
Orthogonalized projector matrix, initial overlap matrix and its eigenvalues.
|
||||
"""
|
||||
|
||||
overlap = np.dot(p_matrix, p_matrix.conj().T)
|
||||
eig, eigv = np.linalg.eigh(overlap)
|
||||
assert np.all(eig > 0.0), (" Negative eigenvalues of the overlap matrix:"
|
||||
"projectors are ill-defined")
|
||||
sqrt_eig = np.diag(1.0 / np.sqrt(eig))
|
||||
shalf = np.dot(eigv, np.dot(sqrt_eig, eigv.conj().T))
|
||||
p_ortho = np.dot(shalf, p_matrix)
|
||||
|
||||
return (p_ortho, overlap, eig)
|
||||
|
||||
################################################################################
|
||||
# check_vasp_data_consistency()
|
||||
################################################################################
|
||||
def check_vasp_data_consistency(vasp_data):
|
||||
"""
|
||||
Check the consistency of the VASP data.
|
||||
"""
|
||||
pass
|
||||
|
||||
################################################################################
|
||||
# select_bands()
|
||||
################################################################################
|
||||
def select_bands(eigvals, emin, emax):
|
||||
"""
|
||||
Select a subset of bands lying within a given energy window.
|
||||
The band energies are assumed to be sorted in an ascending order.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
eigvals (numpy.array) : all eigenvalues
|
||||
emin, emax (float) : energy window
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
||||
ib_win, nb_min, nb_max :
|
||||
"""
|
||||
nk, nband, ns_band = eigvals.shape
|
||||
ib_win = np.zeros((nk, ns_band, 2), dtype=np.int32)
|
||||
|
||||
nb_min = 10000000
|
||||
nb_max = 0
|
||||
for isp in xrange(ns_band):
|
||||
for ik in xrange(nk):
|
||||
for ib in xrange(nband):
|
||||
en = eigvals[ik, ib, isp]
|
||||
if en >= emin:
|
||||
break
|
||||
ib1 = ib
|
||||
for ib in xrange(ib1, nb_max):
|
||||
en = eigvals[ik, ib, isp]
|
||||
if en <= emax:
|
||||
break
|
||||
ib2 = ib
|
||||
|
||||
ib_win[ik, isp, 0] = ib1
|
||||
ib_win[ik, isp, 1] = ib2
|
||||
|
||||
nb_min = min(nb_min, ib1)
|
||||
nb_max = max(nb_max, ib2)
|
||||
|
||||
return ib_win, nb_min, nb_max
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# class ProjectorSet
|
||||
#
|
||||
################################################################################
|
||||
class ProjectorSet:
|
||||
"""
|
||||
Container of projectors defined within a certain energy window.
|
||||
|
||||
The constructor selects a subset of projectors according to
|
||||
the parameters from the config-file (passed in `pars`).
|
||||
|
||||
Parameters:
|
||||
|
||||
- pars (dict) : dictionary of parameters from the config-file for a given PLO group
|
||||
- proj_raw (numpy.array) : array of raw projectors
|
||||
- eigvals (numpy.array) : array of KS eigenvalues
|
||||
|
||||
"""
|
||||
# def __init__(self, proj_set, nb_min, nb_max, ib_win):
|
||||
# """
|
||||
# Constructor.
|
||||
#
|
||||
# Parameters
|
||||
# ----------
|
||||
#
|
||||
# proj_set (numpy.array) : projector array
|
||||
# nb_min (int) : the lowest absolute band index
|
||||
# nb_max (int) : the lowest absolute band index
|
||||
# ib_win (numpy.array((nk, ns, 2), dtype=int)) : the lowest and highest band indices
|
||||
# for a given `k`-point
|
||||
# """
|
||||
# self.proj_set = proj_set
|
||||
# self.nb_min = nb_min
|
||||
# self.nb_max = nb_max
|
||||
# self.ib_win = ib_win
|
||||
|
||||
#################################################################################
|
||||
# __init__()
|
||||
#################################################################################
|
||||
def __init__(self, pars, proj_raw, eigvals):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
ns = proj_raw.shape[1]
|
||||
nk, nband, ns_band = eigvals.shape
|
||||
|
||||
self.lorb = pars['lshell']
|
||||
self.lm_l = range(lorb**2, (lorb+1)**2)
|
||||
nlm = len(self.lm_l)
|
||||
|
||||
self.emin = pars['emin']
|
||||
self.emax = pars['emax']
|
||||
|
||||
# Determine the minimum and maximum band numbers
|
||||
ib_win, nb_min, nb_max = select_bands(eigvals, self.emin, self.emax)
|
||||
self.ib_win = ib_win
|
||||
self.nb_min = nb_min
|
||||
self.nb_max = nb_max
|
||||
|
||||
# Set the dimensions of the array
|
||||
nb_win = self.nb_max - self.nb_min + 1
|
||||
nion_sel = pars['ion_list'].shape[0]
|
||||
|
||||
self.proj_set = np.zeros((nion_sel, ns, nk, nb_win, nlm), dtype=np.complex128)
|
||||
# Select projectors for a given energy window
|
||||
for isp in xrange(ns):
|
||||
for ik in xrange(nk):
|
||||
# TODO: for non-collinear case something else should be done here
|
||||
is_b = min(isp, ns_band)
|
||||
ib1 = self.ib_win[ik, is_b, 0]
|
||||
ib2 = self.ib_win[ik, is_b, 1] + 1
|
||||
ib1_win = ib1 - self.nb_min
|
||||
ib2_win = ib2 - self.nb_min
|
||||
for ion, ion_sel in enumerate(pars['ion_list']):
|
||||
self.proj_set[ion, isp, ik, ib1_win:ib2_win, :] = proj_raw[ion_sel, isp, ik, ib1:ib2, self.lm_l]
|
||||
|
||||
|
||||
def generate_ortho_plos(conf_pars, vasp_data):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
|
||||
conf_pars (dict) : dictionary of input parameters (from conf-file)
|
||||
vasp_data (dict) : dictionary of object representing various VASP files
|
||||
"""
|
||||
|
||||
check_vasp_data_consistency(vasp_data)
|
||||
|
||||
proj_raw = vaps_data['plocar'].plo
|
||||
efermi = vasp_data['doscar'].efermi
|
||||
# eigvals(nktot, nband, ispin) are defined with respect to the Fermi level
|
||||
eigvals = vasp_data['eigenval'].eigs - efermi
|
||||
|
||||
proj_set_l = []
|
||||
for pars in conf_pars:
|
||||
proj_set = select_projectors(pars, proj_raw, eigvals)
|
||||
|
331
python/converters/vasp/pyconf.py
Normal file
331
python/converters/vasp/pyconf.py
Normal file
@ -0,0 +1,331 @@
|
||||
|
||||
import ConfigParser
|
||||
import numpy as np
|
||||
import re
|
||||
import sys
|
||||
import vaspio
|
||||
|
||||
def issue_warning(message):
|
||||
"""
|
||||
Issues a warning.
|
||||
"""
|
||||
print
|
||||
print " !!! WARNING !!!: " + message
|
||||
print
|
||||
################################################################################
|
||||
################################################################################
|
||||
#
|
||||
# class ConfigParameters
|
||||
#
|
||||
################################################################################
|
||||
################################################################################
|
||||
class ConfigParameters:
|
||||
r"""
|
||||
Class responsible for parsing of the input config-file.
|
||||
|
||||
Parameters:
|
||||
|
||||
- *sh_required*, *sh_optional* : required and optional parameters of shells
|
||||
- *gr_required*, *gr_optional* : required and optional parameters of groups
|
||||
|
||||
The dictionary contains a mapping of conf-file keywords to
|
||||
a pair of objects:
|
||||
|
||||
1. internal name of a parameter
|
||||
2. function used to convert an input string into data for a given parameter
|
||||
"""
|
||||
self.sh_required = {
|
||||
'ions': ('ion_list', self.parse_ion_list),
|
||||
'lshell': ('lshell', int)}
|
||||
|
||||
self.sh_optional = {
|
||||
'rtransform': ('tmatrix', lambda s: self.parse_tmatrix(s, real=True)),
|
||||
'ctransform': ('tmatrix', lambda s: self.parse_tmatrix(s, real=False))}
|
||||
|
||||
self.gr_required = {
|
||||
'emin': ('emin', float),
|
||||
'emax': ('emax', float)}
|
||||
|
||||
self.gr_optional = {
|
||||
'normalize' : ('normalize', self.parse_logical),
|
||||
'normion' : ('normion', self.parse_logical)}
|
||||
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# __init__()
|
||||
#
|
||||
################################################################################
|
||||
def __init__(self, input_filename, verbosity=1):
|
||||
self.verbosity = verbosity
|
||||
self.cp = ConfigParser.ConfigParser()
|
||||
self.cp.readfp(open(input_filename, 'r'))
|
||||
|
||||
self.conf_pars = {}
|
||||
|
||||
#
|
||||
# Special parsers
|
||||
#
|
||||
################################################################################
|
||||
#
|
||||
# parse_string_ion_list()
|
||||
#
|
||||
################################################################################
|
||||
def parse_string_ion_list(self, par_str):
|
||||
"""
|
||||
The ion list accepts two formats:
|
||||
1). A list of ion indices according to POSCAR.
|
||||
2). An element name, in which case all ions with
|
||||
this name are included.
|
||||
|
||||
The second option requires an input from POSCAR file.
|
||||
"""
|
||||
try:
|
||||
l_tmp = map(int, par_str.split())
|
||||
# Subtract 1 so that VASP indices (starting with 1) are converted
|
||||
# to Python indices (starting with 0)
|
||||
ion_list = np.array(l_tmp) - 1
|
||||
except ValueError:
|
||||
err_msg = "Only an option with a list of ion indices is implemented"
|
||||
raise NotImplementedError(err_msg)
|
||||
|
||||
return ion_list
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# parse_string_logical()
|
||||
#
|
||||
################################################################################
|
||||
def parse_string_logical(self, par_str):
|
||||
"""
|
||||
Logical parameters are given by string 'True' or 'False'
|
||||
(case does not matter). In fact, only the first symbol matters so that
|
||||
one can write 'T' or 'F'.
|
||||
"""
|
||||
assert par_str[0] in 'tf', "Logical parameters should be given by either 'True' or 'False'"
|
||||
return par_str[0] == 't'
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# parse_parameter_set()
|
||||
#
|
||||
################################################################################
|
||||
def parse_parameter_set(self, section, param_set, exception=False):
|
||||
"""
|
||||
Parses required or optional parameter set from a section.
|
||||
For required parameters `exception=True` must be set.
|
||||
"""
|
||||
parsed = {}
|
||||
for par in param_set.keys():
|
||||
try:
|
||||
par_str = self.cp.get(section, par)
|
||||
except ConfigParser.NoOptionError:
|
||||
if exception:
|
||||
message = "Required parameter '%s' not found in section [%s]"%(par, section)
|
||||
raise ConfigParser.NoOptionError(message)
|
||||
else:
|
||||
continue
|
||||
|
||||
if self.verbosity > 0:
|
||||
print " %s = %s"%(par, par_str)
|
||||
|
||||
key = param_set[par][0]
|
||||
parse_fun = param_set[par][1]
|
||||
parsed[key] = parse_fun(par_str)
|
||||
|
||||
return parsed
|
||||
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# parse_shells()
|
||||
#
|
||||
################################################################################
|
||||
def parse_shells(self):
|
||||
"""
|
||||
Parses all [Shell] sections.
|
||||
"""
|
||||
# Find all [Shell] sections
|
||||
# (note that ConfigParser transforms all names to lower case)
|
||||
sections = self.cp.sections()
|
||||
|
||||
sh_patt = 'shell *([0-9]*)'
|
||||
ismatch = lambda s: not re.match(sh_patt, s) is None
|
||||
sec_shells = filter(ismatch, sections)
|
||||
|
||||
self.nshells = len(sec_shells)
|
||||
assert self.nshells > 0, "No projected shells found in the input file"
|
||||
|
||||
if self.verbosity > 0:
|
||||
print
|
||||
if self.nshells > 1:
|
||||
print " Found %i projected shells"%(self.nshells)
|
||||
else:
|
||||
print " Found 1 projected shell"
|
||||
|
||||
# Get shell indices
|
||||
get_ind = lambda s: int(re.match(sh_patt, s).groups()[0])
|
||||
try:
|
||||
sh_inds = map(get_ind, sec_shells)
|
||||
except ValueError:
|
||||
raise ValueError("Failed to extract shell indices from a list: %s"%(sec_shells))
|
||||
|
||||
self.sh_sections = {ind: sec for ind, sec in sh_inds, sec_shells}
|
||||
|
||||
# Check that all indices are unique
|
||||
assert len(sh_inds) == len(set(sh_inds)), "There must be no shell with the same index!"
|
||||
|
||||
# Ideally, indices should run from 1 to <nshells>
|
||||
# If it's not the case, issue a warning
|
||||
sh_inds.sort()
|
||||
if sh_inds != range(1, len(sh_inds) + 1):
|
||||
issue_warning("Shell indices are not uniform or not starting from 1. "
|
||||
"This might be an indication of a incorrect setup."
|
||||
|
||||
# Parse shell parameters
|
||||
self.shells = {}
|
||||
for ind in sh_inds:
|
||||
self.shells[ind] = {}
|
||||
section = self.sh_sections[ind]
|
||||
|
||||
# Shell required parameters
|
||||
if self.verbosity > 0:
|
||||
print
|
||||
print " Required shell parameters:"
|
||||
parsed = self.parse_parameter_set(section, self.sh_required, exception=True)
|
||||
self.shells[ind].update(parsed)
|
||||
|
||||
# Shell optional parameters
|
||||
if self.verbosity > 0:
|
||||
print
|
||||
print " Optional shell parameters:"
|
||||
parsed = self.parse_parameter_set(section, self.sh_optional, exception=False)
|
||||
self.shells[ind].update(parsed)
|
||||
|
||||
# Group required parameters
|
||||
# Must be given if no group is explicitly specified
|
||||
# If in conflict with the [Group] section, the latter has a priority
|
||||
if self.verbosity > 0:
|
||||
print
|
||||
print " Required group parameters:"
|
||||
parsed = self.parse_parameter_set(section, self.gr_required, exception=False)
|
||||
self.shells[ind].update(parsed)
|
||||
|
||||
# Group optional parameters
|
||||
if self.verbosity > 0:
|
||||
print
|
||||
print " Optional group parameters:"
|
||||
parsed = self.parse_parameter_set(section, self.gr_optional, exception=False)
|
||||
self.shells[ind].update(parsed)
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# parse_groups()
|
||||
#
|
||||
################################################################################
|
||||
def parse_groups(self):
|
||||
"""
|
||||
Parses [Group] sections.
|
||||
"""
|
||||
# Find group sections
|
||||
sections = self.cp.sections()
|
||||
|
||||
gr_patt = 'group *([0-9]*)'
|
||||
ismatch = lambda s: not re.match(gr_patt, s) is None
|
||||
sec_groups = filter(ismatch, sections)
|
||||
|
||||
self.ngroups = len(sec_groups)
|
||||
|
||||
# Special case: no groups is defined
|
||||
if self.ngroups == 0:
|
||||
# Check that 'nshells = 1'
|
||||
assert self.nshells == 1, "At least one group must be defined if there are more than one shells."
|
||||
|
||||
# Otherwise create a single group taking group information from [Shell] section
|
||||
self.groups = [{}]
|
||||
# Check that the single '[Shell]' section contains enough information
|
||||
# and move it to the `groups` dictionary
|
||||
ind = self.sh_sections.keys()[0]
|
||||
try:
|
||||
for par in self.gr_required.keys():
|
||||
key = self.gr_required[par][0]
|
||||
value = self.shells[ind].pop(key)
|
||||
self.groups[0][key] = value
|
||||
except KeyError:
|
||||
message = "One [Shell] section is specified but no explicit [Group] section is provided."
|
||||
message += " In this case the [Shell] section must contain all required group information.\n"
|
||||
message += " Required parameters are: %s"%(self.gr_required.keys())
|
||||
raise KeyError(message)
|
||||
|
||||
# Do the same for optional group parameters, but do not raise an exception
|
||||
for par in self.gr_required.keys():
|
||||
try:
|
||||
key = self.gr_required[par][0]
|
||||
value = self.shells[ind].pop(key)
|
||||
self.groups[0][key] = value
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
self.groups.update({'shells': [self.shells[ind]]})
|
||||
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# Main parser
|
||||
# parse_logical()
|
||||
#
|
||||
################################################################################
|
||||
def parse_input(self):
|
||||
"""
|
||||
Parses input conf-file.
|
||||
"""
|
||||
self.parse_shells()
|
||||
self.parse_groups()
|
||||
|
||||
|
||||
# Output list of dictionaries
|
||||
output_pars = [{} for isec in xrange(nsections)]
|
||||
for isec, section in enumerate(sections):
|
||||
print "Section: %s"%(section)
|
||||
for par in required.keys():
|
||||
try:
|
||||
par_str = cp.get(section, par)
|
||||
except ConfigParser.NoOptionError:
|
||||
raise SystemExit("*** Error: Required entry '%s' not found in the input file"%(par))
|
||||
|
||||
print " %s: %s"%(par, par_str)
|
||||
key = required[par][0]
|
||||
parse_fun = required[par][1]
|
||||
output_pars[isec][key] = parse_fun(par_str)
|
||||
|
||||
print output_pars
|
||||
print cp.get(section, 'rtransform').strip().split('\n')
|
||||
|
||||
return output_pars
|
||||
|
||||
if __name__ == '__main__':
|
||||
narg = len(sys.argv)
|
||||
if narg < 2:
|
||||
raise SystemExit(" Usage: python pyconf.py <conf-file> [<path-to-vasp-calcultaion>]")
|
||||
else:
|
||||
filename = sys.argv[1]
|
||||
if narg > 2:
|
||||
vasp_dir = sys.argv[2]
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
else:
|
||||
vasp_dir = './'
|
||||
|
||||
|
||||
# plocar = vaspio.Plocar()
|
||||
# plocar.from_file(vasp_dir)
|
||||
# poscar = vaspio.Poscar()
|
||||
# poscar.from_file(vasp_dir)
|
||||
# kpoints = vaspio.Kpoints()
|
||||
# kpoints.from_file(vasp_dir)
|
||||
eigenval = vaspio.Eigenval()
|
||||
eigenval.from_file(vasp_dir)
|
||||
doscar = vaspio.Doscar()
|
||||
doscar.from_file(vasp_dir)
|
||||
# pars = parse_input(filename)
|
||||
|
13
python/converters/vasp/test.cfg
Normal file
13
python/converters/vasp/test.cfg
Normal file
@ -0,0 +1,13 @@
|
||||
[General]
|
||||
|
||||
[PLO Group 1]
|
||||
|
||||
IONS = 5 6 7 8
|
||||
#IONS = Ni
|
||||
EMIN = -0.6
|
||||
EMAX = 2.7
|
||||
LSHELL = 2
|
||||
RTRANSFORM =
|
||||
0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 1.0 0.0 0.0
|
||||
|
406
python/converters/vasp/vaspio.py
Normal file
406
python/converters/vasp/vaspio.py
Normal file
@ -0,0 +1,406 @@
|
||||
|
||||
import numpy as np
|
||||
import plocar_io.c_plocar_io as c_plocar_io
|
||||
|
||||
def read_lines(filename):
|
||||
r"""
|
||||
Generator of lines for a file
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
filename (str) : name of the file
|
||||
"""
|
||||
with open(filename, 'r') as f:
|
||||
for line in f:
|
||||
yield line
|
||||
|
||||
class Plocar:
|
||||
r"""
|
||||
Class containing raw PLO data from VASP.
|
||||
|
||||
Properties
|
||||
----------
|
||||
|
||||
plo (numpy.array((nion, ns, nk, nb, nlmmax))) : raw projectors
|
||||
"""
|
||||
|
||||
def from_file(self, vasp_dir='./', plocar_filename='PLOCAR'):
|
||||
r"""
|
||||
Reads non-normalized projectors from a binary file (`PLOCAR' by default)
|
||||
generated by VASP PLO interface.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
vasp_dir (str) : path to the VASP working directory [default = `./']
|
||||
plocar_filename (str) : filename [default = `PLOCAR']
|
||||
|
||||
"""
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
self.params, self.plo, self.ferw = c_plocar_io.read_plocar(vasp_dir + plocar_filename)
|
||||
|
||||
|
||||
class Poscar:
|
||||
"""
|
||||
Class containing POSCAR data from VASP.
|
||||
|
||||
Properties
|
||||
----------
|
||||
|
||||
nq (int) : total number of ions
|
||||
ntypes ([int]) : number of ion types
|
||||
nions (int) : a list of number of ions of each type
|
||||
a_brav (numpy.array((3, 3), dtype=float)) : lattice vectors
|
||||
q_types ([numpy.array((nions, 3), dtype=float)]) : a list of
|
||||
arrays each containing fractional coordinates of ions of a given type
|
||||
"""
|
||||
def __init__(self):
|
||||
self.q_cart = None
|
||||
self.b_rec = None
|
||||
|
||||
def from_file(self, vasp_dir='./', poscar_filename='POSCAR'):
|
||||
"""
|
||||
Reads POSCAR and returns a dictionary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
vasp_dir (str) : path to the VASP working directory [default = `./']
|
||||
plocar_filename (str) : filename [default = `PLOCAR']
|
||||
|
||||
"""
|
||||
# Convenince local function
|
||||
def readline_remove_comments():
|
||||
return f.next().split('!')[0].strip()
|
||||
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
f = read_lines(vasp_dir + poscar_filename)
|
||||
# Comment line
|
||||
comment = f.next().rstrip()
|
||||
print " Found POSCAR, title line: %s"%(comment)
|
||||
|
||||
# Read scale
|
||||
sline = readline_remove_comments()
|
||||
ascale = float(sline[0])
|
||||
# Read lattice vectors
|
||||
self.a_brav = np.zeros((3, 3))
|
||||
for ia in xrange(3):
|
||||
sline = readline_remove_comments()
|
||||
self.a_brav[ia, :] = map(float, sline.split())
|
||||
# Negative scale means that it is a volume scale
|
||||
if ascale < 0:
|
||||
vscale = -ascale
|
||||
vol = np.linalg.det(self.a_brav)
|
||||
ascale = (vscale / vol)**(1.0/3)
|
||||
|
||||
self.a_brav *= ascale
|
||||
|
||||
# Depending on the version of VASP there could be
|
||||
# an extra line with element names
|
||||
sline = readline_remove_comments()
|
||||
try:
|
||||
# Old v4.6 format: no element names
|
||||
self.nions = map(int, sline.split())
|
||||
self.el_names = ['El%i'%(i) for i in xrange(len(nions))]
|
||||
except ValueError:
|
||||
# New v5.x format: read element names first
|
||||
self.el_names = sline.split()
|
||||
sline = readline_remove_comments()
|
||||
self.nions = map(int, sline.split())
|
||||
|
||||
# Set the number of atom sorts (types) and the total
|
||||
# number of atoms in the unit cell
|
||||
self.ntypes = len(self.nions)
|
||||
self.nq = sum(self.nions)
|
||||
|
||||
# Check for the line 'Selective dynamics' (and ignore it)
|
||||
sline = readline_remove_comments()
|
||||
if sline[0].lower() == 's':
|
||||
sline = readline_remove_comments()
|
||||
|
||||
# Check whether coordinates are cartesian or fractional
|
||||
cartesian = (sline[0].lower() in 'ck')
|
||||
if cartesian:
|
||||
brec = np.linalg.inv(self.a_brav.T)
|
||||
|
||||
# Read atomic positions
|
||||
self.q_types = []
|
||||
for it in xrange(self.ntypes):
|
||||
q_at_it = np.zeros((self.nions[it], 3))
|
||||
for iq in xrange(self.nions[it]):
|
||||
sline = readline_remove_comments()
|
||||
qcoord = map(float, sline.split()[:3])
|
||||
if cartesian:
|
||||
qcoord = np.dot(brec, qcoord)
|
||||
q_at_it[iq, :] = qcoord
|
||||
|
||||
self.q_types.append(q_at_it)
|
||||
|
||||
print " Total number of ions:", self.nq
|
||||
print " Number of types:", self.ntypes
|
||||
print " Number of ions for each type:", self.nions
|
||||
|
||||
# print
|
||||
# print " Coords:"
|
||||
# for it in xrange(ntypes):
|
||||
# print " Element:", el_names[it]
|
||||
# print q_at[it]
|
||||
|
||||
################################################################
|
||||
#
|
||||
# Kpoints
|
||||
#
|
||||
################################################################
|
||||
class Kpoints:
|
||||
"""
|
||||
Class describing k-points and optionally tetrahedra.
|
||||
|
||||
Properties
|
||||
----------
|
||||
|
||||
nktot (int) : total number of k-points in the IBZ
|
||||
kpts (numpy.array((nktot, 3), dtype=float)) : k-point vectors (fractional coordinates)
|
||||
ntet (int) : total number of k-point tetrahedra
|
||||
itet (numpy.array((ntet, 5), dtype=float) : array of tetrahedra
|
||||
volt (float) : volume of a tetrahedron (the k-grid is assumed to
|
||||
be uniform)
|
||||
"""
|
||||
#
|
||||
# Reads IBZKPT file
|
||||
#
|
||||
def from_file(self, vasp_dir='./', ibz_filename='IBZKPT'):
|
||||
"""
|
||||
Reads from IBZKPT: k-points and optionally
|
||||
tetrahedra topology (if present).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
vasp_dir (str) : path to the VASP working directory [default = `./']
|
||||
plocar_filename (str) : filename [default = `PLOCAR']
|
||||
|
||||
"""
|
||||
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
ibz_file = read_lines(vasp_dir + ibz_filename)
|
||||
|
||||
# Skip comment line
|
||||
line = ibz_file.next()
|
||||
# Number of k-points
|
||||
line = ibz_file.next()
|
||||
self.nktot = int(line.strip().split()[0])
|
||||
|
||||
print
|
||||
print " {0:>26} {1:d}".format("Total number of k-points:", self.nktot)
|
||||
|
||||
self.kpts = np.zeros((self.nktot, 3))
|
||||
|
||||
# Skip comment line
|
||||
line = ibz_file.next()
|
||||
for ik in xrange(self.nktot):
|
||||
line = ibz_file.next()
|
||||
self.kpts[ik, :] = map(float, line.strip().split()[:3])
|
||||
|
||||
# Attempt to read tetrahedra
|
||||
# Skip comment line ("Tetrahedra")
|
||||
try:
|
||||
line = ibz_file.next()
|
||||
|
||||
# Number of tetrahedra and volume = 1/(6*nkx*nky*nkz)
|
||||
line = ibz_file.next()
|
||||
sline = line.split()
|
||||
self.ntet = int(sline[0])
|
||||
self.volt = float(sline[1])
|
||||
|
||||
print " {0:>26} {1:d}".format("Total number of tetrahedra:", self.ntet)
|
||||
|
||||
# Traditionally, itet[it, 0] contains multiplicity
|
||||
self.itet = np.zeros((self.ntet, 5), dtype=int)
|
||||
for it in xrange(self.ntet):
|
||||
line = ibz_file.next()
|
||||
self.itet[it, :] = map(int, line.split()[:5])
|
||||
except IOError, ValueError:
|
||||
print " Error reading tetrahedra. No tetrahedron data is uesd"
|
||||
self.ntet = 0
|
||||
|
||||
# data = { 'nktot': nktot,
|
||||
# 'kpts': kpts,
|
||||
# 'ntet': ntet,
|
||||
# 'itet': itet,
|
||||
# 'volt': volt }
|
||||
#
|
||||
# return data
|
||||
|
||||
################################################################
|
||||
#
|
||||
# Eigenval
|
||||
#
|
||||
################################################################
|
||||
class Eigenval:
|
||||
"""
|
||||
Class containing Kohn-Sham-eigenvalues data from VASP (EIGENVAL file).
|
||||
"""
|
||||
def from_file(self, vasp_dir='./', eig_filename='EIGENVAL'):
|
||||
"""
|
||||
Reads eigenvalues from EIGENVAL. Note that the file also
|
||||
contains k-points with weights. They are also stored and
|
||||
then used to check the consistency of files read.
|
||||
"""
|
||||
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
f = read_lines(vasp_dir + eig_filename)
|
||||
|
||||
# First line: only the first and the last number out of four
|
||||
# are used; these are 'nions' and 'ispin'
|
||||
sline = f.next()
|
||||
self.nq = int(sline[0])
|
||||
self.ispin = int(sline[3])
|
||||
|
||||
# Second line: cell volume and lengths of lattice vectors (skip)
|
||||
sline = f.next()
|
||||
|
||||
# Third line: temperature (skip)
|
||||
sline = f.next()
|
||||
|
||||
# Fourth and fifth line: useless
|
||||
sline = f.next()
|
||||
sline = f.next()
|
||||
|
||||
# Sixth line: NELECT, NKTOT, NBTOT
|
||||
sline = f.next()
|
||||
self.nelect = int(sline[0])
|
||||
self.nktot = int(sline[1])
|
||||
self.nband = int(sline[2])
|
||||
|
||||
# Set of eigenvalues and k-points
|
||||
self.kpts = np.zeros((self.nktot, 3))
|
||||
sefl.kwghts = np.zeros((self.nktot,))
|
||||
self.eigs = np.zeros((self.nktot, self.nband, self.ispin))
|
||||
|
||||
for ik in xrange(self.nktot):
|
||||
sline = f.next() # Empty line
|
||||
sline = f.next() # k-point info
|
||||
tmp = map(float, sline)
|
||||
self.kpts[ik, :] = tmp[:3]
|
||||
self.kwghts[ik] = tmp[3]
|
||||
|
||||
for ib in xrange(self.nband):
|
||||
sline = f.next()
|
||||
tmp = map(float, sline[1:self.ispin+1])
|
||||
self.eigs[ik, ib, :] = tmp[:]
|
||||
|
||||
################################################################
|
||||
#
|
||||
# Doscar
|
||||
#
|
||||
################################################################
|
||||
class Doscar:
|
||||
"""
|
||||
Class containing some data from DOSCAR
|
||||
"""
|
||||
def from_file(self, vasp_dir='./', eig_filename='DOSCAR'):
|
||||
"""
|
||||
Reads only E_Fermi from DOSCAR.
|
||||
"""
|
||||
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
f = read_lines(vasp_dir + eig_filename)
|
||||
|
||||
# Skip first 5 lines
|
||||
for _ in xrange(5):
|
||||
sline = f.next()
|
||||
|
||||
# Sixth line: EMAX, EMIN, NEDOS, EFERMI, 1.0
|
||||
sline = f.next()
|
||||
self.efermi = int(sline[3])
|
||||
|
||||
|
||||
################################################################
|
||||
#
|
||||
# Reads SYMMCAR
|
||||
#
|
||||
################################################################
|
||||
def read_symmcar(vasp_dir, symm_filename='SYMMCAR'):
|
||||
"""
|
||||
Reads SYMMCAR.
|
||||
"""
|
||||
# Shorthand for simple parsing
|
||||
def extract_int_par(parname):
|
||||
return int(re.findall(parname + '\s*=\s*(\d+)', line)[-1])
|
||||
|
||||
# Add a slash to the path name if necessary
|
||||
if vasp_dir[-1] != '/':
|
||||
vasp_dir += '/'
|
||||
|
||||
symmcar_exist = False
|
||||
sym_file = read_lines(vasp_dir + symm_filename)
|
||||
line = sym_file.next()
|
||||
nrot = extract_int_par('NROT')
|
||||
|
||||
line = sym_file.next()
|
||||
ntrans = extract_int_par('NPCELL')
|
||||
# Lmax
|
||||
line = sym_file.next()
|
||||
lmax = extract_int_par('LMAX')
|
||||
mmax = 2 * lmax + 1
|
||||
# Nion
|
||||
line = sym_file.next()
|
||||
nion = extract_int_par('NION')
|
||||
|
||||
print " {0:>26} {1:d}".format("Number of rotations:", nrot)
|
||||
print " {0:>26} {1:d}".format("Number of translations:", ntrans)
|
||||
print " {0:>26} {1:d}".format("Number of ions:", nion)
|
||||
print " {0:>26} {1:d}".format("L_max:", lmax)
|
||||
|
||||
rot_mats = np.zeros((nrot, lmax+1, mmax, mmax))
|
||||
rot_map = np.zeros((nrot, ntrans, nion), dtype=np.int32)
|
||||
|
||||
for irot in xrange(nrot):
|
||||
# Empty line
|
||||
line = sym_file.next()
|
||||
# IROT index (skip it)
|
||||
line = sym_file.next()
|
||||
# ISYMOP matrix (can be also skipped)
|
||||
line = sym_file.next()
|
||||
line = sym_file.next()
|
||||
line = sym_file.next()
|
||||
|
||||
# Skip comment " Permutation map..."
|
||||
line = sym_file.next()
|
||||
# Permutations (in chunks of 20 indices per line)
|
||||
for it in xrange(ntrans):
|
||||
for ibl in xrange((nion - 1) / 20 + 1):
|
||||
i1 = ibl * 20
|
||||
i2 = (ibl + 1) * 20
|
||||
line = sym_file.next()
|
||||
rot_map[irot, it, i1:i2] = map(int, line.split())
|
||||
|
||||
for l in xrange(lmax + 1):
|
||||
mmax = 2 * l + 1
|
||||
# Comment: "L = ..."
|
||||
line = sym_file.next()
|
||||
for m in xrange(mmax):
|
||||
line = sym_file.next()
|
||||
rot_mats[irot, l, m, :mmax] = map(float, line.split()[:mmax])
|
||||
|
||||
data.update({ 'nrot': nrot, 'ntrans': ntrans,
|
||||
'lmax': lmax, 'nion': nion,
|
||||
'sym_rots': rot_mats, 'perm_map': rot_map })
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user