mirror of
https://github.com/triqs/dft_tools
synced 2024-12-25 05:43:40 +01:00
Minimal changes in the doc
This commit is contained in:
parent
eb8060c290
commit
217f933634
9
doc/_templates/sideb_appli.html
vendored
9
doc/_templates/sideb_appli.html
vendored
@ -1,9 +0,0 @@
|
||||
<p>
|
||||
<a href="http://ipht.cea.fr"> <img style="width: 80px; margin: 10px 5px 0 0" src='_static/logo_cea.png' alt="CEA"/> </a>
|
||||
<a href="http://www.cpht.polytechnique.fr"> <img style="width: 80px; margin: 10px 5px 0 5px" src='_static/logo_x.png' alt="Ecole Polytechnique"/> </a>
|
||||
<br>
|
||||
<a href="http://www.cnrs.fr"> <img style="width: 80px; margin: 10px 0 0 5px" src='_static/logo_cnrs.png' alt="CNRS"/> </a>
|
||||
<img style="width: 80px; margin: 10px 0 0 5px" src='_static/logo_erc.jpg' alt="ERC"/>
|
||||
</p>
|
||||
|
||||
|
@ -4,11 +4,16 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
This document describes the main changes in TRIQS 1.0 that might
|
||||
have consequences for your scripts and archives.
|
||||
This document describes the main changes in TRIQS.
|
||||
|
||||
From TRIQS 0.x to TRIQS 1.0
|
||||
---------------------------
|
||||
|
||||
There have been changes from versions 0.x to 1.0 that will most likely have
|
||||
consequences for your scripts and archives.
|
||||
|
||||
Python classes
|
||||
--------------
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
The number of python classes in the old 0.x releases was increasing with no
|
||||
clear naming convention. In TRIQS 1.0 we have unified the naming of the classes
|
||||
@ -17,9 +22,17 @@ following the `PEP naming conventions
|
||||
|
||||
* Package and module names: lowercase with underscores
|
||||
* Class names: CapWords
|
||||
* Function names: lowercase with underscores
|
||||
* Function and method arguments: lowercase with underscores
|
||||
|
||||
Archives
|
||||
--------
|
||||
~~~~~~~~
|
||||
|
||||
We provide :download:`an update script <scripts/update_archive.py>` which should
|
||||
help you upgrade your archive. The usage is very simple::
|
||||
|
||||
$ python update_archive.py old_archive new_archive
|
||||
|
||||
where ``old_archive`` is your old archive to be upgraded and ``new_archive`` is
|
||||
the name of the new archive. If you encounter problems please post an
|
||||
issue with a copy of your archive.
|
||||
|
@ -10,5 +10,6 @@ Table of contents
|
||||
documentation
|
||||
applications
|
||||
issues
|
||||
changelog
|
||||
about
|
||||
|
||||
|
@ -80,4 +80,3 @@ Further reading
|
||||
installation/install_options
|
||||
installation/python
|
||||
installation/clang
|
||||
installation/changelog
|
||||
|
222
doc/scripts/update_archive.py
Normal file
222
doc/scripts/update_archive.py
Normal file
@ -0,0 +1,222 @@
|
||||
import h5py
|
||||
import sys
|
||||
import numpy
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print "Usage: python update_archive.py old_archive new_archive"
|
||||
sys.exit()
|
||||
|
||||
print """
|
||||
This script is an attempt to update your archive to TRIQS 1.0.
|
||||
Please keep a copy of your old archive as this script is
|
||||
** not guaranteed ** to work for your archive.
|
||||
If you encounter any problem please report it on github!
|
||||
"""
|
||||
|
||||
# The dictionary of modifications to make
|
||||
modifications = {
|
||||
'GF': {'SSS': 'BlockGf', '__Name': 'name', '__Note': 'note', '__BlockIndicesList': 'indices'},
|
||||
'GFBloc_ImFreq': {'SSS': 'GfImFreq'},
|
||||
'GFBloc_ReFreq': {'SSS': 'GfReFreq'},
|
||||
'GFBloc_ImTime': {'SSS': 'GfImTime'},
|
||||
'GFBloc_ReTime': {'SSS': 'GfReTime'},
|
||||
'GFBloc_ImLegendre': {'SSS': 'GfLegendre'},
|
||||
'MeshGF': {'SSS': 'MeshGf'},
|
||||
'TailGF': {'SSS': 'TailGf'},
|
||||
'DOS': {'Name': 'name'},
|
||||
'BlockGf': {'__Name': 'name', '__Note': 'note', '__BlockIndicesList': 'indices'}
|
||||
}
|
||||
|
||||
|
||||
def sanitize_imfreq(origin, dest):
|
||||
|
||||
dest.create_dataset(name="data", data=origin["Data"].value.transpose((2,0,1,3)))
|
||||
dest["data"].attrs.create('__complex__', "1")
|
||||
|
||||
dest.create_group(name="indices")
|
||||
exec("indL = %s"%origin["IndicesL"].value)
|
||||
exec("indR = %s"%origin["IndicesR"].value)
|
||||
indL = [ str(i) for i in indL ]
|
||||
indR = [ str(i) for i in indR ]
|
||||
dest["indices"].create_dataset(name="left", data=indL)
|
||||
dest["indices"].create_dataset(name="right", data=indR)
|
||||
|
||||
dest.create_group(name="singularity")
|
||||
dest["singularity"].create_dataset(name="data", data=origin["Tail"]["array"].value.transpose((2,0,1,3)))
|
||||
dest["singularity"]["data"].attrs.create('__complex__', "1")
|
||||
dest["singularity"].create_dataset(name="omin", data=origin["Tail"]["OrderMinMIN"].value)
|
||||
mask = numpy.zeros( dest["singularity"]["data"].shape[0:2], numpy.integer )
|
||||
mask.fill(origin["Tail"]["OrderMax"].value)
|
||||
dest["singularity"].create_dataset(name="mask", data=mask)
|
||||
|
||||
dest.create_group(name="mesh")
|
||||
beta = origin["Mesh"]["Beta"].value
|
||||
pi = numpy.arccos(-1)
|
||||
size = numpy.uint(len(origin["Mesh"]["array"].value))
|
||||
dest["mesh"].create_dataset(name="kind", data=2)
|
||||
dest["mesh"].create_dataset(name="min", data=pi/beta)
|
||||
dest["mesh"].create_dataset(name="max", data=(2*size+1)*pi/beta)
|
||||
dest["mesh"].create_dataset(name="size", data=size)
|
||||
dest["mesh"].create_group(name="domain")
|
||||
dest["mesh"]["domain"].create_dataset(name="beta", data=beta)
|
||||
dest["mesh"]["domain"].create_dataset(name="statistic", data={"Fermion":"F", "Boson":"B"}[origin["Mesh"]["Statistic"].value] )
|
||||
|
||||
return ['Data', 'IndicesL', 'IndicesR', 'Mesh', 'Name', 'Note', 'Tail']
|
||||
|
||||
|
||||
def sanitize_imtime(origin, dest):
|
||||
|
||||
dest.create_dataset(name="data", data=origin["Data"].value.transpose((2,0,1)))
|
||||
|
||||
dest.create_group(name="indices")
|
||||
exec("indL = %s"%origin["IndicesL"].value)
|
||||
exec("indR = %s"%origin["IndicesR"].value)
|
||||
indL = [ str(i) for i in indL ]
|
||||
indR = [ str(i) for i in indR ]
|
||||
dest["indices"].create_dataset(name="left", data=indL)
|
||||
dest["indices"].create_dataset(name="right", data=indR)
|
||||
|
||||
dest.create_group(name="singularity")
|
||||
dest["singularity"].create_dataset(name="data", data=origin["Tail"]["array"].value.transpose((2,0,1,3)))
|
||||
dest["singularity"]["data"].attrs.create('__complex__', "1")
|
||||
dest["singularity"].create_dataset(name="omin", data=origin["Tail"]["OrderMinMIN"].value)
|
||||
mask = numpy.zeros( dest["singularity"]["data"].shape[0:2], numpy.integer )
|
||||
mask.fill(origin["Tail"]["OrderMax"].value)
|
||||
dest["singularity"].create_dataset(name="mask", data=mask)
|
||||
|
||||
dest.create_group(name="mesh")
|
||||
beta = origin["Mesh"]["Beta"].value
|
||||
size = numpy.uint(len(origin["Mesh"]["array"].value))
|
||||
min_t = origin["Mesh"]["array"].value[0]
|
||||
if min_t > 1e-10:
|
||||
kind = 0
|
||||
assert(abs(min_t - 0.5*beta/size) < 1e-10)
|
||||
else:
|
||||
kind = 2
|
||||
dest["mesh"].create_dataset(name="kind", data=kind)
|
||||
dest["mesh"].create_dataset(name="min", data=0.0)
|
||||
dest["mesh"].create_dataset(name="max", data=beta)
|
||||
dest["mesh"].create_dataset(name="size", data=size)
|
||||
dest["mesh"].create_group(name="domain")
|
||||
dest["mesh"]["domain"].create_dataset(name="beta", data=beta)
|
||||
dest["mesh"]["domain"].create_dataset(name="statistic", data={"Fermion":"F", "Boson":"B"}[origin["Mesh"]["Statistic"].value] )
|
||||
|
||||
return ['Data', 'IndicesL', 'IndicesR', 'Mesh', 'Name', 'Note', 'Tail']
|
||||
|
||||
def sanitize_legendre(origin, dest):
|
||||
|
||||
dest.create_dataset(name="data", data=origin["Data"].value.transpose((2,0,1,3)))
|
||||
|
||||
dest.create_group(name="indices")
|
||||
exec("indL = %s"%origin["IndicesL"].value)
|
||||
exec("indR = %s"%origin["IndicesR"].value)
|
||||
indL = [ str(i) for i in indL ]
|
||||
indR = [ str(i) for i in indR ]
|
||||
dest["indices"].create_dataset(name="left", data=indL)
|
||||
dest["indices"].create_dataset(name="right", data=indR)
|
||||
|
||||
dest.create_group(name="mesh")
|
||||
beta = origin["Mesh"]["Beta"].value
|
||||
size = numpy.uint(len(origin["Mesh"]["array"].value))
|
||||
dest["mesh"].create_group(name="domain")
|
||||
dest["mesh"]["domain"].create_dataset(name="beta", data=beta)
|
||||
dest["mesh"]["domain"].create_dataset(name="n_max", data=size)
|
||||
dest["mesh"]["domain"].create_dataset(name="statistic", data={"Fermion":"F", "Boson":"B"}[origin["Mesh"]["Statistic"].value] )
|
||||
|
||||
return ['Data', 'IndicesL', 'IndicesR', 'Mesh', 'Name', 'Note', 'Tail']
|
||||
|
||||
def sanitize_refreq(origin, dest):
|
||||
|
||||
dest.create_dataset(name="data", data=origin["Data"].value.transpose((2,0,1,3)))
|
||||
dest["data"].attrs.create('__complex__', "1")
|
||||
|
||||
dest.create_group(name="indices")
|
||||
exec("indL = %s"%origin["IndicesL"].value)
|
||||
exec("indR = %s"%origin["IndicesR"].value)
|
||||
indL = [ str(i) for i in indL ]
|
||||
indR = [ str(i) for i in indR ]
|
||||
dest["indices"].create_dataset(name="left", data=indL)
|
||||
dest["indices"].create_dataset(name="right", data=indR)
|
||||
|
||||
dest.create_group(name="singularity")
|
||||
dest["singularity"].create_dataset(name="data", data=origin["Tail"]["array"].value.transpose((2,0,1,3)))
|
||||
dest["singularity"]["data"].attrs.create('__complex__', "1")
|
||||
dest["singularity"].create_dataset(name="omin", data=origin["Tail"]["OrderMinMIN"].value)
|
||||
mask = numpy.zeros( dest["singularity"]["data"].shape[0:2], numpy.integer )
|
||||
mask.fill(origin["Tail"]["OrderMax"].value)
|
||||
dest["singularity"].create_dataset(name="mask", data=mask)
|
||||
|
||||
dest.create_group(name="mesh")
|
||||
size = numpy.uint(len(origin["Mesh"]["array"].value))
|
||||
min_w = origin["Mesh"]["array"].value[0]
|
||||
max_w = origin["Mesh"]["array"].value[-1]
|
||||
dest["mesh"].create_dataset(name="kind", data=1)
|
||||
dest["mesh"].create_dataset(name="min", data=min_w)
|
||||
dest["mesh"].create_dataset(name="max", data=max_w)
|
||||
dest["mesh"].create_dataset(name="size", data=size)
|
||||
|
||||
return ['Data', 'IndicesL', 'IndicesR', 'Mesh', 'Name', 'Note', 'Tail']
|
||||
|
||||
|
||||
|
||||
def copy_and_correct(origin, dest):
|
||||
|
||||
scheme = None
|
||||
exclude = []
|
||||
|
||||
# copy attributes in destination archive
|
||||
for at in origin.attrs.keys():
|
||||
|
||||
# figure if the group is a TRIQS scheme
|
||||
if at == 'TRIQS_HDF5_data_scheme':
|
||||
|
||||
scheme = origin.attrs['TRIQS_HDF5_data_scheme']
|
||||
try:
|
||||
new_name = modifications[scheme]['SSS']
|
||||
except:
|
||||
new_name = scheme
|
||||
dest.attrs.create('TRIQS_HDF5_data_scheme', new_name)
|
||||
|
||||
else:
|
||||
|
||||
dest.attrs.create(at, origin.attrs[at])
|
||||
|
||||
# some special cases here
|
||||
if scheme == "GFBloc_ImFreq" or scheme == "GfImFreq": exclude = sanitize_imfreq(origin, dest)
|
||||
if scheme == "GFBloc_ImTime" or scheme == "GfImTime": exclude = sanitize_imtime(origin, dest)
|
||||
if scheme == "GFBloc_ReFreq" or scheme == "GfReFreq": exclude = sanitize_refreq(origin, dest)
|
||||
if scheme == "GFBloc_ImLegendre" or scheme == "GfLegendre": exclude = sanitize_legendre(origin, dest)
|
||||
|
||||
# copy the rest
|
||||
for key in [ o for o in origin if o not in exclude ]:
|
||||
|
||||
# key is a dataset
|
||||
if hasattr(origin[key],'value'):
|
||||
|
||||
# check if dataset needs renaming
|
||||
try:
|
||||
new_key = modifications[scheme][key]
|
||||
except:
|
||||
new_key = key
|
||||
|
||||
# create dataset and copy data as well as attributes in destination archive
|
||||
dest.create_dataset(name=new_key, data=origin[key].value)
|
||||
for at in origin[key].attrs.keys():
|
||||
dest[key].attrs.create(at, origin[key].attrs[at])
|
||||
|
||||
# key is a group
|
||||
else:
|
||||
|
||||
dest.create_group(name=key)
|
||||
copy_and_correct(origin[key], dest[key])
|
||||
|
||||
|
||||
# Let's do it
|
||||
A = h5py.File(sys.argv[1],'r')
|
||||
B = h5py.File(sys.argv[2],'w')
|
||||
|
||||
print "Starting update..."
|
||||
copy_and_correct(A,B)
|
||||
print "Update done and written in", sys.argv[2]
|
||||
|
||||
|
@ -1,6 +1,44 @@
|
||||
|
||||
.. _versions:
|
||||
|
||||
Version compatibility
|
||||
=====================
|
||||
|
||||
As the TRIQS library and applications based on it might live separate lives, it
|
||||
is important to define a clear version compatibility scheme.
|
||||
TRIQS version numbering
|
||||
-----------------------
|
||||
|
||||
The TRIQS library has three-digit *release* numbers (e.g. 1.0.5) and two-digit
|
||||
*version* numbers (e.g. 1.0). The first two digits of the release number always
|
||||
correspond to the version number.
|
||||
|
||||
New releases within the same version correspond to bug fixes and improvements
|
||||
that do not change the API of the library. In other words, applications based
|
||||
on the TRIQS library do not need to be changed between such releases of the
|
||||
TRIQS library. Instead, a modification of the version number, say from 1.0 to
|
||||
1.1, indicates deeper modifications of the library with possible breaks of the
|
||||
API. In that case, the source codes of the applications might need to be
|
||||
modified.
|
||||
|
||||
TRIQS applications version numbering
|
||||
------------------------------------
|
||||
|
||||
In order to easily identify which versions of an application are compatible
|
||||
with given versions of TRIQS, the applications follow a similar version
|
||||
numbering: three-digit release numbers and two-digit version numbers. The
|
||||
compatibility rule is then simply that *an application is compatible with the
|
||||
TRIQS library if it has the same version number*.
|
||||
|
||||
How to figure the version of the installed TRIQS library
|
||||
--------------------------------------------------------
|
||||
|
||||
In order to figure the version of an installed TRIQS library, you can
|
||||
either:
|
||||
|
||||
* Read the content of the ``version.hpp`` file which is in the
|
||||
``path_to_install_directory/include/triqs/`` diretory.
|
||||
|
||||
* Run ``pytriqs`` and type the following commands::
|
||||
|
||||
from pytriqs.version import *
|
||||
show_version()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user