3
0
mirror of https://github.com/triqs/dft_tools synced 2024-11-14 01:53:48 +01:00
dft_tools/python/converters/hk_converter.py

231 lines
12 KiB
Python
Raw Normal View History

################################################################################
#
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
#
# Copyright (C) 2011 by M. Aichhorn
#
# TRIQS is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
from types import *
import numpy
from pytriqs.archive import *
import pytriqs.utility.mpi as mpi
from math import sqrt
from converter_tools import *
class HkConverter(ConverterTools):
"""
2014-11-18 11:30:26 +01:00
Conversion from general H(k) file to an hdf5 file that can be used as input for the SumKDFT class.
"""
def __init__(self, filename, hdf_filename = None, dft_subgrp = 'dft_input', symmcorr_subgrp = 'dft_symmcorr_input', repacking = False):
"""
Initialise the class.
Parameters
----------
filename : string
Name of file containing the H(k) and other relevant data.
hdf_filename : string, optional
Name of hdf5 archive to be created.
dft_subgrp : string, optional
Name of subgroup storing necessary DFT data.
symmcorr_subgrp : string, optional
Name of subgroup storing correlated-shell symmetry data.
The group is actually empty; it is just included for compatibility.
repacking : boolean, optional
Does the hdf5 archive need to be repacked to save space?
"""
assert type(filename)==StringType,"HkConverter: filename must be a filename."
if hdf_filename is None: hdf_filename = filename+'.h5'
self.hdf_file = hdf_filename
self.dft_file = filename
2014-11-18 11:30:26 +01:00
self.dft_subgrp = dft_subgrp
self.symmcorr_subgrp = symmcorr_subgrp
self.fortran_to_replace = {'D':'E', '(':' ', ')':' ', ',':' '}
# Checks if h5 file is there and repacks it if wanted:
import os.path
if (os.path.exists(self.hdf_file) and repacking):
ConverterTools.repack(self)
def convert_dft_input(self, first_real_part_matrix = True, only_upper_triangle = False, weights_in_file = False):
"""
Reads the appropriate files and stores the data for the dft_subgrp in the hdf5 archive.
Parameters
----------
first_real_part_matrix : boolean, optional
Should all the real components for given k be read in first, followed by the imaginary parts?
only_upper_triangle : boolean, optional
Should only the upper triangular part of H(k) be read in?
weights_in_file : boolean, optional
Are the k-point weights to be read in?
"""
# Read and write only on the master node
if not (mpi.is_master_node()): return
2014-11-18 11:30:26 +01:00
mpi.report("Reading input from %s..."%self.dft_file)
# R is a generator : each R.Next() will return the next number in the file
2014-11-18 11:30:26 +01:00
R = ConverterTools.read_fortran_file(self,self.dft_file,self.fortran_to_replace)
try:
energy_unit = 1.0 # the energy conversion factor is 1.0, we assume eV in files
n_k = int(R.next()) # read the number of k points
k_dep_projection = 0
SP = 0 # no spin-polarision
SO = 0 # no spin-orbit
charge_below = 0.0 # total charge below energy window is set to 0
density_required = R.next() # density required, for setting the chemical potential
symm_op = 0 # No symmetry groups for the k-sum
# the information on the non-correlated shells is needed for defining dimension of matrices:
n_shells = int(R.next()) # number of shells considered in the Wanniers
# corresponds to index R in formulas
# now read the information about the shells (atom, sort, l, dim):
shell_entries = ['atom', 'sort', 'l', 'dim']
shells = [ {name: int(val) for name, val in zip(shell_entries, R)} for ish in range(n_shells) ]
n_corr_shells = int(R.next()) # number of corr. shells (e.g. Fe d, Ce f) in the unit cell,
# corresponds to index R in formulas
# now read the information about the shells (atom, sort, l, dim, SO flag, irep):
corr_shell_entries = ['atom', 'sort', 'l', 'dim', 'SO', 'irep']
corr_shells = [ {name: int(val) for name, val in zip(corr_shell_entries, R)} for icrsh in range(n_corr_shells) ]
# determine the number of inequivalent correlated shells and maps, needed for further reading
[n_inequiv_shells, corr_to_inequiv, inequiv_to_corr] = ConverterTools.det_shell_equivalence(self,corr_shells)
use_rotations = 0
rot_mat = [numpy.identity(corr_shells[icrsh]['dim'],numpy.complex_) for icrsh in range(n_corr_shells)]
rot_mat_time_inv = [0 for i in range(n_corr_shells)]
# Representative representations are read from file
n_reps = [1 for i in range(n_inequiv_shells)]
dim_reps = [0 for i in range(n_inequiv_shells)]
T = []
for ish in range(n_inequiv_shells):
n_reps[ish] = int(R.next()) # number of representatives ("subsets"), e.g. t2g and eg
dim_reps[ish] = [int(R.next()) for i in range(n_reps[ish])] # dimensions of the subsets
# The transformation matrix:
# is of dimension 2l+1, it is taken to be standard d (as in Wien2k)
ll = 2*corr_shells[inequiv_to_corr[ish]]['l']+1
lmax = ll * (corr_shells[inequiv_to_corr[ish]]['SO'] + 1)
T.append(numpy.zeros([lmax,lmax],numpy.complex_))
T[ish] = numpy.array([[0.0, 0.0, 1.0, 0.0, 0.0],
[1.0/sqrt(2.0), 0.0, 0.0, 0.0, 1.0/sqrt(2.0)],
[-1.0/sqrt(2.0), 0.0, 0.0, 0.0, 1.0/sqrt(2.0)],
[0.0, 1.0/sqrt(2.0), 0.0, -1.0/sqrt(2.0), 0.0],
[0.0, 1.0/sqrt(2.0), 0.0, 1.0/sqrt(2.0), 0.0]])
# Spin blocks to be read:
n_spin_blocs = SP + 1 - SO # number of spins to read for Norbs and Ham, NOT Projectors
# define the number of n_orbitals for all k points: it is the number of total bands and independent of k!
n_orbitals = numpy.ones([n_k,n_spin_blocs],numpy.int) * sum([ sh['dim'] for sh in shells ])
# Initialise the projectors:
proj_mat = numpy.zeros([n_k,n_spin_blocs,n_corr_shells,max([crsh['dim'] for crsh in corr_shells]),max(n_orbitals)],numpy.complex_)
# Read the projectors from the file:
for ik in range(n_k):
for icrsh in range(n_corr_shells):
for isp in range(n_spin_blocs):
2014-04-03 14:34:36 +02:00
# calculate the offset:
offset = 0
n_orb = 0
for ish in range(n_shells):
if (n_orb==0):
if (shells[ish]['atom']==corr_shells[icrsh]['atom']) and (shells[ish]['sort']==corr_shells[icrsh]['sort']):
n_orb = corr_shells[icrsh]['dim']
2014-04-03 14:34:36 +02:00
else:
offset += shells[ish]['dim']
2014-04-03 14:34:36 +02:00
proj_mat[ik,isp,icrsh,0:n_orb,offset:offset+n_orb] = numpy.identity(n_orb)
# now define the arrays for weights and hopping ...
bz_weights = numpy.ones([n_k],numpy.float_)/ float(n_k) # w(k_index), default normalisation
hopping = numpy.zeros([n_k,n_spin_blocs,max(n_orbitals),max(n_orbitals)],numpy.complex_)
if (weights_in_file):
# weights in the file
for ik in range(n_k) : bz_weights[ik] = R.next()
# if the sum over spins is in the weights, take it out again!!
sm = sum(bz_weights)
bz_weights[:] /= sm
# Grab the H
for isp in range(n_spin_blocs):
for ik in range(n_k) :
n_orb = n_orbitals[ik,isp]
if (first_real_part_matrix): # first read all real components for given k, then read imaginary parts
2014-04-02 18:36:48 +02:00
for i in range(n_orb):
2014-04-02 18:36:48 +02:00
if (only_upper_triangle):
istart = i
else:
istart = 0
for j in range(istart,n_orb):
2014-04-02 18:36:48 +02:00
hopping[ik,isp,i,j] = R.next()
for i in range(n_orb):
2014-04-02 18:36:48 +02:00
if (only_upper_triangle):
istart = i
else:
istart = 0
for j in range(istart,n_orb):
2014-04-02 18:36:48 +02:00
hopping[ik,isp,i,j] += R.next() * 1j
if ((only_upper_triangle)and(i!=j)): hopping[ik,isp,j,i] = hopping[ik,isp,i,j].conjugate()
else: # read (real,im) tuple
2014-04-02 18:36:48 +02:00
for i in range(n_orb):
2014-04-02 18:36:48 +02:00
if (only_upper_triangle):
istart = i
else:
istart = 0
for j in range(istart,n_orb):
2014-04-02 18:36:48 +02:00
hopping[ik,isp,i,j] = R.next()
hopping[ik,isp,i,j] += R.next() * 1j
2014-04-02 18:36:48 +02:00
if ((only_upper_triangle)and(i!=j)): hopping[ik,isp,j,i] = hopping[ik,isp,i,j].conjugate()
# keep some things that we need for reading parproj:
2014-10-31 18:52:32 +01:00
things_to_set = ['n_shells','shells','n_corr_shells','corr_shells','n_spin_blocs','n_orbitals','n_k','SO','SP','energy_unit']
for it in things_to_set: setattr(self,it,locals()[it])
except StopIteration : # a more explicit error if the file is corrupted.
2014-11-18 11:30:26 +01:00
raise "HK Converter : reading file dft_file failed!"
R.close()
# Save to the HDF5:
ar = HDFArchive(self.hdf_file,'a')
2014-11-18 11:30:26 +01:00
if not (self.dft_subgrp in ar): ar.create_group(self.dft_subgrp)
2014-10-31 18:52:32 +01:00
things_to_save = ['energy_unit','n_k','k_dep_projection','SP','SO','charge_below','density_required',
'symm_op','n_shells','shells','n_corr_shells','corr_shells','use_rotations','rot_mat',
'rot_mat_time_inv','n_reps','dim_reps','T','n_orbitals','proj_mat','bz_weights','hopping',
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
2014-11-18 11:30:26 +01:00
for it in things_to_save: ar[self.dft_subgrp][it] = locals()[it]
del ar