2014-11-15 14:29:18 +01:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
################################################################################
|
|
|
|
#
|
|
|
|
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
|
|
|
#
|
|
|
|
# Copyright (C) 2011 by M. Aichhorn, L. Pourovskii, V. Vildosola
|
|
|
|
#
|
|
|
|
# TRIQS is free software: you can redistribute it and/or modify it under the
|
|
|
|
# terms of the GNU General Public License as published by the Free Software
|
|
|
|
# Foundation, either version 3 of the License, or (at your option) any later
|
|
|
|
# version.
|
|
|
|
#
|
|
|
|
# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
# details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along with
|
|
|
|
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
################################################################################
|
|
|
|
|
|
|
|
from types import *
|
|
|
|
import numpy
|
|
|
|
import pytriqs.utility.dichotomy as dichotomy
|
|
|
|
from pytriqs.gf.local import *
|
|
|
|
import pytriqs.utility.mpi as mpi
|
2014-11-08 00:37:32 +01:00
|
|
|
from pytriqs.archive import *
|
|
|
|
from symmetry import *
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
class SumkLDA:
|
|
|
|
"""This class provides a general SumK method for combining ab-initio code and pytriqs."""
|
|
|
|
|
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
def __init__(self, hdf_file, mu = 0.0, h_field = 0.0, use_lda_blocks = False,
|
|
|
|
lda_data = 'lda_input', symmcorr_data = 'lda_symmcorr_input', parproj_data = 'lda_parproj_input',
|
|
|
|
symmpar_data = 'lda_symmpar_input', bands_data = 'lda_bands_input', lda_output = 'lda_output'):
|
2013-07-23 19:49:42 +02:00
|
|
|
"""
|
|
|
|
Initialises the class from data previously stored into an HDF5
|
|
|
|
"""
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if not type(hdf_file) == StringType:
|
2013-07-23 19:49:42 +02:00
|
|
|
mpi.report("Give a string for the HDF5 filename to read the input!")
|
|
|
|
else:
|
|
|
|
self.hdf_file = hdf_file
|
|
|
|
self.lda_data = lda_data
|
2014-11-03 14:47:55 +01:00
|
|
|
self.symmcorr_data = symmcorr_data
|
|
|
|
self.parproj_data = parproj_data
|
|
|
|
self.symmpar_data = symmpar_data
|
2013-07-23 19:49:42 +02:00
|
|
|
self.bands_data = bands_data
|
2014-11-07 00:55:40 +01:00
|
|
|
self.lda_output = lda_output
|
2014-10-28 16:19:28 +01:00
|
|
|
self.G_upfold = None
|
2013-07-23 19:49:42 +02:00
|
|
|
self.h_field = h_field
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# read input from HDF:
|
|
|
|
things_to_read = ['energy_unit','n_k','k_dep_projection','SP','SO','charge_below','density_required',
|
|
|
|
'symm_op','n_shells','shells','n_corr_shells','corr_shells','use_rotations','rot_mat',
|
2014-11-15 18:15:45 +01:00
|
|
|
'rot_mat_time_inv','n_reps','dim_reps','T','n_orbitals','proj_mat','bz_weights','hopping',
|
|
|
|
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
|
2014-11-07 00:55:40 +01:00
|
|
|
self.subgroup_present, self.value_read = self.read_input_from_hdf(subgrp = self.lda_data, things_to_read = things_to_read)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.SO and (abs(self.h_field) > 0.000001):
|
|
|
|
self.h_field = 0.0
|
2014-10-28 16:19:28 +01:00
|
|
|
mpi.report("For SO, the external magnetic field is not implemented, setting it to 0!")
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
self.spin_block_names = [ ['up','down'], ['ud'] ]
|
|
|
|
self.n_spin_blocks = [2,1]
|
|
|
|
# convert spin_block_names to indices -- if spin polarized, differentiate up and down blocks
|
|
|
|
self.spin_names_to_ind = [{}, {}]
|
|
|
|
for iso in range(2): # SO = 0 or 1
|
|
|
|
for ibl in range(self.n_spin_blocks[iso]):
|
|
|
|
self.spin_names_to_ind[iso][self.spin_block_names[iso][ibl]] = ibl * self.SP
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# GF structure used for the local things in the k sums
|
2014-10-28 16:19:28 +01:00
|
|
|
# Most general form allowing for all hybridisation, i.e. largest blocks possible
|
2014-11-15 20:04:54 +01:00
|
|
|
self.gf_struct_sumk = [ [ (b, range( self.corr_shells[icrsh][3])) for b in self.spin_block_names[self.corr_shells[icrsh][4]] ]
|
|
|
|
for icrsh in range(self.n_corr_shells) ]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
#-----
|
2014-11-14 09:43:28 +01:00
|
|
|
# If these quantities are not in HDF, set them up
|
2014-11-15 14:29:18 +01:00
|
|
|
optional_things = ['gf_struct_solver','sumk_to_solver','solver_to_sumk','solver_to_sumk_block','chemical_potential','dc_imp','dc_energ','deg_shells']
|
2014-11-07 00:55:40 +01:00
|
|
|
self.subgroup_present, self.value_read = self.read_input_from_hdf(subgrp = self.lda_output, things_to_read = [],
|
|
|
|
optional_things = optional_things)
|
|
|
|
if (not self.subgroup_present) or (not self.value_read['gf_struct_solver']):
|
2013-07-23 19:49:42 +02:00
|
|
|
# No gf_struct was stored in HDF, so first set a standard one:
|
2014-11-15 20:04:54 +01:00
|
|
|
self.gf_struct_solver = [ dict([ (b, range(self.corr_shells[self.inequiv_to_corr[ish]][3]) )
|
|
|
|
for b in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]][4]] ])
|
|
|
|
for ish in range(self.n_inequiv_shells)
|
2014-09-22 19:24:33 +02:00
|
|
|
]
|
2014-11-14 09:19:21 +01:00
|
|
|
# Set standard (identity) maps from gf_struct_sumk <-> gf_struct_solver
|
2014-11-15 18:15:45 +01:00
|
|
|
self.sumk_to_solver = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
self.solver_to_sumk = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
self.solver_to_sumk_block = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
for ish in range(self.n_inequiv_shells):
|
|
|
|
for block,inner_list in self.gf_struct_sumk[self.inequiv_to_corr[ish]]:
|
2014-11-15 14:16:52 +01:00
|
|
|
self.solver_to_sumk_block[ish][block] = block
|
2014-11-14 09:19:21 +01:00
|
|
|
for inner in inner_list:
|
|
|
|
self.sumk_to_solver[ish][(block,inner)] = (block,inner)
|
2014-11-15 14:16:52 +01:00
|
|
|
self.solver_to_sumk[ish][(block,inner)] = (block,inner)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
if (not self.subgroup_present) or (not self.value_read['dc_imp']):
|
2014-11-14 09:43:28 +01:00
|
|
|
self.__init_dc() # initialise the double counting
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
if (not self.subgroup_present) or (not self.value_read['chemical_potential']):
|
2013-07-23 19:49:42 +02:00
|
|
|
self.chemical_potential = mu
|
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
if (not self.subgroup_present) or (not self.value_read['deg_shells']):
|
2014-11-15 20:04:54 +01:00
|
|
|
self.deg_shells = [ [] for ish in range(self.n_inequiv_shells)]
|
2014-11-07 00:55:40 +01:00
|
|
|
#-----
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
if self.symm_op:
|
2014-11-07 00:55:40 +01:00
|
|
|
self.symmcorr = Symmetry(hdf_file,subgroup=self.symmcorr_data)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:19:21 +01:00
|
|
|
# Analyse the block structure and determine the smallest blocks, if desired
|
2014-11-15 20:04:54 +01:00
|
|
|
if use_lda_blocks: dm = self.analyse_block_structure()
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
# Now save new things to HDF5:
|
2014-11-07 00:55:40 +01:00
|
|
|
# FIXME WHAT HAPPENS TO h_field? INPUT TO __INIT__? ADD TO OPTIONAL_THINGS?
|
2014-11-15 20:04:54 +01:00
|
|
|
things_to_save = ['chemical_potential','dc_imp','dc_energ','h_field']
|
2014-11-07 00:55:40 +01:00
|
|
|
self.save(things_to_save)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
################
|
|
|
|
# HDF5 FUNCTIONS
|
|
|
|
################
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
def read_input_from_hdf(self, subgrp, things_to_read=[], optional_things=[]):
|
2013-07-23 19:49:42 +02:00
|
|
|
"""
|
|
|
|
Reads data from the HDF file
|
|
|
|
"""
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
value_read = True
|
2014-11-14 09:43:28 +01:00
|
|
|
# initialise variables on all nodes to ensure mpi broadcast works at the end
|
2014-10-31 18:52:32 +01:00
|
|
|
for it in things_to_read: setattr(self,it,0)
|
|
|
|
for it in optional_things: setattr(self,it,0)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
if mpi.is_master_node():
|
2014-11-15 20:04:54 +01:00
|
|
|
ar = HDFArchive(self.hdf_file,'a')
|
2014-11-07 00:55:40 +01:00
|
|
|
if subgrp in ar:
|
|
|
|
subgroup_present = True
|
2013-07-23 19:49:42 +02:00
|
|
|
# first read the necessary things:
|
|
|
|
for it in things_to_read:
|
2014-11-07 00:55:40 +01:00
|
|
|
if it in ar[subgrp]:
|
2014-10-31 18:52:32 +01:00
|
|
|
setattr(self,it,ar[subgrp][it])
|
2013-07-23 19:49:42 +02:00
|
|
|
else:
|
|
|
|
mpi.report("Loading %s failed!"%it)
|
2014-11-07 00:55:40 +01:00
|
|
|
value_read = False
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if value_read and (len(optional_things) > 0):
|
2014-11-14 09:43:28 +01:00
|
|
|
# if successfully read necessary items, read optional things:
|
2014-11-07 00:55:40 +01:00
|
|
|
value_read = {}
|
2013-07-23 19:49:42 +02:00
|
|
|
for it in optional_things:
|
2014-11-07 00:55:40 +01:00
|
|
|
if it in ar[subgrp]:
|
2014-10-31 18:52:32 +01:00
|
|
|
setattr(self,it,ar[subgrp][it])
|
2014-11-07 00:55:40 +01:00
|
|
|
value_read['%s'%it] = True
|
2013-07-23 19:49:42 +02:00
|
|
|
else:
|
2014-11-07 00:55:40 +01:00
|
|
|
value_read['%s'%it] = False
|
2013-07-23 19:49:42 +02:00
|
|
|
else:
|
2014-11-13 17:02:23 +01:00
|
|
|
if (len(things_to_read) != 0): mpi.report("Loading failed: No %s subgroup in HDF5!"%subgrp)
|
2014-11-07 00:55:40 +01:00
|
|
|
subgroup_present = False
|
|
|
|
value_read = False
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
del ar
|
|
|
|
|
|
|
|
# now do the broadcasting:
|
2014-10-31 18:52:32 +01:00
|
|
|
for it in things_to_read: setattr(self,it,mpi.bcast(getattr(self,it)))
|
|
|
|
for it in optional_things: setattr(self,it,mpi.bcast(getattr(self,it)))
|
2014-11-07 00:55:40 +01:00
|
|
|
subgroup_present = mpi.bcast(subgroup_present)
|
|
|
|
value_read = mpi.bcast(value_read)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
return subgroup_present, value_read
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
def save(self,things_to_save):
|
2014-11-14 09:43:28 +01:00
|
|
|
"""Saves some quantities into an HDF5 archive"""
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
if not (mpi.is_master_node()): return # do nothing on nodes
|
2014-11-07 00:55:40 +01:00
|
|
|
ar = HDFArchive(self.hdf_file,'a')
|
2014-11-15 20:04:54 +01:00
|
|
|
if not self.lda_output in ar: ar.create_group(self.lda_output)
|
2014-11-07 00:55:40 +01:00
|
|
|
for it in things_to_save:
|
|
|
|
try:
|
|
|
|
ar[self.lda_output][it] = getattr(self,it)
|
|
|
|
except:
|
|
|
|
mpi.report("%s not found, and so not stored."%it)
|
2014-09-22 19:24:33 +02:00
|
|
|
del ar
|
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
################
|
|
|
|
# CORE FUNCTIONS
|
|
|
|
################
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
def downfold(self,ik,icrsh,bname,gf_to_downfold,gf_inp):
|
2013-07-23 19:49:42 +02:00
|
|
|
"""Downfolding a block of the Greens function"""
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
gf_downfolded = gf_inp.copy()
|
2014-11-14 09:43:28 +01:00
|
|
|
isp = self.spin_names_to_ind[self.SO][bname] # get spin index for proj. matrices
|
2013-07-23 19:49:42 +02:00
|
|
|
dim = self.corr_shells[icrsh][3]
|
|
|
|
n_orb = self.n_orbitals[ik,isp]
|
2014-11-09 12:10:40 +01:00
|
|
|
projmat = self.proj_mat[ik,isp,icrsh,0:dim,0:n_orb]
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
gf_downfolded.from_L_G_R(projmat,gf_to_downfold,projmat.conjugate().transpose())
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
return gf_downfolded
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
def upfold(self,ik,icrsh,bname,gf_to_upfold,gf_inp):
|
2013-07-23 19:49:42 +02:00
|
|
|
"""Upfolding a block of the Greens function"""
|
|
|
|
|
|
|
|
gf_upfolded = gf_inp.copy()
|
2014-11-14 09:43:28 +01:00
|
|
|
isp = self.spin_names_to_ind[self.SO][bname] # get spin index for proj. matrices
|
2013-07-23 19:49:42 +02:00
|
|
|
dim = self.corr_shells[icrsh][3]
|
|
|
|
n_orb = self.n_orbitals[ik,isp]
|
2014-11-09 12:10:40 +01:00
|
|
|
projmat = self.proj_mat[ik,isp,icrsh,0:dim,0:n_orb]
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
gf_upfolded.from_L_G_R(projmat.conjugate().transpose(),gf_to_upfold,projmat)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
return gf_upfolded
|
|
|
|
|
|
|
|
|
|
|
|
def rotloc(self,icrsh,gf_to_rotate,direction):
|
|
|
|
"""Local <-> Global rotation of a GF block.
|
|
|
|
direction: 'toLocal' / 'toGlobal' """
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
assert ((direction == 'toLocal') or (direction == 'toGlobal')),"Give direction 'toLocal' or 'toGlobal' in rotloc!"
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
gf_rotated = gf_to_rotate.copy()
|
2014-11-09 12:10:40 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if direction == 'toGlobal':
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if (self.rot_mat_time_inv[icrsh] == 1) and self.SO:
|
2014-10-28 16:19:28 +01:00
|
|
|
gf_rotated << gf_rotated.transpose()
|
2013-07-23 19:49:42 +02:00
|
|
|
gf_rotated.from_L_G_R(self.rot_mat[icrsh].conjugate(),gf_rotated,self.rot_mat[icrsh].transpose())
|
|
|
|
else:
|
|
|
|
gf_rotated.from_L_G_R(self.rot_mat[icrsh],gf_rotated,self.rot_mat[icrsh].conjugate().transpose())
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
elif direction == 'toLocal':
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if (self.rot_mat_time_inv[icrsh] == 1) and self.SO:
|
2014-10-28 16:19:28 +01:00
|
|
|
gf_rotated << gf_rotated.transpose()
|
2013-07-23 19:49:42 +02:00
|
|
|
gf_rotated.from_L_G_R(self.rot_mat[icrsh].transpose(),gf_rotated,self.rot_mat[icrsh].conjugate())
|
|
|
|
else:
|
|
|
|
gf_rotated.from_L_G_R(self.rot_mat[icrsh].conjugate().transpose(),gf_rotated,self.rot_mat[icrsh])
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
return gf_rotated
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
def lattice_gf_matsubara(self,ik,mu,beta=40,with_Sigma=True):
|
|
|
|
"""Calculates the lattice Green function from the LDA hopping and the self energy at k-point number ik
|
|
|
|
and chemical potential mu."""
|
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
ntoi = self.spin_names_to_ind[self.SO]
|
|
|
|
bln = self.spin_block_names[self.SO]
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if not hasattr(self,"Sigma_imp"): with_Sigma = False
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if with_Sigma:
|
2013-07-23 19:49:42 +02:00
|
|
|
stmp = self.add_dc()
|
2014-10-31 18:52:32 +01:00
|
|
|
beta = self.Sigma_imp[0].mesh.beta # override beta if Sigma is present
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
# Do we need to set up G_upfold?
|
2014-10-31 18:52:32 +01:00
|
|
|
set_up_G_upfold = False # assume not
|
2014-11-10 10:49:27 +01:00
|
|
|
if self.G_upfold is None: # yes if not G_upfold provided
|
2014-10-28 16:19:28 +01:00
|
|
|
set_up_G_upfold = True
|
2014-10-31 18:52:32 +01:00
|
|
|
else: # yes if inconsistencies present in existing G_upfold
|
2014-11-14 09:43:28 +01:00
|
|
|
GFsize = [ gf.N1 for bname,gf in self.G_upfold]
|
2014-11-15 20:04:54 +01:00
|
|
|
unchangedsize = all( [ self.n_orbitals[ik,ntoi[bln[ibl]]] == GFsize[ibl]
|
2014-11-14 09:43:28 +01:00
|
|
|
for ibl in range(self.n_spin_blocks[self.SO]) ] )
|
2014-11-15 20:04:54 +01:00
|
|
|
if (not unchangedsize) or (self.G_upfold.mesh.beta != beta): set_up_G_upfold = True
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
# Set up G_upfold
|
|
|
|
if set_up_G_upfold:
|
2014-11-14 09:43:28 +01:00
|
|
|
block_structure = [ range(self.n_orbitals[ik,ntoi[b]]) for b in bln ]
|
|
|
|
gf_struct = [ (bln[ibl], block_structure[ibl]) for ibl in range(self.n_spin_blocks[self.SO]) ]
|
|
|
|
block_ind_list = [block for block,inner in gf_struct]
|
2014-11-15 20:04:54 +01:00
|
|
|
if with_Sigma:
|
2014-11-14 09:43:28 +01:00
|
|
|
glist = lambda : [ GfImFreq(indices = inner, mesh = self.Sigma_imp[0].mesh) for block,inner in gf_struct]
|
2013-07-23 19:49:42 +02:00
|
|
|
else:
|
2014-11-14 09:43:28 +01:00
|
|
|
glist = lambda : [ GfImFreq(indices = inner, beta = beta) for block,inner in gf_struct]
|
2014-11-15 20:04:54 +01:00
|
|
|
self.G_upfold = BlockGf(name_list = block_ind_list, block_list = glist(), make_copies=False)
|
2014-10-28 16:19:28 +01:00
|
|
|
self.G_upfold.zero()
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-31 18:52:32 +01:00
|
|
|
self.G_upfold << iOmega_n
|
2014-09-22 19:24:33 +02:00
|
|
|
idmat = [numpy.identity(self.n_orbitals[ik,ntoi[bl]],numpy.complex_) for bl in bln]
|
2013-07-23 19:49:42 +02:00
|
|
|
M = copy.deepcopy(idmat)
|
2014-11-14 09:43:28 +01:00
|
|
|
for ibl in range(self.n_spin_blocks[self.SO]):
|
2013-07-23 19:49:42 +02:00
|
|
|
ind = ntoi[bln[ibl]]
|
|
|
|
n_orb = self.n_orbitals[ik,ind]
|
|
|
|
M[ibl] = self.hopping[ik,ind,0:n_orb,0:n_orb] - (idmat[ibl]*mu) - (idmat[ibl] * self.h_field * (1-2*ibl))
|
2014-10-28 16:19:28 +01:00
|
|
|
self.G_upfold -= M
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if with_Sigma:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in self.G_upfold: gf -= self.upfold(ik,icrsh,bname,stmp[icrsh][bname],gf)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
self.G_upfold.invert()
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
return self.G_upfold
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
2014-11-16 18:02:04 +01:00
|
|
|
def density_matrix(self, method = 'using_gf', beta=40.0):
|
|
|
|
"""Calculate density matrices in one of two ways:
|
|
|
|
if 'using_gf': First get upfolded gf (g_loc is not set up), then density matrix.
|
|
|
|
It is useful for Hubbard I, and very quick.
|
|
|
|
No assumption on the hopping structure is made (ie diagonal or not).
|
|
|
|
if 'using_point_integration': Only works for diagonal hopping matrix (true in wien2k).
|
|
|
|
"""
|
2014-11-15 20:04:54 +01:00
|
|
|
dens_mat = [ {} for icrsh in range(self.n_corr_shells)]
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bl in self.spin_block_names[self.corr_shells[icrsh][4]]:
|
2013-07-23 19:49:42 +02:00
|
|
|
dens_mat[icrsh][bl] = numpy.zeros([self.corr_shells[icrsh][3],self.corr_shells[icrsh][3]], numpy.complex_)
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
ikarray = numpy.array(range(self.n_k))
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in mpi.slice_array(ikarray):
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-16 18:02:04 +01:00
|
|
|
if method == "using_gf":
|
|
|
|
|
|
|
|
G_upfold = self.lattice_gf_matsubara(ik=ik, beta=beta, mu=self.chemical_potential)
|
|
|
|
G_upfold *= self.bz_weights[ik]
|
|
|
|
dm = G_upfold.density()
|
|
|
|
MMat = [dm[bl] for bl in self.spin_block_names[self.SO]]
|
|
|
|
|
|
|
|
elif method == "using_point_integration":
|
|
|
|
|
|
|
|
ntoi = self.spin_names_to_ind[self.SO]
|
|
|
|
bln = self.spin_block_names[self.SO]
|
|
|
|
unchangedsize = all( [self.n_orbitals[ik,ntoi[bl]] == self.n_orbitals[0,ntoi[bl]] for bl in bln] )
|
|
|
|
if unchangedsize:
|
|
|
|
dim = self.n_orbitals[0,ntoi[bl]]
|
|
|
|
else:
|
|
|
|
dim = self.n_orbitals[ik,ntoi[bl]]
|
|
|
|
MMat = [numpy.zeros( [dim,dim], numpy.complex_) for bl in bln]
|
|
|
|
|
|
|
|
for ibl, bl in enumerate(bln):
|
|
|
|
ind = ntoi[bl]
|
|
|
|
for inu in range(self.n_orbitals[ik,ind]):
|
|
|
|
if (self.hopping[ik,ind,inu,inu] - self.h_field*(1-2*ibl)) < 0.0: # only works for diagonal hopping matrix (true in wien2k)
|
|
|
|
MMat[ibl][inu,inu] = 1.0
|
|
|
|
else:
|
|
|
|
MMat[ibl][inu,inu] = 0.0
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for ibl, bn in enumerate(self.spin_block_names[self.corr_shells[icrsh][4]]):
|
|
|
|
isp = self.spin_names_to_ind[self.corr_shells[icrsh][4]][bn]
|
2013-07-23 19:49:42 +02:00
|
|
|
dim = self.corr_shells[icrsh][3]
|
|
|
|
n_orb = self.n_orbitals[ik,isp]
|
2014-11-14 09:43:28 +01:00
|
|
|
projmat = self.proj_mat[ik,isp,icrsh,0:dim,0:n_orb]
|
2014-11-16 18:02:04 +01:00
|
|
|
if method == "using_gf":
|
|
|
|
dens_mat[icrsh][bn] += numpy.dot( numpy.dot(projmat,MMat[ibl]),
|
|
|
|
projmat.transpose().conjugate() )
|
|
|
|
elif method == "using_point_integration":
|
|
|
|
dens_mat[icrsh][bn] += self.bz_weights[ik] * numpy.dot( numpy.dot(projmat,MMat[ibl]) ,
|
|
|
|
projmat.transpose().conjugate() )
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# get data from nodes:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname in dens_mat[icrsh]:
|
2014-11-15 20:04:54 +01:00
|
|
|
dens_mat[icrsh][bname] = mpi.all_reduce(mpi.world, dens_mat[icrsh][bname], lambda x,y : x+y)
|
2013-07-23 19:49:42 +02:00
|
|
|
mpi.barrier()
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.symm_op != 0: dens_mat = self.symmcorr.symmetrize(dens_mat)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# Rotate to local coordinate system:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.use_rotations:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
for bn in dens_mat[icrsh]:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.rot_mat_time_inv[icrsh] == 1: dens_mat[icrsh][bn] = dens_mat[icrsh][bn].conjugate()
|
2014-11-16 18:02:04 +01:00
|
|
|
dens_mat[icrsh][bn] = numpy.dot( numpy.dot(self.rot_mat[icrsh].conjugate().transpose(),dens_mat[icrsh][bn]),
|
2013-07-23 19:49:42 +02:00
|
|
|
self.rot_mat[icrsh] )
|
|
|
|
|
|
|
|
return dens_mat
|
|
|
|
|
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
def analyse_block_structure(self, threshold = 0.00001, include_shells = None, dm = None):
|
2014-09-22 19:24:33 +02:00
|
|
|
""" Determines the Green function block structure from simple point integration."""
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
self.gf_struct_solver = [ {} for ish in range(self.n_inequiv_shells) ]
|
2014-11-15 18:15:45 +01:00
|
|
|
self.sumk_to_solver = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
self.solver_to_sumk = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
self.solver_to_sumk_block = [ {} for ish in range(self.n_inequiv_shells) ]
|
2014-11-14 09:19:21 +01:00
|
|
|
|
2014-11-16 18:02:04 +01:00
|
|
|
if dm is None: dm = self.density_matrix(method = 'using_point_integration')
|
2014-11-15 20:04:54 +01:00
|
|
|
dens_mat = [ dm[self.inequiv_to_corr[ish]] for ish in range(self.n_inequiv_shells) ]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if include_shells is None: include_shells = range(self.n_inequiv_shells)
|
|
|
|
for ish in include_shells:
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
block_ind_list = [ block for block,inner in self.gf_struct_sumk[self.inequiv_to_corr[ish]] ]
|
2014-11-14 09:43:28 +01:00
|
|
|
for block in block_ind_list:
|
|
|
|
dm = dens_mat[ish][block]
|
2013-07-23 19:49:42 +02:00
|
|
|
dmbool = (abs(dm) > threshold) # gives an index list of entries larger that threshold
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
# Determine off-diagonal entries in upper triangular part of density matrix
|
2013-07-23 19:49:42 +02:00
|
|
|
offdiag = []
|
2014-11-15 20:04:54 +01:00
|
|
|
for i in range(len(dmbool)):
|
|
|
|
for j in range(i+1,len(dmbool)):
|
|
|
|
if dmbool[i,j]: offdiag.append([i,j])
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
num_blocs = len(dmbool)
|
|
|
|
blocs = [ [i] for i in range(num_blocs) ]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
for i in range(len(offdiag)):
|
2014-11-15 20:04:54 +01:00
|
|
|
for j in range(len(blocs[offdiag[i][1]])): blocs[offdiag[i][0]].append(blocs[offdiag[i][1]][j])
|
|
|
|
del blocs[offdiag[i][1]]
|
|
|
|
for j in range(i+1,len(offdiag)):
|
|
|
|
if offdiag[j][0] == offdiag[i][1]: offdiag[j][0] = offdiag[i][0]
|
|
|
|
if offdiag[j][1] == offdiag[i][1]: offdiag[j][1] = offdiag[i][0]
|
|
|
|
if offdiag[j][0] > offdiag[i][1]: offdiag[j][0] -= 1
|
|
|
|
if offdiag[j][1] > offdiag[i][1]: offdiag[j][1] -= 1
|
|
|
|
offdiag[j].sort()
|
|
|
|
num_blocs -= 1
|
|
|
|
|
|
|
|
for i in range(num_blocs):
|
2013-07-23 19:49:42 +02:00
|
|
|
blocs[i].sort()
|
2014-11-14 09:43:28 +01:00
|
|
|
self.gf_struct_solver[ish].update( [('%s_%s'%(block,i),range(len(blocs[i])))] )
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-14 09:19:21 +01:00
|
|
|
# Construct sumk_to_solver taking (sumk_block, sumk_index) --> (solver_block, solver_inner)
|
|
|
|
# and solver_to_sumk taking (solver_block, solver_inner) --> (sumk_block, sumk_index)
|
2014-11-15 20:04:54 +01:00
|
|
|
for i in range(num_blocs):
|
2014-11-14 09:19:21 +01:00
|
|
|
for j in range(len(blocs[i])):
|
|
|
|
block_sumk = block
|
|
|
|
inner_sumk = blocs[i][j]
|
|
|
|
block_solv = '%s_%s'%(block,i)
|
|
|
|
inner_solv = j
|
|
|
|
self.sumk_to_solver[ish][(block_sumk,inner_sumk)] = (block_solv,inner_solv)
|
|
|
|
self.solver_to_sumk[ish][(block_solv,inner_solv)] = (block_sumk,inner_sumk)
|
2014-11-15 14:16:52 +01:00
|
|
|
self.solver_to_sumk_block[ish][block_solv] = block_sumk
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# now calculate degeneracies of orbitals:
|
|
|
|
dm = {}
|
2014-11-15 20:04:54 +01:00
|
|
|
for block,inner in self.gf_struct_solver[ish].iteritems():
|
2013-07-23 19:49:42 +02:00
|
|
|
# get dm for the blocks:
|
2014-11-15 20:04:54 +01:00
|
|
|
dm[block] = numpy.zeros([len(inner),len(inner)],numpy.complex_)
|
|
|
|
for ind1 in inner:
|
|
|
|
for ind2 in inner:
|
|
|
|
block_sumk,ind1_sumk = self.solver_to_sumk[ish][(block,ind1)]
|
|
|
|
block_sumk,ind2_sumk = self.solver_to_sumk[ish][(block,ind2)]
|
|
|
|
dm[block][ind1,ind2] = dens_mat[ish][block_sumk][ind1_sumk,ind2_sumk]
|
|
|
|
|
|
|
|
for block1 in self.gf_struct_solver[ish].iterkeys():
|
|
|
|
for block2 in self.gf_struct_solver[ish].iterkeys():
|
|
|
|
if dm[block1].shape == dm[block2].shape:
|
|
|
|
if ( (abs(dm[block1] - dm[block2]) < threshold).all() ) and (block1 != block2):
|
2013-07-23 19:49:42 +02:00
|
|
|
# check if it was already there:
|
2014-11-15 20:04:54 +01:00
|
|
|
ind1 = -1
|
|
|
|
ind2 = -2
|
2013-07-23 19:49:42 +02:00
|
|
|
for n,ind in enumerate(self.deg_shells[ish]):
|
2014-11-15 20:04:54 +01:00
|
|
|
if block1 in ind: ind1 = n
|
|
|
|
if block2 in ind: ind2 = n
|
|
|
|
if (ind1 < 0) and (ind2 >= 0):
|
|
|
|
self.deg_shells[ish][ind2].append(block1)
|
|
|
|
elif (ind1 >= 0) and (ind2 < 0):
|
|
|
|
self.deg_shells[ish][ind1].append(block2)
|
|
|
|
elif (ind1 < 0) and (ind2 < 0):
|
|
|
|
self.deg_shells[ish].append([block1,block2])
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 14:29:18 +01:00
|
|
|
things_to_save = ['gf_struct_solver','sumk_to_solver','solver_to_sumk','solver_to_sumk_block','deg_shells']
|
2014-11-07 00:55:40 +01:00
|
|
|
self.save(things_to_save)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
return dens_mat
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
def symm_deg_gf(self,gf_to_symm,orb):
|
|
|
|
"""Symmetrises a GF for the given degenerate shells self.deg_shells"""
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for degsh in self.deg_shells[orb]:
|
|
|
|
#loop over degenerate shells:
|
|
|
|
ss = gf_to_symm[degsh[0]].copy()
|
|
|
|
ss.zero()
|
|
|
|
Ndeg = len(degsh)
|
|
|
|
for bl in degsh: ss += gf_to_symm[bl] / (1.0*Ndeg)
|
2014-10-28 16:19:28 +01:00
|
|
|
for bl in degsh: gf_to_symm[bl] << ss
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-16 18:02:04 +01:00
|
|
|
# For simple dft input, get crystal field splittings.
|
2013-07-23 19:49:42 +02:00
|
|
|
def eff_atomic_levels(self):
|
|
|
|
"""Calculates the effective atomic levels needed as input for the Hubbard I Solver."""
|
|
|
|
|
|
|
|
# define matrices for inequivalent shells:
|
2014-11-15 18:15:45 +01:00
|
|
|
eff_atlevels = [ {} for ish in range(self.n_inequiv_shells) ]
|
|
|
|
for ish in range(self.n_inequiv_shells):
|
|
|
|
for bn in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]][4]]:
|
|
|
|
eff_atlevels[ish][bn] = numpy.identity(self.corr_shells[self.inequiv_to_corr[ish]][3], numpy.complex_)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# Chemical Potential:
|
2014-11-15 20:04:54 +01:00
|
|
|
for ish in range(self.n_inequiv_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
for ii in eff_atlevels[ish]: eff_atlevels[ish][ii] *= -self.chemical_potential
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# double counting term:
|
2014-11-15 20:04:54 +01:00
|
|
|
for ish in range(self.n_inequiv_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
for ii in eff_atlevels[ish]:
|
2014-11-15 18:15:45 +01:00
|
|
|
eff_atlevels[ish][ii] -= self.dc_imp[self.inequiv_to_corr[ish]][ii]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# sum over k:
|
|
|
|
if not hasattr(self,"Hsumk"):
|
|
|
|
# calculate the sum over k. Does not depend on mu, so do it only once:
|
2014-11-15 20:04:54 +01:00
|
|
|
self.Hsumk = [ {} for icrsh in range(self.n_corr_shells) ]
|
2013-07-23 19:49:42 +02:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bn in self.spin_block_names[self.corr_shells[icrsh][4]]:
|
2013-07-23 19:49:42 +02:00
|
|
|
dim = self.corr_shells[icrsh][3] #*(1+self.corr_shells[icrsh][4])
|
|
|
|
self.Hsumk[icrsh][bn] = numpy.zeros([dim,dim],numpy.complex_)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
|
|
|
dim = self.corr_shells[icrsh][3]
|
2014-11-14 09:43:28 +01:00
|
|
|
for ibl, bn in enumerate(self.spin_block_names[self.corr_shells[icrsh][4]]):
|
|
|
|
isp = self.spin_names_to_ind[self.corr_shells[icrsh][4]][bn]
|
2014-11-15 20:04:54 +01:00
|
|
|
for ik in range(self.n_k):
|
2013-07-23 19:49:42 +02:00
|
|
|
n_orb = self.n_orbitals[ik,isp]
|
|
|
|
MMat = numpy.identity(n_orb, numpy.complex_)
|
2014-11-14 09:43:28 +01:00
|
|
|
MMat = self.hopping[ik,isp,0:n_orb,0:n_orb] - (1-2*ibl) * self.h_field * MMat
|
|
|
|
projmat = self.proj_mat[ik,isp,icrsh,0:dim,0:n_orb]
|
|
|
|
self.Hsumk[icrsh][bn] += self.bz_weights[ik] * numpy.dot( numpy.dot(projmat,MMat),
|
|
|
|
projmat.conjugate().transpose() )
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# symmetrisation:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.symm_op != 0: self.Hsumk = self.symmcorr.symmetrize(self.Hsumk)
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# Rotate to local coordinate system:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.use_rotations:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
for bn in self.Hsumk[icrsh]:
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.rot_mat_time_inv[icrsh] == 1: self.Hsumk[icrsh][bn] = self.Hsumk[icrsh][bn].conjugate()
|
2014-09-22 19:24:33 +02:00
|
|
|
self.Hsumk[icrsh][bn] = numpy.dot( numpy.dot(self.rot_mat[icrsh].conjugate().transpose(),self.Hsumk[icrsh][bn]) ,
|
2013-07-23 19:49:42 +02:00
|
|
|
self.rot_mat[icrsh] )
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# add to matrix:
|
2014-11-15 20:04:54 +01:00
|
|
|
for ish in range(self.n_inequiv_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
for bn in eff_atlevels[ish]:
|
2014-11-15 18:15:45 +01:00
|
|
|
eff_atlevels[ish][bn] += self.Hsumk[self.inequiv_to_corr[ish]][bn]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
return eff_atlevels
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __init_dc(self):
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# construct the density matrix dm_imp and double counting arrays
|
2014-11-15 20:04:54 +01:00
|
|
|
self.dc_imp = [ {} for icrsh in range(self.n_corr_shells)]
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
|
|
|
dim = self.corr_shells[icrsh][3]
|
|
|
|
for j in range(len(self.gf_struct_sumk[icrsh])):
|
|
|
|
self.dc_imp[icrsh]['%s'%self.gf_struct_sumk[icrsh][j][0]] = numpy.zeros([dim,dim],numpy.float_)
|
|
|
|
self.dc_energ = [0.0 for icrsh in range(self.n_corr_shells)]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_dc(self,dens_mat,U_interact,J_hund,orb=0,use_dc_formula=0,use_val=None):
|
2014-09-22 19:24:33 +02:00
|
|
|
"""Sets the double counting term for inequiv orbital orb:
|
|
|
|
use_dc_formula=0: LDA+U FLL double counting,
|
|
|
|
use_dc_formula=1: Held's formula,
|
|
|
|
use_dc_formula=2: AMF.
|
|
|
|
Be sure that you are using the correct interaction Hamiltonian!"""
|
2014-01-21 16:25:44 +01:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 18:15:45 +01:00
|
|
|
iorb = self.corr_to_inequiv[icrsh] # iorb is the index of the inequivalent shell corresponding to icrsh
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if iorb != orb: continue # ignore this orbital
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
Ncr = {}
|
|
|
|
dim = self.corr_shells[icrsh][3] #*(1+self.corr_shells[icrsh][4])
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
for j in range(len(self.gf_struct_sumk[icrsh])):
|
|
|
|
self.dc_imp[icrsh]['%s'%self.gf_struct_sumk[icrsh][j][0]] = numpy.identity(dim,numpy.float_)
|
|
|
|
blname = self.gf_struct_sumk[icrsh][j][0]
|
|
|
|
Ncr[blname] = 0.0
|
2014-01-21 16:25:44 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
for block,inner in self.gf_struct_solver[iorb].iteritems():
|
|
|
|
bl = self.solver_to_sumk_block[iorb][block]
|
|
|
|
Ncr[bl] += dens_mat[block].real.trace()
|
2014-11-15 14:16:52 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
Ncrtot = 0.0
|
|
|
|
block_ind_list = [block for block,inner in self.gf_struct_sumk[icrsh]]
|
|
|
|
for bl in block_ind_list:
|
|
|
|
Ncrtot += Ncr[bl]
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
# average the densities if there is no SP:
|
|
|
|
if self.SP == 0:
|
2014-11-14 09:43:28 +01:00
|
|
|
for bl in block_ind_list:
|
2014-11-15 20:04:54 +01:00
|
|
|
Ncr[bl] = Ncrtot / len(block_ind_list)
|
|
|
|
# correction for SO: we have only one block in this case, but in DC we need N/2
|
|
|
|
elif self.SP == 1 and self.SO == 1:
|
|
|
|
for bl in block_ind_list:
|
|
|
|
Ncr[bl] = Ncrtot / 2.0
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if use_val is None:
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if use_dc_formula == 0: # FLL
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
self.dc_energ[icrsh] = U_interact / 2.0 * Ncrtot * (Ncrtot-1.0)
|
|
|
|
for bl in block_ind_list:
|
|
|
|
Uav = U_interact*(Ncrtot-0.5) - J_hund*(Ncr[bl] - 0.5)
|
|
|
|
self.dc_imp[icrsh][bl] *= Uav
|
|
|
|
self.dc_energ[icrsh] -= J_hund / 2.0 * (Ncr[bl]) * (Ncr[bl]-1.0)
|
|
|
|
mpi.report("DC for shell %(icrsh)i and block %(bl)s = %(Uav)f"%locals())
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
elif use_dc_formula == 1: # Held's formula, with U_interact the interorbital onsite interaction
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
self.dc_energ[icrsh] = (U_interact + (dim-1)*(U_interact-2.0*J_hund) + (dim-1)*(U_interact-3.0*J_hund))/(2*dim-1) / 2.0 * Ncrtot * (Ncrtot-1.0)
|
|
|
|
for bl in block_ind_list:
|
|
|
|
Uav =(U_interact + (dim-1)*(U_interact-2.0*J_hund) + (dim-1)*(U_interact-3.0*J_hund))/(2*dim-1) * (Ncrtot-0.5)
|
|
|
|
self.dc_imp[icrsh][bl] *= Uav
|
|
|
|
mpi.report("DC for shell %(icrsh)i and block %(bl)s = %(Uav)f"%locals())
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
elif use_dc_formula == 2: # AMF
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
self.dc_energ[icrsh] = 0.5 * U_interact * Ncrtot * Ncrtot
|
|
|
|
for bl in block_ind_list:
|
|
|
|
Uav = U_interact*(Ncrtot - Ncr[bl]/dim) - J_hund * (Ncr[bl] - Ncr[bl]/dim)
|
|
|
|
self.dc_imp[icrsh][bl] *= Uav
|
|
|
|
self.dc_energ[icrsh] -= (U_interact + (dim-1)*J_hund)/dim * 0.5 * Ncr[bl] * Ncr[bl]
|
|
|
|
mpi.report("DC for shell %(icrsh)i and block %(bl)s = %(Uav)f"%locals())
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
# output:
|
|
|
|
mpi.report("DC energy for shell %s = %s"%(icrsh,self.dc_energ[icrsh]))
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
else:
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
block_ind_list = [block for block,inner in self.gf_struct_sumk[icrsh]]
|
|
|
|
for bl in block_ind_list:
|
|
|
|
self.dc_imp[icrsh][bl] *= use_val
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
self.dc_energ[icrsh] = use_val * Ncrtot
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
# output:
|
|
|
|
mpi.report("DC for shell %(icrsh)i = %(use_val)f"%locals())
|
|
|
|
mpi.report("DC energy = %s"%self.dc_energ[icrsh])
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
def put_Sigma(self, Sigma_imp):
|
|
|
|
"""Puts the impurity self energies for inequivalent atoms into the class, respects the multiplicity of the atoms."""
|
|
|
|
|
|
|
|
assert isinstance(Sigma_imp,list), "Sigma_imp has to be a list of Sigmas for the correlated shells, even if it is of length 1!"
|
2014-11-15 20:04:54 +01:00
|
|
|
assert len(Sigma_imp) == self.n_inequiv_shells, "give exactly one Sigma for each inequivalent corr. shell!"
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# init self.Sigma_imp:
|
2014-11-14 09:43:28 +01:00
|
|
|
if all(type(gf) == GfImFreq for bname,gf in Sigma_imp[0]):
|
2014-10-28 16:19:28 +01:00
|
|
|
# Imaginary frequency Sigma:
|
2014-11-15 20:04:54 +01:00
|
|
|
self.Sigma_imp = [ BlockGf( name_block_generator = [ (block,GfImFreq(indices = inner, mesh = Sigma_imp[0].mesh)) for block,inner in self.gf_struct_sumk[icrsh] ],
|
|
|
|
make_copies = False) for icrsh in range(self.n_corr_shells) ]
|
2014-11-14 09:43:28 +01:00
|
|
|
elif all(type(gf) == GfReFreq for bname,gf in Sigma_imp[0]):
|
2013-07-23 19:49:42 +02:00
|
|
|
# Real frequency Sigma:
|
2014-11-15 20:04:54 +01:00
|
|
|
self.Sigma_imp = [ BlockGf( name_block_generator = [ (block,GfReFreq(indices = inner, mesh = Sigma_imp[0].mesh)) for block,inner in self.gf_struct_sumk[icrsh] ],
|
|
|
|
make_copies = False) for icrsh in range(self.n_corr_shells) ]
|
2013-07-23 19:49:42 +02:00
|
|
|
else:
|
2014-10-28 16:19:28 +01:00
|
|
|
raise ValueError, "This type of Sigma is not handled."
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# transform the CTQMC blocks to the full matrix:
|
2014-11-15 20:04:54 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-15 18:15:45 +01:00
|
|
|
ish = self.corr_to_inequiv[icrsh] # ish is the index of the inequivalent shell corresponding to icrsh
|
2014-11-15 14:16:52 +01:00
|
|
|
|
|
|
|
for block,inner in self.gf_struct_solver[ish].iteritems():
|
|
|
|
for ind1 in inner:
|
|
|
|
for ind2 in inner:
|
2014-11-15 20:04:54 +01:00
|
|
|
block_sumk,ind1_sumk = self.solver_to_sumk[ish][(block,ind1)]
|
|
|
|
block_sumk,ind2_sumk = self.solver_to_sumk[ish][(block,ind2)]
|
|
|
|
self.Sigma_imp[icrsh][block_sumk][ind1_sumk,ind2_sumk] << Sigma_imp[ish][block][ind1,ind2]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# rotation from local to global coordinate system:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.use_rotations:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in self.Sigma_imp[icrsh]: self.Sigma_imp[icrsh][bname] << self.rotloc(icrsh,gf,direction='toGlobal')
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def add_dc(self):
|
|
|
|
"""Substracts the double counting term from the impurity self energy."""
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# Be careful: Sigma_imp is already in the global coordinate system!!
|
|
|
|
sres = [s.copy() for s in self.Sigma_imp]
|
2014-11-15 20:04:54 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in sres[icrsh]:
|
2014-10-28 16:19:28 +01:00
|
|
|
# Transform dc_imp to global coordinate system
|
2014-11-14 09:43:28 +01:00
|
|
|
dccont = numpy.dot(self.rot_mat[icrsh],numpy.dot(self.dc_imp[icrsh][bname],self.rot_mat[icrsh].conjugate().transpose()))
|
|
|
|
sres[icrsh][bname] -= dccont
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
return sres # list of self energies corrected by DC
|
2014-09-22 19:24:33 +02:00
|
|
|
|
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
def set_mu(self,mu):
|
|
|
|
"""Sets a new chemical potential"""
|
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
self.chemical_potential = mu
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
def total_density(self, mu):
|
|
|
|
"""
|
2014-11-15 20:04:54 +01:00
|
|
|
Calculates the total charge for the energy window for a given chemical potential mu.
|
|
|
|
Since in general n_orbitals depends on k, the calculation is done in the following order:
|
2014-09-22 19:24:33 +02:00
|
|
|
G_aa'(k,iw) -> n(k) = Tr G_aa'(k,iw) -> sum_k n_k
|
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
The calculation is done in the global coordinate system, if distinction is made between local/global!
|
|
|
|
"""
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
dens = 0.0
|
|
|
|
ikarray=numpy.array(range(self.n_k))
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in mpi.slice_array(ikarray):
|
2014-09-22 19:24:33 +02:00
|
|
|
|
|
|
|
S = self.lattice_gf_matsubara(ik=ik,mu=mu)
|
2013-07-23 19:49:42 +02:00
|
|
|
dens += self.bz_weights[ik] * S.total_density()
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# collect data from mpi:
|
2014-11-15 20:04:54 +01:00
|
|
|
dens = mpi.all_reduce(mpi.world, dens, lambda x,y : x+y)
|
2013-07-23 19:49:42 +02:00
|
|
|
mpi.barrier()
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
return dens
|
|
|
|
|
|
|
|
|
|
|
|
def find_mu(self, precision = 0.01):
|
2014-11-09 12:10:40 +01:00
|
|
|
"""
|
|
|
|
Searches for mu in order to give the desired charge
|
|
|
|
A desired precision can be specified in precision.
|
|
|
|
"""
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
F = lambda mu : self.total_density(mu = mu)
|
|
|
|
|
2014-11-07 00:55:40 +01:00
|
|
|
density = self.density_required - self.charge_below
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
self.chemical_potential = dichotomy.dichotomy(function = F,
|
2014-11-07 00:55:40 +01:00
|
|
|
x_init = self.chemical_potential, y_value = density,
|
|
|
|
precision_on_y = precision, delta_x = 0.5, max_loops = 100,
|
|
|
|
x_name = "Chemical Potential", y_name = "Total Density",
|
2013-07-23 19:49:42 +02:00
|
|
|
verbosity = 3)[0]
|
|
|
|
|
|
|
|
return self.chemical_potential
|
|
|
|
|
|
|
|
|
|
|
|
def extract_G_loc(self, mu=None, with_Sigma = True):
|
2014-09-22 19:24:33 +02:00
|
|
|
"""
|
2014-11-09 12:10:40 +01:00
|
|
|
Extracts the local downfolded Green function at the chemical potential of the class.
|
2014-09-22 19:24:33 +02:00
|
|
|
At the end, the local G is rotated from the global coordinate system to the local system.
|
2013-07-23 19:49:42 +02:00
|
|
|
if with_Sigma = False: Sigma is not included => non-interacting local GF
|
|
|
|
"""
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if mu is None: mu = self.chemical_potential
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
Gloc = [ self.Sigma_imp[icrsh].copy() for icrsh in range(self.n_corr_shells) ] # this list will be returned
|
|
|
|
for icrsh in range(self.n_corr_shells): Gloc[icrsh].zero() # initialize to zero
|
2013-07-23 19:49:42 +02:00
|
|
|
beta = Gloc[0].mesh.beta
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
ikarray=numpy.array(range(self.n_k))
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in mpi.slice_array(ikarray):
|
2014-09-22 19:24:33 +02:00
|
|
|
|
|
|
|
S = self.lattice_gf_matsubara(ik=ik,mu=mu,with_Sigma = with_Sigma, beta = beta)
|
2013-07-23 19:49:42 +02:00
|
|
|
S *= self.bz_weights[ik]
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
tmp = Gloc[icrsh].copy() # init temporary storage
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in tmp: tmp[bname] << self.downfold(ik,icrsh,bname,S[bname],gf)
|
2013-07-23 19:49:42 +02:00
|
|
|
Gloc[icrsh] += tmp
|
|
|
|
|
|
|
|
#collect data from mpi:
|
2014-11-15 20:04:54 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
|
|
|
Gloc[icrsh] << mpi.all_reduce(mpi.world, Gloc[icrsh], lambda x,y : x+y)
|
2013-07-23 19:49:42 +02:00
|
|
|
mpi.barrier()
|
|
|
|
|
|
|
|
# Gloc[:] is now the sum over k projected to the local orbitals.
|
2014-09-22 19:24:33 +02:00
|
|
|
# here comes the symmetrisation, if needed:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.symm_op != 0: Gloc = self.symmcorr.symmetrize(Gloc)
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
# Gloc is rotated to the local coordinate system:
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.use_rotations:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in Gloc[icrsh]: Gloc[icrsh][bname] << self.rotloc(icrsh,gf,direction='toLocal')
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# transform to CTQMC blocks:
|
2014-11-15 14:16:52 +01:00
|
|
|
Glocret = [ BlockGf( name_block_generator = [ (block,GfImFreq(indices = inner, mesh = Gloc[0].mesh)) for block,inner in self.gf_struct_solver[ish].iteritems() ],
|
2014-11-15 20:04:54 +01:00
|
|
|
make_copies = False) for ish in range(self.n_inequiv_shells) ]
|
|
|
|
for ish in range(self.n_inequiv_shells):
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
for block,inner in self.gf_struct_solver[ish].iteritems():
|
2014-11-15 14:16:52 +01:00
|
|
|
for ind1 in inner:
|
|
|
|
for ind2 in inner:
|
2014-11-15 20:04:54 +01:00
|
|
|
block_sumk,ind1_sumk = self.solver_to_sumk[ish][(block,ind1)]
|
|
|
|
block_sumk,ind2_sumk = self.solver_to_sumk[ish][(block,ind2)]
|
|
|
|
Glocret[ish][block][ind1,ind2] << Gloc[self.inequiv_to_corr[ish]][block_sumk][ind1_sumk,ind2_sumk]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# return only the inequivalent shells:
|
|
|
|
return Glocret
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
def calc_density_correction(self,filename = 'dens_mat.dat'):
|
|
|
|
""" Calculates the density correction in order to feed it back to the DFT calculations."""
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
assert type(filename) == StringType, "filename has to be a string!"
|
2013-07-23 19:49:42 +02:00
|
|
|
|
2014-11-14 09:43:28 +01:00
|
|
|
ntoi = self.spin_names_to_ind[self.SO]
|
|
|
|
bln = self.spin_block_names[self.SO]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
# Set up deltaN:
|
|
|
|
deltaN = {}
|
2014-11-14 09:43:28 +01:00
|
|
|
for b in bln:
|
|
|
|
deltaN[b] = [ numpy.zeros( [self.n_orbitals[ik,ntoi[b]],self.n_orbitals[ik,ntoi[b]]], numpy.complex_) for ik in range(self.n_k)]
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
ikarray=numpy.array(range(self.n_k))
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
dens = {}
|
2014-11-14 09:43:28 +01:00
|
|
|
for b in bln:
|
|
|
|
dens[b] = 0.0
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in mpi.slice_array(ikarray):
|
|
|
|
S = self.lattice_gf_matsubara(ik=ik,mu=self.chemical_potential)
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname,gf in S:
|
|
|
|
deltaN[bname][ik] = S[bname].density()
|
|
|
|
dens[bname] += self.bz_weights[ik] * S[bname].total_density()
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
#put mpi Barrier:
|
2014-11-14 09:43:28 +01:00
|
|
|
for bname in deltaN:
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in range(self.n_k):
|
2014-11-15 20:04:54 +01:00
|
|
|
deltaN[bname][ik] = mpi.all_reduce(mpi.world, deltaN[bname][ik], lambda x,y : x+y)
|
|
|
|
dens[bname] = mpi.all_reduce(mpi.world, dens[bname], lambda x,y : x+y)
|
2013-07-23 19:49:42 +02:00
|
|
|
mpi.barrier()
|
|
|
|
|
|
|
|
# now save to file:
|
2014-11-15 20:04:54 +01:00
|
|
|
if mpi.is_master_node():
|
|
|
|
if self.SP == 0:
|
2013-07-23 19:49:42 +02:00
|
|
|
f=open(filename,'w')
|
|
|
|
else:
|
|
|
|
f=open(filename+'up','w')
|
|
|
|
f1=open(filename+'dn','w')
|
|
|
|
# write chemical potential (in Rydberg):
|
|
|
|
f.write("%.14f\n"%(self.chemical_potential/self.energy_unit))
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.SP != 0: f1.write("%.14f\n"%(self.chemical_potential/self.energy_unit))
|
2013-07-23 19:49:42 +02:00
|
|
|
# write beta in ryderg-1
|
|
|
|
f.write("%.14f\n"%(S.mesh.beta*self.energy_unit))
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.SP != 0: f1.write("%.14f\n"%(S.mesh.beta*self.energy_unit))
|
2014-11-09 12:10:40 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if self.SP == 0: # no spin-polarization
|
2014-11-09 12:10:40 +01:00
|
|
|
|
2013-07-23 19:49:42 +02:00
|
|
|
for ik in range(self.n_k):
|
|
|
|
f.write("%s\n"%self.n_orbitals[ik,0])
|
|
|
|
for inu in range(self.n_orbitals[ik,0]):
|
|
|
|
for imu in range(self.n_orbitals[ik,0]):
|
|
|
|
valre = (deltaN['up'][ik][inu,imu].real + deltaN['down'][ik][inu,imu].real) / 2.0
|
|
|
|
valim = (deltaN['up'][ik][inu,imu].imag + deltaN['down'][ik][inu,imu].imag) / 2.0
|
|
|
|
f.write("%.14f %.14f "%(valre,valim))
|
|
|
|
f.write("\n")
|
|
|
|
f.write("\n")
|
|
|
|
f.close()
|
2014-09-22 19:24:33 +02:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
elif self.SP == 1: # with spin-polarization
|
2014-11-09 12:10:40 +01:00
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
# dict of filename: (spin index, block_name)
|
|
|
|
if self.SO == 0: to_write = {f: (0, 'up'), f1: (1, 'down')}
|
|
|
|
if self.SO == 1: to_write = {f: (0, 'ud'), f1: (0, 'ud')}
|
2014-11-09 12:10:40 +01:00
|
|
|
for fout in to_write.iterkeys():
|
|
|
|
isp, bn = to_write[fout]
|
|
|
|
for ik in range(self.n_k):
|
|
|
|
fout.write("%s\n"%self.n_orbitals[ik,isp])
|
|
|
|
for inu in range(self.n_orbitals[ik,isp]):
|
|
|
|
for imu in range(self.n_orbitals[ik,isp]):
|
|
|
|
fout.write("%.14f %.14f "%(deltaN[bn][ik][inu,imu].real,deltaN[bn][ik][inu,imu].imag))
|
|
|
|
fout.write("\n")
|
|
|
|
fout.write("\n")
|
|
|
|
fout.close()
|
2013-07-23 19:49:42 +02:00
|
|
|
|
|
|
|
return deltaN, dens
|
2014-10-28 16:19:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
################
|
|
|
|
# FIXME LEAVE UNDOCUMENTED
|
|
|
|
################
|
|
|
|
|
2014-11-09 12:10:40 +01:00
|
|
|
# FIXME Merge with find_mu?
|
2014-11-15 17:03:16 +01:00
|
|
|
def find_mu_nonint(self, dens_req, orb = None, precision = 0.01):
|
2014-10-31 18:52:32 +01:00
|
|
|
|
|
|
|
def F(mu):
|
|
|
|
gnonint = self.extract_G_loc(mu=mu,with_Sigma=False)
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if orb is None:
|
2014-10-31 18:52:32 +01:00
|
|
|
dens = 0.0
|
2014-11-15 18:15:45 +01:00
|
|
|
for ish in range(self.n_inequiv_shells):
|
2014-10-31 18:52:32 +01:00
|
|
|
dens += gnonint[ish].total_density()
|
|
|
|
else:
|
|
|
|
dens = gnonint[orb].total_density()
|
|
|
|
|
|
|
|
return dens
|
|
|
|
|
|
|
|
|
|
|
|
self.chemical_potential = dichotomy.dichotomy(function = F,
|
2014-11-07 00:55:40 +01:00
|
|
|
x_init = self.chemical_potential, y_value = dens_req,
|
|
|
|
precision_on_y = precision, delta_x = 0.5, max_loops = 100,
|
|
|
|
x_name = "Chemical Potential", y_name = "Total Density",
|
|
|
|
verbosity = 3)[0]
|
2014-10-31 18:52:32 +01:00
|
|
|
|
|
|
|
return self.chemical_potential
|
|
|
|
|
|
|
|
|
2014-10-28 16:19:28 +01:00
|
|
|
def find_dc(self,orb,guess,dens_mat,dens_req=None,precision=0.01):
|
|
|
|
"""Searches for DC in order to fulfill charge neutrality.
|
|
|
|
If dens_req is given, then DC is set such that the LOCAL charge of orbital
|
|
|
|
orb coincides with dens_req."""
|
|
|
|
|
|
|
|
mu = self.chemical_potential
|
|
|
|
|
|
|
|
def F(dc):
|
|
|
|
self.set_dc(dens_mat=dens_mat,U_interact=0,J_hund=0,orb=orb,use_val=dc)
|
2014-11-15 20:04:54 +01:00
|
|
|
if dens_req is None:
|
2014-10-28 16:19:28 +01:00
|
|
|
return self.total_density(mu=mu)
|
|
|
|
else:
|
|
|
|
return self.extract_G_loc()[orb].total_density()
|
|
|
|
|
|
|
|
|
2014-11-15 20:04:54 +01:00
|
|
|
if dens_req is None:
|
2014-11-07 00:55:40 +01:00
|
|
|
density = self.density_required - self.charge_below
|
2014-10-28 16:19:28 +01:00
|
|
|
else:
|
2014-11-07 00:55:40 +01:00
|
|
|
density = dens_req
|
2014-10-28 16:19:28 +01:00
|
|
|
|
|
|
|
dcnew = dichotomy.dichotomy(function = F,
|
2014-11-07 00:55:40 +01:00
|
|
|
x_init = guess, y_value = density,
|
2014-10-28 16:19:28 +01:00
|
|
|
precision_on_y = precision, delta_x=0.5,
|
|
|
|
max_loops = 100, x_name="Double-Counting", y_name= "Total Density",
|
|
|
|
verbosity = 3)[0]
|
|
|
|
|
|
|
|
return dcnew
|
|
|
|
|
|
|
|
|
2014-11-15 17:03:16 +01:00
|
|
|
# Check that the density matrix from projectors (DM = P Pdagger) is correct (ie matches DFT)
|
2014-10-28 16:19:28 +01:00
|
|
|
def check_projectors(self):
|
|
|
|
|
2014-11-15 17:03:16 +01:00
|
|
|
dens_mat = [numpy.zeros([self.corr_shells[icrsh][3],self.corr_shells[icrsh][3]],numpy.complex_)
|
|
|
|
for icrsh in range(self.n_corr_shells)]
|
2014-10-28 16:19:28 +01:00
|
|
|
|
|
|
|
for ik in range(self.n_k):
|
2014-11-15 17:03:16 +01:00
|
|
|
for icrsh in range(self.n_corr_shells):
|
|
|
|
dim = self.corr_shells[icrsh][3]
|
2014-10-28 16:19:28 +01:00
|
|
|
n_orb = self.n_orbitals[ik,0]
|
2014-11-15 17:03:16 +01:00
|
|
|
projmat = self.proj_mat[ik,0,icrsh,0:dim,0:n_orb]
|
|
|
|
dens_mat[icrsh][:,:] += numpy.dot(projmat, projmat.transpose().conjugate()) * self.bz_weights[ik]
|
2014-10-28 16:19:28 +01:00
|
|
|
|
2014-11-15 17:03:16 +01:00
|
|
|
if self.symm_op != 0: dens_mat = self.symmcorr.symmetrize(dens_mat)
|
2014-10-28 16:19:28 +01:00
|
|
|
|
|
|
|
# Rotate to local coordinate system:
|
2014-11-15 17:03:16 +01:00
|
|
|
if self.use_rotations:
|
|
|
|
for icrsh in range(self.n_corr_shells):
|
|
|
|
if self.rot_mat_time_inv[icrsh] == 1: dens_mat[icrsh] = dens_mat[icrsh].conjugate()
|
2014-10-28 16:19:28 +01:00
|
|
|
dens_mat[icrsh] = numpy.dot( numpy.dot(self.rot_mat[icrsh].conjugate().transpose(),dens_mat[icrsh]) ,
|
|
|
|
self.rot_mat[icrsh] )
|
|
|
|
|
|
|
|
return dens_mat
|
|
|
|
|
|
|
|
|
2014-11-15 17:03:16 +01:00
|
|
|
# Determine the number of equivalent shells
|
2014-10-28 16:19:28 +01:00
|
|
|
def sorts_of_atoms(self,lst):
|
|
|
|
"""
|
|
|
|
This routine should determine the number of sorts in the double list lst
|
|
|
|
"""
|
2014-11-15 17:03:16 +01:00
|
|
|
sortlst = [ lst[i][1] for i in range(len(lst)) ]
|
|
|
|
sorts = len(set(sortlst))
|
2014-10-28 16:19:28 +01:00
|
|
|
return sorts
|
|
|
|
|
|
|
|
|
2014-11-15 17:03:16 +01:00
|
|
|
# Determine the number of atoms
|
2014-10-28 16:19:28 +01:00
|
|
|
def number_of_atoms(self,lst):
|
|
|
|
"""
|
|
|
|
This routine should determine the number of atoms in the double list lst
|
|
|
|
"""
|
2014-11-15 17:03:16 +01:00
|
|
|
atomlst = [ lst[i][0] for i in range(len(lst)) ]
|
|
|
|
atoms = len(set(atomlst))
|
2014-10-28 16:19:28 +01:00
|
|
|
return atoms
|