3
0
mirror of https://github.com/triqs/dft_tools synced 2024-12-21 20:03:41 +01:00

Unify notation in sumk_dft_tools.

You *may* need to run
"pytriqs update_archive.py filename.h5 v1.2"
to update the archive if you have dft_parproj_input is present.
This commit is contained in:
Priyanka Seth 2015-01-22 10:47:53 +01:00
parent 8cb45ddd45
commit c1ac9c85c8
6 changed files with 54 additions and 45 deletions

View File

@ -228,7 +228,7 @@ class Wien2kConverter(ConverterTools):
n_parproj = numpy.array(n_parproj)
# Initialise P, here a double list of matrices:
proj_mat_pc = numpy.zeros([self.n_k,self.n_spin_blocs,self.n_shells,max(n_parproj),max([sh['dim'] for sh in self.shells]),max(self.n_orbitals)],numpy.complex_)
proj_mat_all = numpy.zeros([self.n_k,self.n_spin_blocs,self.n_shells,max(n_parproj),max([sh['dim'] for sh in self.shells]),max(self.n_orbitals)],numpy.complex_)
rot_mat_all = [numpy.identity(self.shells[ish]['dim'],numpy.complex_) for ish in range(self.n_shells)]
rot_mat_all_time_inv = [0 for i in range(self.n_shells)]
@ -241,12 +241,12 @@ class Wien2kConverter(ConverterTools):
for isp in range(self.n_spin_blocs):
for i in range(self.shells[ish]['dim']): # read real part:
for j in range(self.n_orbitals[ik][isp]):
proj_mat_pc[ik,isp,ish,ir,i,j] = R.next()
proj_mat_all[ik,isp,ish,ir,i,j] = R.next()
for isp in range(self.n_spin_blocs):
for i in range(self.shells[ish]['dim']): # read imaginary part:
for j in range(self.n_orbitals[ik][isp]):
proj_mat_pc[ik,isp,ish,ir,i,j] += 1j * R.next()
proj_mat_all[ik,isp,ish,ir,i,j] += 1j * R.next()
# now read the Density Matrix for this orbital below the energy window:
@ -278,7 +278,7 @@ class Wien2kConverter(ConverterTools):
ar = HDFArchive(self.hdf_file,'a')
if not (self.parproj_subgrp in ar): ar.create_group(self.parproj_subgrp)
# The subgroup containing the data. If it does not exist, it is created. If it exists, the data is overwritten!
things_to_save = ['dens_mat_below','n_parproj','proj_mat_pc','rot_mat_all','rot_mat_all_time_inv']
things_to_save = ['dens_mat_below','n_parproj','proj_mat_all','rot_mat_all','rot_mat_all_time_inv']
for it in things_to_save: ar[self.parproj_subgrp][it] = locals()[it]
del ar
@ -338,7 +338,7 @@ class Wien2kConverter(ConverterTools):
n_parproj = numpy.array(n_parproj)
# Initialise P, here a double list of matrices:
proj_mat_pc = numpy.zeros([n_k,self.n_spin_blocs,self.n_shells,max(n_parproj),max([sh['dim'] for sh in self.shells]),max(n_orbitals)],numpy.complex_)
proj_mat_all = numpy.zeros([n_k,self.n_spin_blocs,self.n_shells,max(n_parproj),max([sh['dim'] for sh in self.shells]),max(n_orbitals)],numpy.complex_)
for ish in range(self.n_shells):
for ik in range(n_k):
@ -347,11 +347,11 @@ class Wien2kConverter(ConverterTools):
for i in range(self.shells[ish]['dim']): # read real part:
for j in range(n_orbitals[ik,isp]):
proj_mat_pc[ik,isp,ish,ir,i,j] = R.next()
proj_mat_all[ik,isp,ish,ir,i,j] = R.next()
for i in range(self.shells[ish]['dim']): # read imaginary part:
for j in range(n_orbitals[ik,isp]):
proj_mat_pc[ik,isp,ish,ir,i,j] += 1j * R.next()
proj_mat_all[ik,isp,ish,ir,i,j] += 1j * R.next()
except StopIteration : # a more explicit error if the file is corrupted.
raise "Wien2k_converter : reading file band_file failed!"
@ -363,7 +363,7 @@ class Wien2kConverter(ConverterTools):
ar = HDFArchive(self.hdf_file,'a')
if not (self.bands_subgrp in ar): ar.create_group(self.bands_subgrp)
# The subgroup containing the data. If it does not exist, it is created. If it exists, the data is overwritten!
things_to_save = ['n_k','n_orbitals','proj_mat','hopping','n_parproj','proj_mat_pc']
things_to_save = ['n_k','n_orbitals','proj_mat','hopping','n_parproj','proj_mat_all']
for it in things_to_save: ar[self.bands_subgrp][it] = locals()[it]
del ar

View File

@ -182,7 +182,7 @@ class SumkDFT:
elif shells == 'all':
if ir is None: raise ValueError, "downfold: provide ir if treating all shells."
dim = self.shells[ish]['dim']
projmat = self.proj_mat_pc[ik,isp,ish,ir,0:dim,0:n_orb]
projmat = self.proj_mat_all[ik,isp,ish,ir,0:dim,0:n_orb]
gf_downfolded.from_L_G_R(projmat,gf_to_downfold,projmat.conjugate().transpose())
@ -201,7 +201,7 @@ class SumkDFT:
elif shells == 'all':
if ir is None: raise ValueError, "upfold: provide ir if treating all shells."
dim = self.shells[ish]['dim']
projmat = self.proj_mat_pc[ik,isp,ish,ir,0:dim,0:n_orb]
projmat = self.proj_mat_all[ik,isp,ish,ir,0:dim,0:n_orb]
gf_upfolded.from_L_G_R(projmat.conjugate().transpose(),gf_to_upfold,projmat)
@ -358,9 +358,9 @@ class SumkDFT:
"""
if mu is None: mu = self.chemical_potential
Gloc = [ self.Sigma_imp_iw[icrsh].copy() for icrsh in range(self.n_corr_shells) ] # this list will be returned
for icrsh in range(self.n_corr_shells): Gloc[icrsh].zero() # initialize to zero
beta = Gloc[0].mesh.beta
G_loc = [ self.Sigma_imp_iw[icrsh].copy() for icrsh in range(self.n_corr_shells) ] # this list will be returned
for icrsh in range(self.n_corr_shells): G_loc[icrsh].zero() # initialize to zero
beta = G_loc[0].mesh.beta
ikarray = numpy.array(range(self.n_k))
for ik in mpi.slice_array(ikarray):
@ -369,25 +369,25 @@ class SumkDFT:
G_latt_iw *= self.bz_weights[ik]
for icrsh in range(self.n_corr_shells):
tmp = Gloc[icrsh].copy() # init temporary storage
tmp = G_loc[icrsh].copy() # init temporary storage
for bname,gf in tmp: tmp[bname] << self.downfold(ik,icrsh,bname,G_latt_iw[bname],gf)
Gloc[icrsh] += tmp
G_loc[icrsh] += tmp
# collect data from mpi
for icrsh in range(self.n_corr_shells): Gloc[icrsh] << mpi.all_reduce(mpi.world, Gloc[icrsh], lambda x,y : x+y)
for icrsh in range(self.n_corr_shells): G_loc[icrsh] << mpi.all_reduce(mpi.world, G_loc[icrsh], lambda x,y : x+y)
mpi.barrier()
# Gloc[:] is now the sum over k projected to the local orbitals.
# G_loc[:] is now the sum over k projected to the local orbitals.
# here comes the symmetrisation, if needed:
if self.symm_op != 0: Gloc = self.symmcorr.symmetrize(Gloc)
if self.symm_op != 0: G_loc = self.symmcorr.symmetrize(G_loc)
# Gloc is rotated to the local coordinate system:
# G_loc is rotated to the local coordinate system:
if self.use_rotations:
for icrsh in range(self.n_corr_shells):
for bname,gf in Gloc[icrsh]: Gloc[icrsh][bname] << self.rotloc(icrsh,gf,direction='toLocal')
for bname,gf in G_loc[icrsh]: G_loc[icrsh][bname] << self.rotloc(icrsh,gf,direction='toLocal')
# transform to CTQMC blocks:
Gloc_inequiv = [ BlockGf( name_block_generator = [ (block,GfImFreq(indices = inner, mesh = Gloc[0].mesh)) for block,inner in self.gf_struct_solver[ish].iteritems() ],
G_loc_inequiv = [ BlockGf( name_block_generator = [ (block,GfImFreq(indices = inner, mesh = G_loc[0].mesh)) for block,inner in self.gf_struct_solver[ish].iteritems() ],
make_copies = False) for ish in range(self.n_inequiv_shells) ]
for ish in range(self.n_inequiv_shells):
for block,inner in self.gf_struct_solver[ish].iteritems():
@ -395,10 +395,10 @@ class SumkDFT:
for ind2 in inner:
block_sumk,ind1_sumk = self.solver_to_sumk[ish][(block,ind1)]
block_sumk,ind2_sumk = self.solver_to_sumk[ish][(block,ind2)]
Gloc_inequiv[ish][block][ind1,ind2] << Gloc[self.inequiv_to_corr[ish]][block_sumk][ind1_sumk,ind2_sumk]
G_loc_inequiv[ish][block][ind1,ind2] << G_loc[self.inequiv_to_corr[ish]][block_sumk][ind1_sumk,ind2_sumk]
# return only the inequivalent shells:
return Gloc_inequiv
return G_loc_inequiv
def analyse_block_structure(self, threshold = 0.00001, include_shells = None, dm = None):
@ -721,7 +721,7 @@ class SumkDFT:
for bl in degsh: gf_to_symm[bl] << ss
def total_density(self, mu):
def total_density(self, mu=None):
"""
Calculates the total charge for the energy window for a given chemical potential mu.
Since in general n_orbitals depends on k, the calculation is done in the following order:
@ -730,6 +730,7 @@ class SumkDFT:
The calculation is done in the global coordinate system, if distinction is made between local/global!
"""
if mu is None: mu = self.chemical_potential
dens = 0.0
ikarray = numpy.array(range(self.n_k))
for ik in mpi.slice_array(ikarray):

View File

@ -34,7 +34,6 @@ class SumkDFTTools(SumkDFT):
parproj_data = 'dft_parproj_input', symmpar_data = 'dft_symmpar_input', bands_data = 'dft_bands_input',
transp_data = 'dft_transp_input'):
#self.G_latt_w = None # DEBUG -- remove later
SumkDFT.__init__(self, hdf_file=hdf_file, h_field=h_field, use_dft_blocks=use_dft_blocks,
dft_data=dft_data, symmcorr_data=symmcorr_data, parproj_data=parproj_data,
symmpar_data=symmpar_data, bands_data=bands_data, transp_data=transp_data)
@ -42,7 +41,6 @@ class SumkDFTTools(SumkDFT):
def check_input_dos(self, om_min, om_max, n_om, beta=10, broadening=0.01):
delta_om = (om_max-om_min)/(n_om-1)
om_mesh = numpy.zeros([n_om],numpy.float_)
for i in range(n_om): om_mesh[i] = om_min + delta_om * i
@ -83,8 +81,6 @@ class SumkDFTTools(SumkDFT):
for bname,gf in tmp: tmp[bname] << self.downfold(ik,icrsh,bname,G_latt_w[bname],gf) # downfolding G
Gloc[icrsh] += tmp
if self.symm_op != 0: Gloc = self.symmcorr.symmetrize(Gloc)
if self.use_rotations:
@ -125,7 +121,7 @@ class SumkDFTTools(SumkDFT):
Reads the data for the partial projectors from the HDF file
"""
things_to_read = ['dens_mat_below','n_parproj','proj_mat_pc','rot_mat_all','rot_mat_all_time_inv']
things_to_read = ['dens_mat_below','n_parproj','proj_mat_all','rot_mat_all','rot_mat_all_time_inv']
value_read = self.read_input_from_hdf(subgrp=self.parproj_data,things_to_read = things_to_read)
return value_read
@ -228,7 +224,7 @@ class SumkDFTTools(SumkDFT):
ATTENTION: Many things from the original input file are overwritten!!!"""
assert hasattr(self,"Sigma_imp_w"), "spaghettis: Set Sigma_imp_w first."
things_to_read = ['n_k','n_orbitals','proj_mat','hopping','n_parproj','proj_mat_pc']
things_to_read = ['n_k','n_orbitals','proj_mat','hopping','n_parproj','proj_mat_all']
value_read = self.read_input_from_hdf(subgrp=self.bands_data,things_to_read=things_to_read)
if not value_read: return value_read

View File

@ -5,7 +5,7 @@ import numpy
import subprocess
if len(sys.argv) < 2:
print "Usage: python update_archive.py old_archive"
print "Usage: python update_archive.py old_archive [v1.0|v1.2]"
sys.exit()
print """
@ -50,6 +50,10 @@ def det_shell_equivalence(corr_shells):
### Main ###
filename = sys.argv[1]
if len(sys.argv) > 2:
from_v = sys.argv[2]
else: # Assume updating an old v1.0 script
from_v = 'v1.0'
A = h5py.File(filename)
# Rename groups
@ -76,18 +80,19 @@ for obj in to_delete:
if obj in A['dft_input'].keys():
del(A['dft_input'][obj])
# Update shells and corr_shells to list of dicts
shells_old = HDFArchive(filename,'r')['dft_input']['shells']
corr_shells_old = HDFArchive(filename,'r')['dft_input']['corr_shells']
shells = convert_shells(shells_old)
corr_shells = convert_corr_shells(corr_shells_old)
del(A['dft_input']['shells'])
del(A['dft_input']['corr_shells'])
A.close()
# Need to use HDFArchive for the following
HDFArchive(filename,'a')['dft_input']['shells'] = shells
HDFArchive(filename,'a')['dft_input']['corr_shells'] = corr_shells
A = h5py.File(filename)
if from_v == 'v1.0':
# Update shells and corr_shells to list of dicts
shells_old = HDFArchive(filename,'r')['dft_input']['shells']
corr_shells_old = HDFArchive(filename,'r')['dft_input']['corr_shells']
shells = convert_shells(shells_old)
corr_shells = convert_corr_shells(corr_shells_old)
del(A['dft_input']['shells'])
del(A['dft_input']['corr_shells'])
A.close()
# Need to use HDFArchive for the following
HDFArchive(filename,'a')['dft_input']['shells'] = shells
HDFArchive(filename,'a')['dft_input']['corr_shells'] = corr_shells
A = h5py.File(filename)
# Add shell equivalency quantities
if 'n_inequiv_shells' not in A['dft_input']:
@ -98,10 +103,9 @@ if 'n_inequiv_shells' not in A['dft_input']:
# Rename variables
groups = ['dft_symmcorr_input','dft_symmpar_input']
for group in groups:
if group not in A.keys(): continue
if 'n_s' not in group: continue
if 'n_s' not in A[group]: continue
print "Changing n_s to n_symm ..."
A[group].move('n_s','n_symm')
# Convert orbits to list of dicts
@ -111,6 +115,14 @@ for group in groups:
A.close()
HDFArchive(filename,'a')[group]['orbits'] = orbits
A = h5py.File(filename)
groups = ['dft_parproj_input']
for group in groups:
if group not in A.keys(): continue
if 'proj_mat_pc' not in A[group]: continue
print "Changing proj_mat_pc to proj_mat_all ..."
A[group].move('proj_mat_pc','proj_mat_all')
A.close()
# Repack to reclaim disk space

Binary file not shown.

Binary file not shown.