change del to with when reading hdf

This commit is contained in:
Manuel 2018-12-06 17:28:49 -05:00
parent 85c8c2b58c
commit a8c7569830
16 changed files with 288 additions and 481 deletions

View File

@ -1,147 +0,0 @@
import pytriqs.utility.mpi as mpi
from pytriqs.operators.util import *
from pytriqs.archive import HDFArchive
from triqs_cthyb import *
from pytriqs.gf import *
from triqs_dft_tools.sumk_dft import *
from triqs_dft_tools.converters.wien2k_converter import *
dft_filename='Gd_fcc'
U = 9.6
J = 0.8
beta = 40
loops = 10 # Number of DMFT sc-loops
sigma_mix = 1.0 # Mixing factor of Sigma after solution of the AIM
delta_mix = 1.0 # Mixing factor of Delta as input for the AIM
dc_type = 0 # DC type: 0 FLL, 1 Held, 2 AMF
use_blocks = True # use bloc structure from DFT input
prec_mu = 0.0001
h_field = 0.0
# Solver parameters
p = {}
p["max_time"] = -1
p["length_cycle"] = 50
p["n_warmup_cycles"] = 50
p["n_cycles"] = 5000
Converter = Wien2kConverter(filename=dft_filename, repacking=True)
Converter.convert_dft_input()
mpi.barrier()
previous_runs = 0
previous_present = False
if mpi.is_master_node():
f = HDFArchive(dft_filename+'.h5','a')
if 'dmft_output' in f:
ar = f['dmft_output']
if 'iterations' in ar:
previous_present = True
previous_runs = ar['iterations']
else:
f.create_group('dmft_output')
del f
previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)
SK=SumkDFT(hdf_file=dft_filename+'.h5',use_dft_blocks=use_blocks,h_field=h_field)
n_orb = SK.corr_shells[0]['dim']
l = SK.corr_shells[0]['l']
spin_names = ["up","down"]
orb_names = [i for i in range(n_orb)]
# Use GF structure determined by DFT blocks
gf_struct = [(block, indices) for block, indices in SK.gf_struct_solver[0].iteritems()]
# Construct U matrix for density-density calculations
Umat, Upmat = U_matrix_kanamori(n_orb=n_orb, U_int=U, J_hund=J)
# Construct Hamiltonian and solver
h_int = h_int_density(spin_names, orb_names, map_operator_structure=SK.sumk_to_solver[0], U=Umat, Uprime=Upmat, H_dump="H.txt")
S = Solver(beta=beta, gf_struct=gf_struct)
if previous_present:
chemical_potential = 0
dc_imp = 0
dc_energ = 0
if mpi.is_master_node():
S.Sigma_iw << HDFArchive(dft_filename+'.h5','a')['dmft_output']['Sigma_iw']
chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
S.Sigma_iw << mpi.bcast(S.Sigma_iw)
chemical_potential = mpi.bcast(chemical_potential)
dc_imp = mpi.bcast(dc_imp)
dc_energ = mpi.bcast(dc_energ)
SK.set_mu(chemical_potential)
SK.set_dc(dc_imp,dc_energ)
for iteration_number in range(1,loops+1):
if mpi.is_master_node(): print "Iteration = ", iteration_number
SK.symm_deg_gf(S.Sigma_iw,orb=0) # symmetrise Sigma
SK.set_Sigma([ S.Sigma_iw ]) # set Sigma into the SumK class
chemical_potential = SK.calc_mu( precision = prec_mu ) # find the chemical potential for given density
S.G_iw << SK.extract_G_loc()[0] # calc the local Green function
mpi.report("Total charge of Gloc : %.6f"%S.G_iw.total_density())
# Init the DC term and the real part of Sigma, if no previous runs found:
if (iteration_number==1 and previous_present==False):
dm = S.G_iw.density()
SK.calc_dc(dm, U_interact = U, J_hund = J, orb = 0, use_dc_formula = dc_type)
S.Sigma_iw << SK.dc_imp[0]['up'][0,0]
# Calculate new G0_iw to input into the solver:
if mpi.is_master_node():
# We can do a mixing of Delta in order to stabilize the DMFT iterations:
S.G0_iw << S.Sigma_iw + inverse(S.G_iw)
ar = HDFArchive(dft_filename+'.h5','a')['dmft_output']
if (iteration_number>1 or previous_present):
mpi.report("Mixing input Delta with factor %s"%delta_mix)
Delta = (delta_mix * delta(S.G0_iw)) + (1.0-delta_mix) * ar['Delta_iw']
S.G0_iw << S.G0_iw + delta(S.G0_iw) - Delta
ar['Delta_iw'] = delta(S.G0_iw)
S.G0_iw << inverse(S.G0_iw)
del ar
S.G0_iw << mpi.bcast(S.G0_iw)
# Solve the impurity problem:
S.solve(h_int=h_int, **p)
# Solved. Now do post-processing:
mpi.report("Total charge of impurity problem : %.6f"%S.G_iw.total_density())
# Now mix Sigma and G with factor sigma_mix, if wanted:
if (iteration_number>1 or previous_present):
if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a')['dmft_output']
mpi.report("Mixing Sigma and G with factor %s"%sigma_mix)
S.Sigma_iw << sigma_mix * S.Sigma_iw + (1.0-sigma_mix) * ar['Sigma_iw']
S.G_iw << sigma_mix * S.G_iw + (1.0-sigma_mix) * ar['G_iw']
del ar
S.G_iw << mpi.bcast(S.G_iw)
S.Sigma_iw << mpi.bcast(S.Sigma_iw)
# Write the final Sigma and G to the hdf5 archive:
if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a')['dmft_output']
if previous_runs: iteration_number += previous_runs
ar['iterations'] = iteration_number
ar['G_tau'] = S.G_tau
ar['G_iw'] = S.G_iw
ar['Sigma_iw'] = S.Sigma_iw
ar['G0-%s'%(iteration_number)] = S.G0_iw
ar['G-%s'%(iteration_number)] = S.G_iw
ar['Sigma-%s'%(iteration_number)] = S.Sigma_iw
del ar
# Set the new double counting:
dm = S.G_iw.density() # compute the density matrix of the impurity problem
SK.calc_dc(dm, U_interact = U, J_hund = J, orb = 0, use_dc_formula = dc_type)
# Save stuff into the dft_output group of hdf5 archive in case of rerun:
SK.save(['chemical_potential','dc_imp','dc_energ'])
if mpi.is_master_node():
ar = HDFArchive("dftdmft.h5",'w')
ar["G_tau"] = S.G_tau
ar["G_iw"] = S.G_iw
ar["Sigma_iw"] = S.Sigma_iw

View File

@ -40,8 +40,8 @@ If required, we have to load and initialise the real-frequency self energy. Most
you have your self energy already stored as a real-frequency :class:`BlockGf <pytriqs.gf.BlockGf>` object you have your self energy already stored as a real-frequency :class:`BlockGf <pytriqs.gf.BlockGf>` object
in a hdf5 file:: in a hdf5 file::
ar = HDFArchive('case.h5', 'a') with HDFArchive('case.h5', 'r') as ar:
SigmaReFreq = ar['dmft_output']['Sigma_w'] SigmaReFreq = ar['dmft_output']['Sigma_w']
You may also have your self energy stored in text files. For this case the :ref:`TRIQS <triqslibs:welcome>` library offers You may also have your self energy stored in text files. For this case the :ref:`TRIQS <triqslibs:welcome>` library offers
the function :meth:`read_gf_from_txt`, which is able to load the data from text files of one Green function block the function :meth:`read_gf_from_txt`, which is able to load the data from text files of one Green function block
@ -73,7 +73,6 @@ and additionally set the chemical potential and the double counting correction f
chemical_potential, dc_imp, dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) chemical_potential, dc_imp, dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
SK.set_mu(chemical_potential) SK.set_mu(chemical_potential)
SK.set_dc(dc_imp,dc_energ) SK.set_dc(dc_imp,dc_energ)
del ar
.. _dos_wannier: .. _dos_wannier:

View File

@ -106,15 +106,15 @@ are present, or if the calculation should start from scratch::
previous_runs = 0 previous_runs = 0
previous_present = False previous_present = False
if mpi.is_master_node(): if mpi.is_master_node():
f = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as f:
if 'dmft_output' in f: if 'dmft_output' in f:
ar = f['dmft_output'] ar = f['dmft_output']
if 'iterations' in ar: if 'iterations' in ar:
previous_present = True previous_present = True
previous_runs = ar['iterations'] previous_runs = ar['iterations']
else: else:
f.create_group('dmft_output') f.create_group('dmft_output')
del f
previous_runs = mpi.bcast(previous_runs) previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present) previous_present = mpi.bcast(previous_present)
@ -126,9 +126,8 @@ double counting values of the last iteration::
if previous_present: if previous_present:
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
S.Sigma_iw << ar['dmft_output']['Sigma_iw'] S.Sigma_iw << ar['dmft_output']['Sigma_iw']
del ar
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
@ -153,11 +152,10 @@ functions) can be necessary in order to ensure convergence::
mix = 0.8 # mixing factor mix = 0.8 # mixing factor
if (iteration_number>1 or previous_present): if (iteration_number>1 or previous_present):
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%mix) mpi.report("Mixing Sigma and G with factor %s"%mix)
S.Sigma_iw << mix * S.Sigma_iw + (1.0-mix) * ar['dmft_output']['Sigma_iw'] S.Sigma_iw << mix * S.Sigma_iw + (1.0-mix) * ar['dmft_output']['Sigma_iw']
S.G_iw << mix * S.G_iw + (1.0-mix) * ar['dmft_output']['G_iw'] S.G_iw << mix * S.G_iw + (1.0-mix) * ar['dmft_output']['G_iw']
del ar
S.G_iw << mpi.bcast(S.G_iw) S.G_iw << mpi.bcast(S.G_iw)
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)

View File

@ -96,12 +96,11 @@ The converter :meth:`convert_transport_input <dft.converters.wien2k_converter.Wi
reads the required data of the Wien2k output and stores it in the `dft_transp_input` subgroup of your hdf file. reads the required data of the Wien2k output and stores it in the `dft_transp_input` subgroup of your hdf file.
Additionally we need to read and set the self energy, the chemical potential and the double counting:: Additionally we need to read and set the self energy, the chemical potential and the double counting::
ar = HDFArchive('case.h5', 'a') with HDFArchive('case.h5', 'r') as ar:
SK.set_Sigma([ar['dmft_output']['Sigma_w']]) SK.set_Sigma([ar['dmft_output']['Sigma_w']])
chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
SK.set_mu(chemical_potential) SK.set_mu(chemical_potential)
SK.set_dc(dc_imp,dc_energ) SK.set_dc(dc_imp,dc_energ)
del ar
As next step we can calculate the transport distribution :math:`\Gamma_{\alpha\beta}(\omega)`:: As next step we can calculate the transport distribution :math:`\Gamma_{\alpha\beta}(\omega)`::

View File

@ -22,15 +22,14 @@ mpi.barrier()
previous_runs = 0 previous_runs = 0
previous_present = False previous_present = False
if mpi.is_master_node(): if mpi.is_master_node():
f = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as f:
if 'dmft_output' in f: if 'dmft_output' in f:
ar = f['dmft_output'] ar = f['dmft_output']
if 'iterations' in ar: if 'iterations' in ar:
previous_present = True previous_present = True
previous_runs = ar['iterations'] previous_runs = ar['iterations']
else: else:
f.create_group('dmft_output') f.create_group('dmft_output')
del f
previous_runs = mpi.bcast(previous_runs) previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present) previous_present = mpi.bcast(previous_present)
@ -47,9 +46,8 @@ chemical_potential=chemical_potential_init
# load previous data: old self-energy, chemical potential, DC correction # load previous data: old self-energy, chemical potential, DC correction
if previous_present: if previous_present:
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
S.Sigma << ar['dmft_output']['Sigma'] S.Sigma << ar['dmft_output']['Sigma']
del ar
SK.chemical_potential,SK.dc_imp,SK.dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) SK.chemical_potential,SK.dc_imp,SK.dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
S.Sigma << mpi.bcast(S.Sigma) S.Sigma << mpi.bcast(S.Sigma)
SK.chemical_potential = mpi.bcast(SK.chemical_potential) SK.chemical_potential = mpi.bcast(SK.chemical_potential)
@ -87,11 +85,10 @@ for iteration_number in range(1,Loops+1):
# Now mix Sigma and G with factor Mix, if wanted: # Now mix Sigma and G with factor Mix, if wanted:
if (iteration_number>1 or previous_present): if (iteration_number>1 or previous_present):
if (mpi.is_master_node() and (mixing<1.0)): if (mpi.is_master_node() and (mixing<1.0)):
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%mixing) mpi.report("Mixing Sigma and G with factor %s"%mixing)
S.Sigma << mixing * S.Sigma + (1.0-mixing) * ar['dmft_output']['Sigma'] S.Sigma << mixing * S.Sigma + (1.0-mixing) * ar['dmft_output']['Sigma']
S.G << mixing * S.G + (1.0-mixing) * ar['dmft_output']['G'] S.G << mixing * S.G + (1.0-mixing) * ar['dmft_output']['G']
del ar
S.G << mpi.bcast(S.G) S.G << mpi.bcast(S.G)
S.Sigma << mpi.bcast(S.Sigma) S.Sigma << mpi.bcast(S.Sigma)
@ -106,11 +103,10 @@ for iteration_number in range(1,Loops+1):
# store the impurity self-energy, GF as well as correlation energy in h5 # store the impurity self-energy, GF as well as correlation energy in h5
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as ar:
ar['dmft_output']['iterations'] = iteration_number + previous_runs ar['dmft_output']['iterations'] = iteration_number + previous_runs
ar['dmft_output']['G'] = S.G ar['dmft_output']['G'] = S.G
ar['dmft_output']['Sigma'] = S.Sigma ar['dmft_output']['Sigma'] = S.Sigma
del ar
#Save essential SumkDFT data: #Save essential SumkDFT data:
SK.save(['chemical_potential','dc_imp','dc_energ','correnerg']) SK.save(['chemical_potential','dc_imp','dc_energ','correnerg'])

View File

@ -38,15 +38,15 @@ p["fit_max_n"] = 60
previous_runs = 0 previous_runs = 0
previous_present = False previous_present = False
if mpi.is_master_node(): if mpi.is_master_node():
f = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as f:
if 'dmft_output' in f: if 'dmft_output' in f:
ar = f['dmft_output'] ar = f['dmft_output']
if 'iterations' in ar: if 'iterations' in ar:
previous_present = True previous_present = True
previous_runs = ar['iterations'] previous_runs = ar['iterations']
else: else:
f.create_group('dmft_output') f.create_group('dmft_output')
del f
previous_runs = mpi.bcast(previous_runs) previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present) previous_present = mpi.bcast(previous_present)
@ -72,9 +72,8 @@ if previous_present:
dc_imp = 0 dc_imp = 0
dc_energ = 0 dc_energ = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
S.Sigma_iw << ar['dmft_output']['Sigma_iw'] S.Sigma_iw << ar['dmft_output']['Sigma_iw']
del ar
chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
chemical_potential = mpi.bcast(chemical_potential) chemical_potential = mpi.bcast(chemical_potential)
@ -103,14 +102,13 @@ for iteration_number in range(1,loops+1):
# We can do a mixing of Delta in order to stabilize the DMFT iterations: # We can do a mixing of Delta in order to stabilize the DMFT iterations:
S.G0_iw << S.Sigma_iw + inverse(S.G_iw) S.G0_iw << S.Sigma_iw + inverse(S.G_iw)
# The following lines are uncommented until issue #98 is fixed in TRIQS # The following lines are uncommented until issue #98 is fixed in TRIQS
# ar = HDFArchive(dft_filename+'.h5','a') # with HDFArchive(dft_filename+'.h5','a') as ar:
# if (iteration_number>1 or previous_present): # if (iteration_number>1 or previous_present):
# mpi.report("Mixing input Delta with factor %s"%delta_mix) # mpi.report("Mixing input Delta with factor %s"%delta_mix)
# Delta = (delta_mix * delta(S.G0_iw)) + (1.0-delta_mix) * ar['dmft_output']['Delta_iw'] # Delta = (delta_mix * delta(S.G0_iw)) + (1.0-delta_mix) * ar['dmft_output']['Delta_iw']
# S.G0_iw << S.G0_iw + delta(S.G0_iw) - Delta # S.G0_iw << S.G0_iw + delta(S.G0_iw) - Delta
# ar['dmft_output']['Delta_iw'] = delta(S.G0_iw) # ar['dmft_output']['Delta_iw'] = delta(S.G0_iw)
S.G0_iw << inverse(S.G0_iw) S.G0_iw << inverse(S.G0_iw)
# del ar
S.G0_iw << mpi.bcast(S.G0_iw) S.G0_iw << mpi.bcast(S.G0_iw)
@ -123,25 +121,24 @@ for iteration_number in range(1,loops+1):
# Now mix Sigma and G with factor sigma_mix, if wanted: # Now mix Sigma and G with factor sigma_mix, if wanted:
if (iteration_number>1 or previous_present): if (iteration_number>1 or previous_present):
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%sigma_mix) mpi.report("Mixing Sigma and G with factor %s"%sigma_mix)
S.Sigma_iw << sigma_mix * S.Sigma_iw + (1.0-sigma_mix) * ar['dmft_output']['Sigma_iw'] S.Sigma_iw << sigma_mix * S.Sigma_iw + (1.0-sigma_mix) * ar['dmft_output']['Sigma_iw']
S.G_iw << sigma_mix * S.G_iw + (1.0-sigma_mix) * ar['dmft_output']['G_iw'] S.G_iw << sigma_mix * S.G_iw + (1.0-sigma_mix) * ar['dmft_output']['G_iw']
del ar
S.G_iw << mpi.bcast(S.G_iw) S.G_iw << mpi.bcast(S.G_iw)
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
# Write the final Sigma and G to the hdf5 archive: # Write the final Sigma and G to the hdf5 archive:
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with ar = HDFArchive(dft_filename+'.h5','a') as ar:
ar['dmft_output']['iterations'] = iteration_number + previous_runs ar['dmft_output']['iterations'] = iteration_number + previous_runs
ar['dmft_output']['G_tau'] = S.G_tau ar['dmft_output']['G_tau'] = S.G_tau
ar['dmft_output']['G_iw'] = S.G_iw ar['dmft_output']['G_iw'] = S.G_iw
ar['dmft_output']['Sigma_iw'] = S.Sigma_iw ar['dmft_output']['Sigma_iw'] = S.Sigma_iw
ar['dmft_output']['G0-%s'%(iteration_number)] = S.G0_iw ar['dmft_output']['G0-%s'%(iteration_number)] = S.G0_iw
ar['dmft_output']['G-%s'%(iteration_number)] = S.G_iw ar['dmft_output']['G-%s'%(iteration_number)] = S.G_iw
ar['dmft_output']['Sigma-%s'%(iteration_number)] = S.Sigma_iw ar['dmft_output']['Sigma-%s'%(iteration_number)] = S.Sigma_iw
del ar
# Set the new double counting: # Set the new double counting:
dm = S.G_iw.density() # compute the density matrix of the impurity problem dm = S.G_iw.density() # compute the density matrix of the impurity problem

View File

@ -39,15 +39,14 @@ p["fit_max_n"] = 60
previous_runs = 0 previous_runs = 0
previous_present = False previous_present = False
if mpi.is_master_node(): if mpi.is_master_node():
f = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as f:
if 'dmft_output' in f: if 'dmft_output' in f:
ar = f['dmft_output'] ar = f['dmft_output']
if 'iterations' in ar: if 'iterations' in ar:
previous_present = True previous_present = True
previous_runs = ar['iterations'] previous_runs = ar['iterations']
else: else:
f.create_group('dmft_output') f.create_group('dmft_output')
del f
previous_runs = mpi.bcast(previous_runs) previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present) previous_present = mpi.bcast(previous_present)
@ -75,9 +74,8 @@ if previous_present:
dc_imp = 0 dc_imp = 0
dc_energ = 0 dc_energ = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
S.Sigma_iw << ar['dmft_output']['Sigma_iw'] S.Sigma_iw << ar['dmft_output']['Sigma_iw']
del ar
chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ']) chemical_potential,dc_imp,dc_energ = SK.load(['chemical_potential','dc_imp','dc_energ'])
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
chemical_potential = mpi.bcast(chemical_potential) chemical_potential = mpi.bcast(chemical_potential)
@ -106,14 +104,13 @@ for iteration_number in range(1,loops+1):
# We can do a mixing of Delta in order to stabilize the DMFT iterations: # We can do a mixing of Delta in order to stabilize the DMFT iterations:
S.G0_iw << S.Sigma_iw + inverse(S.G_iw) S.G0_iw << S.Sigma_iw + inverse(S.G_iw)
# The following lines are uncommented until issue #98 is fixed in TRIQS # The following lines are uncommented until issue #98 is fixed in TRIQS
# ar = HDFArchive(dft_filename+'.h5','a') # with HDFArchive(dft_filename+'.h5','a') as ar:
# if (iteration_number>1 or previous_present): # if (iteration_number>1 or previous_present):
# mpi.report("Mixing input Delta with factor %s"%delta_mix) # mpi.report("Mixing input Delta with factor %s"%delta_mix)
# Delta = (delta_mix * delta(S.G0_iw)) + (1.0-delta_mix) * ar['dmft_output']['Delta_iw'] # Delta = (delta_mix * delta(S.G0_iw)) + (1.0-delta_mix) * ar['dmft_output']['Delta_iw']
# S.G0_iw << S.G0_iw + delta(S.G0_iw) - Delta # S.G0_iw << S.G0_iw + delta(S.G0_iw) - Delta
# ar['dmft_output']['Delta_iw'] = delta(S.G0_iw) # ar['dmft_output']['Delta_iw'] = delta(S.G0_iw)
S.G0_iw << inverse(S.G0_iw) S.G0_iw << inverse(S.G0_iw)
# del ar
S.G0_iw << mpi.bcast(S.G0_iw) S.G0_iw << mpi.bcast(S.G0_iw)
@ -126,25 +123,23 @@ for iteration_number in range(1,loops+1):
# Now mix Sigma and G with factor sigma_mix, if wanted: # Now mix Sigma and G with factor sigma_mix, if wanted:
if (iteration_number>1 or previous_present): if (iteration_number>1 or previous_present):
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%sigma_mix) mpi.report("Mixing Sigma and G with factor %s"%sigma_mix)
S.Sigma_iw << sigma_mix * S.Sigma_iw + (1.0-sigma_mix) * ar['dmft_output']['Sigma_iw'] S.Sigma_iw << sigma_mix * S.Sigma_iw + (1.0-sigma_mix) * ar['dmft_output']['Sigma_iw']
S.G_iw << sigma_mix * S.G_iw + (1.0-sigma_mix) * ar['dmft_output']['G_iw'] S.G_iw << sigma_mix * S.G_iw + (1.0-sigma_mix) * ar['dmft_output']['G_iw']
del ar
S.G_iw << mpi.bcast(S.G_iw) S.G_iw << mpi.bcast(S.G_iw)
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
# Write the final Sigma and G to the hdf5 archive: # Write the final Sigma and G to the hdf5 archive:
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as ar:
ar['dmft_output']['iterations'] = iteration_number + previous_runs ar['dmft_output']['iterations'] = iteration_number + previous_runs
ar['dmft_output']['G_tau'] = S.G_tau ar['dmft_output']['G_tau'] = S.G_tau
ar['dmft_output']['G_iw'] = S.G_iw ar['dmft_output']['G_iw'] = S.G_iw
ar['dmft_output']['Sigma_iw'] = S.Sigma_iw ar['dmft_output']['Sigma_iw'] = S.Sigma_iw
ar['dmft_output']['G0-%s'%(iteration_number)] = S.G0_iw ar['dmft_output']['G0-%s'%(iteration_number)] = S.G0_iw
ar['dmft_output']['G-%s'%(iteration_number)] = S.G_iw ar['dmft_output']['G-%s'%(iteration_number)] = S.G_iw
ar['dmft_output']['Sigma-%s'%(iteration_number)] = S.Sigma_iw ar['dmft_output']['Sigma-%s'%(iteration_number)] = S.Sigma_iw
del ar
# Set the new double counting: # Set the new double counting:
dm = S.G_iw.density() # compute the density matrix of the impurity problem dm = S.G_iw.density() # compute the density matrix of the impurity problem

View File

@ -205,23 +205,21 @@ some additional refinements::
# Now mix Sigma and G with factor mix, if wanted: # Now mix Sigma and G with factor mix, if wanted:
if (iteration_number>1 or previous_present): if (iteration_number>1 or previous_present):
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%mix) mpi.report("Mixing Sigma and G with factor %s"%mix)
S.Sigma_iw << mix * S.Sigma_iw + (1.0-mix) * ar['dmft_output']['Sigma_iw'] S.Sigma_iw << mix * S.Sigma_iw + (1.0-mix) * ar['dmft_output']['Sigma_iw']
S.G_iw << mix * S.G_iw + (1.0-mix) * ar['dmft_output']['G_iw'] S.G_iw << mix * S.G_iw + (1.0-mix) * ar['dmft_output']['G_iw']
del ar
S.G_iw << mpi.bcast(S.G_iw) S.G_iw << mpi.bcast(S.G_iw)
S.Sigma_iw << mpi.bcast(S.Sigma_iw) S.Sigma_iw << mpi.bcast(S.Sigma_iw)
# Write the final Sigma and G to the hdf5 archive: # Write the final Sigma and G to the hdf5 archive:
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(dft_filename+'.h5','a') with HDFArchive(dft_filename+'.h5','a') as ar:
ar['dmft_output']['iterations'] = iteration_number ar['dmft_output']['iterations'] = iteration_number
ar['dmft_output']['G_0'] = S.G0_iw ar['dmft_output']['G_0'] = S.G0_iw
ar['dmft_output']['G_tau'] = S.G_tau ar['dmft_output']['G_tau'] = S.G_tau
ar['dmft_output']['G_iw'] = S.G_iw ar['dmft_output']['G_iw'] = S.G_iw
ar['dmft_output']['Sigma_iw'] = S.Sigma_iw ar['dmft_output']['Sigma_iw'] = S.Sigma_iw
del ar
# Set the new double counting: # Set the new double counting:
dm = S.G_iw.density() # compute the density matrix of the impurity problem dm = S.G_iw.density() # compute the density matrix of the impurity problem

View File

@ -260,13 +260,12 @@ class HkConverter(ConverterTools):
R.close() R.close()
# Save to the HDF5: # Save to the HDF5:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.dft_subgrp in ar): if not (self.dft_subgrp in ar):
ar.create_group(self.dft_subgrp) ar.create_group(self.dft_subgrp)
things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required', things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required',
'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat', 'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat',
'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping', 'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping',
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr'] 'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
for it in things_to_save: for it in things_to_save:
ar[self.dft_subgrp][it] = locals()[it] ar[self.dft_subgrp][it] = locals()[it]
del ar

View File

@ -44,10 +44,9 @@ class TestSumkDFT(SumkDFT):
fermi_weights = 0 fermi_weights = 0
band_window = 0 band_window = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(self.hdf_file,'r') with HDFArchive(self.hdf_file,'r') as ar:
fermi_weights = ar['dft_misc_input']['dft_fermi_weights'] fermi_weights = ar['dft_misc_input']['dft_fermi_weights']
band_window = ar['dft_misc_input']['band_window'] band_window = ar['dft_misc_input']['band_window']
del ar
fermi_weights = mpi.bcast(fermi_weights) fermi_weights = mpi.bcast(fermi_weights)
band_window = mpi.bcast(band_window) band_window = mpi.bcast(band_window)
@ -184,10 +183,9 @@ class TestSumkDFT(SumkDFT):
fermi_weights = 0 fermi_weights = 0
band_window = 0 band_window = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(self.hdf_file,'r') with HDFArchive(self.hdf_file,'r') as ar:
fermi_weights = ar['dft_misc_input']['dft_fermi_weights'] fermi_weights = ar['dft_misc_input']['dft_fermi_weights']
band_window = ar['dft_misc_input']['band_window'] band_window = ar['dft_misc_input']['band_window']
del ar
fermi_weights = mpi.bcast(fermi_weights) fermi_weights = mpi.bcast(fermi_weights)
band_window = mpi.bcast(band_window) band_window = mpi.bcast(band_window)
@ -282,14 +280,13 @@ def dmft_cycle():
previous_present = False previous_present = False
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(HDFfilename,'a') with HDFArchive(HDFfilename,'a') as ar:
if 'iterations' in ar: if 'iterations' in ar:
previous_present = True previous_present = True
previous_runs = ar['iterations'] previous_runs = ar['iterations']
else: else:
previous_runs = 0 previous_runs = 0
previous_present = False previous_present = False
del ar
mpi.barrier() mpi.barrier()
previous_runs = mpi.bcast(previous_runs) previous_runs = mpi.bcast(previous_runs)
@ -315,9 +312,8 @@ def dmft_cycle():
if (previous_present): if (previous_present):
mpi.report("Using stored data for initialisation") mpi.report("Using stored data for initialisation")
if (mpi.is_master_node()): if (mpi.is_master_node()):
ar = HDFArchive(HDFfilename,'a') with HDFArchive(HDFfilename,'a') as ar:
S.Sigma <<= ar['SigmaF'] S.Sigma <<= ar['SigmaF']
del ar
things_to_load=['chemical_potential','dc_imp'] things_to_load=['chemical_potential','dc_imp']
old_data=SK.load(things_to_load) old_data=SK.load(things_to_load)
chemical_potential=old_data[0] chemical_potential=old_data[0]
@ -365,13 +361,12 @@ def dmft_cycle():
# Now mix Sigma and G: # Now mix Sigma and G:
if ((itn>1)or(previous_present)): if ((itn>1)or(previous_present)):
if (mpi.is_master_node()and (Mix<1.0)): if (mpi.is_master_node()and (Mix<1.0)):
ar = HDFArchive(HDFfilename,'r') with HDFArchive(HDFfilename,'r') as ar:
mpi.report("Mixing Sigma and G with factor %s"%Mix) mpi.report("Mixing Sigma and G with factor %s"%Mix)
if ('SigmaF' in ar): if ('SigmaF' in ar):
S.Sigma <<= Mix * S.Sigma + (1.0-Mix) * ar['SigmaF'] S.Sigma <<= Mix * S.Sigma + (1.0-Mix) * ar['SigmaF']
if ('GF' in ar): if ('GF' in ar):
S.G <<= Mix * S.G + (1.0-Mix) * ar['GF'] S.G <<= Mix * S.G + (1.0-Mix) * ar['GF']
del ar
S.G = mpi.bcast(S.G) S.G = mpi.bcast(S.G)
S.Sigma = mpi.bcast(S.Sigma) S.Sigma = mpi.bcast(S.Sigma)
@ -386,14 +381,13 @@ def dmft_cycle():
# store the impurity self-energy, GF as well as correlation energy in h5 # store the impurity self-energy, GF as well as correlation energy in h5
if (mpi.is_master_node()): if (mpi.is_master_node()):
ar = HDFArchive(HDFfilename,'a') with HDFArchive(HDFfilename,'a') as ar:
ar['iterations'] = itn ar['iterations'] = itn
ar['chemical_cotential%s'%itn] = chemical_potential ar['chemical_cotential%s'%itn] = chemical_potential
ar['SigmaF'] = S.Sigma ar['SigmaF'] = S.Sigma
ar['GF'] = S.G ar['GF'] = S.G
ar['correnerg%s'%itn] = correnerg ar['correnerg%s'%itn] = correnerg
ar['DCenerg%s'%itn] = SK.dc_energ ar['DCenerg%s'%itn] = SK.dc_energ
del ar
#Save essential SumkDFT data: #Save essential SumkDFT data:
things_to_save=['chemical_potential','dc_energ','dc_imp'] things_to_save=['chemical_potential','dc_energ','dc_imp']
@ -428,11 +422,10 @@ def dmft_cycle():
# store correlation energy contribution to be read by Wien2ki and then included to DFT+DMFT total energy # store correlation energy contribution to be read by Wien2ki and then included to DFT+DMFT total energy
if (mpi.is_master_node()): if (mpi.is_master_node()):
ar = HDFArchive(HDFfilename) with HDFArchive(HDFfilename) as ar:
itn = ar['iterations'] itn = ar['iterations']
correnerg = ar['correnerg%s'%itn] correnerg = ar['correnerg%s'%itn]
DCenerg = ar['DCenerg%s'%itn] DCenerg = ar['DCenerg%s'%itn]
del ar
correnerg -= DCenerg[0] correnerg -= DCenerg[0]
f=open(lda_filename+'.qdmft','a') f=open(lda_filename+'.qdmft','a')
f.write("%.16f\n"%correnerg) f.write("%.16f\n"%correnerg)

View File

@ -269,22 +269,23 @@ class VaspConverter(ConverterTools):
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file,'a') with HDFArchive(self.hdf_file,'a') as ar:
if not (self.dft_subgrp in ar): ar.create_group(self.dft_subgrp) if not (self.dft_subgrp in ar): ar.create_group(self.dft_subgrp)
# The subgroup containing the data. If it does not exist, it is created. If it exists, the data is overwritten! # The subgroup containing the data. If it does not exist, it is created. If it exists, the data is overwritten!
things_to_save = ['energy_unit','n_k','k_dep_projection','SP','SO','charge_below','density_required', things_to_save = ['energy_unit','n_k','k_dep_projection','SP','SO','charge_below','density_required',
'symm_op','n_shells','shells','n_corr_shells','corr_shells','use_rotations','rot_mat', 'symm_op','n_shells','shells','n_corr_shells','corr_shells','use_rotations','rot_mat',
'rot_mat_time_inv','n_reps','dim_reps','T','n_orbitals','proj_mat','bz_weights','hopping', 'rot_mat_time_inv','n_reps','dim_reps','T','n_orbitals','proj_mat','bz_weights','hopping',
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr'] 'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
for it in things_to_save: ar[self.dft_subgrp][it] = locals()[it] for it in things_to_save: ar[self.dft_subgrp][it] = locals()[it]
# Store Fermi weights to 'dft_misc_input' # Store Fermi weights to 'dft_misc_input'
if not (self.misc_subgrp in ar): ar.create_group(self.misc_subgrp) if not (self.misc_subgrp in ar): ar.create_group(self.misc_subgrp)
ar[self.misc_subgrp]['dft_fermi_weights'] = f_weights ar[self.misc_subgrp]['dft_fermi_weights'] = f_weights
ar[self.misc_subgrp]['band_window'] = band_window ar[self.misc_subgrp]['band_window'] = band_window
del ar
# Symmetries are used, so now convert symmetry information for *correlated* orbitals: # Symmetries are used, so now convert symmetry information for *correlated* orbitals:
self.convert_symmetry_input(ctrl_head, orbits=self.corr_shells, symm_subgrp=self.symmcorr_subgrp) self.convert_symmetry_input(ctrl_head, orbits=self.corr_shells, symm_subgrp=self.symmcorr_subgrp)
# TODO: Implement misc_input # TODO: Implement misc_input
# self.convert_misc_input(bandwin_file=self.bandwin_file,struct_file=self.struct_file,outputs_file=self.outputs_file, # self.convert_misc_input(bandwin_file=self.bandwin_file,struct_file=self.struct_file,outputs_file=self.outputs_file,
# misc_subgrp=self.misc_subgrp,SO=self.SO,SP=self.SP,n_k=self.n_k) # misc_subgrp=self.misc_subgrp,SO=self.SO,SP=self.SP,n_k=self.n_k)
@ -381,10 +382,9 @@ class VaspConverter(ConverterTools):
raise "convert_misc_input: reading file %s failed" %self.outputs_file raise "convert_misc_input: reading file %s failed" %self.outputs_file
# Save it to the HDF: # Save it to the HDF:
ar=HDFArchive(self.hdf_file,'a') with HDFArchive(self.hdf_file,'a') as ar:
if not (misc_subgrp in ar): ar.create_group(misc_subgrp) if not (misc_subgrp in ar): ar.create_group(misc_subgrp)
for it in things_to_save: ar[misc_subgrp][it] = locals()[it] for it in things_to_save: ar[misc_subgrp][it] = locals()[it]
del ar
def convert_symmetry_input(self, ctrl_head, orbits, symm_subgrp): def convert_symmetry_input(self, ctrl_head, orbits, symm_subgrp):
@ -405,10 +405,8 @@ class VaspConverter(ConverterTools):
mat_tinv = [numpy.identity(1)] mat_tinv = [numpy.identity(1)]
# Save it to the HDF: # Save it to the HDF:
ar=HDFArchive(self.hdf_file,'a') with HDFArchive(self.hdf_file,'a') as ar:
if not (symm_subgrp in ar): ar.create_group(symm_subgrp) if not (symm_subgrp in ar): ar.create_group(symm_subgrp)
things_to_save = ['n_symm','n_atoms','perm','orbits','SO','SP','time_inv','mat','mat_tinv'] things_to_save = ['n_symm','n_atoms','perm','orbits','SO','SP','time_inv','mat','mat_tinv']
for it in things_to_save: for it in things_to_save:
# print "%s:"%(it), locals()[it] ar[symm_subgrp][it] = locals()[it]
ar[symm_subgrp][it] = locals()[it]
del ar

View File

@ -345,18 +345,17 @@ class Wannier90Converter(ConverterTools):
iorb += norb iorb += norb
# Finally, save all required data into the HDF archive: # Finally, save all required data into the HDF archive:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.dft_subgrp in ar): if not (self.dft_subgrp in ar):
ar.create_group(self.dft_subgrp) ar.create_group(self.dft_subgrp)
# The subgroup containing the data. If it does not exist, it is # The subgroup containing the data. If it does not exist, it is
# created. If it exists, the data is overwritten! # created. If it exists, the data is overwritten!
things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required', things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required',
'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat', 'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat',
'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping', 'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping',
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr'] 'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
for it in things_to_save: for it in things_to_save:
ar[self.dft_subgrp][it] = locals()[it] ar[self.dft_subgrp][it] = locals()[it]
del ar
def read_wannier90hr(self, hr_filename="wannier_hr.dat"): def read_wannier90hr(self, hr_filename="wannier_hr.dat"):
""" """

View File

@ -258,18 +258,17 @@ class Wien2kConverter(ConverterTools):
# Reading done! # Reading done!
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.dft_subgrp in ar): if not (self.dft_subgrp in ar):
ar.create_group(self.dft_subgrp) ar.create_group(self.dft_subgrp)
# The subgroup containing the data. If it does not exist, it is # The subgroup containing the data. If it does not exist, it is
# created. If it exists, the data is overwritten! # created. If it exists, the data is overwritten!
things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required', things_to_save = ['energy_unit', 'n_k', 'k_dep_projection', 'SP', 'SO', 'charge_below', 'density_required',
'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat', 'symm_op', 'n_shells', 'shells', 'n_corr_shells', 'corr_shells', 'use_rotations', 'rot_mat',
'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping', 'rot_mat_time_inv', 'n_reps', 'dim_reps', 'T', 'n_orbitals', 'proj_mat', 'bz_weights', 'hopping',
'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr'] 'n_inequiv_shells', 'corr_to_inequiv', 'inequiv_to_corr']
for it in things_to_save: for it in things_to_save:
ar[self.dft_subgrp][it] = locals()[it] ar[self.dft_subgrp][it] = locals()[it]
del ar
# Symmetries are used, so now convert symmetry information for # Symmetries are used, so now convert symmetry information for
# *correlated* orbitals: # *correlated* orbitals:
@ -292,15 +291,14 @@ class Wien2kConverter(ConverterTools):
return return
# get needed data from hdf file # get needed data from hdf file
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
things_to_read = ['SP', 'SO', 'n_shells', things_to_read = ['SP', 'SO', 'n_shells',
'n_k', 'n_orbitals', 'shells'] 'n_k', 'n_orbitals', 'shells']
for it in things_to_read: for it in things_to_read:
if not hasattr(self, it): if not hasattr(self, it):
setattr(self, it, ar[self.dft_subgrp][it]) setattr(self, it, ar[self.dft_subgrp][it])
self.n_spin_blocs = self.SP + 1 - self.SO self.n_spin_blocs = self.SP + 1 - self.SO
del ar
mpi.report("Reading input from %s..." % self.parproj_file) mpi.report("Reading input from %s..." % self.parproj_file)
@ -368,16 +366,15 @@ class Wien2kConverter(ConverterTools):
# Reading done! # Reading done!
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.parproj_subgrp in ar): if not (self.parproj_subgrp in ar):
ar.create_group(self.parproj_subgrp) ar.create_group(self.parproj_subgrp)
# The subgroup containing the data. If it does not exist, it is # The subgroup containing the data. If it does not exist, it is
# created. If it exists, the data is overwritten! # created. If it exists, the data is overwritten!
things_to_save = ['dens_mat_below', 'n_parproj', things_to_save = ['dens_mat_below', 'n_parproj',
'proj_mat_all', 'rot_mat_all', 'rot_mat_all_time_inv'] 'proj_mat_all', 'rot_mat_all', 'rot_mat_all_time_inv']
for it in things_to_save: for it in things_to_save:
ar[self.parproj_subgrp][it] = locals()[it] ar[self.parproj_subgrp][it] = locals()[it]
del ar
# Symmetries are used, so now convert symmetry information for *all* # Symmetries are used, so now convert symmetry information for *all*
# orbitals: # orbitals:
@ -395,15 +392,14 @@ class Wien2kConverter(ConverterTools):
try: try:
# get needed data from hdf file # get needed data from hdf file
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
things_to_read = ['SP', 'SO', 'n_corr_shells', things_to_read = ['SP', 'SO', 'n_corr_shells',
'n_shells', 'corr_shells', 'shells', 'energy_unit'] 'n_shells', 'corr_shells', 'shells', 'energy_unit']
for it in things_to_read: for it in things_to_read:
if not hasattr(self, it): if not hasattr(self, it):
setattr(self, it, ar[self.dft_subgrp][it]) setattr(self, it, ar[self.dft_subgrp][it])
self.n_spin_blocs = self.SP + 1 - self.SO self.n_spin_blocs = self.SP + 1 - self.SO
del ar
mpi.report("Reading input from %s..." % self.band_file) mpi.report("Reading input from %s..." % self.band_file)
R = ConverterTools.read_fortran_file( R = ConverterTools.read_fortran_file(
@ -482,16 +478,15 @@ class Wien2kConverter(ConverterTools):
# Reading done! # Reading done!
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.bands_subgrp in ar): if not (self.bands_subgrp in ar):
ar.create_group(self.bands_subgrp) ar.create_group(self.bands_subgrp)
# The subgroup containing the data. If it does not exist, it is # The subgroup containing the data. If it does not exist, it is
# created. If it exists, the data is overwritten! # created. If it exists, the data is overwritten!
things_to_save = ['n_k', 'n_orbitals', 'proj_mat', things_to_save = ['n_k', 'n_orbitals', 'proj_mat',
'hopping', 'n_parproj', 'proj_mat_all'] 'hopping', 'n_parproj', 'proj_mat_all']
for it in things_to_save: for it in things_to_save:
ar[self.bands_subgrp][it] = locals()[it] ar[self.bands_subgrp][it] = locals()[it]
del ar
def convert_misc_input(self): def convert_misc_input(self):
""" """
@ -510,13 +505,12 @@ class Wien2kConverter(ConverterTools):
return return
# Check if SP, SO and n_k are already in h5 # Check if SP, SO and n_k are already in h5
ar = HDFArchive(self.hdf_file, 'r') with HDFArchive(self.hdf_file, 'r') as ar:
if not (self.dft_subgrp in ar): if not (self.dft_subgrp in ar):
raise IOError, "convert_misc_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp raise IOError, "convert_misc_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp
SP = ar[self.dft_subgrp]['SP'] SP = ar[self.dft_subgrp]['SP']
SO = ar[self.dft_subgrp]['SO'] SO = ar[self.dft_subgrp]['SO']
n_k = ar[self.dft_subgrp]['n_k'] n_k = ar[self.dft_subgrp]['n_k']
del ar
things_to_save = [] things_to_save = []
@ -612,12 +606,11 @@ class Wien2kConverter(ConverterTools):
raise IOError, "convert_misc_input: reading file %s failed" % self.outputs_file raise IOError, "convert_misc_input: reading file %s failed" % self.outputs_file
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.misc_subgrp in ar): if not (self.misc_subgrp in ar):
ar.create_group(self.misc_subgrp) ar.create_group(self.misc_subgrp)
for it in things_to_save: for it in things_to_save:
ar[self.misc_subgrp][it] = locals()[it] ar[self.misc_subgrp][it] = locals()[it]
del ar
def convert_transport_input(self): def convert_transport_input(self):
""" """
@ -633,13 +626,12 @@ class Wien2kConverter(ConverterTools):
return return
# Check if SP, SO and n_k are already in h5 # Check if SP, SO and n_k are already in h5
ar = HDFArchive(self.hdf_file, 'r') with HDFArchive(self.hdf_file, 'r') as ar:
if not (self.dft_subgrp in ar): if not (self.dft_subgrp in ar):
raise IOError, "convert_transport_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp raise IOError, "convert_transport_input: No %s subgroup in hdf file found! Call convert_dft_input first." % self.dft_subgrp
SP = ar[self.dft_subgrp]['SP'] SP = ar[self.dft_subgrp]['SP']
SO = ar[self.dft_subgrp]['SO'] SO = ar[self.dft_subgrp]['SO']
n_k = ar[self.dft_subgrp]['n_k'] n_k = ar[self.dft_subgrp]['n_k']
del ar
# Read relevant data from .pmat/up/dn files # Read relevant data from .pmat/up/dn files
########################################### ###########################################
@ -691,15 +683,14 @@ class Wien2kConverter(ConverterTools):
R.close() # Reading done! R.close() # Reading done!
# Put data to HDF5 file # Put data to HDF5 file
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (self.transp_subgrp in ar): if not (self.transp_subgrp in ar):
ar.create_group(self.transp_subgrp) ar.create_group(self.transp_subgrp)
# The subgroup containing the data. If it does not exist, it is # The subgroup containing the data. If it does not exist, it is
# created. If it exists, the data is overwritten!!! # created. If it exists, the data is overwritten!!!
things_to_save = ['band_window_optics', 'velocities_k'] things_to_save = ['band_window_optics', 'velocities_k']
for it in things_to_save: for it in things_to_save:
ar[self.transp_subgrp][it] = locals()[it] ar[self.transp_subgrp][it] = locals()[it]
del ar
def convert_symmetry_input(self, orbits, symm_file, symm_subgrp, SO, SP): def convert_symmetry_input(self, orbits, symm_file, symm_subgrp, SO, SP):
""" """
@ -781,11 +772,10 @@ class Wien2kConverter(ConverterTools):
# Reading done! # Reading done!
# Save it to the HDF: # Save it to the HDF:
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not (symm_subgrp in ar): if not (symm_subgrp in ar):
ar.create_group(symm_subgrp) ar.create_group(symm_subgrp)
things_to_save = ['n_symm', 'n_atoms', 'perm', things_to_save = ['n_symm', 'n_atoms', 'perm',
'orbits', 'SO', 'SP', 'time_inv', 'mat', 'mat_tinv'] 'orbits', 'SO', 'SP', 'time_inv', 'mat', 'mat_tinv']
for it in things_to_save: for it in things_to_save:
ar[symm_subgrp][it] = locals()[it] ar[symm_subgrp][it] = locals()[it]
del ar

View File

@ -187,23 +187,22 @@ class SumkDFT(object):
subgroup_present = 0 subgroup_present = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(self.hdf_file, 'r') with HDFArchive(self.hdf_file, 'r') as ar:
if subgrp in ar: if subgrp in ar:
subgroup_present = True subgroup_present = True
# first read the necessary things: # first read the necessary things:
for it in things_to_read: for it in things_to_read:
if it in ar[subgrp]: if it in ar[subgrp]:
setattr(self, it, ar[subgrp][it]) setattr(self, it, ar[subgrp][it])
else: else:
mpi.report("Loading %s failed!" % it) mpi.report("Loading %s failed!" % it)
value_read = False value_read = False
else: else:
if (len(things_to_read) != 0): if (len(things_to_read) != 0):
mpi.report( mpi.report(
"Loading failed: No %s subgroup in hdf5!" % subgrp) "Loading failed: No %s subgroup in hdf5!" % subgrp)
subgroup_present = False subgroup_present = False
value_read = False value_read = False
del ar
# now do the broadcasting: # now do the broadcasting:
for it in things_to_read: for it in things_to_read:
setattr(self, it, mpi.bcast(getattr(self, it))) setattr(self, it, mpi.bcast(getattr(self, it)))
@ -226,18 +225,16 @@ class SumkDFT(object):
if not (mpi.is_master_node()): if not (mpi.is_master_node()):
return # do nothing on nodes return # do nothing on nodes
ar = HDFArchive(self.hdf_file, 'a') with HDFArchive(self.hdf_file, 'a') as ar:
if not subgrp in ar: if not subgrp in ar: ar.create_group(subgrp)
ar.create_group(subgrp) for it in things_to_save:
for it in things_to_save: if it in [ "gf_struct_sumk", "gf_struct_solver",
if it in [ "gf_struct_sumk", "gf_struct_solver", "solver_to_sumk", "sumk_to_solver", "solver_to_sumk_block"]:
"solver_to_sumk", "sumk_to_solver", "solver_to_sumk_block"]: warn("It is not recommended to save '{}' individually. Save 'block_structure' instead.".format(it))
warn("It is not recommended to save '{}' individually. Save 'block_structure' instead.".format(it)) try:
try: ar[subgrp][it] = getattr(self, it)
ar[subgrp][it] = getattr(self, it) except:
except: mpi.report("%s not found, and so not saved." % it)
mpi.report("%s not found, and so not saved." % it)
del ar
def load(self, things_to_load, subgrp='user_data'): def load(self, things_to_load, subgrp='user_data'):
r""" r"""
@ -258,16 +255,15 @@ class SumkDFT(object):
if not (mpi.is_master_node()): if not (mpi.is_master_node()):
return # do nothing on nodes return # do nothing on nodes
ar = HDFArchive(self.hdf_file, 'r') with HDFArchive(self.hdf_file, 'r') as ar:
if not subgrp in ar: if not subgrp in ar:
mpi.report("Loading %s failed!" % subgrp) mpi.report("Loading %s failed!" % subgrp)
list_to_return = [] list_to_return = []
for it in things_to_load: for it in things_to_load:
try: try:
list_to_return.append(ar[subgrp][it]) list_to_return.append(ar[subgrp][it])
except: except:
raise ValueError, "load: %s not found, and so not loaded." % it raise ValueError, "load: %s not found, and so not loaded." % it
del ar
return list_to_return return list_to_return
################ ################
@ -1822,10 +1818,9 @@ class SumkDFT(object):
fermi_weights = 0 fermi_weights = 0
band_window = 0 band_window = 0
if mpi.is_master_node(): if mpi.is_master_node():
ar = HDFArchive(self.hdf_file,'r') with HDFArchive(self.hdf_file,'r') as ar:
fermi_weights = ar['dft_misc_input']['dft_fermi_weights'] fermi_weights = ar['dft_misc_input']['dft_fermi_weights']
band_window = ar['dft_misc_input']['band_window'] band_window = ar['dft_misc_input']['band_window']
del ar
fermi_weights = mpi.bcast(fermi_weights) fermi_weights = mpi.bcast(fermi_weights)
band_window = mpi.bcast(band_window) band_window = mpi.bcast(band_window)

View File

@ -58,16 +58,15 @@ class Symmetry:
if mpi.is_master_node(): if mpi.is_master_node():
# Read the stuff on master: # Read the stuff on master:
ar = HDFArchive(hdf_file, 'r') with HDFArchive(hdf_file, 'r') as ar:
if subgroup is None: if subgroup is None:
ar2 = ar ar2 = ar
else: else:
ar2 = ar[subgroup] ar2 = ar[subgroup]
for it in things_to_read: for it in things_to_read:
setattr(self, it, ar2[it]) setattr(self, it, ar2[it])
del ar2 del ar2
del ar
# Broadcasting # Broadcasting
for it in things_to_read: for it in things_to_read:

View File

@ -34,12 +34,11 @@ Converter.convert_transport_input()
SK = SumkDFTTools(hdf_file='SrVO3.h5', use_dft_blocks=True) SK = SumkDFTTools(hdf_file='SrVO3.h5', use_dft_blocks=True)
ar = HDFArchive('SrVO3_Sigma.h5', 'a') with HDFArchive('SrVO3_Sigma.h5', 'a') as ar:
Sigma = ar['dmft_transp_input']['Sigma_w'] Sigma = ar['dmft_transp_input']['Sigma_w']
SK.set_Sigma([Sigma]) SK.set_Sigma([Sigma])
SK.chemical_potential = ar['dmft_transp_input']['chemical_potential'] SK.chemical_potential = ar['dmft_transp_input']['chemical_potential']
SK.dc_imp = ar['dmft_transp_input']['dc_imp'] SK.dc_imp = ar['dmft_transp_input']['dc_imp']
del ar
SK.transport_distribution(directions=['xx'], broadening=0.0, energy_window=[-0.3,0.3], Om_mesh=[0.00, 0.02] , beta=beta, with_Sigma=True) SK.transport_distribution(directions=['xx'], broadening=0.0, energy_window=[-0.3,0.3], Om_mesh=[0.00, 0.02] , beta=beta, with_Sigma=True)
#SK.save(['Gamma_w','Om_meshr','omega','directions']) #SK.save(['Gamma_w','Om_meshr','omega','directions'])