2020-04-08 21:47:15 +02:00
|
|
|
from h5 import HDFArchive
|
2014-11-03 14:47:55 +01:00
|
|
|
import h5py
|
|
|
|
import sys
|
|
|
|
import numpy
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
if len(sys.argv) < 2:
|
2020-04-08 21:35:59 +02:00
|
|
|
print("Usage: python update_archive.py old_archive [v1.0|v1.2]")
|
2016-05-09 10:19:56 +02:00
|
|
|
sys.exit()
|
2014-11-03 14:47:55 +01:00
|
|
|
|
2020-04-08 21:35:59 +02:00
|
|
|
print("""
|
2014-11-03 14:47:55 +01:00
|
|
|
This script is an attempt to update your archive to TRIQS 1.2.
|
|
|
|
Please keep a copy of your old archive as this script is
|
|
|
|
** not guaranteed ** to work for your archive.
|
|
|
|
If you encounter any problem please report it on github!
|
2020-04-08 21:35:59 +02:00
|
|
|
""")
|
2014-11-03 14:47:55 +01:00
|
|
|
|
2016-05-09 10:19:56 +02:00
|
|
|
|
2014-11-26 16:24:02 +01:00
|
|
|
def convert_shells(shells):
|
|
|
|
shell_entries = ['atom', 'sort', 'l', 'dim']
|
2016-05-09 10:19:56 +02:00
|
|
|
return [{name: int(val) for name, val in zip(shell_entries, shells[ish])} for ish in range(len(shells))]
|
|
|
|
|
2014-11-26 16:24:02 +01:00
|
|
|
|
|
|
|
def convert_corr_shells(corr_shells):
|
|
|
|
corr_shell_entries = ['atom', 'sort', 'l', 'dim', 'SO', 'irep']
|
2016-05-09 10:19:56 +02:00
|
|
|
return [{name: int(val) for name, val in zip(corr_shell_entries, corr_shells[icrsh])} for icrsh in range(len(corr_shells))]
|
|
|
|
|
2014-11-26 16:24:02 +01:00
|
|
|
|
|
|
|
def det_shell_equivalence(corr_shells):
|
|
|
|
corr_to_inequiv = [0 for i in range(len(corr_shells))]
|
2014-11-15 18:15:45 +01:00
|
|
|
inequiv_to_corr = [0]
|
|
|
|
n_inequiv_shells = 1
|
2014-11-26 16:24:02 +01:00
|
|
|
|
|
|
|
if len(corr_shells) > 1:
|
2016-05-09 10:19:56 +02:00
|
|
|
inequiv_sort = [corr_shells[0]['sort']]
|
|
|
|
inequiv_l = [corr_shells[0]['l']]
|
|
|
|
for i in range(len(corr_shells) - 1):
|
2014-11-26 16:24:02 +01:00
|
|
|
is_equiv = False
|
2014-11-15 18:15:45 +01:00
|
|
|
for j in range(n_inequiv_shells):
|
2016-05-09 10:19:56 +02:00
|
|
|
if (inequiv_sort[j] == corr_shells[i + 1]['sort']) and (inequiv_l[j] == corr_shells[i + 1]['l']):
|
2014-11-26 16:24:02 +01:00
|
|
|
is_equiv = True
|
2016-05-09 10:19:56 +02:00
|
|
|
corr_to_inequiv[i + 1] = j
|
|
|
|
if is_equiv == False:
|
|
|
|
corr_to_inequiv[i + 1] = n_inequiv_shells
|
2014-11-15 18:15:45 +01:00
|
|
|
n_inequiv_shells += 1
|
2016-05-09 10:19:56 +02:00
|
|
|
inequiv_sort.append(corr_shells[i + 1]['sort'])
|
|
|
|
inequiv_l.append(corr_shells[i + 1]['l'])
|
|
|
|
inequiv_to_corr.append(i + 1)
|
2014-11-26 16:24:02 +01:00
|
|
|
|
|
|
|
return n_inequiv_shells, corr_to_inequiv, inequiv_to_corr
|
|
|
|
|
2014-11-15 18:15:45 +01:00
|
|
|
|
|
|
|
### Main ###
|
|
|
|
|
2014-11-03 14:47:55 +01:00
|
|
|
filename = sys.argv[1]
|
2016-05-09 10:19:56 +02:00
|
|
|
if len(sys.argv) > 2:
|
2015-01-22 10:47:53 +01:00
|
|
|
from_v = sys.argv[2]
|
2016-05-09 10:19:56 +02:00
|
|
|
else: # Assume updating an old v1.0 script
|
2015-01-22 10:47:53 +01:00
|
|
|
from_v = 'v1.0'
|
2014-11-03 14:47:55 +01:00
|
|
|
A = h5py.File(filename)
|
|
|
|
|
2014-11-14 18:13:43 +01:00
|
|
|
# Rename groups
|
2016-05-09 10:19:56 +02:00
|
|
|
old_to_new = {'SumK_LDA': 'dft_input', 'SumK_LDA_ParProj': 'dft_parproj_input',
|
|
|
|
'SymmCorr': 'dft_symmcorr_input', 'SymmPar': 'dft_symmpar_input', 'SumK_LDA_Bands': 'dft_bands_input'}
|
2014-11-03 14:47:55 +01:00
|
|
|
|
2020-04-08 21:35:59 +02:00
|
|
|
for old, new in old_to_new.items():
|
|
|
|
if old not in list(A.keys()):
|
2016-05-09 10:19:56 +02:00
|
|
|
continue
|
2020-04-08 21:35:59 +02:00
|
|
|
print("Changing %s to %s ..." % (old, new))
|
2016-05-09 10:19:56 +02:00
|
|
|
A.copy(old, new)
|
2014-11-03 14:47:55 +01:00
|
|
|
del(A[old])
|
2014-11-07 00:55:40 +01:00
|
|
|
|
2014-12-03 23:12:39 +01:00
|
|
|
# Move output items from dft_input to user_data
|
2016-05-09 10:19:56 +02:00
|
|
|
move_to_output = ['chemical_potential', 'dc_imp', 'dc_energ']
|
2014-11-07 00:55:40 +01:00
|
|
|
for obj in move_to_output:
|
2020-04-08 21:35:59 +02:00
|
|
|
if obj in list(A['dft_input'].keys()):
|
2016-05-09 10:19:56 +02:00
|
|
|
if 'user_data' not in A:
|
|
|
|
A.create_group('user_data')
|
2020-04-08 21:35:59 +02:00
|
|
|
print("Moving %s to user_data ..." % obj)
|
2016-05-09 10:19:56 +02:00
|
|
|
A.copy('dft_input/' + obj, 'user_data/' + obj)
|
|
|
|
del(A['dft_input'][obj])
|
2014-12-03 23:12:39 +01:00
|
|
|
# Delete obsolete quantities
|
2016-05-09 10:19:56 +02:00
|
|
|
to_delete = ['gf_struct_solver', 'map_inv', 'map', 'deg_shells', 'h_field']
|
2014-12-03 23:12:39 +01:00
|
|
|
for obj in to_delete:
|
2020-04-08 21:35:59 +02:00
|
|
|
if obj in list(A['dft_input'].keys()):
|
2016-05-09 10:19:56 +02:00
|
|
|
del(A['dft_input'][obj])
|
2014-11-07 00:55:40 +01:00
|
|
|
|
2015-01-22 10:47:53 +01:00
|
|
|
if from_v == 'v1.0':
|
|
|
|
# Update shells and corr_shells to list of dicts
|
2016-05-09 10:19:56 +02:00
|
|
|
shells_old = HDFArchive(filename, 'r')['dft_input']['shells']
|
|
|
|
corr_shells_old = HDFArchive(filename, 'r')['dft_input']['corr_shells']
|
2015-01-22 10:47:53 +01:00
|
|
|
shells = convert_shells(shells_old)
|
|
|
|
corr_shells = convert_corr_shells(corr_shells_old)
|
|
|
|
del(A['dft_input']['shells'])
|
|
|
|
del(A['dft_input']['corr_shells'])
|
|
|
|
A.close()
|
|
|
|
# Need to use HDFArchive for the following
|
2016-05-09 10:19:56 +02:00
|
|
|
HDFArchive(filename, 'a')['dft_input']['shells'] = shells
|
|
|
|
HDFArchive(filename, 'a')['dft_input']['corr_shells'] = corr_shells
|
2015-01-22 10:47:53 +01:00
|
|
|
A = h5py.File(filename)
|
2014-11-26 16:24:02 +01:00
|
|
|
|
2014-11-15 18:15:45 +01:00
|
|
|
# Add shell equivalency quantities
|
2014-12-03 23:12:39 +01:00
|
|
|
if 'n_inequiv_shells' not in A['dft_input']:
|
|
|
|
equiv_shell_info = det_shell_equivalence(corr_shells)
|
|
|
|
A['dft_input']['n_inequiv_shells'] = equiv_shell_info[0]
|
|
|
|
A['dft_input']['corr_to_inequiv'] = equiv_shell_info[1]
|
|
|
|
A['dft_input']['inequiv_to_corr'] = equiv_shell_info[2]
|
2014-11-15 18:15:45 +01:00
|
|
|
|
2014-11-14 18:13:43 +01:00
|
|
|
# Rename variables
|
2016-05-09 10:19:56 +02:00
|
|
|
groups = ['dft_symmcorr_input', 'dft_symmpar_input']
|
2014-11-14 18:13:43 +01:00
|
|
|
for group in groups:
|
2020-04-08 21:35:59 +02:00
|
|
|
if group not in list(A.keys()):
|
2016-05-09 10:19:56 +02:00
|
|
|
continue
|
|
|
|
if 'n_s' not in A[group]:
|
|
|
|
continue
|
2020-04-08 21:35:59 +02:00
|
|
|
print("Changing n_s to n_symm ...")
|
2016-05-09 10:19:56 +02:00
|
|
|
A[group].move('n_s', 'n_symm')
|
2014-11-26 16:24:02 +01:00
|
|
|
# Convert orbits to list of dicts
|
2016-05-09 10:19:56 +02:00
|
|
|
orbits_old = HDFArchive(filename, 'r')[group]['orbits']
|
2014-11-26 16:24:02 +01:00
|
|
|
orbits = convert_corr_shells(orbits_old)
|
|
|
|
del(A[group]['orbits'])
|
|
|
|
A.close()
|
2016-05-09 10:19:56 +02:00
|
|
|
HDFArchive(filename, 'a')[group]['orbits'] = orbits
|
2014-11-26 16:24:02 +01:00
|
|
|
A = h5py.File(filename)
|
2015-01-22 10:47:53 +01:00
|
|
|
|
|
|
|
groups = ['dft_parproj_input']
|
|
|
|
for group in groups:
|
2020-04-08 21:35:59 +02:00
|
|
|
if group not in list(A.keys()):
|
2016-05-09 10:19:56 +02:00
|
|
|
continue
|
|
|
|
if 'proj_mat_pc' not in A[group]:
|
|
|
|
continue
|
2020-04-08 21:35:59 +02:00
|
|
|
print("Changing proj_mat_pc to proj_mat_all ...")
|
2016-05-09 10:19:56 +02:00
|
|
|
A[group].move('proj_mat_pc', 'proj_mat_all')
|
2015-01-22 10:47:53 +01:00
|
|
|
|
2014-11-03 14:47:55 +01:00
|
|
|
A.close()
|
|
|
|
|
|
|
|
# Repack to reclaim disk space
|
2016-05-09 10:19:56 +02:00
|
|
|
retcode = subprocess.call(["h5repack", "-i%s" % filename, "-otemphgfrt.h5"])
|
2014-11-03 14:47:55 +01:00
|
|
|
if retcode != 0:
|
2020-04-08 21:35:59 +02:00
|
|
|
print("h5repack failed!")
|
2014-11-03 14:47:55 +01:00
|
|
|
else:
|
2016-05-09 10:19:56 +02:00
|
|
|
subprocess.call(["mv", "-f", "temphgfrt.h5", "%s" % filename])
|