mirror of
https://github.com/triqs/dft_tools
synced 2024-12-25 13:53:40 +01:00
Finish gf wrapping
- clean old. - clean gf_desc.py
This commit is contained in:
parent
3be8e65ae7
commit
22f9576834
@ -1,397 +0,0 @@
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# TRIQS: a Toolbox for Research in Interacting Quantum Systems
|
||||
#
|
||||
# Copyright (C) 2011-2012 by M. Ferrero, O. Parcollet
|
||||
#
|
||||
# TRIQS is free software: you can redistribute it and/or modify it under the
|
||||
# terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation, either version 3 of the License, or (at your option) any later
|
||||
# version.
|
||||
#
|
||||
# TRIQS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# TRIQS. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
|
||||
import numpy
|
||||
import lazy_expressions, descriptors
|
||||
from gf import MeshImFreq
|
||||
from pytriqs.plot.protocol import clip_array
|
||||
from types import IntType, SliceType, StringType
|
||||
from tools import LazyCTX, IndicesConverter, get_indices_in_dict, py_deserialize
|
||||
from impl_plot import PlotWrapperPartialReduce
|
||||
from nothing import Nothing
|
||||
from gf import TailGf
|
||||
from matrix_stack import MatrixStack
|
||||
import copy_reg
|
||||
from nothing import Nothing
|
||||
|
||||
def builder_cls_with_dict_arg(cls, args): return cls(**args)
|
||||
|
||||
def reductor(x):
|
||||
return (builder_cls_with_dict_arg, (x._derived, x.__reduce_to_dict__()) )
|
||||
|
||||
|
||||
class GfGeneric:
|
||||
|
||||
def __init__(self, mesh, data, singularity, symmetry, indices_pack, name, derived):
|
||||
|
||||
self._mesh = mesh
|
||||
self._data = data
|
||||
self._singularity = singularity
|
||||
self._symmetry = symmetry
|
||||
self._indices_pack = indices_pack
|
||||
self.name = name
|
||||
self._derived = derived
|
||||
|
||||
self.indicesR, self.indicesL = indices_pack
|
||||
|
||||
copy_reg.pickle(derived, reductor, builder_cls_with_dict_arg )
|
||||
|
||||
@property
|
||||
def mesh(self): return self._mesh
|
||||
|
||||
@property
|
||||
def tail(self): return self._singularity
|
||||
@tail.setter
|
||||
def tail(self, t):
|
||||
if type(t) == TailGf:
|
||||
assert (self.N1, self.N2) == (t.N1, t.N2)
|
||||
self._singularity.copy_from (t)
|
||||
elif type(t) == Nothing:
|
||||
self._singularity = Nothing()
|
||||
else:
|
||||
raise RuntimeError, "invalid rhs for tail assignment"
|
||||
|
||||
@property
|
||||
def data(self): return self._data
|
||||
@data.setter
|
||||
def data (self, value): self._data[:,:,:] = value
|
||||
|
||||
@property
|
||||
def indices(self):
|
||||
assert (self.indicesR == self.indicesL), "right and left indices are different"
|
||||
return self.indicesR
|
||||
|
||||
@property
|
||||
def N1(self): return self._data.shape[1]
|
||||
|
||||
@property
|
||||
def N2(self): return self._data.shape[2]
|
||||
|
||||
#--------------------- h5 and reduction ------------------------------------------
|
||||
|
||||
def __reduce_to_dict__(self):
|
||||
return {'mesh': self._mesh, 'data': self._data,
|
||||
'tail': self._singularity, 'symmetry': self._symmetry,
|
||||
'indices_pack': self._indices_pack, 'name': self.name }
|
||||
|
||||
def __write_hdf5__ (self, gr, key):
|
||||
self.__write_hdf5_cython__(gr, key)
|
||||
gr[key].create_group('indices')
|
||||
gr[key]['indices']['left'] = self.indicesL
|
||||
gr[key]['indices']['right'] = self.indicesR
|
||||
#gr[key]['name'] = self.name
|
||||
|
||||
#--------------------- copies ------------------------------------------
|
||||
|
||||
def copy(self):
|
||||
return self._derived(indices_pack = self._indices_pack, mesh = self._mesh,
|
||||
data = self._data.copy(), tail = self._singularity.copy(),
|
||||
name = self.name)
|
||||
|
||||
def copy_from(self, X):
|
||||
assert self._derived is X._derived
|
||||
assert self.mesh == X.mesh
|
||||
self.data = X.data
|
||||
self.tail = X.tail
|
||||
#assert list(self._indices)== list(X._indices)
|
||||
self._symmetry = X._symmetry
|
||||
self.name = X.name
|
||||
|
||||
#--------------------- [ ] operator ------------------------------------------
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Key is a tuple of index (n1, n2) as defined at construction"""
|
||||
if len(key) !=2: raise KeyError, "[ ] must be given two arguments"
|
||||
sl1, sl2 = key
|
||||
if type(sl1) == StringType and type(sl2) == StringType:
|
||||
# Convert the indices to integer
|
||||
indices_converter = [ IndicesConverter(self.indicesL), IndicesConverter(self.indicesR)]
|
||||
sl1, sl2 = [ indices_converter[i].convertToNumpyIndex(k) for i, k in enumerate(key) ]
|
||||
if type (sl1) != slice: sl1 = slice (sl1, sl1+1)
|
||||
if type (sl2) != slice: sl2 = slice (sl2, sl2+1)
|
||||
return self.__class__(indicesL = list(self.indicesL)[sl1],
|
||||
indicesR = list(self.indicesR)[sl2],
|
||||
name = self.name,
|
||||
mesh = self.mesh,
|
||||
data = self.data[:,sl1,sl2],
|
||||
tail = self.tail._make_slice(sl1, sl2))
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
g = self.__getitem__(key)
|
||||
g <<= val
|
||||
|
||||
#------------- Iteration ------------------------------------
|
||||
|
||||
def __iter__(self):
|
||||
for i in self.indicesL:
|
||||
for j in self.indicesR:
|
||||
b =self[i, j]
|
||||
b.name = "%s_%s_%s"%(self.name if hasattr(self, 'name') else '', i, j)
|
||||
yield i, j, b
|
||||
|
||||
#---------------- Repr, str ---------------------------------
|
||||
|
||||
def __str__(self):
|
||||
return self.name if self.name else repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
return """%s %s: indicesL = %s, indicesR = %s"""%(self.__class__.__name__, self.name,
|
||||
[x for x in self.indicesL], [x for x in self.indicesR])
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
@property
|
||||
def real(self):
|
||||
"""Use self.real in a plot to plot only the real part"""
|
||||
return PlotWrapperPartialReduce(self, RI='R')
|
||||
|
||||
@property
|
||||
def imag(self):
|
||||
"""Use self.imag in a plot to plot only the imag part"""
|
||||
return PlotWrapperPartialReduce(self, RI='I')
|
||||
|
||||
#------------------
|
||||
|
||||
def x_data_view(self, x_window = None, flatten_y = False):
|
||||
"""
|
||||
:param x_window: the window of x variable (omega/omega_n/t/tau) for which data is requested
|
||||
if None, take the full window
|
||||
:param flatten_y: If the Green function is of size (1, 1) flatten the array as a 1d array
|
||||
:rtype: a tuple (X, data) where
|
||||
* X is a 1d numpy of the x variable inside the window requested
|
||||
* data is a 3d numpy array of dim (:,:, len(X)), the corresponding slice of data
|
||||
If flatten_y is True and dim is (1, 1, *), returns a 1d numpy
|
||||
"""
|
||||
X = [x.imag for x in self.mesh] if type(self.mesh) == MeshImFreq else [x for x in self.mesh]
|
||||
X, data = numpy.array(X), self.data
|
||||
if x_window:
|
||||
sl = clip_array (X, *x_window) if x_window else slice(len(X)) # the slice due to clip option x_window
|
||||
X, data = X[sl], data[sl,:,:]
|
||||
if flatten_y and data.shape[1:3]==(1, 1): data = data[:,0,0]
|
||||
return X, data
|
||||
|
||||
#-------- LAZY expression system -----------------------------------------
|
||||
|
||||
def _lazy_expr_eval_context_(self):
|
||||
return LazyCTX(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
raise RuntimeError, " Operator not defined "
|
||||
|
||||
def _ilshift_(self, A):
|
||||
""" A can be two things:
|
||||
* G <<= any_init will init the GFBloc with the initializer
|
||||
* G <<= g2 where g2 is a GFBloc will copy g2 into self
|
||||
"""
|
||||
if isinstance(A, self.__class__):
|
||||
if self is not A: self.copy_from(A) # otherwise it is useless AND does not work !!
|
||||
elif isinstance(A, lazy_expressions.LazyExpr): # A is a lazy_expression made of GF, scalars, descriptors
|
||||
A2= descriptors.convert_scalar_to_const(A)
|
||||
def e_t (x):
|
||||
if not isinstance(x, descriptors.Base): return x
|
||||
tmp = self.copy()
|
||||
x(tmp)
|
||||
return tmp
|
||||
self.copy_from (lazy_expressions.eval_expr_with_context(e_t, A2) )
|
||||
elif isinstance(A, lazy_expressions.LazyExprTerminal): #e.g. g<<= SemiCircular (...)
|
||||
self <<= lazy_expressions.LazyExpr(A)
|
||||
elif descriptors.is_scalar(A): #in the case it is a scalar ....
|
||||
self <<= lazy_expressions.LazyExpr(A)
|
||||
else:
|
||||
raise RuntimeError, " <<= operator: RHS not understood"
|
||||
return self
|
||||
|
||||
#-------------------- Arithmetic operations ---------------------------------
|
||||
|
||||
def __add_iadd_impl (self, lhs, arg, is_add):
|
||||
d, t, rhs = lhs.data, lhs.tail,None
|
||||
if type(lhs) == type(arg):
|
||||
d[:,:,:] += arg.data
|
||||
t += arg.tail
|
||||
elif isinstance(arg, numpy.ndarray): # an array considered as a constant function
|
||||
MatrixStack(lhs.data).add(arg)
|
||||
rhs = arg
|
||||
elif descriptors.is_scalar(arg): # just a scalar
|
||||
arg = arg*numpy.identity(lhs.N1,dtype = lhs.data.dtype )
|
||||
MatrixStack(lhs.data).add(arg)
|
||||
assert lhs.tail.shape[0] == lhs.tail.shape[1], "tail + scalar only valid in diagonal case"
|
||||
rhs = numpy.identity(lhs.tail.shape[0]) *arg
|
||||
else:
|
||||
raise RuntimeError, " argument type not recognized in += for %s"%arg
|
||||
if rhs !=None :
|
||||
new_tail = TailGf(shape=lhs.tail.shape)
|
||||
new_tail[0][:,:] = rhs
|
||||
if is_add : lhs._singularity = lhs.tail + new_tail
|
||||
else : lhs.tail = lhs.tail + new_tail
|
||||
return lhs
|
||||
|
||||
def __iadd__(self, arg):
|
||||
return self.__add_iadd_impl(self,arg,False)
|
||||
|
||||
def __add__(self, y):
|
||||
if descriptors.is_lazy(y): return lazy_expressions.make_lazy(self) + y
|
||||
c = self.copy()
|
||||
return self.__add_iadd_impl(c,y,True)
|
||||
|
||||
def __radd__(self, y): return self.__add__(y)
|
||||
|
||||
def __sub_isub_impl (self, lhs, arg, is_sub):
|
||||
d, t, rhs = lhs.data, lhs.tail,None
|
||||
if type(lhs) == type(arg):
|
||||
d[:,:,:] -= arg.data
|
||||
t -= arg.tail
|
||||
elif isinstance(arg, numpy.ndarray): # an array considered as a constant function
|
||||
MatrixStack(lhs.data).sub(arg)
|
||||
rhs = arg
|
||||
elif descriptors.is_scalar(arg): # just a scalar
|
||||
arg = arg*numpy.identity(lhs.N1,dtype = lhs.data.dtype )
|
||||
MatrixStack(lhs.data).sub(arg)
|
||||
assert lhs.tail.shape[0] == lhs.tail.shape[1], "tail - scalar only valid in diagonal case"
|
||||
rhs = numpy.identity(lhs.tail.shape[0]) *arg
|
||||
else:
|
||||
raise RuntimeError, " argument type not recognized in -= for %s"%arg
|
||||
if rhs !=None :
|
||||
new_tail = TailGf(shape=lhs.tail.shape)
|
||||
new_tail[0][:,:] = rhs
|
||||
if is_sub : lhs._singularity = lhs.tail - new_tail
|
||||
else : lhs.tail = lhs.tail - new_tail
|
||||
return lhs
|
||||
|
||||
def __isub__(self, arg):
|
||||
return self.__sub_isub_impl(self,arg,False)
|
||||
|
||||
def __sub__(self, y):
|
||||
if descriptors.is_lazy(y): return lazy_expressions.make_lazy(self) - y
|
||||
c = self.copy()
|
||||
return self.__sub_isub_impl(c,y,True)
|
||||
|
||||
def __rsub__(self, y):
|
||||
c = (-1)*self.copy()
|
||||
return c + y # very important to use the as +, cf above, _singularity vs tail
|
||||
|
||||
def __imul__(self, arg):
|
||||
|
||||
if type(self) == type(arg):
|
||||
d, d2 = self.data, arg.data
|
||||
assert d.shape == d2.shape, " Green function block multiplication with arrays of different size !"
|
||||
for om in range (d.shape[0]):
|
||||
d[om,:,:] = numpy.dot(d[om,:,:], d2[om,:,:])
|
||||
self.tail = self.tail * arg.tail
|
||||
elif descriptors.is_scalar(arg):
|
||||
self.data *= arg
|
||||
self.tail *= arg
|
||||
else:
|
||||
raise RuntimeError, " argument type not recognized in *= for %s"%arg
|
||||
return self
|
||||
|
||||
def __mul__(self, arg):
|
||||
|
||||
if descriptors.is_lazy(arg):
|
||||
return lazy_expressions.make_lazy(self) * arg
|
||||
else:
|
||||
res = self.copy()
|
||||
res *= arg
|
||||
return res
|
||||
|
||||
def __rmul__(self, arg):
|
||||
|
||||
if descriptors.is_lazy(arg):
|
||||
return arg * lazy_expressions.make_lazy(self)
|
||||
elif descriptors.is_scalar(arg):
|
||||
return self.__mul__(arg)
|
||||
|
||||
def from_L_G_R(self, L, G, R):
|
||||
|
||||
N1 = self.data.shape[1]
|
||||
N2 = self.data.shape[2]
|
||||
assert L.shape[0] == N1
|
||||
assert L.shape[1] == G.data.shape[1]
|
||||
assert R.shape[0] == G.data.shape[2]
|
||||
assert R.shape[1] == N2
|
||||
|
||||
MatrixStack(self.data).matmul_L_R(L, G.data, R)
|
||||
|
||||
# this might be a bit slow
|
||||
for o in range(G.tail.order_min, G.tail.order_max+1):
|
||||
self.tail[o] = numpy.dot(L, numpy.dot(G.tail[o], R))
|
||||
self.tail.mask.fill(G.tail.order_max)
|
||||
|
||||
def __idiv__(self, arg):
|
||||
""" If arg is a scalar, simple scalar multiplication
|
||||
"""
|
||||
if descriptors.is_lazy(arg): return lazy_expressions.make_lazy(self) / arg
|
||||
if descriptors.is_scalar(arg):
|
||||
self.data /= arg
|
||||
self.tail /= arg
|
||||
else:
|
||||
raise RuntimeError, " argument type not recognized in imul for %s"%arg
|
||||
return self
|
||||
|
||||
def __div__(self, arg):
|
||||
assert descriptors.is_scalar(arg), "Error in /"
|
||||
res = self.copy()
|
||||
res /= arg
|
||||
return res
|
||||
|
||||
#---------------------------------------------------
|
||||
|
||||
def zero(self):
|
||||
self <<= 0.0
|
||||
|
||||
#---------------------------------------------------
|
||||
|
||||
def invert(self):
|
||||
"""Invert the matrix for all arguments"""
|
||||
MatrixStack(self.data).invert()
|
||||
self.tail.invert()
|
||||
|
||||
#---------------------------------------------------
|
||||
|
||||
def transpose(self):
|
||||
"""Transposes the GF Bloc: return a new transposed view"""
|
||||
### WARNING: this depends on the C++ layering ....
|
||||
return self.__class__(
|
||||
indices = list(self.indices),
|
||||
mesh = self.mesh,
|
||||
data = self.data.transpose( (0, 2, 1) ),
|
||||
tail = self.tail.transpose(),
|
||||
name = self.name+'(t)')
|
||||
|
||||
#---------------------------------------------------
|
||||
|
||||
def conjugate(self):
|
||||
"""Complex conjugate of the GF Bloc. It follow the policy of numpy and
|
||||
make a copy only if the Green function is complex valued"""
|
||||
|
||||
return self.__class__(
|
||||
indices = list(self.indices),
|
||||
mesh = self.mesh,
|
||||
data = self.data.conjugate(),
|
||||
tail = self.tail.conjugate(),
|
||||
name = self.name+'*')
|
||||
|
||||
#------------------ Density -----------------------------------
|
||||
|
||||
def total_density(self):
|
||||
"""Trace density"""
|
||||
return numpy.trace(self.density())
|
||||
|
@ -1,143 +0,0 @@
|
||||
from gf import GfImFreq_cython, MeshImFreq, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from scipy.optimize import leastsq
|
||||
from tools import get_indices_in_dict
|
||||
from nothing import Nothing
|
||||
import impl_plot
|
||||
|
||||
class GfImFreq ( GfGeneric, GfImFreq_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfImFreq(indices, beta, statistic, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``beta``: Inverse Temperature
|
||||
* ``statistic``: 'F' or 'B'
|
||||
* ``positive_only``: True or False
|
||||
* ``n_points``: Number of Matsubara frequencies
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfImFreq(indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Frequency
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),:) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
if 'beta' not in d : raise ValueError, "beta not provided"
|
||||
beta = float(d.pop('beta'))
|
||||
n_points = d.pop('n_points',1025)
|
||||
stat = d.pop('statistic','F')
|
||||
positive_only = d.pop('positive_only',True)
|
||||
mesh = MeshImFreq(beta,stat,n_points, positive_only)
|
||||
|
||||
self.dtype = numpy.complex_
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
tail = d.pop('tail') if 'tail' in d else TailGf(shape = (N1,N2))
|
||||
symmetry = d.pop('symmetry', Nothing())
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GFBloc constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, tail, symmetry, indices_pack, name, GfImFreq)
|
||||
GfImFreq_cython.__init__(self, mesh, data, tail)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RIS: 'R', 'I', 'S', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
return impl_plot.plot_base( self, opt_dict, r'$\omega_n$',
|
||||
lambda name : r'%s$(i\omega_n)$'%name, True, [x.imag for x in self.mesh] )
|
||||
|
||||
#-------------- OTHER OPERATIONS -----------------------------------------------------
|
||||
|
||||
def replace_by_tail(self,start) :
|
||||
d = self.data
|
||||
t = self.tail
|
||||
for n, om in enumerate(self.mesh) :
|
||||
if n >= start : d[n,:,:] = t(om)
|
||||
|
||||
def fit_tail(self, fixed_coef, order_max, fit_start, fit_stop, replace_tail = True):
|
||||
"""
|
||||
Fit the tail of the Green's function
|
||||
Input:
|
||||
- fixed_coef: a 3-dim array of known coefficients for the fit starting from the order -1
|
||||
- order_max: highest order in the fit
|
||||
- fit_start, fit_stop: fit the data between fit_start and fit_stop
|
||||
Output:
|
||||
On output all the data above fit_start is replaced by the fitted tail
|
||||
and the new moments are included in the Green's function
|
||||
"""
|
||||
|
||||
# Turn known_coefs into a numpy array if ever it is not already the case
|
||||
known_coef = fixed_coef
|
||||
|
||||
# Change the order_max
|
||||
# It is assumed that any known_coef will start at order -1
|
||||
self.tail.zero()
|
||||
self.tail.mask.fill(order_max)
|
||||
|
||||
# Fill up two arrays with the frequencies and values over the range of interest
|
||||
ninit, nstop = 0, -1
|
||||
x = []
|
||||
for om in self.mesh:
|
||||
if (om.imag < fit_start): ninit = ninit+1
|
||||
if (om.imag <= fit_stop): nstop = nstop+1
|
||||
if (om.imag <= fit_stop and om.imag >= fit_start): x += [om]
|
||||
omegas = numpy.array(x)
|
||||
values = self.data[ninit:nstop+1,:,:]
|
||||
|
||||
# Loop over the indices of the Green's function
|
||||
for n1,indR in enumerate(self.indicesR):
|
||||
for n2,indL in enumerate(self.indicesL):
|
||||
|
||||
# Construct the part of the fitting functions which is known
|
||||
f_known = numpy.zeros((len(omegas)),numpy.complex)
|
||||
for order in range(len(known_coef[n1][n2])):
|
||||
f_known += known_coef[n1][n2][order]*omegas**(1-order)
|
||||
|
||||
# Compute how many free parameters we have and give an initial guess
|
||||
len_param = order_max-len(known_coef[n1][n2])+2
|
||||
p0 = len_param*[1.0]
|
||||
|
||||
# This is the function to be minimized, the diff between the original
|
||||
# data in values and the fitting function
|
||||
def fct(p):
|
||||
y_fct = 1.0*f_known
|
||||
for order in range(len_param):
|
||||
y_fct += p[order]*omegas**(1-len(known_coef[n1][n2])-order)
|
||||
y_fct -= values[:,n1,n2]
|
||||
return abs(y_fct)
|
||||
|
||||
# Now call the minimizing function
|
||||
sol = leastsq(fct, p0, maxfev=1000*len_param)
|
||||
|
||||
# Put the known and the new found moments in the tail
|
||||
for order in range(len(known_coef[n1][n2])):
|
||||
self.tail[order-1][n1,n2] = numpy.array([[ known_coef[n1][n2][order] ]])
|
||||
for order, moment in enumerate(sol[0]):
|
||||
self.tail[len(known_coef[n1][n2])+order-1][n1,n2] = numpy.array([[ moment ]])
|
||||
self.tail.mask.fill(order_max)
|
||||
# Replace then end of the Green's function by the tail
|
||||
if replace_tail: self.replace_by_tail(ninit);
|
@ -1,70 +0,0 @@
|
||||
from gf import GfImTime_cython, MeshImTime, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from tools import get_indices_in_dict
|
||||
from nothing import Nothing
|
||||
import impl_plot
|
||||
|
||||
class GfImTime ( GfGeneric, GfImTime_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfImTime(indices, beta, statistic, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``beta``: Inverse Temperature
|
||||
* ``statistic``: 'F' or 'B'
|
||||
* ``n_points`` : Number of time points in the mesh
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfImTime (indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Time
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
if 'beta' not in d : raise ValueError, "beta not provided"
|
||||
beta = float(d.pop('beta'))
|
||||
n_max = d.pop('n_points',10000)
|
||||
stat = d.pop('statistic','F')
|
||||
kind = d.pop('kind','H')
|
||||
mesh = MeshImTime(beta,stat,n_max,kind)
|
||||
|
||||
self.dtype = numpy.float64
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
tail = d.pop('tail') if 'tail' in d else TailGf(shape = (N1,N2))
|
||||
symmetry = d.pop('symmetry', Nothing())
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GFBloc constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, tail, symmetry, indices_pack, name, GfImTime)
|
||||
GfImTime_cython.__init__(self, mesh, data, tail)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RI: 'R', 'I', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
has_complex_value = False
|
||||
return impl_plot.plot_base( self, opt_dict, r'$\tau$', lambda name : r'%s$(\tau)$'%name, has_complex_value , list(self.mesh) )
|
||||
|
@ -1,67 +0,0 @@
|
||||
from gf import GfLegendre_cython, MeshLegendre, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from tools import get_indices_in_dict
|
||||
from nothing import Nothing
|
||||
import impl_plot
|
||||
|
||||
class GfLegendre ( GfGeneric, GfLegendre_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfLegendre(indices, beta, statistic, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``beta``: Inverse Temperature
|
||||
* ``statistic``: 'F' or 'B'
|
||||
* ``n_points`` : Number of legendre points in the mesh
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfLegendre (indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Time
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
if 'beta' not in d : raise ValueError, "beta not provided"
|
||||
beta = float(d.pop('beta'))
|
||||
stat = d.pop('statistic','F')
|
||||
n_max = d.pop('n_points',30)
|
||||
mesh = MeshLegendre(beta,stat,n_max)
|
||||
|
||||
self.dtype = numpy.float64
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
tail = d.pop('tail',Nothing())
|
||||
symmetry = d.pop('symmetry',None)
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GFBloc constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, tail, symmetry, indices_pack, name, GfLegendre)
|
||||
GfLegendre_cython.__init__(self, mesh, data)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RI: 'R', 'I', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
return impl_plot.plot_base( self, opt_dict, r'$l_n$', lambda name : r'%s$(l_n)$'%name, False, list(self.mesh) )
|
@ -1,67 +0,0 @@
|
||||
from gf import GfReFreq_cython, MeshReFreq, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from tools import get_indices_in_dict
|
||||
import impl_plot
|
||||
|
||||
class GfReFreq ( GfGeneric, GfReFreq_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfReFreq(indices, window, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``window``: a tuple (omega_min, omega_max)
|
||||
* ``n_points`` : Number of frequency points in the mesh
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfReFreq (indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Time
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
window = d.pop('window')
|
||||
omega_min = window[0]
|
||||
omega_max = window[1]
|
||||
n_max = d.pop('n_points',10000)
|
||||
kind = d.pop('kind','F')
|
||||
mesh = MeshReFreq(omega_min, omega_max, n_max, kind)
|
||||
|
||||
self.dtype = numpy.complex_
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
tail= d.pop('tail') if 'tail' in d else TailGf(shape = (N1,N2))
|
||||
symmetry = d.pop('symmetry',None)
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GFBloc constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, tail, symmetry, indices_pack, name, GfReFreq)
|
||||
GfReFreq_cython.__init__(self, mesh, data, tail)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RI: 'R', 'I', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
return impl_plot.plot_base(self, opt_dict, r'$\omega$', lambda name : r'%s$(\omega)$'%name, True, list(self.mesh))
|
||||
|
@ -1,67 +0,0 @@
|
||||
from gf import GfReTime_cython, MeshReTime, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from tools import get_indices_in_dict
|
||||
import impl_plot
|
||||
|
||||
class GfReTime ( GfGeneric, GfReTime_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfReTime(indices, window, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``window``: a tuple (t_min, t_max)
|
||||
* ``n_points`` : Number of time points in the mesh
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfReTime (indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Time
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
window = d.pop('window')
|
||||
t_min = window[0]
|
||||
t_max = window[1]
|
||||
n_max = d.pop('n_points',10000)
|
||||
kind = d.pop('kind','F')
|
||||
mesh = MeshReTime(t_min, t_max, n_max, kind)
|
||||
|
||||
self.dtype = numpy.complex_
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
tail= d.pop('tail') if 'tail' in d else TailGf(shape = (N1,N2))
|
||||
symmetry = d.pop('symmetry',None)
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GFBloc constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, tail, symmetry, indices_pack, name, GfReTime)
|
||||
GfReTime_cython.__init__(self, mesh, data, tail)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RI: 'R', 'I', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
return impl_plot.plot_base(self, opt_dict, r'$\t$', lambda name : r'%s$(\t)$'%name, True, list(self.mesh))
|
||||
|
@ -1,69 +0,0 @@
|
||||
from gf import GfTwoRealTime_cython, MeshTwoRealTime, MeshReTime, TailGf
|
||||
from gf_generic import GfGeneric
|
||||
import numpy
|
||||
from tools import get_indices_in_dict
|
||||
import impl_plot
|
||||
|
||||
class GfTwoRealTime( GfGeneric, GfTwoRealTime_cython ) :
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
The constructor have two variants : you can either provide the mesh in
|
||||
Matsubara frequencies yourself, or give the parameters to build it.
|
||||
All parameters must be given with keyword arguments.
|
||||
|
||||
GfTwoRealTime(indices, window, n_points, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``window``: a tuple (t_min, t_max)
|
||||
* ``n_points`` : Number of time points in the mesh
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
GfReTime (indices, mesh, data, tail, name)
|
||||
|
||||
* ``indices``: a list of indices names of the block
|
||||
* ``mesh``: a MeshGf object, such that mesh.TypeGF== GF_Type.Imaginary_Time
|
||||
* ``data``: A numpy array of dimensions (len(indices),len(indices),n_points) representing the value of the Green function on the mesh.
|
||||
* ``tail``: the tail
|
||||
* ``name``: a name of the GF
|
||||
|
||||
.. warning::
|
||||
|
||||
The Green function take a **view** of the array data, and a **reference** to the tail.
|
||||
|
||||
"""
|
||||
mesh = d.pop('mesh',None)
|
||||
if mesh is None :
|
||||
window = d.pop('window')
|
||||
t_min = window[0]
|
||||
t_max = window[1]
|
||||
n_max = d.pop('n_points',10000)
|
||||
kind = d.pop('kind','F')
|
||||
mesh = MeshTwoRealTime(t_max, n_max)
|
||||
#mesh = MeshTwoRealTime(t_min, t_max, n_max)
|
||||
#mesh = MeshReTime(t_min, t_max, n_max, 'F')
|
||||
|
||||
self.dtype = numpy.complex_
|
||||
indices_pack = get_indices_in_dict(d)
|
||||
indicesL, indicesR = indices_pack
|
||||
N1, N2 = len(indicesL),len(indicesR)
|
||||
data = d.pop('data') if 'data' in d else numpy.zeros((len(mesh),N1,N2), self.dtype )
|
||||
symmetry = d.pop('symmetry',None)
|
||||
name = d.pop('name','g')
|
||||
assert len(d) ==0, "Unknown parameters in GfTwoRealTime constructions %s"%d.keys()
|
||||
|
||||
GfGeneric.__init__(self, mesh, data, None, symmetry, indices_pack, name, GfTwoRealTime)
|
||||
GfTwoRealTime_cython.__init__(self, mesh, data)
|
||||
|
||||
#-------------- PLOT ---------------------------------------
|
||||
|
||||
def _plot_(self, opt_dict):
|
||||
""" Plot protocol. opt_dict can contain :
|
||||
* :param RI: 'R', 'I', 'RI' [ default]
|
||||
* :param x_window: (xmin,xmax) or None [default]
|
||||
* :param name: a string [default ='']. If not '', it remplaces the name of the function just for this plot.
|
||||
"""
|
||||
# NOT CHANGED
|
||||
return impl_plot.plot_base(self, opt_dict, r'$\t$', lambda name : r'%s$(\t)$'%name, True, list(self.mesh))
|
||||
|
@ -1,81 +0,0 @@
|
||||
from gf_imfreq import GfImFreq
|
||||
|
||||
cdef class GfImFreq_cython:
|
||||
|
||||
cdef gf_imfreq _c
|
||||
|
||||
def __init__(self, MeshImFreq mesh, data, TailGf tail):
|
||||
self._c = gf_imfreq (mesh._c, array_view[dcomplex,THREE](data), tail._c , nothing())
|
||||
|
||||
def __write_hdf5_cython__ (self, gr , char * key) :
|
||||
h5_write (make_h5_group(gr), key, self._c)
|
||||
|
||||
def set_from_fourier(self,GfImTime_cython gt) :
|
||||
"""Fills self with the Fourier transform of gt"""
|
||||
self._c << fourier( gt._c )
|
||||
|
||||
def set_from_legendre(self, GfLegendre_cython gl) :
|
||||
"""Fills self with the Legendre transform of gl"""
|
||||
self._c << legendre_to_imfreq(gl._c)
|
||||
|
||||
def density(self):
|
||||
return density(self._c).to_python()
|
||||
|
||||
def __dealloc__ (self):
|
||||
pass
|
||||
|
||||
#---------------- Reading from h5 ---------------------------------------
|
||||
|
||||
def h5_read_GfImFreq(gr, key):
|
||||
try:
|
||||
indicesL = gr[key]['indices']['left']
|
||||
indicesR = gr[key]['indices']['right']
|
||||
pack = [indicesL, indicesR]
|
||||
except:
|
||||
pack = []
|
||||
try:
|
||||
name = gr[key]['name']
|
||||
except:
|
||||
name = key
|
||||
return make_GfImFreq(h5_extractor[gf_imfreq]()(make_h5_group(gr),key), pack, name)
|
||||
|
||||
from pytriqs.archive.hdf_archive_schemes import register_class
|
||||
register_class (GfImFreq, read_fun = h5_read_GfImFreq)
|
||||
|
||||
#---------------- Convertions functions ---------------------------------------
|
||||
|
||||
# Python -> C
|
||||
cdef gf_imfreq as_gf_imfreq (g) except +:
|
||||
return (<GfImFreq_cython?>g)._c
|
||||
|
||||
# C -> Python. Do NOT add except +
|
||||
cdef make_GfImFreq (gf_imfreq x, indices_pack = [], name = "g"):
|
||||
data = x.data().to_python()
|
||||
if indices_pack == []:
|
||||
indices_pack = [range(data.shape[1]), range(data.shape[2])]
|
||||
return GfImFreq(
|
||||
mesh = make_MeshImFreq (x.mesh()),
|
||||
data = data,
|
||||
tail = make_TailGf (x.singularity()),
|
||||
indices_pack = indices_pack,
|
||||
name = name)
|
||||
|
||||
# Python -> C for blocks
|
||||
cdef gf_block_imfreq as_gf_block_imfreq (G) except +:
|
||||
cdef vector[gf_imfreq] v_c
|
||||
for n,g in G:
|
||||
v_c.push_back(as_gf_imfreq(g))
|
||||
return make_gf_block_imfreq (v_c)
|
||||
|
||||
# C -> Python for block
|
||||
cdef make_BlockGfImFreq (gf_block_imfreq G, block_indices_pack = [], name = "G"):
|
||||
gl = []
|
||||
name_list = G.mesh().domain().names()
|
||||
if block_indices_pack == []:
|
||||
for i,n in enumerate(name_list):
|
||||
sha = G[i].data().to_python().shape[1:3]
|
||||
block_indices_pack.append( [range(sha[0]), range(sha[1])] )
|
||||
for i,n in enumerate(name_list):
|
||||
gl.append( make_GfImFreq(G[i], block_indices_pack[i]) )
|
||||
return BlockGf( name_list = name_list, block_list = gl, name = name )
|
||||
|
@ -1,86 +0,0 @@
|
||||
from gf_imtime import GfImTime
|
||||
|
||||
cdef class GfImTime_cython:
|
||||
|
||||
cdef gf_imtime _c
|
||||
|
||||
def __init__(self, MeshImTime mesh, data, TailGf tail):
|
||||
self._c = gf_imtime (mesh._c, array_view[double,THREE](data), tail._c, nothing())
|
||||
|
||||
def __write_hdf5_cython__ (self, gr , char * key) :
|
||||
h5_write (make_h5_group(gr), key, self._c)
|
||||
|
||||
def set_from_inverse_fourier(self,GfImFreq_cython gw) :
|
||||
"""Fills self with the Inverse Fourier transform of gw"""
|
||||
self._c << inverse_fourier( gw._c)
|
||||
|
||||
def set_from_legendre(self, GfLegendre_cython gl) :
|
||||
"""Fills self with the Legendre transform of gl"""
|
||||
self._c << legendre_to_imtime(gl._c)
|
||||
|
||||
def __dealloc__ (self):
|
||||
pass
|
||||
|
||||
def __call__ (self, float tau) :
|
||||
return matrix[double](self._c (tau)).to_python()
|
||||
|
||||
#---------------- Reading from h5 ---------------------------------------
|
||||
|
||||
def h5_read_GfImTime(gr, key):
|
||||
try:
|
||||
indicesL = gr[key]['indices']['left']
|
||||
indicesR = gr[key]['indices']['right']
|
||||
pack = [indicesL, indicesR]
|
||||
except:
|
||||
pack = []
|
||||
try:
|
||||
name = gr[key]['name']
|
||||
except:
|
||||
name = key
|
||||
return make_GfImTime(h5_extractor[gf_imtime]()(make_h5_group(gr),key), pack, name)
|
||||
|
||||
from pytriqs.archive.hdf_archive_schemes import register_class
|
||||
register_class (GfImTime, read_fun = h5_read_GfImTime)
|
||||
|
||||
#---------------- Convertions functions ---------------------------------------
|
||||
|
||||
# Python -> C
|
||||
cdef gf_imtime as_gf_imtime (g) except +:
|
||||
return (<GfImTime_cython?>g)._c
|
||||
|
||||
# C -> Python. Do NOT add except +
|
||||
cdef make_GfImTime (gf_imtime x, indices_pack = [], name = "g"):
|
||||
data = x.data().to_python()
|
||||
if indices_pack == []:
|
||||
indices_pack = [range(data.shape[1]), range(data.shape[2])]
|
||||
else :
|
||||
# check that the dimensions are ok
|
||||
assert len(indices_pack)==2
|
||||
assert len(indices_pack[0]) == data.shape[1]
|
||||
assert len(indices_pack[1]) == data.shape[2]
|
||||
return GfImTime(
|
||||
mesh = make_MeshImTime (x.mesh()),
|
||||
data = data,
|
||||
tail = make_TailGf (x.singularity()),
|
||||
indices_pack = indices_pack,
|
||||
name = name)
|
||||
|
||||
# Python -> C for blocks
|
||||
cdef gf_block_imtime as_gf_block_imtime (G) except +:
|
||||
cdef vector[gf_imtime] v_c
|
||||
for n,g in G:
|
||||
v_c.push_back(as_gf_imtime(g))
|
||||
return make_gf_block_imtime (v_c)
|
||||
|
||||
# C -> Python for block
|
||||
cdef make_BlockGfImTime (gf_block_imtime G, block_indices_pack = [], name = "G"):
|
||||
gl = []
|
||||
name_list = G.mesh().domain().names()
|
||||
if block_indices_pack == []:
|
||||
for i,n in enumerate(name_list):
|
||||
sha = G[i].data().to_python().shape[1:3]
|
||||
block_indices_pack.append( [range(sha[0]), range(sha[1])] )
|
||||
for i,n in enumerate(name_list):
|
||||
gl.append( make_GfImTime(G[i], block_indices_pack[i]) )
|
||||
return BlockGf( name_list = name_list, block_list = gl, name = name )
|
||||
|
@ -1,81 +0,0 @@
|
||||
from gf_legendre import GfLegendre
|
||||
|
||||
cdef class GfLegendre_cython:
|
||||
cdef gf_legendre _c
|
||||
def __init__(self, MeshLegendre mesh, data):
|
||||
self._c = gf_legendre(mesh._c, array_view[double,THREE](data), nothing(), nothing())
|
||||
|
||||
def __write_hdf5_cython__ (self, gr , char * key) :
|
||||
h5_write (make_h5_group(gr), key, self._c)
|
||||
|
||||
def set_from_imtime(self, GfImTime_cython gt) :
|
||||
"""Fills self with the Legendre transform of gt"""
|
||||
self._c << imtime_to_legendre(gt._c)
|
||||
|
||||
def set_from_imfreq(self, GfImFreq_cython gw) :
|
||||
"""Fills self with the Legendre transform of gw"""
|
||||
self._c << imfreq_to_legendre(gw._c)
|
||||
|
||||
def density(self):
|
||||
return density(self._c).to_python()
|
||||
|
||||
def enforce_discontinuity(self, disc):
|
||||
enforce_discontinuity(self._c, array_view[double,TWO](disc))
|
||||
|
||||
def __dealloc__ (self):
|
||||
pass
|
||||
|
||||
#---------------- Reading from h5 ---------------------------------------
|
||||
|
||||
def h5_read_GfLegendre(gr, key):
|
||||
try:
|
||||
indicesL = gr[key]['indices']['left']
|
||||
indicesR = gr[key]['indices']['right']
|
||||
pack = [indicesL, indicesR]
|
||||
except:
|
||||
pack = []
|
||||
try:
|
||||
name = gr[key]['name']
|
||||
except:
|
||||
name = key
|
||||
return make_GfLegendre(h5_extractor[gf_legendre]()(make_h5_group(gr),key), pack, name)
|
||||
|
||||
from pytriqs.archive.hdf_archive_schemes import register_class
|
||||
register_class (GfLegendre, read_fun = h5_read_GfLegendre)
|
||||
|
||||
#---------------- Convertions functions ---------------------------------------
|
||||
|
||||
# Python -> C
|
||||
cdef gf_legendre as_gf_legendre (g) except +:
|
||||
return (<GfLegendre_cython?>g)._c
|
||||
|
||||
# C -> Python. Do NOT add except +
|
||||
cdef make_GfLegendre (gf_legendre x, indices_pack = [], name = "g"):
|
||||
data = x.data().to_python()
|
||||
if indices_pack == []:
|
||||
indices_pack = [range(data.shape[1]), range(data.shape[2])]
|
||||
return GfLegendre(
|
||||
mesh = make_MeshLegendre (x.mesh()),
|
||||
data = data,
|
||||
indices_pack = indices_pack,
|
||||
name = name)
|
||||
|
||||
# Python -> C for blocks
|
||||
cdef gf_block_legendre as_gf_block_legendre (G) except +:
|
||||
cdef vector[gf_legendre] v_c
|
||||
for n,g in G:
|
||||
v_c.push_back(as_gf_legendre(g))
|
||||
return make_gf_block_legendre (v_c)
|
||||
|
||||
# C -> Python for block
|
||||
cdef make_BlockGfLegendre (gf_block_legendre G, block_indices_pack = [], name = "G"):
|
||||
gl = []
|
||||
name_list = G.mesh().domain().names()
|
||||
if block_indices_pack == []:
|
||||
for i,n in enumerate(name_list):
|
||||
sha = G[i].data().to_python().shape[1:3]
|
||||
block_indices_pack.append( [range(sha[0]), range(sha[1])] )
|
||||
for i,n in enumerate(name_list):
|
||||
gl.append( make_GfLegendre(G[i], block_indices_pack[i]) )
|
||||
return BlockGf( name_list = name_list, block_list = gl, name = name )
|
||||
|
@ -1,32 +0,0 @@
|
||||
cdef class MeshLegendre:
|
||||
cdef mesh_legendre _c
|
||||
|
||||
def __init__(self, beta, stat, int n_leg):
|
||||
self._c = make_mesh_legendre(beta, {'F' :Fermion, 'B' : Boson}[stat], n_leg)
|
||||
|
||||
def __len__ (self) : return self._c.size()
|
||||
|
||||
property beta :
|
||||
"""Inverse temperature"""
|
||||
def __get__(self): return self._c.domain().beta
|
||||
|
||||
property statistic :
|
||||
def __get__(self): return 'F' if self._c.domain().statistic==Fermion else 'B'
|
||||
|
||||
def __iter__(self) : # I use the C++ generator !
|
||||
cdef mesh_pt_generator[mesh_legendre ] g = mesh_pt_generator[mesh_legendre ](&self._c)
|
||||
while not g.at_end() :
|
||||
yield g.to_point()
|
||||
g.increment()
|
||||
|
||||
def __richcmp__(MeshLegendre self, MeshLegendre other,int op) :
|
||||
if op ==2 : # ==
|
||||
return self._c == other._c
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.beta, self.statistic, len(self))
|
||||
|
||||
# C -> Python
|
||||
cdef inline make_MeshLegendre ( mesh_legendre x) :
|
||||
return MeshLegendre( x.domain().beta, 'F' if x.domain().statistic==Fermion else 'B', x.size() )
|
||||
|
@ -1,34 +0,0 @@
|
||||
cdef class MeshTwoRealTime:
|
||||
cdef mesh_two_real_times _c
|
||||
|
||||
def __init__(self, double tmax, double n_time_slices) :
|
||||
self._c = make_mesh_two_real_times(tmax,n_time_slices)
|
||||
|
||||
property t_min:
|
||||
def __get__(self): return get_1d_mesh_from_2times_mesh(self._c).x_min()
|
||||
|
||||
property t_max:
|
||||
def __get__(self): return get_1d_mesh_from_2times_mesh(self._c).x_max()
|
||||
|
||||
#property kind:
|
||||
# def __get__(self): return self._c.kind()
|
||||
|
||||
def __len__ (self) : return self._c.size()
|
||||
|
||||
#def __iter__(self) : # I use the C++ generator !
|
||||
# cdef mesh_pt_generator[mesh_two_real_times] g = mesh_pt_generator[mesh_two_real_times](&self._c)
|
||||
# while not g.at_end() :
|
||||
# yield g.to_point()
|
||||
# g.increment()
|
||||
|
||||
def __richcmp__(MeshTwoRealTime self, MeshTwoRealTime other, int op) :
|
||||
if op ==2 : # ==
|
||||
return self._c == other._c
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.t_min, self.t_max, len(self)) #, self.kind)
|
||||
|
||||
# C -> Python
|
||||
cdef inline make_MeshTwoRealTime (mesh_two_real_times x) :
|
||||
return MeshTwoRealTime(get_1d_mesh_from_2times_mesh(x).x_max(), get_1d_mesh_from_2times_mesh(x).size() )
|
||||
|
@ -1,64 +0,0 @@
|
||||
from tools import py_deserialize
|
||||
import descriptors
|
||||
|
||||
cdef class TailGf:
|
||||
cdef tail _c
|
||||
def __init__(self, **d):
|
||||
"""
|
||||
TailGf ( shape )
|
||||
TailGf ( data, mask, order_min )
|
||||
"""
|
||||
# default values
|
||||
omin = -1
|
||||
omax = 8
|
||||
|
||||
a = d.pop('data',None)
|
||||
if a==None :
|
||||
(N1, N2) = d.pop('shape')
|
||||
a = numpy.zeros((omax-omin+1,N1,N2), numpy.complex)
|
||||
m = d.pop('mask',None)
|
||||
if m==None :
|
||||
m = numpy.zeros(a.shape[1:3], int)
|
||||
m.fill(omax)
|
||||
o = d.pop('order_min',None)
|
||||
if o==None: o = omin
|
||||
|
||||
assert len(d) == 0, "Unknown parameters in TailGf constructions %s"%d.keys()
|
||||
|
||||
self._c = tail(array_view[dcomplex,THREE](a), array_view[long,TWO](m), o)
|
||||
|
||||
property N1 :
|
||||
def __get__(self): return self.shape[0]
|
||||
|
||||
property N2 :
|
||||
def __get__(self): return self.shape[1]
|
||||
|
||||
def _make_slice(self, sl1, sl2):
|
||||
return self.__class__(data = self.data[:,sl1,sl2], mask = self.mask[sl1,sl2])
|
||||
|
||||
def __repr__ (self) :
|
||||
omin = self.order_min
|
||||
while ((omin <= self.order_max) and (numpy.max(numpy.abs(self.data[omin-self.order_min,:,:])) < 1e-8)):
|
||||
omin = omin+1
|
||||
if omin == self.order_max+1:
|
||||
return "%s"%numpy.zeros(self.shape)
|
||||
else:
|
||||
return string.join([ "%s"%self[r]+ (" /" if r>0 else "") + " Om^%s"%(abs(r)) for r in range(omin, self.order_max+1) ] , " + ")
|
||||
|
||||
def __call__(self, x) :
|
||||
val = 0.0
|
||||
for n in range(self.order_min, self.order_max+1):
|
||||
val += self[n] * x**(-n)
|
||||
return val
|
||||
|
||||
#---- other operations ----
|
||||
def transpose (self) :
|
||||
"""Transpose the array : new view as in numpy"""
|
||||
return TailGf(data=self.data.transpose(), mask=self.mask.transpose())
|
||||
|
||||
def conjugate(self) :
|
||||
"""Transpose the array : new view as in numpy"""
|
||||
return TailGf(data=self.data.conjugate(), mask=self.mask)
|
||||
|
||||
|
||||
|
@ -1,71 +0,0 @@
|
||||
from gf_two_real_times import GfTwoRealTime
|
||||
|
||||
cdef class GfTwoRealTime_cython:
|
||||
cdef gf_two_real_times _c
|
||||
def __init__(self, MeshTwoRealTime mesh, data):
|
||||
self._c = gf_two_real_times(mesh._c, array_view[dcomplex,THREE](data), nothing(), nothing())
|
||||
|
||||
def __write_hdf5_cython__ (self, gr , char * key) :
|
||||
h5_write (make_h5_group(gr), key, self._c)
|
||||
|
||||
def __dealloc__ (self):
|
||||
pass
|
||||
|
||||
def slice1d(self, double t):
|
||||
return make_GfReTime(slice1d(self._c,t))
|
||||
|
||||
|
||||
#---------------- Reading from h5 ---------------------------------------
|
||||
|
||||
def h5_read_GfTwoRealTime(gr, key):
|
||||
try:
|
||||
indicesL = gr[key]['indices']['left']
|
||||
indicesR = gr[key]['indices']['right']
|
||||
pack = [indicesL, indicesR]
|
||||
except:
|
||||
pack = []
|
||||
try:
|
||||
name = gr[key]['name']
|
||||
except:
|
||||
name = key
|
||||
return make_GfTwoRealTime(h5_extractor[gf_two_real_times]()(make_h5_group(gr),key), pack, name)
|
||||
|
||||
from pytriqs.archive.hdf_archive_schemes import register_class
|
||||
register_class (GfTwoRealTime, read_fun = h5_read_GfTwoRealTime)
|
||||
|
||||
#---------------- Convertions functions ---------------------------------------
|
||||
|
||||
# Python -> C
|
||||
cdef gf_two_real_times as_gf_two_real_times (g) except +:
|
||||
return (<GfTwoRealTime_cython?>g)._c
|
||||
|
||||
# C -> Python. Do NOT add except +
|
||||
cdef make_GfTwoRealTime (gf_two_real_times x, indices_pack = [], name = "g"):
|
||||
data = x.data().to_python()
|
||||
if indices_pack == []:
|
||||
indices_pack = [range(data.shape[1]), range(data.shape[2])]
|
||||
return GfTwoRealTime(
|
||||
mesh = make_MeshTwoRealTime (x.mesh()),
|
||||
data = data,
|
||||
indices_pack = indices_pack,
|
||||
name = name)
|
||||
|
||||
# Python -> C for blocks
|
||||
cdef gf_block_two_real_times as_gf_block_two_real_times (G) except +:
|
||||
cdef vector[gf_two_real_times] v_c
|
||||
for n,g in G:
|
||||
v_c.push_back(as_gf_two_real_times(g))
|
||||
return make_gf_block_two_real_times (v_c)
|
||||
|
||||
# C -> Python for block
|
||||
cdef make_BlockGfTwoRealTime (gf_block_two_real_times G, block_indices_pack = [], name = "G"):
|
||||
gl = []
|
||||
name_list = G.mesh().domain().names()
|
||||
if block_indices_pack == []:
|
||||
for i,n in enumerate(name_list):
|
||||
sha = G[i].data().to_python().shape[1:3]
|
||||
block_indices_pack.append( [range(sha[0]), range(sha[1])] )
|
||||
for i,n in enumerate(name_list):
|
||||
gl.append( make_GfTwoRealTime(G[i], block_indices_pack[i]) )
|
||||
return BlockGf( name_list = name_list, block_list = gl, name = name )
|
||||
|
@ -22,15 +22,14 @@ t = class_( py_type = "TailGf",
|
||||
arithmetic = ("algebra","double")
|
||||
)
|
||||
|
||||
t.add_constructor(signature = "(int N1, int N2, int size=10, int order_min=-1)",
|
||||
doc = "DOC of constructor")
|
||||
#t.add_constructor(doc = "DOC of constructor", signature = "(array_view<dcomplex,3> d, array_view<long,2> m, long i)", python_precall = "tail_aux.tail_construct")
|
||||
t.add_constructor(signature = "(int N1, int N2, int n_order=10, int order_min=-1)",
|
||||
doc = "Constructs a new tail, of matrix size N1xN2, with n_order expansion starting at order_min")
|
||||
|
||||
t.add_property(name = "data",
|
||||
getter = cfunction(c_name="data", signature = "array_view<dcomplex,3>()"),
|
||||
doc = "Access to the data array")
|
||||
|
||||
##tail.add_property(name = "shape", getter = cfunction(c_name="shape", signature = "int()", doc = "Shape"))
|
||||
#t.add_property(name = "shape", getter = cfunction(c_name="shape", signature = "int()", doc = "Shape"))
|
||||
t.add_property(getter = cfunction(c_name="size", signature = "int()"),
|
||||
doc = "size")
|
||||
|
||||
@ -81,6 +80,16 @@ t.add_setitem(calling_pattern = "self_c(i) = m",
|
||||
|
||||
module.add_class(t)
|
||||
|
||||
# Change C++ to make the same
|
||||
# def __repr__ (self) :
|
||||
# omin = self.order_min
|
||||
# while ((omin <= self.order_max) and (numpy.max(numpy.abs(self.data[omin-self.order_min,:,:])) < 1e-8)):
|
||||
# omin = omin+1
|
||||
# if omin == self.order_max+1:
|
||||
# return "%s"%numpy.zeros(self.shape)
|
||||
# else:
|
||||
# return string.join([ "%s"%self[r]+ (" /" if r>0 else "") + " Om^%s"%(abs(r)) for r in range(omin, self.order_max+1) ] , " + ")
|
||||
|
||||
########################
|
||||
## enums
|
||||
########################
|
||||
@ -254,19 +263,19 @@ def make_gf( py_type, c_tag, is_complex_data = True, is_im = False, has_tail = T
|
||||
# backward compatibility
|
||||
g.add_property(name = "N1",
|
||||
getter = cfunction(calling_pattern="int result = get_target_shape(self_c)[0]", signature = "int()"),
|
||||
doc = "")
|
||||
doc = "[Deprecated] equivalent to target_shape[0]")
|
||||
g.add_property(name = "N2",
|
||||
getter = cfunction(calling_pattern="int result = get_target_shape(self_c)[1]", signature = "int()"),
|
||||
doc = "")
|
||||
doc = "[Deprecated] equivalent to target_shape[1]")
|
||||
|
||||
# []
|
||||
g.add_getitem(signature = "gf_view<%s>(range r1, range r2)"%c_tag,
|
||||
calling_pattern= "auto result = slice_target(self_c,r1,r2)",
|
||||
doc = " DOC to be written ")
|
||||
doc = "Returns a sliced view of the Green function")
|
||||
|
||||
g.add_getitem(signature = "gf_view<%s>(std::string i1, std::string i2)"%c_tag,
|
||||
calling_pattern= "auto result = slice_target(self_c,self_c.indices().convert_index(i1,0),self_c.indices().convert_index(i2,1))",
|
||||
doc = " DOC to be written ")
|
||||
doc = "Returns a sliced view of the Green function")
|
||||
|
||||
g.add_setitem(signature = "void(PyObject* r1, PyObject* r2, PyObject* val)",
|
||||
calling_pattern=
|
||||
@ -275,7 +284,7 @@ def make_gf( py_type, c_tag, is_complex_data = True, is_im = False, has_tail = T
|
||||
pyref res = PyNumber_InPlaceLshift(gs_py,val); // gs <<= val
|
||||
""",
|
||||
no_self_c = True, # avoid a warning
|
||||
doc = " doc [] set ")
|
||||
doc = "g[....] <<= what_ever : fills the slice of the Green function with what_ever")
|
||||
|
||||
# Plot
|
||||
g.add_property(name = "real",
|
||||
@ -289,6 +298,9 @@ def make_gf( py_type, c_tag, is_complex_data = True, is_im = False, has_tail = T
|
||||
# Lazy system
|
||||
g.add_pure_python_method("pytriqs.gf.local._gf_common.LazyCTX", py_name = "__lazy_expr_eval_context__")
|
||||
|
||||
# For basic ops, if the other operand is a lazy expression, build a lazy
|
||||
# expression : this is done by this little external functions, for backward
|
||||
# compatibility
|
||||
g.number_protocol['add'].python_precall = "pytriqs.gf.local._gf_common.add_precall"
|
||||
g.number_protocol['subtract'].python_precall = "pytriqs.gf.local._gf_common.sub_precall"
|
||||
g.number_protocol['multiply'].python_precall = "pytriqs.gf.local._gf_common.mul_precall"
|
||||
@ -301,13 +313,13 @@ def make_gf( py_type, c_tag, is_complex_data = True, is_im = False, has_tail = T
|
||||
g.add_method(py_name = "transpose",
|
||||
calling_pattern = "auto result = transpose(self_c)",
|
||||
signature = "gf<%s>()"%c_tag,
|
||||
doc = "Returns a NEW gf, with transposed data...")
|
||||
doc = "Returns a NEW gf, with transposed data, i.e. it is NOT a transposed view.")
|
||||
|
||||
if c_tag not in [ "imtime", "legendre"] :
|
||||
g.add_method(py_name = "conjugate", calling_pattern = "auto result = conj(self_c)" , signature = "gf<%s>()"%c_tag, doc = "Return a new function, conjugate of self.")
|
||||
|
||||
g.number_protocol['multiply'].add_overload(calling_pattern = "*", signature = "gf<%s>(matrix<%s> x,gf<%s> y)"%(c_tag,data_type,c_tag)) #'x'), (self.c_type,'y')], rtype = self.c_type)
|
||||
g.number_protocol['multiply'].add_overload(calling_pattern = "*", signature = "gf<%s>(gf<%s> x,matrix<%s> y)"%(c_tag,c_tag,data_type)) #'x'), (self.c_type,'y')], rtype = self.c_type)
|
||||
g.number_protocol['multiply'].add_overload(calling_pattern = "*", signature = "gf<%s>(matrix<%s> x,gf<%s> y)"%(c_tag,data_type,c_tag))
|
||||
g.number_protocol['multiply'].add_overload(calling_pattern = "*", signature = "gf<%s>(gf<%s> x,matrix<%s> y)"%(c_tag,c_tag,data_type))
|
||||
|
||||
g.add_method(py_name = "from_L_G_R",
|
||||
calling_pattern = "self_c = L_G_R(l,g,r)",
|
||||
@ -330,9 +342,6 @@ def make_gf( py_type, c_tag, is_complex_data = True, is_im = False, has_tail = T
|
||||
|
||||
g = make_gf(py_type = "GfImFreq", c_tag = "imfreq", is_im = True)
|
||||
|
||||
#g.add_method(py_name = "set_from_fourier", c_name = "fourier", signature = "void()", doc = "Fills self with the Fourier transform of gt")
|
||||
#g.add_method(py_name = "set_from_legendre", c_name = "fourier", signature = "void()", doc = "Fills self with the Legendre transform of gl")
|
||||
|
||||
g.add_method(py_name = "density",
|
||||
calling_pattern = "auto result = density(self_c)",
|
||||
signature = "matrix_view<double>()",
|
||||
@ -455,6 +464,7 @@ g.add_method(py_name = "set_from_inverse_fourier",
|
||||
module.add_class(g)
|
||||
|
||||
# EXPERIMENTAL : global fourier functions....
|
||||
# To be replaced by make_gf(fourier...)).
|
||||
|
||||
module.add_function(name = "make_gf_from_inverse_fourier", signature="gf_view<retime>(gf_view<refreq> gw)", doc ="")
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user