MRCC merge with garniroy, broken

This commit is contained in:
Anthony Scemama 2017-12-04 12:04:46 +01:00
parent a846375d25
commit 44f91e4187
9 changed files with 114 additions and 151 deletions

View File

@ -1,15 +0,0 @@
#!/bin/bash -x
TARGET=gpi2
#GPI_OPTIONS=--with-infiniband
GPI_OPTIONS=--with-ethernet
function _install()
{
cd _build/gpi2
./install.sh -p $QP_ROOT $GPI_OPTIONS
cp src/GASPI.f90 $QP_ROOT/plugins/GPI2/
return 0
}
source scripts/build.sh

View File

@ -1084,6 +1084,8 @@ subroutine H_S2_u_0_mrcc_nstates(v_0,s_0,u_0,H_jj,S2_jj,n,keys_tmp,Nint,istate_i
! Begin Specific to dressing
! --------------------------
!TODO : DRESSING 1 column
!$OMP DO
do ii=1,n_det_ref
i = idx_ref(ii)

View File

@ -195,34 +195,34 @@ END_PROVIDER
if (diag_algorithm == "Davidson") then
allocate (eigenvectors(size(CI_eigenvectors_dressed,1),size(CI_eigenvectors_dressed,2)), &
eigenvalues(size(CI_electronic_energy_dressed,1)))
allocate (eigenvectors(size(CI_eigenvectors_dressed,1),size(CI_eigenvectors_dressed,2)),&
eigenvalues(size(CI_electronic_energy_dressed,1)))
do j=1,min(N_states,N_det)
do i=1,N_det
eigenvectors(i,j) = psi_coef(i,j)
enddo
enddo
do mrcc_state=1,N_states
do j=mrcc_state,min(N_states,N_det)
do i=1,N_det
eigenvectors(i,j) = psi_coef(i,j)
enddo
enddo
call davidson_diag_mrcc_HS2(psi_det,eigenvectors,&
size(eigenvectors,1), &
eigenvalues,N_det,N_states,N_states_diag,N_int, &
output_determinants,mrcc_state)
CI_eigenvectors_dressed(1:N_det,mrcc_state) = eigenvectors(1:N_det,mrcc_state)
CI_electronic_energy_dressed(mrcc_state) = eigenvalues(mrcc_state)
enddo
do k=N_states+1,N_states_diag
CI_eigenvectors_dressed(1:N_det,k) = eigenvectors(1:N_det,k)
CI_electronic_energy_dressed(k) = eigenvalues(k)
enddo
call u_0_S2_u_0(CI_eigenvectors_s2_dressed,CI_eigenvectors_dressed,N_det,psi_det,N_int,&
N_states_diag,size(CI_eigenvectors_dressed,1))
deallocate (eigenvectors,eigenvalues)
do j=mrcc_state,min(N_states,N_det)
do i=1,N_det
eigenvectors(i,j) = psi_coef(i,j)
enddo
enddo
call davidson_diag_mrcc_HS2(psi_det,eigenvectors, &
size(eigenvectors,1), &
eigenvalues,N_det,N_states,N_states_diag,N_int, &
output_determinants,mrcc_state)
CI_eigenvectors_dressed(1:N_det,mrcc_state) = eigenvectors(1:N_det,mrcc_state)
CI_electronic_energy_dressed(mrcc_state) = eigenvalues(mrcc_state)
enddo
do k=N_states+1,N_states_diag
CI_eigenvectors_dressed(1:N_det,k) = eigenvectors(1:N_det,k)
CI_electronic_energy_dressed(k) = eigenvalues(k)
enddo
call u_0_S2_u_0(CI_eigenvectors_s2_dressed,CI_eigenvectors_dressed,N_det,psi_det,N_int,&
N_states_diag,size(CI_eigenvectors_dressed,1))
deallocate (eigenvectors,eigenvalues)
else if (diag_algorithm == "Lapack") then

View File

@ -13,8 +13,6 @@ BEGIN_PROVIDER [ double precision, mrcc_E0_denominator, (N_states) ]
END_DOC
if (initialize_mrcc_E0_denominator) then
mrcc_E0_denominator(1:N_states) = psi_energy(1:N_states)
! mrcc_E0_denominator(1:N_states) = HF_energy - nuclear_repulsion
! mrcc_E0_denominator(1:N_states) = barycentric_electronic_energy(1:N_states)
call write_double(6,mrcc_E0_denominator(1)+nuclear_repulsion, 'mrcc Energy denominator')
else
mrcc_E0_denominator = -huge(1.d0)

View File

@ -1,80 +0,0 @@
! DO NOT MODIFY BY HAND
! Created by $QP_ROOT/scripts/ezfio_interface/ei_handler.py
! from file /home/garniron/quantum_package/src/mrcepa0/EZFIO.cfg
BEGIN_PROVIDER [ logical, perturbative_triples ]
implicit none
BEGIN_DOC
! Compute perturbative contribution of the Triples
END_DOC
logical :: has
PROVIDE ezfio_filename
call ezfio_has_mrcepa0_perturbative_triples(has)
if (has) then
call ezfio_get_mrcepa0_perturbative_triples(perturbative_triples)
else
print *, 'mrcepa0/perturbative_triples not found in EZFIO file'
stop 1
endif
END_PROVIDER
BEGIN_PROVIDER [ double precision, thresh_dressed_ci ]
implicit none
BEGIN_DOC
! Threshold on the convergence of the dressed CI energy
END_DOC
logical :: has
PROVIDE ezfio_filename
call ezfio_has_mrcepa0_thresh_dressed_ci(has)
if (has) then
call ezfio_get_mrcepa0_thresh_dressed_ci(thresh_dressed_ci)
else
print *, 'mrcepa0/thresh_dressed_ci not found in EZFIO file'
stop 1
endif
END_PROVIDER
BEGIN_PROVIDER [ integer, n_it_max_dressed_ci ]
implicit none
BEGIN_DOC
! Maximum number of dressed CI iterations
END_DOC
logical :: has
PROVIDE ezfio_filename
call ezfio_has_mrcepa0_n_it_max_dressed_ci(has)
if (has) then
call ezfio_get_mrcepa0_n_it_max_dressed_ci(n_it_max_dressed_ci)
else
print *, 'mrcepa0/n_it_max_dressed_ci not found in EZFIO file'
stop 1
endif
END_PROVIDER
BEGIN_PROVIDER [ integer, lambda_type ]
implicit none
BEGIN_DOC
! lambda type
END_DOC
logical :: has
PROVIDE ezfio_filename
call ezfio_has_mrcepa0_lambda_type(has)
if (has) then
call ezfio_get_mrcepa0_lambda_type(lambda_type)
else
print *, 'mrcepa0/lambda_type not found in EZFIO file'
stop 1
endif
END_PROVIDER

View File

@ -11,7 +11,7 @@ subroutine ZMQ_mrcc(E, mrcc, delta, delta_s2, relative_error)
implicit none
character(len=64000) :: task
integer(ZMQ_PTR) :: zmq_to_qp_run_socket, zmq_to_qp_run_socket2
integer(ZMQ_PTR) :: zmq_to_qp_run_socket, zmq_socket_pull
integer, external :: omp_get_thread_num
double precision, intent(in) :: relative_error, E
double precision, intent(out) :: mrcc(N_states)
@ -23,10 +23,16 @@ subroutine ZMQ_mrcc(E, mrcc, delta, delta_s2, relative_error)
double precision, external :: omp_get_wtime
double precision :: time
double precision :: w(N_states)
provide nproc fragment_first fragment_count mo_bielec_integrals_in_map mo_mono_elec_integral mrcc_weight psi_selectors
w(:) = 0.d0
w(mrcc_stoch_istate) = 1.d0
call update_psi_average_norm_contrib(w)
@ -34,16 +40,30 @@ subroutine ZMQ_mrcc(E, mrcc, delta, delta_s2, relative_error)
print *, ' Samples Energy Stat. Error Seconds '
print *, '========== ================= ================= ================='
call new_parallel_job(zmq_to_qp_run_socket,'mrcc')
call zmq_put_psi(zmq_to_qp_run_socket,1,mrcc_e0_denominator,size(mrcc_e0_denominator))
call new_parallel_job(zmq_to_qp_run_socket,zmq_socket_pull, 'mrcc')
! call get_carlo_workbatch(Ncp, tbc, cp, cp_at, cp_N)
do i=1,comb_teeth
print *, "TOOTH", first_det_of_teeth(i+1) - first_det_of_teeth(i)
end do
!stop
integer, external :: zmq_put_psi
integer, external :: zmq_put_N_det_generators
integer, external :: zmq_put_N_det_selectors
integer, external :: zmq_put_dvector
if (zmq_put_psi(zmq_to_qp_run_socket,1) == -1) then
stop 'Unable to put psi on ZMQ server'
endif
if (zmq_put_N_det_generators(zmq_to_qp_run_socket, 1) == -1) then
stop 'Unable to put N_det_generators on ZMQ server'
endif
if (zmq_put_N_det_selectors(zmq_to_qp_run_socket, 1) == -1) then
stop 'Unable to put N_det_selectors on ZMQ server'
endif
if (zmq_put_dvector(zmq_to_qp_run_socket,1,'energy',mrcc_e0_denominator,size(mrcc_e0_denominator)) == -1) then
stop 'Unable to put energy on ZMQ server'
endif
! do i=1,comb_teeth
! print *, "TOOTH", first_det_of_teeth(i+1) - first_det_of_teeth(i)
! end do
integer(ZMQ_PTR), external :: new_zmq_to_qp_run_socket
integer :: ipos
ipos=1
@ -52,7 +72,9 @@ subroutine ZMQ_mrcc(E, mrcc, delta, delta_s2, relative_error)
write(task(ipos:ipos+20),'(I9,1X,I9,''|'')') 0, mrcc_jobs(i)
ipos += 20
if (ipos > 63980) then
call add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos)))
if (add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos))) == -1) then
stop 'Unable to add task to task server'
endif
ipos=1
endif
else
@ -60,28 +82,34 @@ subroutine ZMQ_mrcc(E, mrcc, delta, delta_s2, relative_error)
write(task(ipos:ipos+20),'(I9,1X,I9,''|'')') j, mrcc_jobs(i)
ipos += 20
if (ipos > 63980) then
call add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos)))
if (add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos))) == -1) then
stop 'Unable to add task to task server'
endif
ipos=1
endif
end do
end if
end do
if (ipos > 1) then
call add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos)))
if (add_task_to_taskserver(zmq_to_qp_run_socket,trim(task(1:ipos))) == -1) then
stop 'Unable to add task to task server'
endif
endif
call zmq_set_running(zmq_to_qp_run_socket)
!$OMP PARALLEL DEFAULT(shared) NUM_THREADS(nproc+1) &
if (zmq_set_running(zmq_to_qp_run_socket) == -1) then
print *, irp_here, ': Failed in zmq_set_running'
endif
!$OMP PARALLEL DEFAULT(shared) NUM_THREADS(nproc+1) &
!$OMP PRIVATE(i)
i = omp_get_thread_num()
if (i==0) then
call mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
call mrcc_collector(zmq_socket_pull,E(mrcc_stoch_istate), relative_error, delta, delta_s2, mrcc)
else
call mrcc_slave_inproc(i)
endif
!$OMP END PARALLEL
call end_parallel_job(zmq_to_qp_run_socket, 'mrcc')
call end_parallel_job(zmq_to_qp_run_socket, zmq_socket_pull, 'mrcc')
print *, '========== ================= ================= ================='
end subroutine
@ -95,13 +123,14 @@ subroutine mrcc_slave_inproc(i)
end
subroutine mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
subroutine mrcc_collector(zmq_socket_pull, E, relative_error, delta, delta_s2, mrcc)
use dress_types
use f77_zmq
use bitmasks
implicit none
integer(ZMQ_PTR), intent(in) :: zmq_socket_pull
double precision, intent(in) :: relative_error, E
double precision, intent(out) :: mrcc(N_states)
double precision, allocatable :: cp(:,:,:,:)
@ -207,10 +236,11 @@ subroutine mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
end if
end do
do i=1, ntask
call zmq_delete_task(zmq_to_qp_run_socket,zmq_socket_pull,task_id(i),more)
end do
integer, external :: zmq_delete_tasks
if (zmq_delete_tasks(zmq_to_qp_run_socket,zmq_socket_pull,task_id,ntask,more) == -1) then
stop 'Unable to delete tasks'
endif
time = omp_get_wtime()
@ -231,6 +261,7 @@ subroutine mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
!!!!!!!!!!!!
double precision :: su, su2, eqt, avg, E0, val
integer, external :: zmq_abort
su = 0d0
su2 = 0d0
@ -253,12 +284,18 @@ subroutine mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
if ((dabs(eqt) < relative_error .and. cps_N(cur_cp) >= 30) .or. total_computed == N_det_generators) then
! Termination
!print '(G10.3, 2X, F16.7, 2X, G16.3, 2X, F16.4, A20)', Nabove(tooth), avg+E, eqt, time-time0, ''
print *, "GREPME", cur_cp, E+E0+avg, eqt, time-time0, total_computed
call zmq_abort(zmq_to_qp_run_socket)
! print *, "GREPME", cur_cp, E+E0+avg, eqt, time-time0, total_computed
if (zmq_abort(zmq_to_qp_run_socket) == -1) then
call sleep(1)
if (zmq_abort(zmq_to_qp_run_socket) == -1) then
print *, irp_here, ': Error in sending abort signal (2)'
endif
endif
else
if (cur_cp > old_cur_cp) then
old_cur_cp = cur_cp
print *, "GREPME", cur_cp, E+E0+avg, eqt, time-time0, total_computed
! print *, "GREPME", cur_cp, E+E0+avg, eqt, time-time0, total_computed
!print '(G10.3, 2X, F16.7, 2X, G16.3, 2X, F16.4, A20)', Nabove(tooth), avg+E, eqt, time-time0, ''
endif
endif
@ -289,7 +326,6 @@ subroutine mrcc_collector(E, relative_error, delta, delta_s2, mrcc)
mrcc(1) = E
call end_zmq_to_qp_run_socket(zmq_to_qp_run_socket)
call end_zmq_pull_socket(zmq_socket_pull)
end subroutine

View File

@ -0,0 +1,27 @@
program mrsc2sub
implicit none
double precision, allocatable :: energy(:)
allocate (energy(N_states))
!mrmode : 1=mrcepa0, 2=mrsc2 add, 3=mrcc
mrmode = 5
read_wf = .True.
SOFT_TOUCH read_wf
call set_generators_bitmasks_as_holes_and_particles
if (.True.) then
integer :: i,j
do j=1,N_states
do i=1,N_det
psi_coef(i,j) = CI_eigenvectors(i,j)
enddo
enddo
SOFT_TOUCH psi_coef
endif
call run(N_states,energy)
if(do_pt2)then
call run_pt2(N_states,energy)
endif
deallocate(energy)
end

View File

@ -47,9 +47,6 @@ subroutine run(N_st,energy)
enddo
call diagonalize_ci_dressed(lambda)
E_new = mrcc_e0_denominator(1) !sum(ci_energy_dressed(1:N_states))
! if (.true.) then
! provide delta_ij_mrcc_pouet
! endif
delta_E = (E_new - E_old)/dble(N_states)
print *, ''
call write_double(6,thresh_mrcc,"thresh_mrcc")

View File

@ -9,7 +9,6 @@ END_PROVIDER
subroutine run_mrcc_slave(thread,iproc,energy)
use dress_types
use f77_zmq
implicit none
@ -184,7 +183,6 @@ end subroutine
subroutine pull_mrcc_results(zmq_socket_pull, N, ind, mrcc_detail, delta_loc, task_id, ntask)
use dress_types
use f77_zmq
implicit none
integer(ZMQ_PTR), intent(in) :: zmq_socket_pull