mirror of
https://github.com/LCPQ/quantum_package
synced 2024-12-22 12:23:48 +01:00
Merge branch 'master' of github.com:scemama/quantum_package
This commit is contained in:
commit
76f6de92fd
6
bin/qp_gaspi_run
Executable file
6
bin/qp_gaspi_run
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
QP_ROOT=$( cd $(dirname "${BASH_SOURCE}")/.. ; pwd -P )
|
||||
source $HOME/.bashrc
|
||||
source $QP_ROOT/quantum_package.rc
|
||||
exec $QP_ROOT/ocaml/qp_run $@
|
@ -15,7 +15,7 @@ let print_list () =
|
||||
let () =
|
||||
Random.self_init ()
|
||||
|
||||
let run slave gaspi_command exe ezfio_file =
|
||||
let run slave exe ezfio_file =
|
||||
|
||||
|
||||
(** Check availability of the ports *)
|
||||
@ -115,17 +115,13 @@ let run slave gaspi_command exe ezfio_file =
|
||||
match Sys.getenv "QP_PREFIX" with
|
||||
| Some x -> x^" "
|
||||
| None -> ""
|
||||
and gaspi_command =
|
||||
match gaspi_command with
|
||||
| Some gaspi_run -> gaspi_run^" "
|
||||
| None -> ""
|
||||
and exe =
|
||||
match (List.find ~f:(fun (x,_) -> x = exe) executables) with
|
||||
| Some (_,x) -> x^" "
|
||||
| None -> assert false
|
||||
in
|
||||
let exit_code =
|
||||
match (Sys.command (prefix^gaspi_command^exe^ezfio_file)) with
|
||||
match (Sys.command (prefix^exe^ezfio_file)) with
|
||||
| 0 -> 0
|
||||
| i -> (Printf.printf "Program exited with code %d.\n%!" i; i)
|
||||
in
|
||||
@ -146,8 +142,6 @@ let spec =
|
||||
empty
|
||||
+> flag "slave" no_arg
|
||||
~doc:(" Needed for slave tasks")
|
||||
+> flag "gaspi_run" (optional string)
|
||||
~doc:(" mpi launcher with its options")
|
||||
+> anon ("executable" %: string)
|
||||
+> anon ("ezfio_file" %: string)
|
||||
;;
|
||||
@ -165,8 +159,8 @@ Executes a Quantum Package binary file among these:\n\n"
|
||||
)
|
||||
)
|
||||
spec
|
||||
(fun slave gaspi_command exe ezfio_file () ->
|
||||
run slave gaspi_command exe ezfio_file
|
||||
(fun slave exe ezfio_file () ->
|
||||
run slave exe ezfio_file
|
||||
)
|
||||
|> Command.run ~version: Git.sha1 ~build_info: Git.message
|
||||
|
||||
|
1
plugins/Full_CI_ZMQ_GPI2/NEEDED_CHILDREN_MODULES
Normal file
1
plugins/Full_CI_ZMQ_GPI2/NEEDED_CHILDREN_MODULES
Normal file
@ -0,0 +1 @@
|
||||
Full_CI_ZMQ GPI2
|
@ -1,8 +1,9 @@
|
||||
===
|
||||
MPI
|
||||
===
|
||||
================
|
||||
Full_CI_ZMQ_GPI2
|
||||
================
|
||||
|
||||
Providers for MPI programs.
|
||||
GPI2 Slave for Full_CI with ZMQ. There should be one instance of the slave
|
||||
per compute node.
|
||||
|
||||
Needed Modules
|
||||
==============
|
@ -14,7 +14,7 @@ end
|
||||
|
||||
subroutine provide_everything
|
||||
PROVIDE H_apply_buffer_allocated mo_bielec_integrals_in_map psi_det_generators psi_coef_generators psi_det_sorted_bit psi_selectors n_det_generators n_states generators_bitmask zmq_context
|
||||
PROVIDE pt2_e0_denominator mo_tot_num N_int fragment_count MPI_Initialized
|
||||
PROVIDE pt2_e0_denominator mo_tot_num N_int fragment_count GASPI_is_Initialized
|
||||
end
|
||||
|
||||
subroutine run_wf
|
||||
@ -51,10 +51,10 @@ subroutine run_wf
|
||||
! ---------
|
||||
|
||||
print *, 'Selection'
|
||||
if (is_mpi_master) then
|
||||
if (is_gaspi_master) then
|
||||
call zmq_get_psi(zmq_to_qp_run_socket,1,energy,N_states)
|
||||
endif
|
||||
call mpi_bcast_psi()
|
||||
call broadcast_wf(energy)
|
||||
|
||||
!$OMP PARALLEL PRIVATE(i)
|
||||
i = omp_get_thread_num()
|
||||
@ -68,10 +68,10 @@ subroutine run_wf
|
||||
! --------
|
||||
|
||||
print *, 'Davidson'
|
||||
if (is_mpi_master) then
|
||||
if (is_gaspi_master) then
|
||||
call zmq_get_psi(zmq_to_qp_run_socket,1,energy,N_states)
|
||||
endif
|
||||
call mpi_bcast_psi()
|
||||
call broadcast_wf(energy)
|
||||
call omp_set_nested(.True.)
|
||||
call davidson_slave_tcp(0)
|
||||
call omp_set_nested(.False.)
|
||||
@ -83,10 +83,10 @@ subroutine run_wf
|
||||
! ---
|
||||
|
||||
print *, 'PT2'
|
||||
if (is_mpi_master) then
|
||||
if (is_gaspi_master) then
|
||||
call zmq_get_psi(zmq_to_qp_run_socket,1,energy,N_states)
|
||||
endif
|
||||
call mpi_bcast_psi()
|
||||
call broadcast_wf(energy)
|
||||
|
||||
logical :: lstop
|
||||
lstop = .False.
|
@ -1 +0,0 @@
|
||||
Full_CI_ZMQ MPI
|
@ -1,14 +0,0 @@
|
||||
===============
|
||||
Full_CI_ZMQ_MPI
|
||||
===============
|
||||
|
||||
MPI Slave for Full_CI with ZMQ
|
||||
|
||||
Needed Modules
|
||||
==============
|
||||
.. Do not edit this section It was auto-generated
|
||||
.. by the `update_README.py` script.
|
||||
Documentation
|
||||
=============
|
||||
.. Do not edit this section It was auto-generated
|
||||
.. by the `update_README.py` script.
|
@ -1 +0,0 @@
|
||||
Bitmask
|
@ -1,22 +0,0 @@
|
||||
subroutine mpi_bcast_psi()
|
||||
use f77_zmq
|
||||
implicit none
|
||||
BEGIN_DOC
|
||||
! Broadcast the wave function coming from the qp_run scheduler
|
||||
END_DOC
|
||||
integer :: ierr
|
||||
|
||||
IRP_IF MPI
|
||||
include 'mpif.h'
|
||||
call MPI_BCast(N_states, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
|
||||
call MPI_BCast(N_det, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
|
||||
call MPI_BCast(psi_det_size, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
|
||||
|
||||
TOUCH psi_det_size N_det N_states
|
||||
|
||||
call MPI_BCast(psi_det, N_det, MPI_INTEGER8, 0, MPI_COMM_WORLD, ierr)
|
||||
call MPI_BCast(psi_coef, psi_det_size, MPI_DOUBLE_PRECISION* N_states, 0, MPI_COMM_WORLD, ierr)
|
||||
IRP_ENDIF
|
||||
|
||||
end
|
||||
|
@ -1,70 +0,0 @@
|
||||
BEGIN_PROVIDER [ logical, MPI_Initialized ]
|
||||
&BEGIN_PROVIDER [ logical, has_mpi ]
|
||||
implicit none
|
||||
BEGIN_DOC
|
||||
! This is true when MPI_Init has been called
|
||||
END_DOC
|
||||
|
||||
IRP_IF MPI
|
||||
integer :: ierr
|
||||
call MPI_Init(ierr)
|
||||
if (ierr /= 0) then
|
||||
print *, ierr
|
||||
print *, 'MPI failed to initialize'
|
||||
stop -1
|
||||
endif
|
||||
IRP_ENDIF
|
||||
MPI_Initialized = .True.
|
||||
END_PROVIDER
|
||||
|
||||
|
||||
BEGIN_PROVIDER [ integer, MPI_rank ]
|
||||
&BEGIN_PROVIDER [ integer, MPI_size ]
|
||||
&BEGIN_PROVIDER [ logical, is_MPI_master ]
|
||||
implicit none
|
||||
BEGIN_DOC
|
||||
! Usual MPI variables
|
||||
END_DOC
|
||||
|
||||
PROVIDE MPI_Initialized
|
||||
|
||||
IRP_IF MPI
|
||||
include 'mpif.h'
|
||||
integer :: ierr
|
||||
call mpi_comm_size(MPI_COMM_WORLD, MPI_size, ierr)
|
||||
if (ierr /= 0) then
|
||||
print *, ierr
|
||||
print *, 'Unable to get MPI_size'
|
||||
stop -1
|
||||
endif
|
||||
call mpi_comm_rank(MPI_COMM_WORLD, MPI_rank, ierr)
|
||||
if (ierr /= 0) then
|
||||
print *, ierr
|
||||
print *, 'Unable to get MPI_rank'
|
||||
stop -1
|
||||
endif
|
||||
is_MPI_master = (MPI_rank == 0)
|
||||
IRP_ELSE
|
||||
MPI_rank = 0
|
||||
MPI_size = 1
|
||||
is_MPI_master = .True.
|
||||
IRP_ENDIF
|
||||
|
||||
|
||||
END_PROVIDER
|
||||
|
||||
subroutine qp_mpi_finalize()
|
||||
implicit none
|
||||
PROVIDE MPI_Initialized
|
||||
IRP_IF MPI
|
||||
include 'mpif.h'
|
||||
integer :: ierr
|
||||
call MPI_Finalize(ierr)
|
||||
if (ierr /= 0) then
|
||||
print *, ierr
|
||||
print *, 'Unable to finalize MPI'
|
||||
stop -1
|
||||
endif
|
||||
IRP_ENDIF
|
||||
end subroutine
|
||||
|
Loading…
Reference in New Issue
Block a user