Choosing MPI or not at configuration

This commit is contained in:
Anthony Scemama 2019-01-15 15:17:34 +01:00
parent 881db3a586
commit a194a1d61f
13 changed files with 302 additions and 7 deletions

1
.gitignore vendored
View File

@ -1,4 +1,5 @@
_build/
Makefile
Parallel
*.byte
*.native

View File

@ -262,13 +262,13 @@ let of_basis_parallel basis =
let t0 = Unix.gettimeofday () in
let ishell = ref 0 in
let ishell = ref 10000000 in
let input_stream = Stream.of_list (List.rev shell_pairs) in
let f shell_p =
let () =
if Parallel.rank < 2 && Cs.index (Csp.shell_a shell_p) > !ishell then
if Parallel.rank < 2 && Cs.index (Csp.shell_a shell_p) < !ishell then
(ishell := Cs.index (Csp.shell_a shell_p) ; print_int !ishell ; print_newline ())
in
@ -322,7 +322,12 @@ let of_basis_parallel basis =
let of_basis = of_basis_parallel
let of_basis =
match Parallel.size with
| 1 -> of_basis_serial
| _ -> of_basis_parallel

View File

@ -3,7 +3,7 @@
INCLUDE_DIRS=Parallel,Nuclei,Utils,Basis,SCF,MOBasis,CI
LIBS=
PKGS=
OCAMLBUILD=ocamlbuild -j 0 -cflags $(ocamlcflags) -lflags $(ocamllflags) $(ocamldocflags) -Is $(INCLUDE_DIRS) -ocamlopt $(ocamloptflags)
OCAMLBUILD=ocamlbuild -j 0 -cflags $(ocamlcflags) -lflags $(ocamllflags) $(ocamldocflags) -Is $(INCLUDE_DIRS) -ocamlopt $(ocamloptflags) $(mpi)
MLLFILES=$(wildcard */*.mll) $(wildcard *.mll) Utils/math_functions.c
MLYFILES=$(wildcard */*.mly) $(wildcard *.mly)

View File

@ -9,8 +9,6 @@ let run_sequential f stream =
Stream.from next
(* Multi-process functions *)
type task_id = int
@ -140,3 +138,4 @@ let run ?(ordered=true) ~f stream =
| 1 -> run_sequential f stream
| _ -> run_parallel ~ordered f stream

13
Parallel_serial/Farm.ml Normal file
View File

@ -0,0 +1,13 @@
(* Single process function *)
let run_sequential f stream =
let rec next _ =
try
let task = Stream.next stream in
Some (f task)
with Stream.Failure -> None in
Stream.from next
let run ?(ordered=true) ~f stream =
run_sequential f stream

14
Parallel_serial/Farm.mli Normal file
View File

@ -0,0 +1,14 @@
(** The Farm skeleton, similar to SklMl.
The input is a stream of input data, and the output is a stream of data.
*)
val run : ?ordered:bool -> f:('a -> 'b) -> 'a Stream.t -> 'b Stream.t
(** Run the [f] function on every process by popping elements from the
input stream, and putting the results on the output stream. If [ordered]
(the default is [ordered = true], then the order of the output is kept
consistent with the order of the input.
*)

126
Parallel_serial/Parallel.ml Normal file
View File

@ -0,0 +1,126 @@
(** Module for handling distributed parallelism *)
let size = 1
let rank = 0
let master = true
let barrier () = ()
let broadcast x =
Lazy.force x
let broadcast_int x = x
let broadcast_int_array x = x
let broadcast_float x = x
let broadcast_float_array x = x
let broadcast_vec x = x
module Vec = struct
type t =
{
global_first : int ; (* Lower index in the global array *)
global_last : int ; (* Higher index in the global array *)
local_first : int ; (* Lower index in the local array *)
local_last : int ; (* Higher index in the local array *)
data : Lacaml.D.vec ; (* Lacaml vector containing the data *)
}
let dim vec =
vec.global_last - vec.global_first + 1
let local_first vec = vec.local_first
let local_last vec = vec.local_last
let global_first vec = vec.global_first
let global_last vec = vec.global_last
let data vec = vec.data
let pp ppf v =
Format.fprintf ppf "@[<2>";
Format.fprintf ppf "@[ gf : %d@]@;" v.global_first;
Format.fprintf ppf "@[ gl : %d@]@;" v.global_last;
Format.fprintf ppf "@[ lf : %d@]@;" v.local_first;
Format.fprintf ppf "@[ ll : %d@]@;" v.local_last;
Format.fprintf ppf "@[ data : %a@]@;" (Lacaml.Io.pp_lfvec ()) v.data;
Format.fprintf ppf "@]@.";
()
let create n =
{
global_first = 1 ;
global_last = n ;
local_first = 1 ;
local_last = n ;
data = Lacaml.D.Vec.create n
}
let make n x =
let result = create n in
{ result with data =
Lacaml.D.Vec.make
(Lacaml.D.Vec.dim result.data)
x
}
let make0 n =
make n 0.
let init n f =
let result = create n in
{ result with data =
Lacaml.D.Vec.init
(Lacaml.D.Vec.dim result.data)
(fun i -> f (i+result.local_first-1))
}
let of_array a =
let length_a = Array.length a in
let a =
let n = length_a mod size in
if n > 0 then
Array.concat [ a ; Array.make (size-n) 0. ]
else
a
in
let result = create length_a in
let a_local = Array.make (Array.length a) 0. in
{ result with data = Lacaml.D.Vec.of_array a_local }
let to_array vec =
Lacaml.D.Vec.to_array vec.data
|> Array.copy
let of_vec a =
Lacaml.D.Vec.to_array a
|> of_array
let to_vec v =
to_array v
|> Lacaml.D.Vec.of_array
end
let dot v1 v2 =
if Vec.dim v1 <> Vec.dim v2 then
invalid_arg "Incompatible dimensions";
Lacaml.D.dot (Vec.data v1) (Vec.data v2)

View File

@ -0,0 +1,126 @@
(** Module for handling distributed parallelism *)
val size : int
(** Number of distributed processes. *)
val rank : int
(** Rank of the current distributed processe. *)
val master : bool
(** True if [rank = 0]. *)
val barrier : unit -> unit
(** Wait for all processes to reach this point. *)
val broadcast : 'a lazy_t -> 'a
(** Broadcasts data to all processes. *)
val broadcast_int : int -> int
(** Broadcasts an [int] to all processes. *)
val broadcast_float : float -> float
(** Broadcasts a [float] to all processes. *)
val broadcast_int_array : int array -> int array
(** Broadcasts an [int array] to all processes. *)
val broadcast_float_array : float array -> float array
(** Broadcasts a [float array] to all processes. *)
val broadcast_vec : Lacaml.D.vec -> Lacaml.D.vec
(** Broadcasts a Lacaml vector to all processes. *)
(** {5 Vector operations} *)
module Vec : sig
type t = private
{
global_first : int ; (* Lower index in the global array *)
global_last : int ; (* Higher index in the global array *)
local_first : int ; (* Lower index in the local array *)
local_last : int ; (* Higher index in the local array *)
data : Lacaml.D.vec ; (* Lacaml vector containing the data *)
}
val pp : Format.formatter -> t -> unit
(** {6 Creation/conversion of vectors} *)
val create : int -> t
(** [create n] @return a distributed vector with [n] rows (not initialized). *)
val make : int -> float -> t
(** [make n x] @return a distributed vector with [n] rows initialized with value [x]. *)
val make0 : int -> t
(** [make0 n x] @return a distributed vector with [n] rows initialized with the zero
element. *)
val init : int -> (int -> float) -> t
(** [init n f] @return a distributed vector containing [n] elements, where
each element at position [i] is initialized by the result of calling [f i]. *)
val of_array : float array -> t
(** [of_array ar] @return a distributed vector initialized from array [ar]. *)
val to_array : t -> float array
(** [to_array v] @return an array initialized from vector [v]. *)
val of_vec : Lacaml.D.vec -> t
(** [of_vec vec] @return a distributed vector initialized from Lacaml vector [vec]. *)
val to_vec : t -> Lacaml.D.vec
(** [to_vec v] @return a Lacaml vector initialized from vector [v]. *)
(** {6 Accessors } *)
val dim : t -> int
(** [dim v] @return the dimension of the vector [v]. *)
val global_first : t -> int
(** [global_first v] @return the index of the first element of [v]. *)
val global_last : t -> int
(** [global_last v] @return the index of the last element of [v]. *)
val local_first : t -> int
(** [local_first v] @return the index of the first element of the local piece of [v]. *)
val global_last : t -> int
(** [local_last v] @return the index of the last element of the local piece of [v]. *)
val data : t -> Lacaml.D.vec
(** [data v] @return the local Lacaml vector in which the piece of the vector [v] is stored. *)
end
(*
module Mat : sig
type t =
{
global_first_row : int ; (* Lower row index in the global array *)
global_last_row : int ; (* Higher row index in the global array *)
global_first_col : int ; (* Lower column index in the global array *)
global_last_col : int ; (* Higher column index in the global array *)
local_first_row : int ; (* Lower row index in the local array *)
local_last_row : int ; (* Higher row index in the local array *)
local_first_col : int ; (* Lower column index in the local array *)
local_last_col : int ; (* Higher column index in the local array *)
data : Lacaml.D.mat ; (* Lacaml matrix containing the data *)
}
end
val gemm : Mat.t -> Mat.t -> Mat.t
(* Distributed matrix-matrix product. The result is a distributed matrix. *)
*)
val dot : Vec.t -> Vec.t-> float
(* Dot product between distributed vectors. *)

2
_tags
View File

@ -1,4 +1,4 @@
true: package(str,unix,bigarray,lacaml,alcotest,zarith,mpi)
true: package(str,unix,bigarray,lacaml,alcotest,zarith)
<*.byte> : linkdep(Utils/math_functions.o), custom
<*.native>: linkdep(Utils/math_functions.o)
<odoc-ltxhtml>: not_hygienic

11
configure vendored
View File

@ -15,6 +15,7 @@ ocamlcflags='"-g -warn-error A"'
ocamllflags='"-g -warn-error A"'
ocamloptflags='"opt -O3 -nodynlink -remove-unused-arguments -rounds 16 -inline 100 -inline-max-unroll 100"'
ocamldocflags='-docflags "-g ltxhtml.cma -sort -css-style $(PWD)/style.css -colorize-code"'
mpi=''
# --------------------------------
@ -42,6 +43,7 @@ where options include:
-etc dir default: $etc
-ocamlcflags default: $ocamlcflags
-ocamloptflags default: $ocamloptflags
-mpi default: mpi off
EOF
exit
@ -73,6 +75,8 @@ while : ; do
-share|--share)
share="$2"
shift;;
-mpi|--mpi)
mpi='-tag "package(mpi)"';;
-help|--help)
help;;
*)
@ -82,6 +86,12 @@ while : ; do
shift
done
rm -f Parallel
if [[ -n $mpi ]] ; then
ln -s Parallel_mpi Parallel
else
ln -s Parallel_serial Parallel
fi
cat << EOF > Makefile
package_name=$package_name
@ -92,6 +102,7 @@ doc=$doc
share=$share
man=$man
etc=$etc
mpi=$mpi
ocamlcflags=$ocamlcflags
ocamllflags=$ocamllflags