mirror of
https://github.com/TREX-CoE/trexio.git
synced 2025-01-03 10:06:01 +01:00
Merge pull request #110 from TREX-CoE/no_hl
Removed dependency on hdf5_hl library
This commit is contained in:
commit
986f5d8bf8
@ -4,6 +4,7 @@ CHANGES
|
||||
2.3
|
||||
---
|
||||
|
||||
- Removed dependency to hdf5_hl
|
||||
- Fixed Fortran interface on 32-bit (e.g. i386) architectures
|
||||
- Changed the structure of the state group
|
||||
- Sparse data type is patched to work with different dimensions
|
||||
|
21
README.md
21
README.md
@ -37,6 +37,8 @@ TREX library for efficient I/O.
|
||||
- Emacs (>= 26.0)
|
||||
- SWIG (>= 4.0) [required for the Python API]
|
||||
|
||||
**Note:** The source code is auto-generated from the Emacs org-mode (`.org`) files following the literate programming approach. This is why the `src` directory is initially empty.
|
||||
|
||||
## Installation procedure from the GitHub repo clone (for developers):
|
||||
|
||||
1. `git clone https://github.com/TREX-CoE/trexio.git`
|
||||
@ -83,13 +85,10 @@ The official releases of TREXIO `>=2.0.0` can be installed using the
|
||||
[GNU Guix](https://guix.gnu.org) functional package manager.
|
||||
The [trexio.scm](https://github.com/TREX-CoE/trexio/blob/master/tools/trexio.scm)
|
||||
Schema file contains the manifest specification for the `trexio` package.
|
||||
It can be installed within the selected `$GUIX_PROFILE` as follows:
|
||||
It can be installed as follows:
|
||||
|
||||
```
|
||||
guix package \
|
||||
--profile=$GUIX_PROFILE \
|
||||
--cores=<n_cores> \
|
||||
--install-from-file=trexio.scm
|
||||
guix package --cores=<n_cores> --install-from-file=trexio.scm
|
||||
```
|
||||
|
||||
## Installation procedure for Spack users
|
||||
@ -104,6 +103,18 @@ It can be installed as follows
|
||||
spack install --jobs <n_cores> trexio
|
||||
```
|
||||
|
||||
## Installation procedure for Debian/Ubuntu users
|
||||
|
||||
The official release of TREXIO `2.2.0` is available as a Debian (`.deb`) package thanks to the [Debichem Team](https://wiki.debian.org/Debichem).
|
||||
The source code is hosted [here](https://salsa.debian.org/debichem-team/libtrexio) and
|
||||
the pre-built binary files are available via the [Debian package registry](https://packages.debian.org/bookworm/libtrexio-dev).
|
||||
|
||||
TREXIO is also available on [Ubuntu 23.04 (Lunar Lobster)](https://packages.ubuntu.com/lunar/libtrexio-dev) and newer and can be installed as follows:
|
||||
|
||||
```
|
||||
sudo apt-get update && sudo apt-get install libtrexio-dev
|
||||
```
|
||||
|
||||
## Compilation without the HDF5 library
|
||||
|
||||
By default, the configuration step proceeds to search for the [HDF5 library](https://portal.hdfgroup.org/display/HDF5/HDF5).
|
||||
|
76
configure.ac
76
configure.ac
@ -95,7 +95,7 @@ AC_RUN_IFELSE(
|
||||
execl("/bin/sh", "sh", "-c", "mkdir tmpdir1 && \
|
||||
touch tmpdir1/test_file && \
|
||||
$CP_PROG -r -n tmpdir1 tmpdir2 && \
|
||||
exec ls tmpdir2/test_file", NULL);
|
||||
exec ls tmpdir2/test_file > /dev/null", NULL);
|
||||
])],
|
||||
[ rm -rf tmpdir1 tmpdir2
|
||||
CP_COMMAND="\"$CP_PROG\", \"-r\", \"-n\""
|
||||
@ -122,7 +122,9 @@ AC_CHECK_HEADERS([fcntl.h inttypes.h stdint.h stdbool.h stdlib.h string.h unistd
|
||||
### ----
|
||||
|
||||
HDF5_LIBS=""
|
||||
HDF5_LDFLAGS=""
|
||||
HDF5_CFLAGS=""
|
||||
HDF5_CPPFLAGS=""
|
||||
AC_ARG_WITH([hdf5],
|
||||
AS_HELP_STRING([--with-hdf5=PATH], [Path to HDF5 library and headers]), [
|
||||
with_hdf5="$withval"], [with_hdf5="yes"])
|
||||
@ -130,29 +132,87 @@ AC_ARG_WITH([hdf5],
|
||||
AS_IF([test "x$with_hdf5" == "xno"], [
|
||||
AC_DEFINE([HAVE_HDF5], 0, [Define to 1 if HDF5 is available]) ],
|
||||
[test "x$with_hdf5" != "xyes"], [
|
||||
HDF5_LIBS="-lhdf5"
|
||||
HDF5_PATH="$with_hdf5"
|
||||
HDF5_LIBS="-L$HDF5_PATH/lib -lhdf5 -lhdf5_hl"
|
||||
HDF5_CFLAGS="-I$HDF5_PATH/include"
|
||||
HDF5_LDFLAGS="-L$HDF5_PATH/lib"
|
||||
HDF5_CPPFLAGS="-I$HDF5_PATH/include"
|
||||
AC_DEFINE([HAVE_HDF5], 1, [Define to 1 if HDF5 is available]) ],
|
||||
[
|
||||
PKG_CHECK_EXISTS([hdf5], [
|
||||
PKG_CHECK_MODULES([HDF5], [hdf5 >= 1.8])
|
||||
AS_IF([ test "${HDF5_LIBS}" == "" ], [
|
||||
AC_CHECK_LIB([hdf5], [H5Fcreate], [ HDF5_LIBS="-lhdf5" ])
|
||||
AC_CHECK_HEADER([hdf5.h], [HDF5_CFLAGS=""], [AC_MSG_ERROR([HDF5 header not found]) ])
|
||||
AC_CHECK_HEADER([hdf5_hl.h], [HDF5_CFLAGS=""], [AC_MSG_ERROR([HDF5 header not found]) ])
|
||||
],
|
||||
[ AC_PATH_PROG([H5CC],[h5cc],[not_found])
|
||||
AS_IF([test "$H5CC" != "not_found"], [
|
||||
HDF5_LIBS="-lhdf5"
|
||||
AC_REQUIRE([AC_PROG_SED])
|
||||
AC_REQUIRE([AC_PROG_AWK])
|
||||
AC_REQUIRE([AC_PROG_GREP])
|
||||
# Look for "HDF5 Version: X.Y.Z"
|
||||
HDF5_VERSION=$(eval $H5CC -showconfig | $GREP 'HDF5 Version:' \
|
||||
| $AWK '{print $[]3}')
|
||||
|
||||
# A ideal situation would be where everything we needed was
|
||||
# in the AM_* variables. However most systems are not like this
|
||||
# and seem to have the values in the non-AM variables.
|
||||
#
|
||||
# We try the following to find the flags:
|
||||
# (1) Look for "NAME:" tags
|
||||
# (2) Look for "H5_NAME:" tags
|
||||
# (3) Look for "AM_NAME:" tags
|
||||
#
|
||||
HDF5_tmp_flags=$(eval $H5CC -showconfig \
|
||||
| $GREP 'FLAGS\|Extra libraries:' \
|
||||
| $AWK -F: '{printf("%s "), $[]2}' )
|
||||
|
||||
dnl Find the installation directory and append include/
|
||||
HDF5_tmp_inst=$(eval $H5CC -showconfig \
|
||||
| $GREP 'Installation point:' \
|
||||
| $AWK '{print $[]NF}' )
|
||||
|
||||
dnl Add this to the CPPFLAGS
|
||||
HDF5_CPPFLAGS="-I${HDF5_tmp_inst}/include"
|
||||
|
||||
HDF5_SHOW=$(eval $H5CC -show)
|
||||
|
||||
dnl Now sort the flags out based upon their prefixes
|
||||
for arg in $HDF5_SHOW $HDF5_tmp_flags ; do
|
||||
case "$arg" in
|
||||
-I*) echo $HDF5_CPPFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \
|
||||
|| HDF5_CPPFLAGS="$HDF5_CPPFLAGS $arg"
|
||||
;;
|
||||
-L*) echo $HDF5_LDFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \
|
||||
|| HDF5_LDFLAGS="$HDF5_LDFLAGS $arg"
|
||||
;;
|
||||
-l*) echo $HDF5_LIBS | $GREP -e "$arg" 2>&1 >/dev/null \
|
||||
|| HDF5_LIBS="$HDF5_LIBS $arg"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
])
|
||||
])
|
||||
AC_DEFINE([HAVE_HDF5], 1, [Define to 1 if HDF5 is available])
|
||||
HDF5_LIBS="$HDF5_LIBS -lhdf5_hl"
|
||||
])
|
||||
|
||||
AM_CONDITIONAL([HAVE_HDF5],[test "x$with_hdf5" != "xno"])
|
||||
|
||||
AC_SUBST([HDF5_LDFLAGS])
|
||||
AC_SUBST([HDF5_LIBS])
|
||||
AC_SUBST([HDF5_CFLAGS])
|
||||
AC_SUBST([HDF5_CPPFLAGS])
|
||||
CPPFLAGS="${HDF5_CPPFLAGS} ${CPPFLAGS}"
|
||||
CFLAGS="${HDF5_CFLAGS} ${CFLAGS}"
|
||||
LDFLAGS="${HDF5_LDFLAGS} ${LDFLAGS}"
|
||||
LIBS="${HDF5_LIBS} ${LIBS}"
|
||||
|
||||
AS_IF([test "x$with_hdf5" != "xno"], [
|
||||
OLD_LIBS=$LIBS
|
||||
AC_CHECK_LIB([hdf5], [H5Fcreate], [], [
|
||||
AC_MSG_ERROR([-lhdf5 fails, use ./configure --with-hdf5=...]) ])
|
||||
LIBS=$OLD_LIBS
|
||||
AC_CHECK_HEADER([hdf5.h], [], [
|
||||
AC_MSG_ERROR([hdf5.h not found, use ./configure --with-hdf5=...]) ])
|
||||
])
|
||||
|
||||
# The block below should only execute if the ax_lib_hdf5.m4 macro failed to find HDF5.
|
||||
# It is only needed to manually build Python API because setup.py depends on HDF5.
|
||||
|
@ -92,7 +92,7 @@ h5_ldflags = h5_ldflags_withl.split(" ")[0] if not do_sdist else ""
|
||||
pytrexio_module = Extension('pytrexio._pytrexio',
|
||||
sources = [os.path.join(srcpath, code) for code in c_files],
|
||||
include_dirs = [h5_cflags, srcpath, numpy_includedir],
|
||||
libraries = ['hdf5', 'hdf5_hl'],
|
||||
libraries = ['hdf5' ],
|
||||
extra_compile_args = [
|
||||
'-std=c99',
|
||||
'-Wno-discarded-qualifiers',
|
||||
|
@ -39,7 +39,6 @@
|
||||
#include <sys/stat.h>
|
||||
|
||||
#include "hdf5.h"
|
||||
#include "hdf5_hl.h"
|
||||
|
||||
#+end_src
|
||||
|
||||
@ -380,18 +379,21 @@ trexio_hdf5_read_$group_dset$ (trexio_t* const file, $group_dset_dtype$* const $
|
||||
}
|
||||
|
||||
FREE(ddims);
|
||||
|
||||
/* Read dataset */
|
||||
herr_t status = H5Dread(dset_id,
|
||||
H5T_$GROUP_DSET_H5_DTYPE$,
|
||||
H5S_ALL, H5S_ALL, H5P_DEFAULT,
|
||||
$group_dset$);
|
||||
|
||||
H5Sclose(dspace_id);
|
||||
H5Dclose(dset_id);
|
||||
|
||||
/* High-level H5LT API. No need to deal with dataspaces and datatypes */
|
||||
herr_t status = H5LTread_dataset(f->$group$_group,
|
||||
$GROUP_DSET$_NAME,
|
||||
H5T_$GROUP_DSET_H5_DTYPE$,
|
||||
$group_dset$);
|
||||
if (status < 0) return TREXIO_FAILURE;
|
||||
|
||||
return TREXIO_SUCCESS;
|
||||
}
|
||||
|
||||
#+end_src
|
||||
|
||||
#+begin_src c :tangle write_dset_data_hdf5.c
|
||||
@ -411,7 +413,8 @@ trexio_hdf5_write_$group_dset$ (trexio_t* const file, const $group_dset_dtype$*
|
||||
|
||||
Consider using HDF5-native h5repack utility after deleting/overwriting big datasets.
|
||||
,*/
|
||||
if (H5LTfind_dataset(f->$group$_group, $GROUP_DSET$_NAME) == 1 && file->mode == 'u') {
|
||||
|
||||
if ((trexio_hdf5_has_$group_dset$(file) == TREXIO_SUCCESS) && (file->mode == 'u')) {
|
||||
herr_t status_del = H5Ldelete(f->$group$_group, $GROUP_DSET$_NAME, H5P_DEFAULT);
|
||||
if (status_del < 0) return TREXIO_FAILURE;
|
||||
}
|
||||
@ -454,14 +457,13 @@ trexio_hdf5_has_$group_dset$ (trexio_t* const file)
|
||||
trexio_hdf5_t* f = (trexio_hdf5_t*) file;
|
||||
if (f->$group$_group == (hsize_t) 0) return TREXIO_HAS_NOT;
|
||||
|
||||
herr_t status = H5LTfind_dataset(f->$group$_group, $GROUP_DSET$_NAME);
|
||||
/* H5LTfind_dataset returns 1 if dataset exists, 0 otherwise */
|
||||
if (status == 1){
|
||||
htri_t exists = H5Lexists(f->$group$_group, $GROUP_DSET$_NAME, H5P_DEFAULT);
|
||||
if (exists > 0) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else if (status == 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
} else if (exists < 0) {
|
||||
return TREXIO_FAILURE;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
|
||||
}
|
||||
@ -534,7 +536,7 @@ trexio_hdf5_write_$group_dset$ (trexio_t* const file,
|
||||
|
||||
trexio_exit_code rc_write = TREXIO_FAILURE;
|
||||
/* NOTE: chunk size is set upon creation of the HDF5 dataset and cannot be changed ! */
|
||||
if ( H5LTfind_dataset(f->$group$_group, dset_index_name) != 1 ) {
|
||||
if (trexio_hdf5_has_$group_dset$(file) == TREXIO_HAS_NOT) {
|
||||
/* If the file does not exist -> create it and write */
|
||||
|
||||
/* Create chunked dataset with index_dtype datatype and write indices into it */
|
||||
@ -657,14 +659,13 @@ trexio_hdf5_has_$group_dset$ (trexio_t* const file)
|
||||
trexio_hdf5_t* f = (trexio_hdf5_t*) file;
|
||||
if (f->$group$_group == (hsize_t) 0) return TREXIO_HAS_NOT;
|
||||
|
||||
herr_t status = H5LTfind_dataset(f->$group$_group, $GROUP_DSET$_NAME "_values");
|
||||
/* H5LTfind_dataset returns 1 if dataset exists, 0 otherwise */
|
||||
if (status == 1){
|
||||
htri_t exists = H5Lexists(f->$group$_group, $GROUP_DSET$_NAME "_values", H5P_DEFAULT);
|
||||
if (exists > 0) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else if (status == 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
} else if (exists < 0) {
|
||||
return TREXIO_FAILURE;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
|
||||
}
|
||||
@ -728,7 +729,7 @@ trexio_exit_code trexio_hdf5_write_$group_dset$(trexio_t* const file,
|
||||
|
||||
trexio_exit_code rc_write = TREXIO_FAILURE;
|
||||
/* NOTE: chunk size is set upon creation of the HDF5 dataset and cannot be changed ! */
|
||||
if ( H5LTfind_dataset(f->$group$_group, dset_name) != 1 ) {
|
||||
if (trexio_hdf5_has_$group_dset$(file) == TREXIO_HAS_NOT) {
|
||||
/* If the file does not exist -> create it and write */
|
||||
|
||||
/* Create chunked dataset with dtype datatype and write indices into it */
|
||||
@ -793,14 +794,13 @@ trexio_exit_code trexio_hdf5_has_$group_dset$(trexio_t* const file)
|
||||
|
||||
const char dset_name[256] = "$group_dset$";
|
||||
|
||||
herr_t status = H5LTfind_dataset(f->$group$_group, dset_name);
|
||||
/* H5LTfind_dataset returns 1 if dataset exists, 0 otherwise */
|
||||
if (status == 1){
|
||||
htri_t exists = H5Lexists(f->$group$_group, dset_name, H5P_DEFAULT);
|
||||
if (exists > 0) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else if (status == 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
} else if (exists < 0) {
|
||||
return TREXIO_FAILURE;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
}
|
||||
#+end_src
|
||||
@ -936,7 +936,7 @@ trexio_hdf5_write_$group_dset$ (trexio_t* const file, const char** $group_dset$,
|
||||
|
||||
Consider using HDF5-provided h5repack utility after deleting/overwriting big datasets.
|
||||
,*/
|
||||
if (H5LTfind_dataset(f->$group$_group, $GROUP_DSET$_NAME) == 1 && file->mode == 'u') {
|
||||
if ( (trexio_hdf5_has_$group_dset$(file) == TREXIO_SUCCESS) && (file->mode == 'u') ) {
|
||||
herr_t status_del = H5Ldelete(f->$group$_group, $GROUP_DSET$_NAME, H5P_DEFAULT);
|
||||
if (status_del < 0) return TREXIO_FAILURE;
|
||||
}
|
||||
@ -991,14 +991,13 @@ trexio_hdf5_has_$group_dset$ (trexio_t* const file)
|
||||
trexio_hdf5_t* f = (trexio_hdf5_t*) file;
|
||||
if (f->$group$_group == (hsize_t) 0) return TREXIO_HAS_NOT;
|
||||
|
||||
herr_t status = H5LTfind_dataset(f->$group$_group, $GROUP_DSET$_NAME);
|
||||
/* H5LTfind_dataset returns 1 if dataset exists, 0 otherwise */
|
||||
if (status == 1){
|
||||
htri_t exists = H5Lexists(f->$group$_group, $GROUP_DSET$_NAME, H5P_DEFAULT);
|
||||
if (exists > 0) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else if (status == 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
} else if (exists < 0) {
|
||||
return TREXIO_FAILURE;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1231,7 +1230,7 @@ trexio_exit_code trexio_hdf5_write_determinant_list(trexio_t* const file,
|
||||
|
||||
trexio_exit_code rc_write = TREXIO_FAILURE;
|
||||
/* NOTE: chunk size is set upon creation of the HDF5 dataset and cannot be changed ! */
|
||||
if ( H5LTfind_dataset(f->determinant_group, dset_det_name) != 1 ) {
|
||||
if ( trexio_hdf5_has_determinant_list(file) == TREXIO_HAS_NOT ) {
|
||||
/* If the file does not exist -> create it and write */
|
||||
|
||||
/* Create chunked dataset with det_dtype datatype and write indices into it */
|
||||
@ -1260,14 +1259,13 @@ trexio_exit_code trexio_hdf5_has_determinant_list(trexio_t* const file)
|
||||
trexio_hdf5_t* f = (trexio_hdf5_t*) file;
|
||||
if (f->determinant_group == (hsize_t) 0) return TREXIO_HAS_NOT;
|
||||
|
||||
herr_t status = H5LTfind_dataset(f->determinant_group, "determinant_list");
|
||||
/* H5LTfind_dataset returns 1 if dataset exists, 0 otherwise */
|
||||
if (status == 1){
|
||||
htri_t exists = H5Lexists(f->determinant_group, "determinant_list", H5P_DEFAULT);
|
||||
if (exists > 0) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else if (status == 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
} else if (exists < 0) {
|
||||
return TREXIO_FAILURE;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
}
|
||||
#+end_src
|
||||
|
Loading…
Reference in New Issue
Block a user