mirror of
https://github.com/TREX-CoE/trexio.git
synced 2024-12-22 12:23:54 +01:00
Merge branch 'master' into csf
This commit is contained in:
commit
dbd51bcb4f
10
.pre-commit-config.yaml
Normal file
10
.pre-commit-config.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
@ -1,6 +1,13 @@
|
||||
CHANGES
|
||||
=======
|
||||
|
||||
2.3
|
||||
---
|
||||
|
||||
- Added `trexio_has_group` functionality
|
||||
- Added OCaml binding
|
||||
- Added spin and energy in MOs
|
||||
|
||||
2.2
|
||||
---
|
||||
|
||||
|
19
Makefile.am
19
Makefile.am
@ -146,7 +146,6 @@ check_PROGRAMS = $(TESTS)
|
||||
LDADD = src/libtrexio.la
|
||||
|
||||
test_trexio_f = $(srcdir)/tests/trexio_f.f90
|
||||
CLEANFILES += $(test_trexio_f)
|
||||
|
||||
$(test_trexio_f): $(trexio_f)
|
||||
cp $(trexio_f) $(test_trexio_f)
|
||||
@ -227,6 +226,22 @@ cppcheck.out: $(trexio_h)
|
||||
--language=c --std=c99 -rp --platform=unix64 \
|
||||
-I../include *.c *.h 2>../$@
|
||||
|
||||
#################
|
||||
# OCaml binding #
|
||||
#################
|
||||
|
||||
ocaml/trexio/_build/default/lib/trexio.cma:
|
||||
$(MAKE) -C ocaml/trexio
|
||||
|
||||
ocaml: ocaml/trexio/_build/default/lib/trexio.cma
|
||||
|
||||
ocaml-install: ocaml/trexio/_build/default/lib/trexio.cma
|
||||
opam install ocaml/trexio
|
||||
|
||||
##################
|
||||
# Python binding #
|
||||
##################
|
||||
|
||||
setup_py = $(srcdir)/python/setup.py
|
||||
setup_cfg = $(srcdir)/python/setup.cfg
|
||||
pytrexio_py = $(srcdir)/python/pytrexio/pytrexio.py
|
||||
@ -278,6 +293,6 @@ CLEANFILES += $(pytrexio_c) \
|
||||
python/src/*.c \
|
||||
python/src/*.h
|
||||
|
||||
.PHONY: cppcheck python-test python-install python-sdist check-numpy FORCE
|
||||
.PHONY: cppcheck python-test python-install python-sdist check-numpy FORCE ocaml
|
||||
|
||||
endif
|
||||
|
@ -10,10 +10,10 @@ TREX library for efficient I/O.
|
||||
|
||||
## Minimal requirements (for users):
|
||||
|
||||
- Autotools (autoconf >= 2.69, automake >= 1.11, libtool >= 2.2) or CMake (>= 3.16)
|
||||
- Autotools (autoconf >= 2.69, automake >= 1.11, libtool >= 2.2) or CMake (>= 3.16)
|
||||
- C compiler (gcc/icc/clang)
|
||||
- Fortran compiler (gfortran/ifort)
|
||||
- HDF5 library (>= 1.8) [optional, recommended for high performance]
|
||||
- HDF5 library (>= 1.8) [optional, recommended for high performance]
|
||||
|
||||
|
||||
## Installation procedure from the tarball (for users):
|
||||
|
9
ocaml/trexio/Makefile
Normal file
9
ocaml/trexio/Makefile
Normal file
@ -0,0 +1,9 @@
|
||||
default: sources
|
||||
dune build
|
||||
|
||||
lib/trexio.ml: ../../trex.json read_json.py src/trexio.ml src/trexio.mli src/trexio_stubs.c
|
||||
./read_json.py
|
||||
|
||||
sources: lib/trexio.ml
|
||||
|
||||
.PHONY: sources default
|
@ -1,11 +1,12 @@
|
||||
(lang dune 3.1)
|
||||
|
||||
(name trexio)
|
||||
(version 2.3.0)
|
||||
|
||||
(generate_opam_files true)
|
||||
|
||||
(source
|
||||
(github trex-coe/trexio))
|
||||
(github trex-coe/trexio_ocaml))
|
||||
|
||||
(authors
|
||||
"Anthony Scemama <scemama@irsamc.ups-tlse.fr>"
|
||||
@ -24,8 +25,12 @@
|
||||
(name trexio)
|
||||
(synopsis "Binding for the TREXIO Input/Output library")
|
||||
(description "TREXIO is a file format and library for storing wave functions and integrals for quantum chemistry.")
|
||||
(depends ocaml dune)
|
||||
(depends
|
||||
dune
|
||||
(dune-configurator :build)
|
||||
(conf-pkg-config :build))
|
||||
(tags
|
||||
("Quantum chemistry" "Library")))
|
||||
("Quantum chemistry" "Library"))
|
||||
)
|
||||
|
||||
; See the complete stanza docs at https://dune.readthedocs.io/en/stable/dune-files.html#dune-project
|
||||
|
@ -34,6 +34,17 @@ CAMLprim value caml_delete_{group}(value file)
|
||||
caml_failwith(trexio_string_of_error(rc));
|
||||
}
|
||||
}
|
||||
|
||||
CAMLprim value caml_has_{group}(value file)
|
||||
{
|
||||
CAMLparam1(file);
|
||||
trexio_exit_code rc = trexio_has_{group}( File_val(file) );
|
||||
if (rc == TREXIO_SUCCESS) {
|
||||
CAMLreturn ( Val_bool(true) );
|
||||
} else {
|
||||
CAMLreturn ( Val_bool(false) );
|
||||
}
|
||||
}
|
||||
"""
|
||||
f.write( t.replace("{group}",group) )
|
||||
|
||||
@ -437,7 +448,8 @@ def write_mli(data):
|
||||
f.write(content_pre)
|
||||
|
||||
for group in data:
|
||||
t = "val delete_{group}: trexio_file -> unit\n"
|
||||
t = "val delete_{group}: trexio_file -> unit\n"
|
||||
t += "val has_{group}: trexio_file -> bool\n"
|
||||
f.write( t.replace("{group}",group) )
|
||||
|
||||
for element in data[group]:
|
||||
@ -526,7 +538,8 @@ def write_ml(data):
|
||||
f.write(content_pre)
|
||||
|
||||
for group in data:
|
||||
t = "external delete_{group}: trexio_file -> unit = \"caml_delete_{group}\"\n"
|
||||
t = "external delete_{group}: trexio_file -> unit = \"caml_delete_{group}\"\n"
|
||||
t += "external has_{group}: trexio_file -> bool = \"caml_has_{group}\"\n"
|
||||
f.write( t.replace("{group}",group) )
|
||||
|
||||
for element in data[group]:
|
||||
|
@ -192,11 +192,21 @@ class TestIO:
|
||||
self.test_array_1D()
|
||||
self.test_array_2D()
|
||||
|
||||
assert trexio.has_nucleus(self.test_file)
|
||||
|
||||
trexio.delete_nucleus(self.test_file)
|
||||
|
||||
assert not trexio.has_nucleus_num(self.test_file)
|
||||
assert not trexio.has_nucleus_charge(self.test_file)
|
||||
assert not trexio.has_nucleus_coord(self.test_file)
|
||||
assert not trexio.has_nucleus(self.test_file)
|
||||
|
||||
|
||||
def test_has_group(self):
|
||||
"""Check existense of a group."""
|
||||
self.open()
|
||||
assert trexio.has_nucleus(self.test_file)
|
||||
assert not trexio.has_rdm(self.test_file)
|
||||
|
||||
|
||||
def test_context_manager(self):
|
||||
|
@ -1755,6 +1755,98 @@ trexio_pre_close (trexio_t* file)
|
||||
considered dimensioning variables and cannot be negative or 0. An attempt to write negative or 0
|
||||
value will result in ~TREXIO_INVALID_ARG_2~ exit code.
|
||||
|
||||
** Templates for front end has_group functions
|
||||
*** Introduction
|
||||
|
||||
This section concerns API calls related to TREXIO groups
|
||||
|
||||
| Function name | Description |
|
||||
|----------------------+-----------------------------------|
|
||||
| ~trexio_has_$group$~ | Check if a group exists in a file |
|
||||
|
||||
*** C templates for front end
|
||||
|
||||
The ~C~ templates that correspond to each of the abovementioned
|
||||
functions can be found below. First parameter is the ~TREXIO~ file
|
||||
handle.
|
||||
|
||||
**** Function declarations
|
||||
|
||||
#+begin_src c :tangle hrw_group_front.h :exports none
|
||||
trexio_exit_code trexio_has_$group$(trexio_t* const file);
|
||||
#+end_src
|
||||
|
||||
**** Source code
|
||||
|
||||
#+begin_src c :tangle has_group_front.c
|
||||
trexio_exit_code
|
||||
trexio_has_$group$ (trexio_t* const file)
|
||||
{
|
||||
|
||||
if (file == NULL) return TREXIO_INVALID_ARG_1;
|
||||
|
||||
assert(file->back_end < TREXIO_INVALID_BACK_END);
|
||||
|
||||
switch (file->back_end) {
|
||||
|
||||
case TREXIO_TEXT:
|
||||
return trexio_text_has_$group$(file);
|
||||
|
||||
case TREXIO_HDF5:
|
||||
#ifdef HAVE_HDF5
|
||||
return trexio_hdf5_has_$group$(file);
|
||||
#else
|
||||
return TREXIO_BACK_END_MISSING;
|
||||
#endif
|
||||
/*
|
||||
case TREXIO_JSON:
|
||||
return trexio_json_has_$group$(file);
|
||||
break;
|
||||
,*/
|
||||
}
|
||||
|
||||
return TREXIO_FAILURE;
|
||||
}
|
||||
#+end_src
|
||||
|
||||
*** Fortran templates for front end
|
||||
|
||||
The ~Fortran~ templates that provide an access to the ~C~ API calls from Fortran.
|
||||
These templates are based on the use of ~iso_c_binding~. Pointers have to be passed by value.
|
||||
|
||||
#+begin_src f90 :tangle has_group_front_fortran.f90
|
||||
interface
|
||||
integer(trexio_exit_code) function trexio_has_$group$ (trex_file) bind(C)
|
||||
use, intrinsic :: iso_c_binding
|
||||
import
|
||||
integer(c_int64_t), intent(in), value :: trex_file
|
||||
end function trexio_has_$group$
|
||||
end interface
|
||||
#+end_src
|
||||
|
||||
*** Python templates for front end
|
||||
|
||||
#+begin_src python :tangle has_group_front.py
|
||||
def has_$group$(trexio_file) -> bool:
|
||||
"""Check that $group$ group exists in the TREXIO file.
|
||||
|
||||
Parameter is a ~TREXIO File~ object that has been created by a call to ~open~ function.
|
||||
|
||||
Returns:
|
||||
True if the variable exists, False otherwise
|
||||
|
||||
Raises:
|
||||
- trexio.Error if TREXIO return code ~rc~ is TREXIO_FAILURE and prints the error message using string_of_error.
|
||||
- Exception from some other error (e.g. RuntimeError).
|
||||
"""
|
||||
|
||||
rc = pytr.trexio_has_$group$(trexio_file.pytrexio_s)
|
||||
if rc == TREXIO_FAILURE:
|
||||
raise Error(rc)
|
||||
|
||||
return rc == TREXIO_SUCCESS
|
||||
#+end_src
|
||||
|
||||
** Templates for front end has/read/write a single numerical attribute
|
||||
*** Introduction
|
||||
|
||||
|
@ -179,6 +179,38 @@ trexio_hdf5_deinit (trexio_t* const file)
|
||||
}
|
||||
#+end_src
|
||||
|
||||
* Template for HDF5 has a group
|
||||
|
||||
#+begin_src c :tangle hrw_group_hdf5.h :exports none
|
||||
trexio_exit_code trexio_hdf5_has_$group$ (trexio_t* const file);
|
||||
#+end_src
|
||||
|
||||
|
||||
#+begin_src c :tangle has_group_hdf5.c
|
||||
trexio_exit_code
|
||||
trexio_hdf5_has_$group$ (trexio_t* const file)
|
||||
{
|
||||
|
||||
if (file == NULL) return TREXIO_INVALID_ARG_1;
|
||||
|
||||
const trexio_hdf5_t* f = (const trexio_hdf5_t*) file;
|
||||
|
||||
struct H5G_info_t group_info;
|
||||
|
||||
/* H5Gget_info return info about the HDF5 group as a group_info struct */
|
||||
herr_t status = H5Gget_info(f->$group$_group, &group_info);
|
||||
if (status < 0) return TREXIO_FAILURE;
|
||||
|
||||
/* If nlinks==0 --> the group is empty, i.e. non-existent */
|
||||
if (group_info.nlinks == (hsize_t) 0) {
|
||||
return TREXIO_HAS_NOT;
|
||||
} else {
|
||||
return TREXIO_SUCCESS;
|
||||
}
|
||||
|
||||
}
|
||||
#+end_src
|
||||
|
||||
* Template for HDF5 has/read/write a numerical attribute
|
||||
|
||||
#+begin_src c :tangle hrw_attr_num_hdf5.h :exports none
|
||||
|
@ -13,6 +13,9 @@ cat basic_text.h >> trexio_text.h
|
||||
cat hrw_determinant_text.h >> trexio_text.h
|
||||
cat *_determinant_text.c >> trexio_text.c
|
||||
|
||||
cat populated/pop_has_group_text.c >> trexio_text.c
|
||||
cat populated/pop_hrw_group_text.h >> trexio_text.h
|
||||
|
||||
cat populated/pop_free_group_text.c >> trexio_text.c
|
||||
cat populated/pop_read_group_text.c >> trexio_text.c
|
||||
cat populated/pop_flush_group_text.c >> trexio_text.c
|
||||
|
@ -112,6 +112,22 @@ trexio_exit_code trexio_text_inquire(const char* file_name);
|
||||
trexio_exit_code trexio_text_deinit(trexio_t* const file);
|
||||
trexio_exit_code trexio_text_lock(trexio_t* const file);
|
||||
trexio_exit_code trexio_text_unlock(trexio_t* const file);
|
||||
bool trexio_text_file_exists(const char* file_name);
|
||||
#+end_src
|
||||
|
||||
#+begin_src c :tangle basic_text.c
|
||||
bool
|
||||
trexio_text_file_exists (const char* file_name)
|
||||
{
|
||||
/* Check if the file with "file_name" exists */
|
||||
struct stat st;
|
||||
|
||||
int rc = stat(file_name, &st);
|
||||
|
||||
bool file_exists = rc == 0;
|
||||
|
||||
return file_exists;
|
||||
}
|
||||
#+end_src
|
||||
|
||||
#+begin_src c :tangle basic_text.c
|
||||
@ -493,6 +509,47 @@ trexio_text_read_$group$ (trexio_text_t* const file)
|
||||
}
|
||||
#+end_src
|
||||
|
||||
* Template for text has a group
|
||||
|
||||
#+begin_src c :tangle hrw_group_text.h :exports none
|
||||
trexio_exit_code trexio_text_has_$group$(trexio_t* const file);
|
||||
#+end_src
|
||||
|
||||
#+begin_src c :tangle has_group_text.c
|
||||
trexio_exit_code
|
||||
trexio_text_has_$group$ (trexio_t* const file)
|
||||
{
|
||||
|
||||
if (file == NULL) return TREXIO_INVALID_ARG_1;
|
||||
|
||||
/* Flush the group to make sure the group.txt file is created */
|
||||
if (file->mode != 'r') {
|
||||
trexio_exit_code rc = trexio_text_flush_$group$((trexio_text_t*) file);
|
||||
if (rc != TREXIO_SUCCESS) return TREXIO_FAILURE;
|
||||
}
|
||||
|
||||
/* Build the file name */
|
||||
char $group$_full_path[TREXIO_MAX_FILENAME_LENGTH];
|
||||
|
||||
const char* $group$_file_name = "/$group$.txt";
|
||||
|
||||
strncpy ($group$_full_path, file->file_name, TREXIO_MAX_FILENAME_LENGTH);
|
||||
strncat ($group$_full_path, $group$_file_name,
|
||||
TREXIO_MAX_FILENAME_LENGTH-strlen($group$_file_name));
|
||||
|
||||
if ($group$_full_path[TREXIO_MAX_FILENAME_LENGTH-1] != '\0') return TREXIO_FAILURE;
|
||||
|
||||
bool file_exists;
|
||||
file_exists = trexio_text_file_exists($group$_full_path);
|
||||
|
||||
if (file_exists) {
|
||||
return TREXIO_SUCCESS;
|
||||
} else {
|
||||
return TREXIO_HAS_NOT;
|
||||
}
|
||||
}
|
||||
#+end_src
|
||||
|
||||
* Template for text flush a group
|
||||
|
||||
#+begin_src c :tangle flush_group_text.h :exports none
|
||||
@ -1063,6 +1120,23 @@ trexio_exit_code trexio_text_write_$group_dset$(trexio_t* const file,
|
||||
rc = fclose(f_wSize);
|
||||
if (rc != 0) return TREXIO_FILE_ERROR;
|
||||
|
||||
const char $group$_file_name[256] = "/$group$.txt";
|
||||
|
||||
memset (file_full_path, 0, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Copy directory name in file_full_path */
|
||||
strncpy (file_full_path, file->file_name, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Append name of the file with sparse data */
|
||||
strncat (file_full_path, $group$_file_name,
|
||||
TREXIO_MAX_FILENAME_LENGTH-strlen($group$_file_name));
|
||||
|
||||
bool file_exists = trexio_text_file_exists(file_full_path);
|
||||
|
||||
/* Create an empty file for the trexio_text_has_group to work */
|
||||
if (!file_exists) {
|
||||
FILE *fp = fopen(file_full_path, "ab+");
|
||||
fclose(fp);
|
||||
}
|
||||
|
||||
/* Exit upon success */
|
||||
return TREXIO_SUCCESS;
|
||||
}
|
||||
@ -1533,6 +1607,24 @@ trexio_exit_code trexio_text_write_determinant_list(trexio_t* const file,
|
||||
rc = fclose(f);
|
||||
if (rc != 0) return TREXIO_FILE_ERROR;
|
||||
|
||||
/* Additional part for the trexio_text_has_group to work */
|
||||
const char det_file_name[256] = "/determinant.txt";
|
||||
|
||||
memset (file_full_path, 0, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Copy directory name in file_full_path */
|
||||
strncpy (file_full_path, file->file_name, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Append name of the file with sparse data */
|
||||
strncat (file_full_path, det_file_name,
|
||||
TREXIO_MAX_FILENAME_LENGTH-strlen(det_file_name));
|
||||
|
||||
bool file_exists = trexio_text_file_exists(file_full_path);
|
||||
|
||||
/* Create an empty file for the trexio_text_has_group to work */
|
||||
if (!file_exists) {
|
||||
FILE *fp = fopen(file_full_path, "ab+");
|
||||
fclose(fp);
|
||||
}
|
||||
|
||||
/* Exit upon success */
|
||||
return TREXIO_SUCCESS;
|
||||
}
|
||||
@ -1602,6 +1694,24 @@ trexio_exit_code trexio_text_write_determinant_coefficient(trexio_t* const file,
|
||||
rc = fclose(f_wSize);
|
||||
if (rc != 0) return TREXIO_FILE_ERROR;
|
||||
|
||||
/* Additional part for the trexio_text_has_group to work */
|
||||
const char det_file_name[256] = "/determinant.txt";
|
||||
|
||||
memset (file_full_path, 0, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Copy directory name in file_full_path */
|
||||
strncpy (file_full_path, file->file_name, TREXIO_MAX_FILENAME_LENGTH);
|
||||
/* Append name of the file with sparse data */
|
||||
strncat (file_full_path, det_file_name,
|
||||
TREXIO_MAX_FILENAME_LENGTH-strlen(det_file_name));
|
||||
|
||||
bool file_exists = trexio_text_file_exists(file_full_path);
|
||||
|
||||
/* Create an empty file for the trexio_text_has_group to work */
|
||||
if (!file_exists) {
|
||||
FILE *fp = fopen(file_full_path, "ab+");
|
||||
fclose(fp);
|
||||
}
|
||||
|
||||
/* Exit upon success */
|
||||
return TREXIO_SUCCESS;
|
||||
}
|
||||
|
@ -55,6 +55,14 @@ static int test_has_dset (const char* file_name, const back_end_t backend) {
|
||||
file = trexio_open(file_name, 'r', backend, &rc);
|
||||
assert (file != NULL);
|
||||
|
||||
// check that the group exists
|
||||
rc = trexio_has_basis(file);
|
||||
assert(rc==TREXIO_SUCCESS);
|
||||
|
||||
// check that the group does not exist
|
||||
rc = trexio_has_mo(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
|
||||
// check that the previously written dataset exists
|
||||
rc = trexio_has_basis_nucleus_index(file);
|
||||
assert (rc == TREXIO_SUCCESS);
|
||||
@ -130,5 +138,3 @@ int main(void) {
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -55,6 +55,14 @@ static int test_has_dset (const char* file_name, const back_end_t backend) {
|
||||
file = trexio_open(file_name, 'r', backend, &rc);
|
||||
assert (file != NULL);
|
||||
|
||||
// check that the group exists
|
||||
rc = trexio_has_basis(file);
|
||||
assert(rc==TREXIO_SUCCESS);
|
||||
|
||||
// check that the group does not exist
|
||||
rc = trexio_has_mo(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
|
||||
// check that the previously written dataset exists
|
||||
rc = trexio_has_basis_nucleus_index(file);
|
||||
assert (rc == TREXIO_SUCCESS);
|
||||
@ -130,5 +138,3 @@ int main(void) {
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -86,6 +86,14 @@ static int test_has_dset_sparse (const char* file_name, const back_end_t backend
|
||||
assert (file != NULL);
|
||||
assert (rc == TREXIO_SUCCESS);
|
||||
|
||||
// check that the group exists
|
||||
rc = trexio_has_mo_2e_int(file);
|
||||
assert(rc==TREXIO_SUCCESS);
|
||||
|
||||
// check that the group does not exist
|
||||
rc = trexio_has_rdm(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
|
||||
// first check that mo_2e_int_eri_lr (we only write non-lr component in this unit test)
|
||||
rc = trexio_has_mo_2e_int_eri_lr(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
@ -147,7 +155,7 @@ static int test_read_dset_sparse (const char* file_name, const back_end_t backen
|
||||
assert(index_read[4*offset_data_read] == 4 * (int32_t) (offset_file_read-offset));
|
||||
|
||||
// now attempt to read so that one encounters end of file during reading (i.e. offset_file_read + chunk_read > size_max)
|
||||
offset_file_read = 97;
|
||||
offset_file_read = 97L;
|
||||
offset_data_read = 1;
|
||||
int64_t eof_read_size_check = SIZE - offset_file_read; // if offset_file_read=97 => only 3 integrals will be read out of total of 100
|
||||
|
||||
@ -159,11 +167,6 @@ static int test_read_dset_sparse (const char* file_name, const back_end_t backen
|
||||
assert(chunk_read == eof_read_size_check);
|
||||
assert(index_read[4*size_r-1] == 0);
|
||||
assert(index_read[4*offset_data_read] == 4 * (int32_t) (offset_file_read-offset));
|
||||
/*
|
||||
for(int i=0; i<size_r; ++i){
|
||||
printf("%d %lf\n", index_read[4*i], value_read[i]);
|
||||
}
|
||||
*/
|
||||
|
||||
// close current session
|
||||
rc = trexio_close(file);
|
||||
|
@ -86,6 +86,14 @@ static int test_has_dset_sparse (const char* file_name, const back_end_t backend
|
||||
assert (file != NULL);
|
||||
assert (rc == TREXIO_SUCCESS);
|
||||
|
||||
// check that the group exists
|
||||
rc = trexio_has_mo_2e_int(file);
|
||||
assert(rc==TREXIO_SUCCESS);
|
||||
|
||||
// check that the group does not exist
|
||||
rc = trexio_has_rdm(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
|
||||
// first check that mo_2e_int_eri_lr (we only write non-lr component in this unit test)
|
||||
rc = trexio_has_mo_2e_int_eri_lr(file);
|
||||
assert(rc==TREXIO_HAS_NOT);
|
||||
|
@ -63,6 +63,8 @@ subroutine test_write(file_name, back_end)
|
||||
|
||||
character(len=:), allocatable :: sym_str
|
||||
character(len=:), allocatable :: label(:)
|
||||
double precision, allocatable :: energy(:)
|
||||
integer , allocatable :: spin(:)
|
||||
|
||||
! sparse data
|
||||
integer(4) :: index_sparse_ao_2e_int_eri(4,100)
|
||||
@ -143,6 +145,12 @@ subroutine test_write(file_name, back_end)
|
||||
rc = trexio_has_determinant_list(trex_file)
|
||||
call trexio_assert(rc, TREXIO_HAS_NOT, 'SUCCESS HAS NOT 4')
|
||||
|
||||
rc = trexio_has_nucleus(trex_file)
|
||||
call trexio_assert(rc, TREXIO_HAS_NOT, 'SUCCESS HAS NOT 5')
|
||||
|
||||
rc = trexio_has_ao_2e_int(trex_file)
|
||||
call trexio_assert(rc, TREXIO_HAS_NOT, 'SUCCESS HAS NOT 6')
|
||||
|
||||
rc = trexio_write_nucleus_num(trex_file, nucleus_num)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE NUM')
|
||||
|
||||
@ -177,12 +185,29 @@ subroutine test_write(file_name, back_end)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE MO NUM')
|
||||
endif
|
||||
|
||||
allocate(energy(mo_num))
|
||||
do i=1,mo_num
|
||||
energy(i) = dble(i)-100.d0
|
||||
enddo
|
||||
rc = trexio_write_mo_energy(trex_file, energy)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE ENERGY')
|
||||
deallocate(energy)
|
||||
|
||||
allocate(spin(mo_num))
|
||||
spin(:) = 0
|
||||
do i=mo_num/2+1,mo_num
|
||||
spin(i) = 1
|
||||
enddo
|
||||
rc = trexio_write_mo_spin(trex_file, spin)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE SPIN')
|
||||
deallocate(spin)
|
||||
|
||||
|
||||
offset = 0
|
||||
do i = 1,n_buffers
|
||||
rc = trexio_write_ao_2e_int_eri(trex_file, offset, buf_size_sparse, &
|
||||
index_sparse_ao_2e_int_eri(1,offset+1), &
|
||||
value_sparse_ao_2e_int_eri(offset+1))
|
||||
index_sparse_ao_2e_int_eri(1,offset+1), &
|
||||
value_sparse_ao_2e_int_eri(offset+1))
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE SPARSE')
|
||||
offset = offset + buf_size_sparse
|
||||
enddo
|
||||
@ -190,7 +215,7 @@ subroutine test_write(file_name, back_end)
|
||||
offset = 0
|
||||
do i = 1,n_buffers
|
||||
rc = trexio_write_determinant_list(trex_file, offset, buf_size_det, &
|
||||
det_list(1,offset+1))
|
||||
det_list(1,offset+1))
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS WRITE DET LIST')
|
||||
offset = offset + buf_size_det
|
||||
enddo
|
||||
@ -207,6 +232,12 @@ subroutine test_write(file_name, back_end)
|
||||
rc = trexio_has_determinant_list(trex_file)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS HAS 4')
|
||||
|
||||
rc = trexio_has_nucleus(trex_file)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS HAS 5')
|
||||
|
||||
rc = trexio_has_ao_2e_int(trex_file)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS HAS 6')
|
||||
|
||||
rc = trexio_close(trex_file)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS, 'SUCCESS CLOSE')
|
||||
|
||||
@ -240,6 +271,9 @@ subroutine test_read(file_name, back_end)
|
||||
character(len=4) :: label(12) ! also works with allocatable arrays
|
||||
|
||||
character(len=32) :: sym_str
|
||||
integer :: mo_num
|
||||
double precision, allocatable :: energy(:)
|
||||
integer , allocatable :: spin(:)
|
||||
|
||||
! sparse data
|
||||
integer(4) :: index_sparse_ao_2e_int_eri(4,20)
|
||||
@ -346,8 +380,8 @@ subroutine test_read(file_name, back_end)
|
||||
|
||||
|
||||
rc = trexio_read_ao_2e_int_eri(trex_file, offset_read, read_buf_size, &
|
||||
index_sparse_ao_2e_int_eri(1, offset_data_read + 1), &
|
||||
value_sparse_ao_2e_int_eri(offset_data_read + 1))
|
||||
index_sparse_ao_2e_int_eri(1, offset_data_read + 1), &
|
||||
value_sparse_ao_2e_int_eri(offset_data_read + 1))
|
||||
!do i = 1,20
|
||||
! write(*,*) index_sparse_ao_2e_int_eri(1,i)
|
||||
!enddo
|
||||
@ -364,8 +398,8 @@ subroutine test_read(file_name, back_end)
|
||||
! attempt to read reaching EOF: should return TREXIO_END and
|
||||
! NOT increment the existing values in the buffer (only upd with what has been read)
|
||||
rc = trexio_read_ao_2e_int_eri(trex_file, offset_eof, read_buf_size, &
|
||||
index_sparse_ao_2e_int_eri(1, offset_data_eof + 1), &
|
||||
value_sparse_ao_2e_int_eri(offset_data_eof + 1))
|
||||
index_sparse_ao_2e_int_eri(1, offset_data_eof + 1), &
|
||||
value_sparse_ao_2e_int_eri(offset_data_eof + 1))
|
||||
!do i = 1,20
|
||||
! write(*,*) index_sparse_ao_2e_int_eri(1,i)
|
||||
!enddo
|
||||
@ -403,7 +437,7 @@ subroutine test_read(file_name, back_end)
|
||||
|
||||
! read a chunk of determinants
|
||||
rc = trexio_read_determinant_list(trex_file, offset_det_read, read_buf_det_size, &
|
||||
det_list(1, offset_det_data_read + 1))
|
||||
det_list(1, offset_det_data_read + 1))
|
||||
!do i = 1,50
|
||||
! write(*,*) det_list(1,i)
|
||||
!enddo
|
||||
@ -443,6 +477,26 @@ subroutine test_read(file_name, back_end)
|
||||
call exit(-1)
|
||||
endif
|
||||
|
||||
rc = trexio_read_mo_num(trex_file, mo_num)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS)
|
||||
|
||||
allocate(spin(mo_num), energy(mo_num))
|
||||
rc = trexio_read_mo_energy(trex_file, energy)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS)
|
||||
|
||||
if (energy(10) /= -90.d0) then
|
||||
print *, 'Failure to read MO energy: ', energy(10)
|
||||
call exit(-1)
|
||||
end if
|
||||
|
||||
rc = trexio_read_mo_spin(trex_file, spin)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS)
|
||||
|
||||
if (sum(spin) /= mo_num/2) then
|
||||
print *, 'Failure to read MO spin', mo_num, sum(spin)
|
||||
call exit(-1)
|
||||
end if
|
||||
|
||||
! close the file
|
||||
rc = trexio_close(trex_file)
|
||||
call trexio_assert(rc, TREXIO_SUCCESS)
|
||||
|
@ -65,7 +65,7 @@ for fname in files_todo['dset_sparse']:
|
||||
# populate group-related functions with mixed scheme
|
||||
for fname in files_todo['group']:
|
||||
# recursive scheme for delete_group functions
|
||||
if 'delete' in fname:
|
||||
if 'delete' in fname or 'has' in fname:
|
||||
recursive_populate_file(fname, template_paths, group_dict)
|
||||
# mixed (iterative+recursive) scheme [text backend]
|
||||
else:
|
||||
|
180
trex.org
180
trex.org
@ -70,19 +70,19 @@ means that the source code is not produced by the generator, but hand-written.
|
||||
|
||||
#+CALL: json(data=metadata, title="metadata")
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"metadata": {
|
||||
"code_num" : [ "dim", [] ]
|
||||
, "code" : [ "str", [ "metadata.code_num" ] ]
|
||||
, "author_num" : [ "dim", [] ]
|
||||
, "author" : [ "str", [ "metadata.author_num" ] ]
|
||||
, "package_version" : [ "str", [] ]
|
||||
, "description" : [ "str", [] ]
|
||||
, "unsafe" : [ "int", [] ]
|
||||
"code_num" : [ "dim", [] ]
|
||||
, "code" : [ "str", [ "metadata.code_num" ] ]
|
||||
, "author_num" : [ "dim", [] ]
|
||||
, "author" : [ "str", [ "metadata.author_num" ] ]
|
||||
, "package_version" : [ "str", [] ]
|
||||
, "description" : [ "str", [] ]
|
||||
, "unsafe" : [ "int", [] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
* Electron (electron group)
|
||||
|
||||
@ -197,20 +197,20 @@ If you encounter the aforementioned issue, please report it to our [[https://git
|
||||
#+CALL: json(data=ecp, title="ecp")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"ecp": {
|
||||
"max_ang_mom_plus_1" : [ "int" , [ "nucleus.num" ] ]
|
||||
, "z_core" : [ "int" , [ "nucleus.num" ] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "ang_mom" : [ "int" , [ "ecp.num" ] ]
|
||||
, "nucleus_index" : [ "index", [ "ecp.num" ] ]
|
||||
, "exponent" : [ "float", [ "ecp.num" ] ]
|
||||
, "coefficient" : [ "float", [ "ecp.num" ] ]
|
||||
, "power" : [ "int" , [ "ecp.num" ] ]
|
||||
"max_ang_mom_plus_1" : [ "int" , [ "nucleus.num" ] ]
|
||||
, "z_core" : [ "int" , [ "nucleus.num" ] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "ang_mom" : [ "int" , [ "ecp.num" ] ]
|
||||
, "nucleus_index" : [ "index", [ "ecp.num" ] ]
|
||||
, "exponent" : [ "float", [ "ecp.num" ] ]
|
||||
, "coefficient" : [ "float", [ "ecp.num" ] ]
|
||||
, "power" : [ "int" , [ "ecp.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** Example
|
||||
|
||||
@ -324,22 +324,22 @@ power = [
|
||||
#+CALL: json(data=basis, title="basis")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"basis": {
|
||||
"type" : [ "str" , [] ]
|
||||
, "prim_num" : [ "dim" , [] ]
|
||||
, "shell_num" : [ "dim" , [] ]
|
||||
, "nucleus_index" : [ "index", [ "basis.shell_num" ] ]
|
||||
, "shell_ang_mom" : [ "int" , [ "basis.shell_num" ] ]
|
||||
, "shell_factor" : [ "float", [ "basis.shell_num" ] ]
|
||||
, "shell_index" : [ "index", [ "basis.prim_num" ] ]
|
||||
, "exponent" : [ "float", [ "basis.prim_num" ] ]
|
||||
, "coefficient" : [ "float", [ "basis.prim_num" ] ]
|
||||
, "prim_factor" : [ "float", [ "basis.prim_num" ] ]
|
||||
"type" : [ "str" , [] ]
|
||||
, "prim_num" : [ "dim" , [] ]
|
||||
, "shell_num" : [ "dim" , [] ]
|
||||
, "nucleus_index" : [ "index", [ "basis.shell_num" ] ]
|
||||
, "shell_ang_mom" : [ "int" , [ "basis.shell_num" ] ]
|
||||
, "shell_factor" : [ "float", [ "basis.shell_num" ] ]
|
||||
, "shell_index" : [ "index", [ "basis.prim_num" ] ]
|
||||
, "exponent" : [ "float", [ "basis.prim_num" ] ]
|
||||
, "coefficient" : [ "float", [ "basis.prim_num" ] ]
|
||||
, "prim_factor" : [ "float", [ "basis.prim_num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** Example
|
||||
|
||||
@ -470,16 +470,16 @@ prim_factor =
|
||||
#+CALL: json(data=ao, title="ao")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"ao": {
|
||||
"cartesian" : [ "int" , [] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "shell" : [ "index", [ "ao.num" ] ]
|
||||
, "normalization" : [ "float", [ "ao.num" ] ]
|
||||
"cartesian" : [ "int" , [] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "shell" : [ "index", [ "ao.num" ] ]
|
||||
, "normalization" : [ "float", [ "ao.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** One-electron integrals (~ao_1e_int~ group)
|
||||
:PROPERTIES:
|
||||
@ -514,22 +514,22 @@ prim_factor =
|
||||
#+CALL: json(data=ao_1e_int, title="ao_1e_int")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"ao_1e_int": {
|
||||
"overlap" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "kinetic" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "potential_n_e" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "ecp" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "core_hamiltonian" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "overlap_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "kinetic_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "potential_n_e_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "ecp_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "core_hamiltonian_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
"overlap" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "kinetic" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "potential_n_e" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "ecp" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "core_hamiltonian" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "overlap_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "kinetic_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "potential_n_e_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "ecp_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
, "core_hamiltonian_im" : [ "float", [ "ao.num", "ao.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** Two-electron integrals (~ao_2e_int~ group)
|
||||
:PROPERTIES:
|
||||
@ -581,23 +581,27 @@ prim_factor =
|
||||
| ~class~ | ~str~ | ~(mo.num)~ | Choose among: Core, Inactive, Active, Virtual, Deleted |
|
||||
| ~symmetry~ | ~str~ | ~(mo.num)~ | Symmetry in the point group |
|
||||
| ~occupation~ | ~float~ | ~(mo.num)~ | Occupation number |
|
||||
| ~energy~ | ~float~ | ~(mo.num)~ | For canonical MOs, corresponding eigenvalue |
|
||||
| ~spin~ | ~int~ | ~(mo.num)~ | For UHF wave functions, 0 is $\alpha$ and 1 is $\beta$ |
|
||||
|
||||
#+CALL: json(data=mo, title="mo")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"mo": {
|
||||
"type" : [ "str" , [] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "coefficient" : [ "float", [ "mo.num", "ao.num" ] ]
|
||||
, "coefficient_im" : [ "float", [ "mo.num", "ao.num" ] ]
|
||||
, "class" : [ "str" , [ "mo.num" ] ]
|
||||
, "symmetry" : [ "str" , [ "mo.num" ] ]
|
||||
, "occupation" : [ "float", [ "mo.num" ] ]
|
||||
"type" : [ "str" , [] ]
|
||||
, "num" : [ "dim" , [] ]
|
||||
, "coefficient" : [ "float", [ "mo.num", "ao.num" ] ]
|
||||
, "coefficient_im" : [ "float", [ "mo.num", "ao.num" ] ]
|
||||
, "class" : [ "str" , [ "mo.num" ] ]
|
||||
, "symmetry" : [ "str" , [ "mo.num" ] ]
|
||||
, "occupation" : [ "float", [ "mo.num" ] ]
|
||||
, "energy" : [ "float", [ "mo.num" ] ]
|
||||
, "spin" : [ "int" , [ "mo.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** One-electron integrals (~mo_1e_int~ group)
|
||||
|
||||
@ -622,22 +626,22 @@ prim_factor =
|
||||
#+CALL: json(data=mo_1e_int, title="mo_1e_int")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"mo_1e_int": {
|
||||
"overlap" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "kinetic" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "potential_n_e" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "ecp" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "core_hamiltonian" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "overlap_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "kinetic_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "potential_n_e_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "ecp_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "core_hamiltonian_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
"overlap" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "kinetic" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "potential_n_e" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "ecp" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "core_hamiltonian" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "overlap_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "kinetic_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "potential_n_e_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "ecp_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
, "core_hamiltonian_im" : [ "float", [ "mo.num", "mo.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
** Two-electron integrals (~mo_2e_int~ group)
|
||||
|
||||
@ -701,15 +705,15 @@ prim_factor =
|
||||
#+CALL: json(data=determinant, title="determinant")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"determinant": {
|
||||
"num" : [ "dim readonly" , [] ]
|
||||
, "list" : [ "int special" , [ "determinant.num" ] ]
|
||||
, "coefficient" : [ "float special", [ "determinant.num", "state.num" ] ]
|
||||
"num" : [ "dim readonly" , [] ]
|
||||
, "list" : [ "int special" , [ "determinant.num" ] ]
|
||||
, "coefficient" : [ "float special", [ "determinant.num", "state.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
* Configuration state functions (csf group)
|
||||
|
||||
@ -763,14 +767,14 @@ prim_factor =
|
||||
#+CALL: json(data=state, title="state")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"state": {
|
||||
"num" : [ "dim", [] ]
|
||||
, "label" : [ "str", [ "state.num" ] ]
|
||||
"num" : [ "dim", [] ]
|
||||
, "label" : [ "str", [ "state.num" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
* Reduced density matrices (rdm group)
|
||||
|
||||
@ -855,15 +859,15 @@ prim_factor =
|
||||
#+CALL: json(data=cell, title="cell")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"cell": {
|
||||
"a" : [ "float", [ "3" ] ]
|
||||
, "b" : [ "float", [ "3" ] ]
|
||||
, "c" : [ "float", [ "3" ] ]
|
||||
"a" : [ "float", [ "3" ] ]
|
||||
, "b" : [ "float", [ "3" ] ]
|
||||
, "c" : [ "float", [ "3" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
* Periodic boundary calculations (pbc group)
|
||||
|
||||
@ -876,14 +880,14 @@ prim_factor =
|
||||
#+CALL: json(data=pbc, title="pbc")
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
:results:
|
||||
#+begin_src python :tangle trex.json
|
||||
"pbc": {
|
||||
"periodic" : [ "int" , [] ]
|
||||
, "k_point" : [ "float", [ "3" ] ]
|
||||
"periodic" : [ "int" , [] ]
|
||||
, "k_point" : [ "float", [ "3" ] ]
|
||||
} ,
|
||||
#+end_src
|
||||
:END:
|
||||
:end:
|
||||
|
||||
* Quantum Monte Carlo data (qmc group)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user