sandbox/acastillo/output_fields/vtkhdf/output_vtkhdf.h

    #ifndef OUTPUT_VTKHDF_H
    #define OUTPUT_VTKHDF_H

    VTKHDF File Format

    These routines are compatible with the VTKHDF Model and Format which can be read using Paraview or Visit.

    Data is composed of large datasets stored using the Hierarchical Data Format HDF5. As the name implies, data is organized following a hierarchical structure. HDF5 files can be read without any prior knowledge of the stored data. The type, rank, dimension and other properties of each array are stored inside the file in the form of meta-data. Additional features include support for a large number of objects, file compression, a parallel I/O implementation through the MPI-IO or MPI POSIX drivers. Using this format requires the HDF5 library, which is usually installed in most computing centers or may be installed locally through a repository. Linking is automatic but requires the environment variables HDF5_INCDIR and HDF5_LIBDIR, which are usually set when you load the module hdf5. You might also add something like this to your Makefile

        HDFLIBS=-I$(HDF5_INCDIR) -L$(HDF5_LIBDIR) -lhdf5 -lhdf5_hl
        LIBS+=$(HDFLIBS)

    To use HDF5 we need to “declare” the library in the qcc system. This should not be the case, but HDF5 is a messy library that just won’t pass through qcc

        echo "@include <hdf5.h>" > $BASILISK/ast/std/hdf5.h
        echo "@include <hdf5_hl.h>" > $BASILISK/ast/std/hdf5_hl.h

    We also need to declare the datatypes at the end of $BASILISK/ast/defaults.h

        echo "typedef hid_t, hsize_t, herr_t, H5L_info_t;" >> $BASILISK/ast/defaults.h

    An explanation can be found here

    // Check if HDF5 is available by testing if the header can be included
    #if __has_include(<hdf5.h>)
      #define HAVE_HDF5 1
      #pragma autolink -lhdf5
      #include <hdf5.h>
    #else
      #warning "HDF5 library not found. VTKHDF output functions will be disabled."
    #endif

    preamble: define some useful macros

    #define shortcut_slice(n, _alpha)                                \
      double alpha = (_alpha - n.x * x - n.y * y - n.z * z) / Delta; \
      if (fabs(alpha) > 0.87)                                        \
        continue;
    
    #include "output_vtkhdf_helpers.h"

    output_vtkhdf(): Exports full 2D (or 3D) fields.

    This function writes one VTKHDF file which can be read using Paraview. The VTKHDF file format is a file format relying on HDF5. It is meant to provide good I/O performance as well as robust and flexible parallel I/O capabilities. The file stores scalar and vector fields defined at the center points are stored at the cell center of an unstructured grid with type VTK_QUAD in 2D, or VTK_HEXAHEDRON in 3D.

    The arguments and their default values are:

    list
    pointer to the list of scalar fields to be exported.
    vlist
    pointer to the list of vector fields to be exported.
    name
    Output file name generally uses the .vtkhdf extension.
    compression_level
    Level of compression to use when writing data to the HDF5 file (default=9).

    Example Usage

    scalar * list = {a,b};
    vector * vlist = {c,d};
    output_vtkhdf(list, vlist, "domain.vtkhdf");
    trace void output_vtkhdf(scalar *list, vector *vlist, char *name = "domain.vtkhdf", int compression_level = 9){
    #ifdef HAVE_HDF5
      hid_t file_id;     // HDF5 file ID
      hid_t group_id;    // HDF5 group ID
      hid_t subgroup_id; // HDF5 subgroup ID
      hsize_t count[2];  // Hyperslab selection parameters
      hsize_t offset[2]; // Offset for hyperslab
      hsize_t dims[1] = {2};
      
      // Define a scalar mask for periodic conditions
      scalar per_mask[];
      foreach () {
        per_mask[] = 1.;
      }
    
      // VTK cell types: VTK_QUAD (in 2D) or VTK_HEXAHEDRON (in 3D)
      int type, noffset;
      #if dimension == 2
        type = 9;
        noffset = 4;
      #elif dimension == 3
        type = 12;
        noffset = 8;
      #endif
    
      // Obtain the number of points and cells and get a marker to reconstruct the topology
      long num_points = 0, num_points_loc = 0;
      long num_cells = 0,  num_cells_loc = 0;
      
      count_points_and_cells(&num_points, &num_cells, &num_points_loc, &num_cells_loc, per_mask);
      
      long num_ids = num_cells*noffset;
      long num_ids_loc = num_cells_loc*noffset;
    
      // Centralized chunk size calculation
      hsize_t chunk_size = compute_chunk_size(num_cells);
    
      // Calculate offsets for parallel I/O
      long offset_points[npe()], offset_cells[npe()], offset_ids[npe()], offset_offset[npe()];
      calculate_offsets2(offset_offset, num_cells_loc+1,  offset);
      calculate_offsets2(offset_ids,    num_ids_loc,      offset);
      calculate_offsets2(offset_cells,  num_cells_loc,    offset);
      calculate_offsets2(offset_points, num_points_loc,   offset);
    
      // Initialize marker for topology reconstruction
      vertex scalar marker[];
      initialize_marker(marker, offset, 0);
      
      // Create a new HDF5 file using helper
      file_id = create_hdf5_file(name);
      if (file_id < 0) return;
      
      // Create group 
      group_id = H5Gcreate(file_id, "VTKHDF", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    
      // Create "Version", "Type" (and other) attributes
      dims[0] = 2;
      int version_data[2] = {2, 1};
      create_attribute(group_id, "Version", version_data, dims);
      create_attribute_type(group_id, "Type", "UnstructuredGrid", 16);
      create_attribute_type(group_id, "Description", "Simulation perfomed using Basilisk (/)", 57);
      
      // Write "NumberOfConnectivityIds", "NumberOfPoints", "NumberOfCells"
      dims[0] = npe();
      write_simple_dataset(group_id, "NumberOfConnectivityIds", offset_ids, dims);
      write_simple_dataset(group_id, "NumberOfPoints", offset_points, dims);
      write_simple_dataset(group_id, "NumberOfCells", offset_cells, dims);
    
      // Populate and write the points dataset
      double *points_dset;
      populate_points_dset(&points_dset, num_points_loc, offset_points, count, offset);
      write_dataset(group_id, count, offset, "Points", num_points, num_points_loc, 3, points_dset, H5T_NATIVE_DOUBLE, HDF5_CHUNKED, chunk_size, compression_level);
      free(points_dset);
    
      // Populate and write the types dataset
      char * types_dset;
      populate_types_dset(&types_dset, type, num_cells_loc, offset_cells, count, offset);
      write_dataset(group_id, count, offset, "Types", num_cells, num_cells_loc, 1, types_dset, H5T_STD_U8LE, HDF5_CHUNKED, chunk_size, compression_level);
      free(types_dset);  
    
      // Populate and write the connectivity dataset
      long *topo_dset;
      populate_topo_dset(&topo_dset, num_cells_loc, offset_ids, count, offset, per_mask, marker);
      write_dataset(group_id, count, offset, "Connectivity", num_ids, num_ids_loc, 1, topo_dset, H5T_NATIVE_LONG, HDF5_CHUNKED, chunk_size, compression_level);
      free(topo_dset);  
    
      // Populate and write the offsets dataset
      long *offsets_dset;
      populate_offsets_dset(&offsets_dset, noffset, num_cells_loc+1, offset_offset, count, offset);
      write_dataset(group_id, count, offset, "Offsets", num_cells+npe(), num_cells_loc+1, 1, offsets_dset, H5T_NATIVE_LONG, HDF5_CHUNKED, chunk_size, compression_level);
      free(offsets_dset);  
      
      // Create subgroup "CellData"
      subgroup_id = H5Gcreate(group_id, "CellData", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    
      // Allocate memory and write scalar datasets
      double *scalar_dset = (double *)malloc(num_cells_loc * sizeof(double));
      for (scalar s in list) {
        populate_scalar_dset(s, scalar_dset, num_cells_loc, offset_cells, count, offset, per_mask);
        write_dataset(subgroup_id, count, offset, s.name, num_cells, num_cells_loc, 1, scalar_dset, H5T_NATIVE_DOUBLE, HDF5_CHUNKED, chunk_size, compression_level);
      }
      free(scalar_dset);
    
      // Allocate memory and write vector datasets
      double *vector_dset = (double *)malloc(num_cells_loc * 3 * sizeof(double));
      for (vector v in vlist) {    
        populate_vector_dset(v, vector_dset, num_cells_loc, offset_cells, count, offset, per_mask);
        write_dataset(subgroup_id, count, offset, v.x.name, num_cells, num_cells_loc, 3, vector_dset, H5T_NATIVE_DOUBLE, HDF5_CHUNKED, chunk_size, compression_level);
      }
      free(vector_dset);
      H5Gclose(subgroup_id);
    
      // Create subgroup "FieldData"
      subgroup_id = H5Gcreate(group_id, "FieldData", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
      H5Gclose(subgroup_id);
    
      // Create subgroup "PointData"
      subgroup_id = H5Gcreate(group_id, "PointData", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
      H5Gclose(subgroup_id);
      H5Gclose(group_id);
    
      // Close HDF5 resources
      H5Fflush(file_id, H5F_SCOPE_GLOBAL);
      H5Fclose(file_id);
    #else
      // HDF5 not available - print warning and return
      static int warning_printed = 0;
      if (!warning_printed && pid() == 0) {
        fprintf(stderr, "Warning: output_vtkhdf() called but HDF5 is not available. Output skipped.\n");
        warning_printed = 1;
      }
    #endif
    }
    
    #if dimension > 2
    #include "output_vtkhdf_slice.h"
    #endif
    #include "output_vtkhdf_box.h"

    postamble: delete macros

    #undef shortcut_slice
    
    #endif