001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import hdf.object.Attribute;
028import hdf.object.CompoundDS;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034import hdf.object.MetaDataContainer;
035import hdf.object.Utils;
036import hdf.object.h5.H5Datatype;
037import hdf.object.h5.H5MetaDataContainer;
038import hdf.object.h5.H5ReferenceType;
039
040import hdf.hdf5lib.H5;
041import hdf.hdf5lib.HDF5Constants;
042import hdf.hdf5lib.HDFArray;
043import hdf.hdf5lib.HDFNativeData;
044import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
045import hdf.hdf5lib.exceptions.HDF5Exception;
046import hdf.hdf5lib.exceptions.HDF5LibraryException;
047import hdf.hdf5lib.structs.H5O_info_t;
048import hdf.hdf5lib.structs.H5O_token_t;
049
050import org.slf4j.Logger;
051import org.slf4j.LoggerFactory;
052
053/**
054 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
055 *
056 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that
057 * stores a description of the data elements, data layout, and all other information necessary to write, read,
058 * and interpret the stored data.
059 *
060 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of
061 * one or more atomic types or small arrays of such types. Each member of a compound type has a name which is
062 * unique within that type, and a byte offset that determines the first byte (smallest byte address) of that
063 * member in a compound datum.
064 *
065 * For more information on HDF5 datasets and datatypes, read
066 * <a href="https://hdfgroup.github.io/hdf5/_h5_d__u_g.html#sec_dataset">HDF5 Datasets in HDF5 User Guide</a>
067 * <a href="https://hdfgroup.github.io/hdf5/_h5_t__u_g.html#sec_datatype">HDF5 Datatypes in HDF5 User
068 * Guide</a>
069 *
070 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a
071 * simple compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array
072 * of compound data.
073 *
074 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the
075 * following C example.
076 *
077 * <pre>
078 * typedef struct s1_t {
079 *         int    a;
080 *         float  b;
081 *         double c;
082 *         } s1_t;
083 *     s1_t       s1[LENGTH];
084 *     ...
085 *     H5Dwrite(..., s1);
086 *     H5Dread(..., s1);
087 * </pre>
088 *
089 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by
090 * fields instead of compound structure. As for the example above, the java.util.Vector object has three
091 * elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of
092 * int, float and double, we will be able to read/write the compound data by field.
093 *
094 * @version 1.1 9/4/2007
095 * @author Peter X. Cao
096 */
097public class H5CompoundDS extends CompoundDS implements MetaDataContainer {
098    private static final long serialVersionUID = -5968625125574032736L;
099
100    private static final Logger log = LoggerFactory.getLogger(H5CompoundDS.class);
101
102    /**
103     * The metadata object for this data object. Members of the metadata are instances of Attribute.
104     */
105    private H5MetaDataContainer objMetadata;
106
107    /** the object properties */
108    private H5O_info_t objInfo;
109
110    /** flag to indicate if the dataset is an external dataset */
111    private boolean isExternal = false;
112
113    /** flag to indicate if the dataset is a virtual dataset */
114    private boolean isVirtual = false;
115    /** the list of virtual names */
116    private List<String> virtualNameList;
117
118    /**
119     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
120     *
121     * The dataset object represents an existing dataset in the file. For example, new
122     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
123     * dataset,"dset1", at group "/g0/".
124     *
125     * This object is usually constructed at FileFormat.open(), which loads the file structure and
126     * object information into memory. It is rarely used elsewhere.
127     *
128     * @param theFile
129     *            the file that contains the data object.
130     * @param theName
131     *            the name of the data object, e.g. "dset".
132     * @param thePath
133     *            the full path of the data object, e.g. "/arrays/".
134     */
135    public H5CompoundDS(FileFormat theFile, String theName, String thePath)
136    {
137        this(theFile, theName, thePath, null);
138    }
139
140    /**
141     * @deprecated Not for public use in the future.<br>
142     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
143     *
144     * @param theFile
145     *            the file that contains the data object.
146     * @param theName
147     *            the name of the data object, e.g. "dset".
148     * @param thePath
149     *            the full path of the data object, e.g. "/arrays/".
150     * @param oid
151     *            the oid of the data object.
152     */
153    @Deprecated
154    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid)
155    {
156        super(theFile, theName, thePath, oid);
157        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
158
159        if (theFile != null) {
160            if (oid == null) {
161                // retrieve the object ID
162                byte[] refBuf = null;
163                try {
164                    refBuf =
165                        H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
166                    this.oid = HDFNativeData.byteToLong(refBuf);
167                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
168                }
169                catch (Exception ex) {
170                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(),
171                              this.getFullName());
172                }
173                finally {
174                    if (refBuf != null)
175                        H5.H5Rdestroy(refBuf);
176                }
177            }
178            log.trace("constructor OID {}", this.oid);
179            try {
180                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(),
181                                                 HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
182            }
183            catch (Exception ex) {
184                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
185            }
186        }
187        else {
188            this.oid = null;
189            objInfo  = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
190        }
191    }
192
193    /*
194     * (non-Javadoc)
195     *
196     * @see hdf.object.HObject#open()
197     */
198    @Override
199    public long open()
200    {
201        long did = HDF5Constants.H5I_INVALID_HID;
202
203        if (getFID() < 0)
204            log.trace("open(): file id for:{} is invalid", getPath() + getName());
205        else {
206            try {
207                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
208                log.trace("open(): did={}", did);
209            }
210            catch (HDF5Exception ex) {
211                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
212                did = HDF5Constants.H5I_INVALID_HID;
213            }
214        }
215
216        return did;
217    }
218
219    /*
220     * (non-Javadoc)
221     *
222     * @see hdf.object.HObject#close(int)
223     */
224    @Override
225    public void close(long did)
226    {
227        if (did >= 0) {
228            try {
229                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
230            }
231            catch (Exception ex) {
232                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
233            }
234            try {
235                H5.H5Dclose(did);
236            }
237            catch (HDF5Exception ex) {
238                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
239            }
240        }
241    }
242
243    /**
244     * Retrieves datatype and dataspace information from file and sets the dataset
245     * in memory.
246     *
247     * The init() is designed to support lazy operation in a dataset object. When a
248     * data object is retrieved from file, the datatype, dataspace and raw data are
249     * not loaded into memory. When it is asked to read the raw data from file,
250     * init() is first called to get the datatype and dataspace information, then
251     * load the raw data from file.
252     *
253     * init() is also used to reset the selection of a dataset (start, stride and
254     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
255     * the following example, init() at step 1) retrieves datatype and dataspace
256     * information from file. getData() at step 3) reads only one data point. init()
257     * at step 4) resets the selection to the whole dataset. getData() at step 4)
258     * reads the values of whole dataset into memory.
259     *
260     * <pre>
261     * dset = (Dataset) file.get(NAME_DATASET);
262     *
263     * // 1) get datatype and dataspace information from file
264     * dset.init();
265     * rank = dset.getRank(); // rank = 2, a 2D dataset
266     * count = dset.getSelectedDims();
267     * start = dset.getStartDims();
268     * dims = dset.getDims();
269     *
270     * // 2) select only one data point
271     * for (int i = 0; i &lt; rank; i++) {
272     *     start[0] = 0;
273     *     count[i] = 1;
274     * }
275     *
276     * // 3) read one data point
277     * data = dset.getData();
278     *
279     * // 4) reset selection to the whole dataset
280     * dset.init();
281     *
282     * // 5) clean the memory data buffer
283     * dset.clearData();
284     *
285     * // 6) Read the whole dataset
286     * data = dset.getData();
287     * </pre>
288     */
289    @Override
290    public void init()
291    {
292        if (inited) {
293            resetSelection();
294            log.trace("init(): Dataset already initialized");
295            return; // already called. Initialize only once
296        }
297
298        long did     = HDF5Constants.H5I_INVALID_HID;
299        long tid     = HDF5Constants.H5I_INVALID_HID;
300        long sid     = HDF5Constants.H5I_INVALID_HID;
301        flatNameList = new Vector<>();
302        flatTypeList = new Vector<>();
303
304        did = open();
305        if (did >= 0) {
306            // check if it is an external or virtual dataset
307            long pid = HDF5Constants.H5I_INVALID_HID;
308            try {
309                pid = H5.H5Dget_create_plist(did);
310                try {
311                    int nfiles     = H5.H5Pget_external_count(pid);
312                    isExternal     = (nfiles > 0);
313                    int layoutType = H5.H5Pget_layout(pid);
314                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
315                        try {
316                            long vmaps = H5.H5Pget_virtual_count(pid);
317                            if (vmaps > 0) {
318                                virtualNameList = new Vector<>();
319                                for (long next = 0; next < vmaps; next++) {
320                                    try {
321                                        String fname = H5.H5Pget_virtual_filename(pid, next);
322                                        virtualNameList.add(fname);
323                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
324                                    }
325                                    catch (Exception err) {
326                                        log.trace("init(): vds[{}] continue", next);
327                                    }
328                                }
329                            }
330                        }
331                        catch (Exception err) {
332                            log.debug("init(): vds count error: ", err);
333                        }
334                    }
335                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal,
336                              isVirtual);
337                }
338                catch (Exception ex) {
339                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
340                }
341            }
342            catch (Exception ex) {
343                log.debug("init(): H5Dget_create_plist() failure: ", ex);
344            }
345            finally {
346                try {
347                    H5.H5Pclose(pid);
348                }
349                catch (Exception ex) {
350                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
351                }
352            }
353
354            try {
355                sid        = H5.H5Dget_space(did);
356                rank       = H5.H5Sget_simple_extent_ndims(sid);
357                space_type = H5.H5Sget_simple_extent_type(sid);
358                if (space_type == HDF5Constants.H5S_NULL)
359                    isNULL = true;
360                else
361                    isNULL = false;
362                tid = H5.H5Dget_type(did);
363                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
364
365                if (rank == 0) {
366                    // a scalar data point
367                    isScalar = true;
368                    rank     = 1;
369                    dims     = new long[] {1};
370                    log.trace("init(): rank is a scalar data point");
371                }
372                else {
373                    isScalar = false;
374                    dims     = new long[rank];
375                    maxDims  = new long[rank];
376                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
377                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
378                }
379
380                try {
381                    int nativeClass = H5.H5Tget_class(tid);
382                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
383                        long lsize = 1;
384                        if (rank > 0) {
385                            log.trace("init():rank={}, dims={}", rank, dims);
386                            for (int j = 0; j < dims.length; j++) {
387                                lsize *= dims[j];
388                            }
389                        }
390                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
391                    }
392                    else
393                        datatype = new H5Datatype(getFileFormat(), tid);
394
395                    log.trace(
396                        "init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
397                        tid, datatype.isText(), datatype.isVLEN(), ((H5Datatype)datatype).isEnum(),
398                        datatype.isUnsigned(), ((H5Datatype)datatype).isStdRef(),
399                        ((H5Datatype)datatype).isRegRef());
400
401                    H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList);
402                }
403                catch (Exception ex) {
404                    log.debug("init(): failed to create datatype for dataset: ", ex);
405                    datatype = null;
406                }
407
408                // initialize member information
409                numberOfMembers = flatNameList.size();
410                log.trace("init(): numberOfMembers={}", numberOfMembers);
411
412                memberNames      = new String[numberOfMembers];
413                memberTypes      = new Datatype[numberOfMembers];
414                memberOrders     = new int[numberOfMembers];
415                isMemberSelected = new boolean[numberOfMembers];
416                memberDims       = new Object[numberOfMembers];
417
418                for (int i = 0; i < numberOfMembers; i++) {
419                    isMemberSelected[i] = true;
420                    memberOrders[i]     = 1;
421                    memberDims[i]       = null;
422
423                    try {
424                        memberTypes[i] = flatTypeList.get(i);
425                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
426
427                        if (memberTypes[i].isArray()) {
428                            long mdim[]      = memberTypes[i].getArrayDims();
429                            int idim[]       = new int[mdim.length];
430                            int arrayNpoints = 1;
431
432                            for (int j = 0; j < idim.length; j++) {
433                                idim[j] = (int)mdim[j];
434                                arrayNpoints *= idim[j];
435                            }
436
437                            memberDims[i]   = idim;
438                            memberOrders[i] = arrayNpoints;
439                        }
440                    }
441                    catch (Exception ex) {
442                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
443                        memberTypes[i] = null;
444                    }
445
446                    try {
447                        memberNames[i] = flatNameList.get(i);
448                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
449                    }
450                    catch (Exception ex) {
451                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
452                        memberNames[i] = "null";
453                    }
454                } //  (int i=0; i<numberOfMembers; i++)
455
456                inited = true;
457            }
458            catch (HDF5Exception ex) {
459                numberOfMembers = 0;
460                memberNames     = null;
461                memberTypes     = null;
462                memberOrders    = null;
463                log.debug("init(): ", ex);
464            }
465            finally {
466                if (datatype != null)
467                    datatype.close(tid);
468
469                try {
470                    H5.H5Sclose(sid);
471                }
472                catch (HDF5Exception ex2) {
473                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
474                }
475            }
476
477            close(did);
478
479            startDims    = new long[rank];
480            selectedDims = new long[rank];
481
482            resetSelection();
483        }
484        else {
485            log.debug("init(): failed to open dataset");
486        }
487    }
488
489    /**
490     * Get the token for this object.
491     *
492     * @return true if it has any attributes, false otherwise.
493     */
494    public long[] getToken()
495    {
496        H5O_token_t token = objInfo.token;
497        return HDFNativeData.byteToLong(token.data);
498    }
499
500    /**
501     * Check if the object has any attributes attached.
502     *
503     * @return true if it has any attributes, false otherwise.
504     */
505    @Override
506    public boolean hasAttribute()
507    {
508        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
509
510        if (objInfo.num_attrs < 0) {
511            long did = open();
512            if (did >= 0) {
513                objInfo.num_attrs = 0;
514
515                try {
516                    objInfo = H5.H5Oget_info(did);
517                }
518                catch (Exception ex) {
519                    objInfo.num_attrs = 0;
520                    log.debug("hasAttribute(): get object info failure: ", ex);
521                }
522                finally {
523                    close(did);
524                }
525                objMetadata.setObjectAttributeSize((int)objInfo.num_attrs);
526            }
527            else {
528                log.debug("hasAttribute(): could not open dataset");
529            }
530        }
531
532        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
533        return (objInfo.num_attrs > 0);
534    }
535
536    /**
537     * Returns the datatype of the data object.
538     *
539     * @return the datatype of the data object.
540     */
541    @Override
542    public Datatype getDatatype()
543    {
544        if (!inited)
545            init();
546
547        if (datatype == null) {
548            long did = HDF5Constants.H5I_INVALID_HID;
549            long tid = HDF5Constants.H5I_INVALID_HID;
550
551            did = open();
552            if (did >= 0) {
553                try {
554                    tid             = H5.H5Dget_type(did);
555                    int nativeClass = H5.H5Tget_class(tid);
556                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
557                        long lsize = 1;
558                        if (rank > 0) {
559                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
560                            for (int j = 0; j < dims.length; j++) {
561                                lsize *= dims[j];
562                            }
563                        }
564                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
565                    }
566                    else
567                        datatype = new H5Datatype(getFileFormat(), tid);
568                }
569                catch (Exception ex) {
570                    log.debug("getDatatype(): ", ex);
571                }
572                finally {
573                    try {
574                        H5.H5Tclose(tid);
575                    }
576                    catch (HDF5Exception ex) {
577                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
578                    }
579                    try {
580                        H5.H5Dclose(did);
581                    }
582                    catch (HDF5Exception ex) {
583                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
584                    }
585                }
586            }
587        }
588
589        if (isExternal) {
590            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
591
592            if (pdir == null) {
593                pdir = ".";
594            }
595            System.setProperty("user.dir", pdir);
596            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
597        }
598
599        return datatype;
600    }
601
602    /**
603     * Removes all of the elements from metadata list.
604     * The list should be empty after this call returns.
605     */
606    @Override
607    public void clear()
608    {
609        super.clear();
610        objMetadata.clear();
611    }
612
613    /*
614     * (non-Javadoc)
615     *
616     * @see hdf.object.Dataset#readBytes()
617     */
618    @Override
619    public byte[] readBytes() throws HDF5Exception
620    {
621        byte[] theData = null;
622
623        if (!isInited())
624            init();
625
626        long did = open();
627        if (did >= 0) {
628            long fspace = HDF5Constants.H5I_INVALID_HID;
629            long mspace = HDF5Constants.H5I_INVALID_HID;
630            long tid    = HDF5Constants.H5I_INVALID_HID;
631
632            try {
633                long[] lsize = {1};
634                for (int j = 0; j < selectedDims.length; j++)
635                    lsize[0] *= selectedDims[j];
636
637                fspace = H5.H5Dget_space(did);
638                mspace = H5.H5Screate_simple(rank, selectedDims, null);
639
640                // set the rectangle selection
641                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
642                if (rank * dims[0] > 1)
643                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
644                                           selectedDims, null); // set block to 1
645
646                tid       = H5.H5Dget_type(did);
647                long size = H5.H5Tget_size(tid) * lsize[0];
648                log.trace("readBytes(): size = {}", size);
649
650                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
651                    throw new Exception("Invalid int size");
652
653                theData = new byte[(int)size];
654
655                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace,
656                          mspace);
657                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
658            }
659            catch (Exception ex) {
660                log.debug("readBytes(): failed to read data: ", ex);
661            }
662            finally {
663                try {
664                    H5.H5Sclose(fspace);
665                }
666                catch (Exception ex2) {
667                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
668                }
669                try {
670                    H5.H5Sclose(mspace);
671                }
672                catch (Exception ex2) {
673                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
674                }
675                try {
676                    H5.H5Tclose(tid);
677                }
678                catch (HDF5Exception ex2) {
679                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
680                }
681                close(did);
682            }
683        }
684
685        return theData;
686    }
687
688    /**
689     * Reads the data from file.
690     *
691     * read() reads the data from file to a memory buffer and returns the memory
692     * buffer. The dataset object does not hold the memory buffer. To store the
693     * memory buffer in the dataset object, one must call getData().
694     *
695     * By default, the whole dataset is read into memory. Users can also select
696     * a subset to read. Subsetting is done in an implicit way.
697     *
698     * <b>How to Select a Subset</b>
699     *
700     * A selection is specified by three arrays: start, stride and count.
701     * <ol>
702     * <li>start: offset of a selection
703     * <li>stride: determines how many elements to move in each dimension
704     * <li>count: number of elements to select in each dimension
705     * </ol>
706     * getStartDims(), getStride() and getSelectedDims() returns the start,
707     * stride and count arrays respectively. Applications can make a selection
708     * by changing the values of the arrays.
709     *
710     * The following example shows how to make a subset. In the example, the
711     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
712     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
713     * We want to select every other data point in dims[1] and dims[2]
714     *
715     * <pre>
716     * int rank = dataset.getRank(); // number of dimensions of the dataset
717     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
718     * long[] selected = dataset.getSelectedDims(); // the selected size of the
719     *                                              // dataset
720     * long[] start = dataset.getStartDims(); // the offset of the selection
721     * long[] stride = dataset.getStride(); // the stride of the dataset
722     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
723     *                                                   // dimensions for
724     *                                                   // display
725     *
726     * // select dim1 and dim2 as 2D data for display, and slice through dim0
727     * selectedIndex[0] = 1;
728     * selectedIndex[1] = 2;
729     * selectedIndex[1] = 0;
730     *
731     * // reset the selection arrays
732     * for (int i = 0; i &lt; rank; i++) {
733     *     start[i] = 0;
734     *     selected[i] = 1;
735     *     stride[i] = 1;
736     * }
737     *
738     * // set stride to 2 on dim1 and dim2 so that every other data point is
739     * // selected.
740     * stride[1] = 2;
741     * stride[2] = 2;
742     *
743     * // set the selection size of dim1 and dim2
744     * selected[1] = dims[1] / stride[1];
745     * selected[2] = dims[1] / stride[2];
746     *
747     * // when dataset.getData() is called, the selection above will be used
748     * // since
749     * // the dimension arrays are passed by reference. Changes of these arrays
750     * // outside the dataset object directly change the values of these array
751     * // in the dataset object.
752     * </pre>
753     *
754     * For CompoundDS, the memory data object is an java.util.List object. Each
755     * element of the list is a data array that corresponds to a compound field.
756     *
757     * For example, if compound dataset "comp" has the following nested
758     * structure, and member datatypes
759     *
760     * <pre>
761     * comp --&gt; m01 (int)
762     * comp --&gt; m02 (float)
763     * comp --&gt; nest1 --&gt; m11 (char)
764     * comp --&gt; nest1 --&gt; m12 (String)
765     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
766     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
767     * </pre>
768     *
769     * getData() returns a list of six arrays: {int[], float[], char[],
770     * String[], long[] and double[]}.
771     *
772     * @return the data read from file.
773     *
774     * @see #getData()
775     * @see hdf.object.DataFormat#read()
776     *
777     * @throws Exception
778     *             if object can not be read
779     */
780    @Override
781    public Object read() throws Exception
782    {
783        Object readData = null;
784
785        if (!isInited())
786            init();
787
788        try {
789            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
790        }
791        catch (Exception ex) {
792            log.debug("read(): failed to read compound dataset: ", ex);
793            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
794        }
795
796        return readData;
797    }
798
799    /**
800     * Writes the given data buffer into this dataset in a file.
801     *
802     * The data buffer is a vector that contains the data values of compound fields. The data is written
803     * into file field by field.
804     *
805     * @param buf
806     *            The vector that contains the data values of compound fields.
807     *
808     * @throws Exception
809     *             If there is an error at the HDF5 library level.
810     */
811    @Override
812    public void write(Object buf) throws Exception
813    {
814        if (this.getFileFormat().isReadOnly())
815            throw new Exception("cannot write to compound dataset in file opened as read-only");
816
817        if (!isInited())
818            init();
819
820        try {
821            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
822        }
823        catch (Exception ex) {
824            log.debug("write(Object): failed to write compound dataset: ", ex);
825            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
826        }
827    }
828
829    /*
830     * Routine to convert datatypes that are read in as byte arrays to
831     * regular types.
832     */
833    @Override
834    protected Object convertByteMember(final Datatype dtype, byte[] byteData)
835    {
836        Object theObj = null;
837        log.debug("convertByteMember(): dtype={} byteData={}", dtype, byteData);
838
839        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
840            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
841        else
842            theObj = super.convertByteMember(dtype, byteData);
843
844        return theObj;
845    }
846
847    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception
848    {
849        H5Datatype dsDatatype = (H5Datatype)getDatatype();
850        Object theData        = null;
851
852        if (numberOfMembers <= 0) {
853            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
854            throw new Exception("dataset contains no members");
855        }
856
857        /*
858         * I/O type-specific pre-initialization.
859         */
860        if (ioType == H5File.IO_TYPE.WRITE) {
861            if ((writeBuf == null) || !(writeBuf instanceof List)) {
862                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
863                throw new Exception("write buffer is null or invalid");
864            }
865
866            /*
867             * Check for any unsupported datatypes and fail early before
868             * attempting to write to the dataset.
869             */
870            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
871                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
872                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
873            }
874
875            if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) {
876                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
877                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
878            }
879        }
880
881        long did = open();
882        if (did >= 0) {
883            long[] spaceIDs = {HDF5Constants.H5I_INVALID_HID,
884                               HDF5Constants.H5I_INVALID_HID}; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
885
886            try {
887                /*
888                 * NOTE: this call sets up a hyperslab selection in the file according to the
889                 * current selection in the dataset object.
890                 */
891                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(
892                    did, dims, startDims, selectedStride, selectedDims, spaceIDs);
893
894                theData = compoundTypeIO(ioType, did, spaceIDs, (int)totalSelectedSpacePoints, dsDatatype,
895                                         writeBuf, new int[] {0});
896            }
897            finally {
898                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
899                    try {
900                        H5.H5Sclose(spaceIDs[0]);
901                    }
902                    catch (Exception ex) {
903                        log.debug(
904                            "compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
905                    }
906                }
907
908                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
909                    try {
910                        H5.H5Sclose(spaceIDs[1]);
911                    }
912                    catch (Exception ex) {
913                        log.debug(
914                            "compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
915                    }
916                }
917
918                close(did);
919            }
920        }
921        else
922            log.debug("compoundDatasetCommonIO(): failed to open dataset");
923
924        return theData;
925    }
926
927    /*
928     * Private recursive routine to read/write an entire compound datatype field by
929     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
930     * COMPOUND datatypes.
931     *
932     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
933     * running counter so that we can index properly into the flattened name list
934     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
935     */
936    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
937                                  final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex)
938    {
939        Object theData = null;
940
941        if (cmpdType.isArray()) {
942            log.trace("compoundTypeIO(): ARRAY type");
943
944            long[] arrayDims = cmpdType.getArrayDims();
945            int arrSize      = nSelPoints;
946            for (int i = 0; i < arrayDims.length; i++)
947                arrSize *= arrayDims[i];
948            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype)cmpdType.getDatatypeBase(),
949                                     writeBuf, globalMemberIndex);
950        }
951        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
952            /*
953             * TODO: true variable-length support.
954             */
955            String[] errVal = new String[nSelPoints];
956            String errStr   = "*UNSUPPORTED*";
957
958            for (int j = 0; j < nSelPoints; j++)
959                errVal[j] = errStr;
960
961            /*
962             * Setup a fake data list.
963             */
964            Datatype baseType = cmpdType.getDatatypeBase();
965            while (baseType != null && !baseType.isCompound()) {
966                baseType = baseType.getDatatypeBase();
967            }
968
969            List<Object> fakeVlenData =
970                (List<Object>)H5Datatype.allocateArray((H5Datatype)baseType, nSelPoints);
971            fakeVlenData.add(errVal);
972
973            theData = fakeVlenData;
974        }
975        else if (cmpdType.isCompound()) {
976            List<Object> memberDataList = null;
977            List<Datatype> typeList     = cmpdType.getCompoundMemberTypes();
978
979            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write",
980                      typeList.size());
981
982            if (ioType == H5File.IO_TYPE.READ)
983                memberDataList = (List<Object>)H5Datatype.allocateArray(cmpdType, nSelPoints);
984
985            try {
986                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
987                    H5Datatype memberType = null;
988                    String memberName     = null;
989                    Object memberData     = null;
990
991                    try {
992                        memberType = (H5Datatype)typeList.get(i);
993                    }
994                    catch (Exception ex) {
995                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
996                        globalMemberIndex[0]++;
997                        continue;
998                    }
999
1000                    /*
1001                     * Since the type list used here is not a flattened structure, we need to skip
1002                     * the member selection check for compound types, as otherwise having a single
1003                     * member not selected would skip the reading/writing for the entire compound
1004                     * type. The member selection check will be deferred to the recursive compound
1005                     * read/write below.
1006                     */
1007                    if (!memberType.isCompound()) {
1008                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
1009                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
1010                            globalMemberIndex[0]++;
1011                            continue; // the field is not selected
1012                        }
1013                    }
1014
1015                    if (!memberType.isCompound()) {
1016                        try {
1017                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
1018                        }
1019                        catch (Exception ex) {
1020                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
1021                            memberName = "null";
1022                        }
1023                    }
1024
1025                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName,
1026                              memberType.getDescription());
1027
1028                    if (ioType == H5File.IO_TYPE.READ) {
1029                        try {
1030                            if (memberType.isCompound())
1031                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType,
1032                                                            writeBuf, globalMemberIndex);
1033                            else if (
1034                                memberType
1035                                    .isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
1036                                /*
1037                                 * Recursively detect any nested array/vlen of compound types.
1038                                 */
1039                                boolean compoundFound = false;
1040
1041                                Datatype base = memberType.getDatatypeBase();
1042                                while (base != null) {
1043                                    if (base.isCompound())
1044                                        compoundFound = true;
1045
1046                                    base = base.getDatatypeBase();
1047                                }
1048
1049                                if (compoundFound) {
1050                                    /*
1051                                     * Skip the top-level array/vlen type.
1052                                     */
1053                                    globalMemberIndex[0]++;
1054
1055                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType,
1056                                                                writeBuf, globalMemberIndex);
1057                                }
1058                                else {
1059                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints,
1060                                                                          memberType, memberName);
1061                                    globalMemberIndex[0]++;
1062                                }
1063                            }
1064                            else {
1065                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType,
1066                                                                      memberName);
1067                                globalMemberIndex[0]++;
1068                            }
1069                        }
1070                        catch (Exception ex) {
1071                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
1072                            globalMemberIndex[0]++;
1073                            memberData = null;
1074                        }
1075
1076                        if (memberData == null) {
1077                            String[] errVal = new String[nSelPoints];
1078                            String errStr   = "*ERROR*";
1079
1080                            for (int j = 0; j < nSelPoints; j++)
1081                                errVal[j] = errStr;
1082
1083                            memberData = errVal;
1084                        }
1085
1086                        memberDataList.add(memberData);
1087                    }
1088                    else {
1089                        try {
1090                            /*
1091                             * TODO: currently doesn't correctly handle non-selected compound members.
1092                             */
1093                            memberData = ((List<?>)writeBuf).get(writeListIndex++);
1094                        }
1095                        catch (Exception ex) {
1096                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1097                            globalMemberIndex[0]++;
1098                            continue;
1099                        }
1100
1101                        if (memberData == null) {
1102                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1103                            globalMemberIndex[0]++;
1104                            continue;
1105                        }
1106
1107                        try {
1108                            if (memberType.isCompound()) {
1109                                List<?> nestedList = (List<?>)((List<?>)writeBuf).get(writeListIndex++);
1110                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList,
1111                                               globalMemberIndex);
1112                            }
1113                            else {
1114                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName,
1115                                                          memberData);
1116                                globalMemberIndex[0]++;
1117                            }
1118                        }
1119                        catch (Exception ex) {
1120                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1121                            globalMemberIndex[0]++;
1122                        }
1123                    }
1124                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1125            }
1126            catch (Exception ex) {
1127                log.debug("compoundTypeIO(): failure: ", ex);
1128                memberDataList = null;
1129            }
1130
1131            theData = memberDataList;
1132        }
1133
1134        return theData;
1135    }
1136
1137    /*
1138     * Private routine to read a single field of a compound datatype by creating a
1139     * compound datatype and inserting the single field into that datatype.
1140     */
1141    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1142                                            final H5Datatype memberType, String memberName) throws Exception
1143    {
1144        H5Datatype dsDatatype = (H5Datatype)this.getDatatype();
1145        Object memberData     = null;
1146
1147        try {
1148            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1149            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1150        }
1151        catch (OutOfMemoryError err) {
1152            memberData = null;
1153            throw new Exception("Out of memory");
1154        }
1155        catch (Exception ex) {
1156            log.debug("readSingleCompoundMember(): ", ex);
1157            memberData = null;
1158        }
1159
1160        if (memberData != null) {
1161            /*
1162             * Create a compound datatype containing just a single field (the one which we
1163             * want to read).
1164             */
1165            long compTid = -1;
1166            try {
1167                compTid = dsDatatype.createCompoundFieldType(memberName);
1168            }
1169            catch (HDF5Exception ex) {
1170                log.debug(
1171                    "readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1172                    memberType.getDescription(), ex);
1173                memberData = null;
1174            }
1175
1176            /*
1177             * Actually read the data for this member now that everything has been setup.
1178             */
1179            try {
1180                if (memberType.isVarStr()) {
1181                    log.trace(
1182                        "readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1183                        dsetID, compTid,
1184                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1185                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1186
1187                    H5.H5Dread_VLStrings(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1188                                         (Object[])memberData);
1189                }
1190                else if (memberType.isVLEN() ||
1191                         (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1192                    log.trace(
1193                        "readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1194                        dsetID, compTid,
1195                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1196                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1197
1198                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1199                                 (Object[])memberData);
1200                }
1201                else {
1202                    log.trace(
1203                        "readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1204                        dsetID, compTid,
1205                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1206                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1207
1208                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1209                               memberData);
1210                }
1211            }
1212            catch (HDF5DataFiltersException exfltr) {
1213                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1214                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1215            }
1216            catch (Exception ex) {
1217                log.debug("readSingleCompoundMember(): read failure: ", ex);
1218                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1219            }
1220            finally {
1221                dsDatatype.close(compTid);
1222            }
1223
1224            /*
1225             * Perform any necessary data conversions.
1226             */
1227            if (memberType.isUnsigned()) {
1228                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1229                memberData = Dataset.convertFromUnsignedC(memberData, null);
1230            }
1231            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1232                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1233
1234                /*
1235                 * For all other types that get read into memory as a byte[] (such as nested
1236                 * compounds and arrays of compounds), we must manually convert the byte[] into
1237                 * something usable.
1238                 */
1239                memberData = convertByteMember(memberType, (byte[])memberData);
1240            }
1241        }
1242
1243        return memberData;
1244    }
1245
1246    /*
1247     * Private routine to write a single field of a compound datatype by creating a
1248     * compound datatype and inserting the single field into that datatype.
1249     */
1250    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1251                                           final H5Datatype memberType, String memberName, Object theData)
1252        throws Exception
1253    {
1254        H5Datatype dsDatatype = (H5Datatype)this.getDatatype();
1255
1256        /*
1257         * Check for any unsupported datatypes before attempting to write this compound
1258         * member.
1259         */
1260        if (memberType.isVLEN() && !memberType.isVarStr()) {
1261            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1262            throw new Exception("writing of VL non-strings is not currently supported");
1263        }
1264
1265        /*
1266         * Perform any necessary data conversions before writing the data.
1267         */
1268        Object tmpData = theData;
1269        try {
1270            if (memberType.isUnsigned()) {
1271                // Check if we need to convert unsigned integer data from Java-style
1272                // to C-style integers
1273                long tsize   = memberType.getDatatypeSize();
1274                String cname = theData.getClass().getName();
1275                char dname   = cname.charAt(cname.lastIndexOf('[') + 1);
1276                boolean doIntConversion =
1277                    (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I')) ||
1278                     ((tsize == 4) && (dname == 'J')));
1279
1280                if (doIntConversion) {
1281                    log.trace(
1282                        "writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1283                    tmpData = convertToUnsignedC(theData, null);
1284                }
1285            }
1286            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1287                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1288                tmpData = stringToByte((String[])theData, (int)memberType.getDatatypeSize());
1289            }
1290            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1291                log.trace("writeSingleCompoundMember(): converting enum names to values");
1292                tmpData = memberType.convertEnumNameToValue((String[])theData);
1293            }
1294        }
1295        catch (Exception ex) {
1296            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1297            tmpData = null;
1298        }
1299
1300        if (tmpData == null) {
1301            log.debug("writeSingleCompoundMember(): data is null");
1302            return;
1303        }
1304
1305        /*
1306         * Create a compound datatype containing just a single field (the one which we
1307         * want to write).
1308         */
1309        long compTid = -1;
1310        try {
1311            compTid = dsDatatype.createCompoundFieldType(memberName);
1312        }
1313        catch (HDF5Exception ex) {
1314            log.debug(
1315                "writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1316                memberType.getDescription(), ex);
1317        }
1318
1319        /*
1320         * Actually write the data now that everything has been setup.
1321         */
1322        try {
1323            if (memberType.isVarStr()) {
1324                log.trace(
1325                    "writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1326                    dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1327                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1328
1329                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1330                                   (String[])tmpData);
1331            }
1332            else {
1333                log.trace(
1334                    "writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1335                    dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1336                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1337
1338                // BUG!!! does not write nested compound data and no
1339                // exception was caught. Need to check if it is a java
1340                // error or C library error.
1341                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1342            }
1343        }
1344        catch (Exception ex) {
1345            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1346            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1347        }
1348        finally {
1349            dsDatatype.close(compTid);
1350        }
1351    }
1352
1353    /**
1354     * Converts the data values of this data object to appropriate Java integers if
1355     * they are unsigned integers.
1356     *
1357     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1358     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1359     *
1360     * @return the converted data buffer.
1361     */
1362    @Override
1363    public Object convertFromUnsignedC()
1364    {
1365        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1366    }
1367
1368    /**
1369     * Converts Java integer data values of this data object back to unsigned C-type
1370     * integer data if they are unsigned integers.
1371     *
1372     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1373     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1374     *
1375     * @return the converted data buffer.
1376     */
1377    @Override
1378    public Object convertToUnsignedC()
1379    {
1380        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1381    }
1382
1383    /**
1384     * Retrieves the object's metadata, such as attributes, from the file.
1385     *
1386     * Metadata, such as attributes, is stored in a List.
1387     *
1388     * @return the list of metadata objects.
1389     *
1390     * @throws HDF5Exception
1391     *             if the metadata can not be retrieved
1392     */
1393    @Override
1394    public List<Attribute> getMetadata() throws HDF5Exception
1395    {
1396        int gmIndexType  = 0;
1397        int gmIndexOrder = 0;
1398
1399        try {
1400            gmIndexType = fileFormat.getIndexType(null);
1401        }
1402        catch (Exception ex) {
1403            log.debug("getMetadata(): getIndexType failed: ", ex);
1404        }
1405        try {
1406            gmIndexOrder = fileFormat.getIndexOrder(null);
1407        }
1408        catch (Exception ex) {
1409            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1410        }
1411        return this.getMetadata(gmIndexType, gmIndexOrder);
1412    }
1413
1414    /**
1415     * Retrieves the object's metadata, such as attributes, from the file.
1416     *
1417     * Metadata, such as attributes, is stored in a List.
1418     *
1419     * @param attrPropList
1420     *             the list of properties to get
1421     *
1422     * @return the list of metadata objects.
1423     *
1424     * @throws HDF5Exception
1425     *             if the metadata can not be retrieved
1426     */
1427    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception
1428    {
1429        if (!isInited())
1430            init();
1431
1432        try {
1433            this.linkTargetObjName = H5File.getLinkTargetName(this);
1434        }
1435        catch (Exception ex) {
1436            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1437        }
1438
1439        if (objMetadata.getAttributeList() == null) {
1440            long did  = HDF5Constants.H5I_INVALID_HID;
1441            long pcid = HDF5Constants.H5I_INVALID_HID;
1442            long paid = HDF5Constants.H5I_INVALID_HID;
1443
1444            did = open();
1445            if (did >= 0) {
1446                try {
1447                    // get the compression and chunk information
1448                    pcid             = H5.H5Dget_create_plist(did);
1449                    paid             = H5.H5Dget_access_plist(did);
1450                    long storageSize = H5.H5Dget_storage_size(did);
1451                    int nfilt        = H5.H5Pget_nfilters(pcid);
1452                    int layoutType   = H5.H5Pget_layout(pcid);
1453
1454                    storageLayout.setLength(0);
1455                    compression.setLength(0);
1456
1457                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1458                        chunkSize = new long[rank];
1459                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1460                        int n = chunkSize.length;
1461                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1462                        for (int i = 1; i < n; i++)
1463                            storageLayout.append(" X ").append(chunkSize[i]);
1464
1465                        if (nfilt > 0) {
1466                            long nelmts = 1;
1467                            long uncompSize;
1468                            long datumSize = getDatatype().getDatatypeSize();
1469
1470                            if (datumSize < 0) {
1471                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1472                                try {
1473                                    tmptid    = H5.H5Dget_type(did);
1474                                    datumSize = H5.H5Tget_size(tmptid);
1475                                }
1476                                finally {
1477                                    try {
1478                                        H5.H5Tclose(tmptid);
1479                                    }
1480                                    catch (Exception ex2) {
1481                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid,
1482                                                  ex2);
1483                                    }
1484                                }
1485                            }
1486
1487                            for (int i = 0; i < rank; i++)
1488                                nelmts *= dims[i];
1489                            uncompSize = nelmts * datumSize;
1490
1491                            /* compression ratio = uncompressed size / compressed size */
1492
1493                            if (storageSize != 0) {
1494                                double ratio     = (double)uncompSize / (double)storageSize;
1495                                DecimalFormat df = new DecimalFormat();
1496                                df.setMinimumFractionDigits(3);
1497                                df.setMaximumFractionDigits(3);
1498                                compression.append(df.format(ratio)).append(":1");
1499                            }
1500                        }
1501                    }
1502                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1503                        storageLayout.append("COMPACT");
1504                    }
1505                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1506                        storageLayout.append("CONTIGUOUS");
1507                        if (H5.H5Pget_external_count(pcid) > 0)
1508                            storageLayout.append(" - EXTERNAL ");
1509                    }
1510                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1511                        storageLayout.append("VIRTUAL - ");
1512                        try {
1513                            long vmaps = H5.H5Pget_virtual_count(pcid);
1514                            try {
1515                                int virtView = H5.H5Pget_virtual_view(paid);
1516                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1517                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1518                                    storageLayout.append("First Missing");
1519                                else
1520                                    storageLayout.append("Last Available");
1521                                storageLayout.append("\nGAP : ").append(virtGap);
1522                            }
1523                            catch (Exception err) {
1524                                log.debug("getMetadata(): vds error: ", err);
1525                                storageLayout.append("ERROR");
1526                            }
1527                            storageLayout.append("\nMAPS : ").append(vmaps);
1528                            if (vmaps > 0) {
1529                                for (long next = 0; next < vmaps; next++) {
1530                                    try {
1531                                        H5.H5Pget_virtual_vspace(pcid, next);
1532                                        H5.H5Pget_virtual_srcspace(pcid, next);
1533                                        String fname    = H5.H5Pget_virtual_filename(pcid, next);
1534                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1535                                        storageLayout.append("\n").append(fname).append(" : ").append(
1536                                            dsetname);
1537                                    }
1538                                    catch (Exception err) {
1539                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1540                                        storageLayout.append("ERROR");
1541                                    }
1542                                }
1543                            }
1544                        }
1545                        catch (Exception err) {
1546                            log.debug("getMetadata(): vds count error: ", err);
1547                            storageLayout.append("ERROR");
1548                        }
1549                    }
1550                    else {
1551                        chunkSize = null;
1552                        storageLayout.append("NONE");
1553                    }
1554
1555                    int[] flags     = {0, 0};
1556                    long[] cdNelmts = {20};
1557                    int[] cdValues  = new int[(int)cdNelmts[0]];
1558                    String[] cdName = {"", ""};
1559                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1560                    int filter         = -1;
1561                    int[] filterConfig = {1};
1562
1563                    filters.setLength(0);
1564
1565                    if (nfilt == 0) {
1566                        filters.append("NONE");
1567                    }
1568                    else {
1569                        for (int i = 0, k = 0; i < nfilt; i++) {
1570                            log.trace("getMetadata(): filter[{}]", i);
1571                            if (i > 0)
1572                                filters.append(", ");
1573                            if (k > 0)
1574                                compression.append(", ");
1575
1576                            try {
1577                                cdNelmts[0] = 20;
1578                                cdValues    = new int[(int)cdNelmts[0]];
1579                                cdValues    = new int[(int)cdNelmts[0]];
1580                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName,
1581                                                          filterConfig);
1582                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0],
1583                                          cdNelmts[0]);
1584                                for (int j = 0; j < cdNelmts[0]; j++)
1585                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1586                            }
1587                            catch (Exception err) {
1588                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1589                                filters.append("ERROR");
1590                                continue;
1591                            }
1592
1593                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1594                                filters.append("NONE");
1595                            }
1596                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1597                                filters.append("GZIP");
1598                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1599                                k++;
1600                            }
1601                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1602                                filters.append("Error detection filter");
1603                            }
1604                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1605                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1606                            }
1607                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1608                                filters.append("NBIT");
1609                            }
1610                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1611                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1612                            }
1613                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1614                                filters.append("SZIP");
1615                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1616                                k++;
1617                                int flag = -1;
1618                                try {
1619                                    flag = H5.H5Zget_filter_info(filter);
1620                                }
1621                                catch (Exception ex) {
1622                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1623                                    flag = -1;
1624                                }
1625                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1626                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1627                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
1628                                         (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED +
1629                                                   HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1630                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1631                            }
1632                            else {
1633                                filters.append("USERDEFINED ")
1634                                    .append(cdName[0])
1635                                    .append("(")
1636                                    .append(filter)
1637                                    .append("): ");
1638                                for (int j = 0; j < cdNelmts[0]; j++) {
1639                                    if (j > 0)
1640                                        filters.append(", ");
1641                                    filters.append(cdValues[j]);
1642                                }
1643                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1644                            }
1645                        } //  (int i=0; i<nfilt; i++)
1646                    }
1647
1648                    if (compression.length() == 0)
1649                        compression.append("NONE");
1650                    log.trace("getMetadata(): filter compression={}", compression);
1651                    log.trace("getMetadata(): filter information={}", filters);
1652
1653                    storage.setLength(0);
1654                    storage.append("SIZE: ").append(storageSize);
1655
1656                    try {
1657                        int[] at = {0};
1658                        H5.H5Pget_alloc_time(pcid, at);
1659                        storage.append(", allocation time: ");
1660                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1661                            storage.append("Early");
1662                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1663                            storage.append("Incremental");
1664                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1665                            storage.append("Late");
1666                        else
1667                            storage.append("Default");
1668                    }
1669                    catch (Exception ex) {
1670                        log.debug("getMetadata(): Storage allocation time:", ex);
1671                    }
1672                    log.trace("getMetadata(): storage={}", storage);
1673                }
1674                finally {
1675                    try {
1676                        H5.H5Pclose(paid);
1677                    }
1678                    catch (Exception ex) {
1679                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1680                    }
1681                    try {
1682                        H5.H5Pclose(pcid);
1683                    }
1684                    catch (Exception ex) {
1685                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1686                    }
1687                    close(did);
1688                }
1689            }
1690        }
1691
1692        List<Attribute> attrlist = null;
1693        try {
1694            attrlist = objMetadata.getMetadata(attrPropList);
1695        }
1696        catch (Exception ex) {
1697            log.debug("getMetadata(): getMetadata failed: ", ex);
1698        }
1699        return attrlist;
1700    }
1701
1702    /**
1703     * Writes a specific piece of metadata (such as an attribute) into the file.
1704     *
1705     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1706     * value. If the attribute does not exist in the file, it creates the
1707     * attribute in the file and attaches it to the object. It will fail to
1708     * write a new attribute to the object where an attribute with the same name
1709     * already exists. To update the value of an existing attribute in the file,
1710     * one needs to get the instance of the attribute by getMetadata(), change
1711     * its values, then use writeMetadata() to write the value.
1712     *
1713     * @param info
1714     *            the metadata to write.
1715     *
1716     * @throws Exception
1717     *             if the metadata can not be written
1718     */
1719    @Override
1720    public void writeMetadata(Object info) throws Exception
1721    {
1722        try {
1723            objMetadata.writeMetadata(info);
1724        }
1725        catch (Exception ex) {
1726            log.debug("writeMetadata(): Object not an Attribute");
1727            return;
1728        }
1729    }
1730
1731    /**
1732     * Deletes an existing piece of metadata from this object.
1733     *
1734     * @param info
1735     *            the metadata to delete.
1736     *
1737     * @throws HDF5Exception
1738     *             if the metadata can not be removed
1739     */
1740    @Override
1741    public void removeMetadata(Object info) throws HDF5Exception
1742    {
1743        try {
1744            objMetadata.removeMetadata(info);
1745        }
1746        catch (Exception ex) {
1747            log.debug("removeMetadata(): Object not an Attribute");
1748            return;
1749        }
1750
1751        Attribute attr = (Attribute)info;
1752        log.trace("removeMetadata(): {}", attr.getAttributeName());
1753        long did = open();
1754        if (did >= 0) {
1755            try {
1756                H5.H5Adelete(did, attr.getAttributeName());
1757            }
1758            finally {
1759                close(did);
1760            }
1761        }
1762        else {
1763            log.debug("removeMetadata(): failed to open compound dataset");
1764        }
1765    }
1766
1767    /**
1768     * Updates an existing piece of metadata attached to this object.
1769     *
1770     * @param info
1771     *            the metadata to update.
1772     *
1773     * @throws HDF5Exception
1774     *             if the metadata can not be updated
1775     */
1776    @Override
1777    public void updateMetadata(Object info) throws HDF5Exception
1778    {
1779        try {
1780            objMetadata.updateMetadata(info);
1781        }
1782        catch (Exception ex) {
1783            log.debug("updateMetadata(): Object not an Attribute");
1784            return;
1785        }
1786    }
1787
1788    /*
1789     * (non-Javadoc)
1790     *
1791     * @see hdf.object.HObject#setName(java.lang.String)
1792     */
1793    @Override
1794    public void setName(String newName) throws Exception
1795    {
1796        if (newName == null)
1797            throw new IllegalArgumentException("The new name is NULL");
1798
1799        H5File.renameObject(this, newName);
1800        super.setName(newName);
1801    }
1802
1803    /**
1804     * @deprecated Not for public use in the future. <br>
1805     *             Using
1806     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[],
1807     * long[][], Object)}
1808     *
1809     * @param name
1810     *            the name of the dataset to create.
1811     * @param pgroup
1812     *            parent group where the new dataset is created.
1813     * @param dims
1814     *            the dimension size of the dataset.
1815     * @param memberNames
1816     *            the names of compound datatype
1817     * @param memberDatatypes
1818     *            the datatypes of the compound datatype
1819     * @param memberSizes
1820     *            the dim sizes of the members
1821     * @param data
1822     *            list of data arrays written to the new dataset, null if no data is written to the new
1823     *            dataset.
1824     *
1825     * @return the new compound dataset if successful; otherwise returns null.
1826     *
1827     * @throws Exception
1828     *             if there is a failure.
1829     */
1830    @Deprecated
1831    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1832                                 Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception
1833    {
1834        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) ||
1835            (memberDatatypes == null) || (memberSizes == null)) {
1836            return null;
1837        }
1838
1839        int nMembers        = memberNames.length;
1840        int memberRanks[]   = new int[nMembers];
1841        long memberDims[][] = new long[nMembers][1];
1842        for (int i = 0; i < nMembers; i++) {
1843            memberRanks[i]   = 1;
1844            memberDims[i][0] = memberSizes[i];
1845        }
1846
1847        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims,
1848                                   data);
1849    }
1850
1851    /**
1852     * @deprecated Not for public use in the future. <br>
1853     *             Using
1854     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[],
1855     * long[][], Object)}
1856     *
1857     * @param name
1858     *            the name of the dataset to create.
1859     * @param pgroup
1860     *            parent group where the new dataset is created.
1861     * @param dims
1862     *            the dimension size of the dataset.
1863     * @param memberNames
1864     *            the names of compound datatype
1865     * @param memberDatatypes
1866     *            the datatypes of the compound datatype
1867     * @param memberRanks
1868     *            the ranks of the members
1869     * @param memberDims
1870     *            the dim sizes of the members
1871     * @param data
1872     *            list of data arrays written to the new dataset, null if no data is written to the new
1873     *            dataset.
1874     *
1875     * @return the new compound dataset if successful; otherwise returns null.
1876     *
1877     * @throws Exception
1878     *             if the dataset can not be created.
1879     */
1880    @Deprecated
1881    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1882                                 Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims,
1883                                 Object data) throws Exception
1884    {
1885        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes,
1886                                   memberRanks, memberDims, data);
1887    }
1888
1889    /**
1890     * Creates a simple compound dataset in a file with/without chunking and compression.
1891     *
1892     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1893     * details of creating a compound dataset from users.
1894     *
1895     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1896     * dataset is not supported. The required information to create a compound dataset includes the
1897     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1898     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1899     *
1900     * The following example shows how to use this function to create a compound dataset in file.
1901     *
1902     * <pre>
1903     * H5File file = null;
1904     * String message = &quot;&quot;;
1905     * Group pgroup = null;
1906     * int[] DATA_INT = new int[DIM_SIZE];
1907     * float[] DATA_FLOAT = new float[DIM_SIZE];
1908     * String[] DATA_STR = new String[DIM_SIZE];
1909     * long[] DIMs = { 50, 10 };
1910     * long[] CHUNKs = { 25, 5 };
1911     *
1912     * try {
1913     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1914     *     file.open();
1915     *     pgroup = (Group) file.get(&quot;/&quot;);
1916     * }
1917     * catch (Exception ex) {
1918     * }
1919     *
1920     * Vector data = new Vector();
1921     * data.add(0, DATA_INT);
1922     * data.add(1, DATA_FLOAT);
1923     * data.add(2, DATA_STR);
1924     *
1925     * // create groups
1926     * Datatype[] mdtypes = new H5Datatype[3];
1927     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1928     * Dataset dset = null;
1929     * try {
1930     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1931     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1932     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1933     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames,
1934     * mdtypes, null, data);
1935     * }
1936     * catch (Exception ex) {
1937     *     failed(message, ex, file);
1938     *     return 1;
1939     * }
1940     * </pre>
1941     *
1942     * @param name
1943     *            the name of the dataset to create.
1944     * @param pgroup
1945     *            parent group where the new dataset is created.
1946     * @param dims
1947     *            the dimension size of the dataset.
1948     * @param maxdims
1949     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1950     * @param chunks
1951     *            the chunk size of the dataset. No chunking if chunk = null.
1952     * @param gzip
1953     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1954     * @param memberNames
1955     *            the names of compound datatype
1956     * @param memberDatatypes
1957     *            the datatypes of the compound datatype
1958     * @param memberRanks
1959     *            the ranks of the members
1960     * @param memberDims
1961     *            the dim sizes of the members
1962     * @param data
1963     *            list of data arrays written to the new dataset, null if no data is written to the new
1964     *            dataset.
1965     *
1966     * @return the new compound dataset if successful; otherwise returns null.
1967     *
1968     * @throws Exception
1969     *             if there is a failure.
1970     */
1971    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks,
1972                                 int gzip, String[] memberNames, Datatype[] memberDatatypes,
1973                                 int[] memberRanks, long[][] memberDims, Object data) throws Exception
1974    {
1975        H5CompoundDS dataset = null;
1976        String fullPath      = null;
1977        long did             = HDF5Constants.H5I_INVALID_HID;
1978        long plist           = HDF5Constants.H5I_INVALID_HID;
1979        long sid             = HDF5Constants.H5I_INVALID_HID;
1980        long tid             = HDF5Constants.H5I_INVALID_HID;
1981
1982        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null)) ||
1983            (memberNames == null) || (memberDatatypes == null) || (memberRanks == null) ||
1984            (memberDims == null)) {
1985            log.debug("create(): one or more parameters are null");
1986            return null;
1987        }
1988
1989        H5File file = (H5File)pgroup.getFileFormat();
1990        if (file == null) {
1991            log.debug("create(): parent group FileFormat is null");
1992            return null;
1993        }
1994
1995        String path = HObject.SEPARATOR;
1996        if (!pgroup.isRoot()) {
1997            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1998            if (name.endsWith("/"))
1999                name = name.substring(0, name.length() - 1);
2000            int idx = name.lastIndexOf('/');
2001            if (idx >= 0)
2002                name = name.substring(idx + 1);
2003        }
2004
2005        fullPath = path + name;
2006
2007        int typeSize   = 0;
2008        int nMembers   = memberNames.length;
2009        long[] mTypes  = new long[nMembers];
2010        int memberSize = 1;
2011        for (int i = 0; i < nMembers; i++) {
2012            memberSize = 1;
2013            for (int j = 0; j < memberRanks[i]; j++)
2014                memberSize *= memberDims[i][j];
2015
2016            mTypes[i] = -1;
2017            // the member is an array
2018            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
2019                long tmptid = -1;
2020                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
2021                    try {
2022                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
2023                    }
2024                    finally {
2025                        try {
2026                            H5.H5Tclose(tmptid);
2027                        }
2028                        catch (Exception ex) {
2029                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
2030                        }
2031                    }
2032                }
2033            }
2034            else {
2035                mTypes[i] = memberDatatypes[i].createNative();
2036            }
2037            try {
2038                typeSize += H5.H5Tget_size(mTypes[i]);
2039            }
2040            catch (Exception ex) {
2041                log.debug("create(): array create H5Tget_size:", ex);
2042
2043                while (i > 0) {
2044                    try {
2045                        H5.H5Tclose(mTypes[i]);
2046                    }
2047                    catch (HDF5Exception ex2) {
2048                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
2049                    }
2050                    i--;
2051                }
2052                throw ex;
2053            }
2054        } //  (int i = 0; i < nMembers; i++) {
2055
2056        // setup chunking and compression
2057        boolean isExtentable = false;
2058        if (maxdims != null) {
2059            for (int i = 0; i < maxdims.length; i++) {
2060                if (maxdims[i] == 0)
2061                    maxdims[i] = dims[i];
2062                else if (maxdims[i] < 0)
2063                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
2064
2065                if (maxdims[i] != dims[i])
2066                    isExtentable = true;
2067            }
2068        }
2069
2070        // HDF5 requires you to use chunking in order to define extendible
2071        // datasets. Chunking makes it possible to extend datasets efficiently,
2072        // without having to reorganize storage excessively. Using default size
2073        // of 64x...which has good performance
2074        if ((chunks == null) && isExtentable) {
2075            chunks = new long[dims.length];
2076            for (int i = 0; i < dims.length; i++)
2077                chunks[i] = Math.min(dims[i], 64);
2078        }
2079
2080        // prepare the dataspace and datatype
2081        int rank = dims.length;
2082
2083        try {
2084            sid = H5.H5Screate_simple(rank, dims, maxdims);
2085
2086            // figure out creation properties
2087            plist = HDF5Constants.H5P_DEFAULT;
2088
2089            tid        = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2090            int offset = 0;
2091            for (int i = 0; i < nMembers; i++) {
2092                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2093                offset += H5.H5Tget_size(mTypes[i]);
2094            }
2095
2096            if (chunks != null) {
2097                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2098
2099                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2100                H5.H5Pset_chunk(plist, rank, chunks);
2101
2102                // compression requires chunking
2103                if (gzip > 0) {
2104                    H5.H5Pset_deflate(plist, gzip);
2105                }
2106            }
2107
2108            long fid = file.getFID();
2109
2110            did     = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2111                                   HDF5Constants.H5P_DEFAULT);
2112            dataset = new H5CompoundDS(file, name, path);
2113        }
2114        finally {
2115            try {
2116                H5.H5Pclose(plist);
2117            }
2118            catch (HDF5Exception ex) {
2119                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2120            }
2121            try {
2122                H5.H5Sclose(sid);
2123            }
2124            catch (HDF5Exception ex) {
2125                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2126            }
2127            try {
2128                H5.H5Tclose(tid);
2129            }
2130            catch (HDF5Exception ex) {
2131                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2132            }
2133            try {
2134                H5.H5Dclose(did);
2135            }
2136            catch (HDF5Exception ex) {
2137                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2138            }
2139
2140            for (int i = 0; i < nMembers; i++) {
2141                try {
2142                    H5.H5Tclose(mTypes[i]);
2143                }
2144                catch (HDF5Exception ex) {
2145                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2146                }
2147            }
2148        }
2149
2150        if (dataset != null) {
2151            pgroup.addToMemberList(dataset);
2152            if (data != null) {
2153                dataset.init();
2154                long selected[] = dataset.getSelectedDims();
2155                for (int i = 0; i < rank; i++)
2156                    selected[i] = dims[i];
2157                dataset.write(data);
2158            }
2159        }
2160
2161        return dataset;
2162    }
2163
2164    /*
2165     * (non-Javadoc)
2166     *
2167     * @see hdf.object.Dataset#isString(long)
2168     */
2169    @Override
2170    public boolean isString(long tid)
2171    {
2172        boolean b = false;
2173        try {
2174            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2175        }
2176        catch (Exception ex) {
2177            b = false;
2178        }
2179
2180        return b;
2181    }
2182
2183    /*
2184     * (non-Javadoc)
2185     *
2186     * @see hdf.object.Dataset#getSize(long)
2187     */
2188    @Override
2189    public long getSize(long tid)
2190    {
2191        return H5Datatype.getDatatypeSize(tid);
2192    }
2193
2194    /*
2195     * (non-Javadoc)
2196     *
2197     * @see hdf.object.Dataset#isVirtual()
2198     */
2199    @Override
2200    public boolean isVirtual()
2201    {
2202        return isVirtual;
2203    }
2204
2205    /*
2206     * (non-Javadoc)
2207     *
2208     * @see hdf.object.Dataset#getVirtualFilename(int)
2209     */
2210    @Override
2211    public String getVirtualFilename(int index)
2212    {
2213        if (isVirtual)
2214            return virtualNameList.get(index);
2215        else
2216            return null;
2217    }
2218
2219    /*
2220     * (non-Javadoc)
2221     *
2222     * @see hdf.object.Dataset#getVirtualMaps()
2223     */
2224    @Override
2225    public int getVirtualMaps()
2226    {
2227        if (isVirtual)
2228            return virtualNameList.size();
2229        else
2230            return -1;
2231    }
2232
2233    /*
2234     * (non-Javadoc)
2235     *
2236     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2237     */
2238    @Override
2239    public String toString(String delimiter, int maxItems)
2240    {
2241        Object theData = originalBuf;
2242        if (theData == null) {
2243            log.debug("toString: value is null");
2244            return null;
2245        }
2246
2247        if (theData instanceof List<?>) {
2248            log.trace("toString: value is list");
2249            return null;
2250        }
2251
2252        Class<? extends Object> valClass = theData.getClass();
2253
2254        if (!valClass.isArray()) {
2255            log.trace("toString: finish - not array");
2256            String strValue = theData.toString();
2257            if (maxItems > 0 && strValue.length() > maxItems)
2258                // truncate the extra characters
2259                strValue = strValue.substring(0, maxItems);
2260            return strValue;
2261        }
2262
2263        // value is an array
2264        StringBuilder sb = new StringBuilder();
2265        int n            = Array.getLength(theData);
2266        if ((maxItems > 0) && (n > maxItems))
2267            n = maxItems;
2268
2269        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype)getDatatype()).isStdRef(), n);
2270        if (((H5Datatype)getDatatype()).isStdRef()) {
2271            String cname = valClass.getName();
2272            char dname   = cname.charAt(cname.lastIndexOf('[') + 1);
2273            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2274            String ref_str = ((H5ReferenceType)getDatatype()).getObjectReferenceName((byte[])theData);
2275            log.trace("toString: ref_str={}", ref_str);
2276            return ref_str;
2277        }
2278        else {
2279            return super.toString(delimiter, maxItems);
2280        }
2281    }
2282}