001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import org.slf4j.Logger;
028import org.slf4j.LoggerFactory;
029
030import hdf.hdf5lib.H5;
031import hdf.hdf5lib.HDF5Constants;
032import hdf.hdf5lib.HDFArray;
033import hdf.hdf5lib.HDFNativeData;
034import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
035import hdf.hdf5lib.exceptions.HDF5Exception;
036import hdf.hdf5lib.exceptions.HDF5LibraryException;
037import hdf.hdf5lib.structs.H5O_info_t;
038import hdf.hdf5lib.structs.H5O_token_t;
039
040import hdf.object.Attribute;
041import hdf.object.CompoundDS;
042import hdf.object.Dataset;
043import hdf.object.Datatype;
044import hdf.object.FileFormat;
045import hdf.object.Group;
046import hdf.object.HObject;
047import hdf.object.MetaDataContainer;
048import hdf.object.Utils;
049
050import hdf.object.h5.H5Datatype;
051import hdf.object.h5.H5MetaDataContainer;
052import hdf.object.h5.H5ReferenceType;
053
054/**
055 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
056 *
057 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
058 * that stores a description of the data elements, data layout, and all other information necessary
059 * to write, read, and interpret the stored data.
060 *
061 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
062 * collection of one or more atomic types or small arrays of such types. Each member of a compound
063 * type has a name which is unique within that type, and a byte offset that determines the first
064 * byte (smallest byte address) of that member in a compound datum.
065 *
066 * For more information on HDF5 datasets and datatypes, read the <a href=
067 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
068 * User's Guide</a>.
069 *
070 * There are two basic types of compound datasets: simple compound data and nested compound data.
071 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
072 * are compound or array of compound data.
073 *
074 * Since Java does not understand C structures, we cannot directly read/write compound data values
075 * as in the following C example.
076 *
077 * <pre>
078 * typedef struct s1_t {
079 *         int    a;
080 *         float  b;
081 *         double c;
082 *         } s1_t;
083 *     s1_t       s1[LENGTH];
084 *     ...
085 *     H5Dwrite(..., s1);
086 *     H5Dread(..., s1);
087 * </pre>
088 *
089 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
090 * data by fields instead of compound structure. As for the example above, the java.util.Vector
091 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
092 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
093 * by field.
094 *
095 * @version 1.1 9/4/2007
096 * @author Peter X. Cao
097 */
098public class H5CompoundDS extends CompoundDS implements MetaDataContainer
099{
100    private static final long serialVersionUID = -5968625125574032736L;
101
102    private static final Logger log = LoggerFactory.getLogger(H5CompoundDS.class);
103
104    /**
105     * The metadata object for this data object. Members of the metadata are instances of Attribute.
106     */
107    private H5MetaDataContainer objMetadata;
108
109    /** the object properties */
110    private H5O_info_t objInfo;
111
112    /** flag to indicate if the dataset is an external dataset */
113    private boolean isExternal = false;
114
115    /** flag to indicate if the dataset is a virtual dataset */
116    private boolean isVirtual = false;
117    /** the list of virtual names */
118    private List<String> virtualNameList;
119
120    /**
121     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
122     *
123     * The dataset object represents an existing dataset in the file. For example, new
124     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
125     * dataset,"dset1", at group "/g0/".
126     *
127     * This object is usually constructed at FileFormat.open(), which loads the file structure and
128     * object information into memory. It is rarely used elsewhere.
129     *
130     * @param theFile
131     *            the file that contains the data object.
132     * @param theName
133     *            the name of the data object, e.g. "dset".
134     * @param thePath
135     *            the full path of the data object, e.g. "/arrays/".
136     */
137    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
138        this(theFile, theName, thePath, null);
139    }
140
141    /**
142     * @deprecated Not for public use in the future.<br>
143     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
144     *
145     * @param theFile
146     *            the file that contains the data object.
147     * @param theName
148     *            the name of the data object, e.g. "dset".
149     * @param thePath
150     *            the full path of the data object, e.g. "/arrays/".
151     * @param oid
152     *            the oid of the data object.
153     */
154    @Deprecated
155    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
156        super(theFile, theName, thePath, oid);
157        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
158
159        if (theFile != null) {
160            if (oid == null) {
161                // retrieve the object ID
162                byte[] refBuf = null;
163                try {
164                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
165                    this.oid = HDFNativeData.byteToLong(refBuf);
166                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
167                }
168                catch (Exception ex) {
169                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
170                }
171                finally {
172                    if (refBuf != null)
173                        H5.H5Rdestroy(refBuf);
174                }
175            }
176            log.trace("constructor OID {}", this.oid);
177            try {
178                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
179            }
180            catch (Exception ex) {
181                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
182            }
183        }
184        else {
185            this.oid = null;
186            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
187        }
188    }
189
190    /*
191     * (non-Javadoc)
192     *
193     * @see hdf.object.HObject#open()
194     */
195    @Override
196    public long open() {
197        long did = HDF5Constants.H5I_INVALID_HID;
198
199        if (getFID() < 0)
200            log.trace("open(): file id for:{} is invalid", getPath() + getName());
201        else {
202            try {
203                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
204                log.trace("open(): did={}", did);
205            }
206            catch (HDF5Exception ex) {
207                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
208                did = HDF5Constants.H5I_INVALID_HID;
209            }
210        }
211
212        return did;
213    }
214
215    /*
216     * (non-Javadoc)
217     *
218     * @see hdf.object.HObject#close(int)
219     */
220    @Override
221    public void close(long did) {
222        if (did >= 0) {
223            try {
224                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
225            }
226            catch (Exception ex) {
227                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
228            }
229            try {
230                H5.H5Dclose(did);
231            }
232            catch (HDF5Exception ex) {
233                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
234            }
235        }
236    }
237
238    /**
239     * Retrieves datatype and dataspace information from file and sets the dataset
240     * in memory.
241     *
242     * The init() is designed to support lazy operation in a dataset object. When a
243     * data object is retrieved from file, the datatype, dataspace and raw data are
244     * not loaded into memory. When it is asked to read the raw data from file,
245     * init() is first called to get the datatype and dataspace information, then
246     * load the raw data from file.
247     *
248     * init() is also used to reset the selection of a dataset (start, stride and
249     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
250     * the following example, init() at step 1) retrieves datatype and dataspace
251     * information from file. getData() at step 3) reads only one data point. init()
252     * at step 4) resets the selection to the whole dataset. getData() at step 4)
253     * reads the values of whole dataset into memory.
254     *
255     * <pre>
256     * dset = (Dataset) file.get(NAME_DATASET);
257     *
258     * // 1) get datatype and dataspace information from file
259     * dset.init();
260     * rank = dset.getRank(); // rank = 2, a 2D dataset
261     * count = dset.getSelectedDims();
262     * start = dset.getStartDims();
263     * dims = dset.getDims();
264     *
265     * // 2) select only one data point
266     * for (int i = 0; i &lt; rank; i++) {
267     *     start[0] = 0;
268     *     count[i] = 1;
269     * }
270     *
271     * // 3) read one data point
272     * data = dset.getData();
273     *
274     * // 4) reset selection to the whole dataset
275     * dset.init();
276     *
277     * // 5) clean the memory data buffer
278     * dset.clearData();
279     *
280     * // 6) Read the whole dataset
281     * data = dset.getData();
282     * </pre>
283     */
284    @Override
285    public void init() {
286        if (inited) {
287            resetSelection();
288            log.trace("init(): Dataset already initialized");
289            return; // already called. Initialize only once
290        }
291
292        long did = HDF5Constants.H5I_INVALID_HID;
293        long tid = HDF5Constants.H5I_INVALID_HID;
294        long sid = HDF5Constants.H5I_INVALID_HID;
295        flatNameList = new Vector<>();
296        flatTypeList = new Vector<>();
297
298        did = open();
299        if (did >= 0) {
300            // check if it is an external or virtual dataset
301            long pid = HDF5Constants.H5I_INVALID_HID;
302            try {
303                pid = H5.H5Dget_create_plist(did);
304                try {
305                    int nfiles = H5.H5Pget_external_count(pid);
306                    isExternal = (nfiles > 0);
307                    int layoutType = H5.H5Pget_layout(pid);
308                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
309                        try {
310                            long vmaps = H5.H5Pget_virtual_count(pid);
311                            if (vmaps > 0) {
312                                virtualNameList = new Vector<>();
313                                for (long next = 0; next < vmaps; next++) {
314                                    try {
315                                        String fname = H5.H5Pget_virtual_filename(pid, next);
316                                        virtualNameList.add(fname);
317                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
318                                    }
319                                    catch (Exception err) {
320                                        log.trace("init(): vds[{}] continue", next);
321                                    }
322                                }
323                            }
324                        }
325                        catch (Exception err) {
326                            log.debug("init(): vds count error: ", err);
327                        }
328                    }
329                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
330                }
331                catch (Exception ex) {
332                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
333                }
334            }
335            catch (Exception ex) {
336                log.debug("init(): H5Dget_create_plist() failure: ", ex);
337            }
338            finally {
339                try {
340                    H5.H5Pclose(pid);
341                }
342                catch (Exception ex) {
343                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
344                }
345            }
346
347            try {
348                sid = H5.H5Dget_space(did);
349                rank = H5.H5Sget_simple_extent_ndims(sid);
350                tid = H5.H5Dget_type(did);
351                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
352
353                if (rank == 0) {
354                    // a scalar data point
355                    isScalar = true;
356                    rank = 1;
357                    dims = new long[] { 1 };
358                    log.trace("init(): rank is a scalar data point");
359                }
360                else {
361                    isScalar = false;
362                    dims = new long[rank];
363                    maxDims = new long[rank];
364                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
365                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
366                }
367
368                try {
369                    int nativeClass = H5.H5Tget_class(tid);
370                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
371                        long lsize = 1;
372                        if (rank > 0) {
373                            log.trace("init():rank={}, dims={}", rank, dims);
374                            for (int j = 0; j < dims.length; j++) {
375                                lsize *= dims[j];
376                            }
377                        }
378                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
379                    }
380                    else
381                        datatype = new H5Datatype(getFileFormat(), tid);
382
383                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}", tid,
384                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(),
385                            ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
386
387                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
388                }
389                catch (Exception ex) {
390                    log.debug("init(): failed to create datatype for dataset: ", ex);
391                    datatype = null;
392                }
393
394                // initialize member information
395                numberOfMembers = flatNameList.size();
396                log.trace("init(): numberOfMembers={}", numberOfMembers);
397
398                memberNames = new String[numberOfMembers];
399                memberTypes = new Datatype[numberOfMembers];
400                memberOrders = new int[numberOfMembers];
401                isMemberSelected = new boolean[numberOfMembers];
402                memberDims = new Object[numberOfMembers];
403
404                for (int i = 0; i < numberOfMembers; i++) {
405                    isMemberSelected[i] = true;
406                    memberOrders[i] = 1;
407                    memberDims[i] = null;
408
409                    try {
410                        memberTypes[i] = flatTypeList.get(i);
411                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
412
413                        if (memberTypes[i].isArray()) {
414                            long mdim[] = memberTypes[i].getArrayDims();
415                            int idim[] = new int[mdim.length];
416                            int arrayNpoints = 1;
417
418                            for (int j = 0; j < idim.length; j++) {
419                                idim[j] = (int) mdim[j];
420                                arrayNpoints *= idim[j];
421                            }
422
423                            memberDims[i] = idim;
424                            memberOrders[i] = arrayNpoints;
425                        }
426                    }
427                    catch (Exception ex) {
428                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
429                        memberTypes[i] = null;
430                    }
431
432                    try {
433                        memberNames[i] = flatNameList.get(i);
434                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
435                    }
436                    catch (Exception ex) {
437                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
438                        memberNames[i] = "null";
439                    }
440                } //  (int i=0; i<numberOfMembers; i++)
441
442                inited = true;
443            }
444            catch (HDF5Exception ex) {
445                numberOfMembers = 0;
446                memberNames = null;
447                memberTypes = null;
448                memberOrders = null;
449                log.debug("init(): ", ex);
450            }
451            finally {
452                if (datatype != null)
453                    datatype.close(tid);
454
455                try {
456                    H5.H5Sclose(sid);
457                }
458                catch (HDF5Exception ex2) {
459                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
460                }
461            }
462
463            close(did);
464
465            startDims = new long[rank];
466            selectedDims = new long[rank];
467
468            resetSelection();
469        }
470        else {
471            log.debug("init(): failed to open dataset");
472        }
473    }
474
475    /**
476     * Get the token for this object.
477     *
478     * @return true if it has any attributes, false otherwise.
479     */
480    public long[] getToken() {
481        H5O_token_t token = objInfo.token;
482        return HDFNativeData.byteToLong(token.data);
483    }
484
485    /**
486     * Check if the object has any attributes attached.
487     *
488     * @return true if it has any attributes, false otherwise.
489     */
490    @Override
491    public boolean hasAttribute() {
492        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
493
494        if (objInfo.num_attrs < 0) {
495            long did = open();
496            if (did >= 0) {
497                objInfo.num_attrs = 0;
498
499                try {
500                    objInfo = H5.H5Oget_info(did);
501                }
502                catch (Exception ex) {
503                    objInfo.num_attrs = 0;
504                    log.debug("hasAttribute(): get object info failure: ", ex);
505                }
506                finally {
507                    close(did);
508                }
509                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
510            }
511            else {
512                log.debug("hasAttribute(): could not open dataset");
513            }
514        }
515
516        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
517        return (objInfo.num_attrs > 0);
518    }
519
520    /**
521     * Returns the datatype of the data object.
522     *
523     * @return the datatype of the data object.
524     */
525    @Override
526    public Datatype getDatatype() {
527        if (!inited)
528            init();
529
530        if (datatype == null) {
531            long did = HDF5Constants.H5I_INVALID_HID;
532            long tid = HDF5Constants.H5I_INVALID_HID;
533
534            did = open();
535            if (did >= 0) {
536                try {
537                    tid = H5.H5Dget_type(did);
538                    int nativeClass = H5.H5Tget_class(tid);
539                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
540                        long lsize = 1;
541                        if (rank > 0) {
542                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
543                            for (int j = 0; j < dims.length; j++) {
544                                lsize *= dims[j];
545                            }
546                        }
547                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
548                    }
549                    else
550                        datatype = new H5Datatype(getFileFormat(), tid);
551                }
552                catch (Exception ex) {
553                    log.debug("getDatatype(): ", ex);
554                }
555                finally {
556                    try {
557                        H5.H5Tclose(tid);
558                    }
559                    catch (HDF5Exception ex) {
560                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
561                    }
562                    try {
563                        H5.H5Dclose(did);
564                    }
565                    catch (HDF5Exception ex) {
566                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
567                    }
568                }
569            }
570        }
571
572        if (isExternal) {
573            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
574
575            if (pdir == null) {
576                pdir = ".";
577            }
578            System.setProperty("user.dir", pdir);
579            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
580        }
581
582        return datatype;
583    }
584
585    /**
586     * Removes all of the elements from metadata list.
587     * The list should be empty after this call returns.
588     */
589    @Override
590    public void clear() {
591        super.clear();
592        objMetadata.clear();
593    }
594
595    /*
596     * (non-Javadoc)
597     *
598     * @see hdf.object.Dataset#readBytes()
599     */
600    @Override
601    public byte[] readBytes() throws HDF5Exception {
602        byte[] theData = null;
603
604        if (!isInited())
605            init();
606
607        long did = open();
608        if (did >= 0) {
609            long fspace = HDF5Constants.H5I_INVALID_HID;
610            long mspace = HDF5Constants.H5I_INVALID_HID;
611            long tid = HDF5Constants.H5I_INVALID_HID;
612
613            try {
614                long[] lsize = { 1 };
615                for (int j = 0; j < selectedDims.length; j++)
616                    lsize[0] *= selectedDims[j];
617
618                fspace = H5.H5Dget_space(did);
619                mspace = H5.H5Screate_simple(rank, selectedDims, null);
620
621                // set the rectangle selection
622                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
623                if (rank * dims[0] > 1)
624                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
625
626                tid = H5.H5Dget_type(did);
627                long size = H5.H5Tget_size(tid) * lsize[0];
628                log.trace("readBytes(): size = {}", size);
629
630                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
631                    throw new Exception("Invalid int size");
632
633                theData = new byte[(int)size];
634
635                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
636                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
637            }
638            catch (Exception ex) {
639                log.debug("readBytes(): failed to read data: ", ex);
640            }
641            finally {
642                try {
643                    H5.H5Sclose(fspace);
644                }
645                catch (Exception ex2) {
646                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
647                }
648                try {
649                    H5.H5Sclose(mspace);
650                }
651                catch (Exception ex2) {
652                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
653                }
654                try {
655                    H5.H5Tclose(tid);
656                }
657                catch (HDF5Exception ex2) {
658                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
659                }
660                close(did);
661            }
662        }
663
664        return theData;
665    }
666
667    /**
668     * Reads the data from file.
669     *
670     * read() reads the data from file to a memory buffer and returns the memory
671     * buffer. The dataset object does not hold the memory buffer. To store the
672     * memory buffer in the dataset object, one must call getData().
673     *
674     * By default, the whole dataset is read into memory. Users can also select
675     * a subset to read. Subsetting is done in an implicit way.
676     *
677     * <b>How to Select a Subset</b>
678     *
679     * A selection is specified by three arrays: start, stride and count.
680     * <ol>
681     * <li>start: offset of a selection
682     * <li>stride: determines how many elements to move in each dimension
683     * <li>count: number of elements to select in each dimension
684     * </ol>
685     * getStartDims(), getStride() and getSelectedDims() returns the start,
686     * stride and count arrays respectively. Applications can make a selection
687     * by changing the values of the arrays.
688     *
689     * The following example shows how to make a subset. In the example, the
690     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
691     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
692     * We want to select every other data point in dims[1] and dims[2]
693     *
694     * <pre>
695     * int rank = dataset.getRank(); // number of dimensions of the dataset
696     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
697     * long[] selected = dataset.getSelectedDims(); // the selected size of the
698     *                                              // dataset
699     * long[] start = dataset.getStartDims(); // the offset of the selection
700     * long[] stride = dataset.getStride(); // the stride of the dataset
701     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
702     *                                                   // dimensions for
703     *                                                   // display
704     *
705     * // select dim1 and dim2 as 2D data for display, and slice through dim0
706     * selectedIndex[0] = 1;
707     * selectedIndex[1] = 2;
708     * selectedIndex[1] = 0;
709     *
710     * // reset the selection arrays
711     * for (int i = 0; i &lt; rank; i++) {
712     *     start[i] = 0;
713     *     selected[i] = 1;
714     *     stride[i] = 1;
715     * }
716     *
717     * // set stride to 2 on dim1 and dim2 so that every other data point is
718     * // selected.
719     * stride[1] = 2;
720     * stride[2] = 2;
721     *
722     * // set the selection size of dim1 and dim2
723     * selected[1] = dims[1] / stride[1];
724     * selected[2] = dims[1] / stride[2];
725     *
726     * // when dataset.getData() is called, the selection above will be used
727     * // since
728     * // the dimension arrays are passed by reference. Changes of these arrays
729     * // outside the dataset object directly change the values of these array
730     * // in the dataset object.
731     * </pre>
732     *
733     * For CompoundDS, the memory data object is an java.util.List object. Each
734     * element of the list is a data array that corresponds to a compound field.
735     *
736     * For example, if compound dataset "comp" has the following nested
737     * structure, and member datatypes
738     *
739     * <pre>
740     * comp --&gt; m01 (int)
741     * comp --&gt; m02 (float)
742     * comp --&gt; nest1 --&gt; m11 (char)
743     * comp --&gt; nest1 --&gt; m12 (String)
744     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
745     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
746     * </pre>
747     *
748     * getData() returns a list of six arrays: {int[], float[], char[],
749     * String[], long[] and double[]}.
750     *
751     * @return the data read from file.
752     *
753     * @see #getData()
754     * @see hdf.object.DataFormat#read()
755     *
756     * @throws Exception
757     *             if object can not be read
758     */
759    @Override
760    public Object read() throws Exception {
761        Object readData = null;
762
763        if (!isInited())
764            init();
765
766        try {
767            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
768        }
769        catch (Exception ex) {
770            log.debug("read(): failed to read compound dataset: ", ex);
771            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
772        }
773
774        return readData;
775    }
776
777    /**
778     * Writes the given data buffer into this dataset in a file.
779     *
780     * The data buffer is a vector that contains the data values of compound fields. The data is written
781     * into file field by field.
782     *
783     * @param buf
784     *            The vector that contains the data values of compound fields.
785     *
786     * @throws Exception
787     *             If there is an error at the HDF5 library level.
788     */
789    @Override
790    public void write(Object buf) throws Exception {
791        if (this.getFileFormat().isReadOnly())
792            throw new Exception("cannot write to compound dataset in file opened as read-only");
793
794        if (!isInited())
795            init();
796
797        try {
798            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
799        }
800        catch (Exception ex) {
801            log.debug("write(Object): failed to write compound dataset: ", ex);
802            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
803        }
804    }
805
806    /*
807     * Routine to convert datatypes that are read in as byte arrays to
808     * regular types.
809     */
810    @Override
811    protected Object convertByteMember(final Datatype dtype, byte[] byteData) {
812        Object theObj = null;
813
814        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
815            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
816        else
817            theObj = super.convertByteMember(dtype, byteData);
818
819        return theObj;
820    }
821
822    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
823        H5Datatype dsDatatype = (H5Datatype) getDatatype();
824        Object theData = null;
825
826        if (numberOfMembers <= 0) {
827            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
828            throw new Exception("dataset contains no members");
829        }
830
831        /*
832         * I/O type-specific pre-initialization.
833         */
834        if (ioType == H5File.IO_TYPE.WRITE) {
835            if ((writeBuf == null) || !(writeBuf instanceof List)) {
836                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
837                throw new Exception("write buffer is null or invalid");
838            }
839
840            /*
841             * Check for any unsupported datatypes and fail early before
842             * attempting to write to the dataset.
843             */
844            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
845                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
846                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
847            }
848
849            if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) {
850                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
851                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
852            }
853        }
854
855        long did = open();
856        if (did >= 0) {
857            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
858
859            try {
860                /*
861                 * NOTE: this call sets up a hyperslab selection in the file according to the
862                 * current selection in the dataset object.
863                 */
864                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
865                        selectedStride, selectedDims, spaceIDs);
866
867                theData = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
868            }
869            finally {
870                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
871                    try {
872                        H5.H5Sclose(spaceIDs[0]);
873                    }
874                    catch (Exception ex) {
875                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
876                    }
877                }
878
879                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
880                    try {
881                        H5.H5Sclose(spaceIDs[1]);
882                    }
883                    catch (Exception ex) {
884                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
885                    }
886                }
887
888                close(did);
889            }
890        }
891        else
892            log.debug("compoundDatasetCommonIO(): failed to open dataset");
893
894        return theData;
895    }
896
897    /*
898     * Private recursive routine to read/write an entire compound datatype field by
899     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
900     * COMPOUND datatypes.
901     *
902     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
903     * running counter so that we can index properly into the flattened name list
904     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
905     */
906    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
907            final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex) {
908        Object theData = null;
909
910        if (cmpdType.isArray()) {
911            log.trace("compoundTypeIO(): ARRAY type");
912
913            long[] arrayDims = cmpdType.getArrayDims();
914            int arrSize = nSelPoints;
915            for (int i = 0; i < arrayDims.length; i++)
916                arrSize *= arrayDims[i];
917            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
918        }
919        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
920            /*
921             * TODO: true variable-length support.
922             */
923            String[] errVal = new String[nSelPoints];
924            String errStr = "*UNSUPPORTED*";
925
926            for (int j = 0; j < nSelPoints; j++)
927                errVal[j] = errStr;
928
929            /*
930             * Setup a fake data list.
931             */
932            Datatype baseType = cmpdType.getDatatypeBase();
933            while (baseType != null && !baseType.isCompound()) {
934                baseType = baseType.getDatatypeBase();
935            }
936
937            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
938            fakeVlenData.add(errVal);
939
940            theData = fakeVlenData;
941        }
942        else if (cmpdType.isCompound()) {
943            List<Object> memberDataList = null;
944            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
945
946            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write", typeList.size());
947
948            if (ioType == H5File.IO_TYPE.READ)
949                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
950
951            try {
952                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
953                    H5Datatype memberType = null;
954                    String memberName = null;
955                    Object memberData = null;
956
957                    try {
958                        memberType = (H5Datatype) typeList.get(i);
959                    }
960                    catch (Exception ex) {
961                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
962                        globalMemberIndex[0]++;
963                        continue;
964                    }
965
966                    /*
967                     * Since the type list used here is not a flattened structure, we need to skip
968                     * the member selection check for compound types, as otherwise having a single
969                     * member not selected would skip the reading/writing for the entire compound
970                     * type. The member selection check will be deferred to the recursive compound
971                     * read/write below.
972                     */
973                    if (!memberType.isCompound()) {
974                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
975                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
976                            globalMemberIndex[0]++;
977                            continue; // the field is not selected
978                        }
979                    }
980
981                    if (!memberType.isCompound()) {
982                        try {
983                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
984                        }
985                        catch (Exception ex) {
986                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
987                            memberName = "null";
988                        }
989                    }
990
991                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
992
993                    if (ioType == H5File.IO_TYPE.READ) {
994                        try {
995                            if (memberType.isCompound())
996                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
997                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
998                                /*
999                                 * Recursively detect any nested array/vlen of compound types.
1000                                 */
1001                                boolean compoundFound = false;
1002
1003                                Datatype base = memberType.getDatatypeBase();
1004                                while (base != null) {
1005                                    if (base.isCompound())
1006                                        compoundFound = true;
1007
1008                                    base = base.getDatatypeBase();
1009                                }
1010
1011                                if (compoundFound) {
1012                                    /*
1013                                     * Skip the top-level array/vlen type.
1014                                     */
1015                                    globalMemberIndex[0]++;
1016
1017                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
1018                                }
1019                                else {
1020                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1021                                    globalMemberIndex[0]++;
1022                                }
1023                            }
1024                            else {
1025                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1026                                globalMemberIndex[0]++;
1027                            }
1028                        }
1029                        catch (Exception ex) {
1030                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
1031                            globalMemberIndex[0]++;
1032                            memberData = null;
1033                        }
1034
1035                        if (memberData == null) {
1036                            String[] errVal = new String[nSelPoints];
1037                            String errStr = "*ERROR*";
1038
1039                            for (int j = 0; j < nSelPoints; j++)
1040                                errVal[j] = errStr;
1041
1042                            memberData = errVal;
1043                        }
1044
1045                        memberDataList.add(memberData);
1046                    }
1047                    else {
1048                        try {
1049                            /*
1050                             * TODO: currently doesn't correctly handle non-selected compound members.
1051                             */
1052                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
1053                        }
1054                        catch (Exception ex) {
1055                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1056                            globalMemberIndex[0]++;
1057                            continue;
1058                        }
1059
1060                        if (memberData == null) {
1061                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1062                            globalMemberIndex[0]++;
1063                            continue;
1064                        }
1065
1066                        try {
1067                            if (memberType.isCompound()) {
1068                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
1069                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
1070                            }
1071                            else {
1072                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
1073                                globalMemberIndex[0]++;
1074                            }
1075                        }
1076                        catch (Exception ex) {
1077                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1078                            globalMemberIndex[0]++;
1079                        }
1080                    }
1081                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1082            }
1083            catch (Exception ex) {
1084                log.debug("compoundTypeIO(): failure: ", ex);
1085                memberDataList = null;
1086            }
1087
1088            theData = memberDataList;
1089        }
1090
1091        return theData;
1092    }
1093
1094    /*
1095     * Private routine to read a single field of a compound datatype by creating a
1096     * compound datatype and inserting the single field into that datatype.
1097     */
1098    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1099            final H5Datatype memberType, String memberName) throws Exception {
1100        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1101        Object memberData = null;
1102
1103        try {
1104            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1105            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1106        }
1107        catch (OutOfMemoryError err) {
1108            memberData = null;
1109            throw new Exception("Out of memory");
1110        }
1111        catch (Exception ex) {
1112            log.debug("readSingleCompoundMember(): ", ex);
1113            memberData = null;
1114        }
1115
1116        if (memberData != null) {
1117            /*
1118             * Create a compound datatype containing just a single field (the one which we
1119             * want to read).
1120             */
1121            long compTid = -1;
1122            try {
1123                compTid = dsDatatype.createCompoundFieldType(memberName);
1124            }
1125            catch (HDF5Exception ex) {
1126                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1127                        memberType.getDescription(), ex);
1128                memberData = null;
1129            }
1130
1131            /*
1132             * Actually read the data for this member now that everything has been setup.
1133             */
1134            try {
1135                if (memberType.isVarStr()) {
1136                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1137                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1138                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1139
1140                    H5.H5Dread_VLStrings(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1141                }
1142                else if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1143                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1144                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1145                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1146
1147                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1148                }
1149                else {
1150                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1151                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1152                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1153
1154                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1155                }
1156            }
1157            catch (HDF5DataFiltersException exfltr) {
1158                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1159                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1160            }
1161            catch (Exception ex) {
1162                log.debug("readSingleCompoundMember(): read failure: ", ex);
1163                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1164            }
1165            finally {
1166                dsDatatype.close(compTid);
1167            }
1168
1169            /*
1170             * Perform any necessary data conversions.
1171             */
1172            if (memberType.isUnsigned()) {
1173                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1174                memberData = Dataset.convertFromUnsignedC(memberData, null);
1175            }
1176            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1177                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1178
1179                /*
1180                 * For all other types that get read into memory as a byte[] (such as nested
1181                 * compounds and arrays of compounds), we must manually convert the byte[] into
1182                 * something usable.
1183                 */
1184                memberData = convertByteMember(memberType, (byte[]) memberData);
1185            }
1186        }
1187
1188        return memberData;
1189    }
1190
1191    /*
1192     * Private routine to write a single field of a compound datatype by creating a
1193     * compound datatype and inserting the single field into that datatype.
1194     */
1195    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1196            final H5Datatype memberType, String memberName, Object theData) throws Exception {
1197        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1198
1199        /*
1200         * Check for any unsupported datatypes before attempting to write this compound
1201         * member.
1202         */
1203        if (memberType.isVLEN() && !memberType.isVarStr()) {
1204            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1205            throw new Exception("writing of VL non-strings is not currently supported");
1206        }
1207
1208        /*
1209         * Perform any necessary data conversions before writing the data.
1210         */
1211        Object tmpData = theData;
1212        try {
1213            if (memberType.isUnsigned()) {
1214                // Check if we need to convert unsigned integer data from Java-style
1215                // to C-style integers
1216                long tsize = memberType.getDatatypeSize();
1217                String cname = theData.getClass().getName();
1218                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1219                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1220                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1221
1222                if (doIntConversion) {
1223                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1224                    tmpData = convertToUnsignedC(theData, null);
1225                }
1226            }
1227            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1228                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1229                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1230            }
1231            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1232                log.trace("writeSingleCompoundMember(): converting enum names to values");
1233                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1234            }
1235        }
1236        catch (Exception ex) {
1237            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1238            tmpData = null;
1239        }
1240
1241        if (tmpData == null) {
1242            log.debug("writeSingleCompoundMember(): data is null");
1243            return;
1244        }
1245
1246        /*
1247         * Create a compound datatype containing just a single field (the one which we
1248         * want to write).
1249         */
1250        long compTid = -1;
1251        try {
1252            compTid = dsDatatype.createCompoundFieldType(memberName);
1253        }
1254        catch (HDF5Exception ex) {
1255            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1256                    memberType.getDescription(), ex);
1257        }
1258
1259        /*
1260         * Actually write the data now that everything has been setup.
1261         */
1262        try {
1263            if (memberType.isVarStr()) {
1264                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1265                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1266                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1267
1268                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1269            }
1270            else {
1271                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1272                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1273                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1274
1275                // BUG!!! does not write nested compound data and no
1276                // exception was caught. Need to check if it is a java
1277                // error or C library error.
1278                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1279            }
1280        }
1281        catch (Exception ex) {
1282            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1283            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1284        }
1285        finally {
1286            dsDatatype.close(compTid);
1287        }
1288    }
1289
1290    /**
1291     * Converts the data values of this data object to appropriate Java integers if
1292     * they are unsigned integers.
1293     *
1294     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1295     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1296     *
1297     * @return the converted data buffer.
1298     */
1299    @Override
1300    public Object convertFromUnsignedC() {
1301        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1302    }
1303
1304    /**
1305     * Converts Java integer data values of this data object back to unsigned C-type
1306     * integer data if they are unsigned integers.
1307     *
1308     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1309     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1310     *
1311     * @return the converted data buffer.
1312     */
1313    @Override
1314    public Object convertToUnsignedC() {
1315        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1316    }
1317
1318    /**
1319     * Retrieves the object's metadata, such as attributes, from the file.
1320     *
1321     * Metadata, such as attributes, is stored in a List.
1322     *
1323     * @return the list of metadata objects.
1324     *
1325     * @throws HDF5Exception
1326     *             if the metadata can not be retrieved
1327     */
1328    @Override
1329    public List<Attribute> getMetadata() throws HDF5Exception {
1330        int gmIndexType = 0;
1331        int gmIndexOrder = 0;
1332
1333        try {
1334            gmIndexType = fileFormat.getIndexType(null);
1335        }
1336        catch (Exception ex) {
1337            log.debug("getMetadata(): getIndexType failed: ", ex);
1338        }
1339        try {
1340            gmIndexOrder = fileFormat.getIndexOrder(null);
1341        }
1342        catch (Exception ex) {
1343            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1344        }
1345        return this.getMetadata(gmIndexType, gmIndexOrder);
1346    }
1347
1348    /**
1349     * Retrieves the object's metadata, such as attributes, from the file.
1350     *
1351     * Metadata, such as attributes, is stored in a List.
1352     *
1353     * @param attrPropList
1354     *             the list of properties to get
1355     *
1356     * @return the list of metadata objects.
1357     *
1358     * @throws HDF5Exception
1359     *             if the metadata can not be retrieved
1360     */
1361    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1362        if (!isInited())
1363            init();
1364
1365        try {
1366            this.linkTargetObjName = H5File.getLinkTargetName(this);
1367        }
1368        catch (Exception ex) {
1369            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1370        }
1371
1372        if (objMetadata.getAttributeList() == null) {
1373            long did = HDF5Constants.H5I_INVALID_HID;
1374            long pcid = HDF5Constants.H5I_INVALID_HID;
1375            long paid = HDF5Constants.H5I_INVALID_HID;
1376
1377            did = open();
1378            if (did >= 0) {
1379                try {
1380                    // get the compression and chunk information
1381                    pcid = H5.H5Dget_create_plist(did);
1382                    paid = H5.H5Dget_access_plist(did);
1383                    long storageSize = H5.H5Dget_storage_size(did);
1384                    int nfilt = H5.H5Pget_nfilters(pcid);
1385                    int layoutType = H5.H5Pget_layout(pcid);
1386
1387                    storageLayout.setLength(0);
1388                    compression.setLength(0);
1389
1390                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1391                        chunkSize = new long[rank];
1392                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1393                        int n = chunkSize.length;
1394                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1395                        for (int i = 1; i < n; i++)
1396                            storageLayout.append(" X ").append(chunkSize[i]);
1397
1398                        if (nfilt > 0) {
1399                            long nelmts = 1;
1400                            long uncompSize;
1401                            long datumSize = getDatatype().getDatatypeSize();
1402
1403                            if (datumSize < 0) {
1404                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1405                                try {
1406                                    tmptid = H5.H5Dget_type(did);
1407                                    datumSize = H5.H5Tget_size(tmptid);
1408                                }
1409                                finally {
1410                                    try {
1411                                        H5.H5Tclose(tmptid);
1412                                    }
1413                                    catch (Exception ex2) {
1414                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1415                                    }
1416                                }
1417                            }
1418
1419                            for (int i = 0; i < rank; i++)
1420                                nelmts *= dims[i];
1421                            uncompSize = nelmts * datumSize;
1422
1423                            /* compression ratio = uncompressed size / compressed size */
1424
1425                            if (storageSize != 0) {
1426                                double ratio = (double) uncompSize / (double) storageSize;
1427                                DecimalFormat df = new DecimalFormat();
1428                                df.setMinimumFractionDigits(3);
1429                                df.setMaximumFractionDigits(3);
1430                                compression.append(df.format(ratio)).append(":1");
1431                            }
1432                        }
1433                    }
1434                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1435                        storageLayout.append("COMPACT");
1436                    }
1437                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1438                        storageLayout.append("CONTIGUOUS");
1439                        if (H5.H5Pget_external_count(pcid) > 0)
1440                            storageLayout.append(" - EXTERNAL ");
1441                    }
1442                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1443                        storageLayout.append("VIRTUAL - ");
1444                        try {
1445                            long vmaps = H5.H5Pget_virtual_count(pcid);
1446                            try {
1447                                int virtView = H5.H5Pget_virtual_view(paid);
1448                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1449                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1450                                    storageLayout.append("First Missing");
1451                                else
1452                                    storageLayout.append("Last Available");
1453                                storageLayout.append("\nGAP : ").append(virtGap);
1454                            }
1455                            catch (Exception err) {
1456                                log.debug("getMetadata(): vds error: ", err);
1457                                storageLayout.append("ERROR");
1458                            }
1459                            storageLayout.append("\nMAPS : ").append(vmaps);
1460                            if (vmaps > 0) {
1461                                for (long next = 0; next < vmaps; next++) {
1462                                    try {
1463                                        H5.H5Pget_virtual_vspace(pcid, next);
1464                                        H5.H5Pget_virtual_srcspace(pcid, next);
1465                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1466                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1467                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1468                                    }
1469                                    catch (Exception err) {
1470                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1471                                        storageLayout.append("ERROR");
1472                                    }
1473                                }
1474                            }
1475                        }
1476                        catch (Exception err) {
1477                            log.debug("getMetadata(): vds count error: ", err);
1478                            storageLayout.append("ERROR");
1479                        }
1480                    }
1481                    else {
1482                        chunkSize = null;
1483                        storageLayout.append("NONE");
1484                    }
1485
1486                    int[] flags = { 0, 0 };
1487                    long[] cdNelmts = { 20 };
1488                    int[] cdValues = new int[(int) cdNelmts[0]];
1489                    String[] cdName = { "", "" };
1490                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1491                    int filter = -1;
1492                    int[] filterConfig = { 1 };
1493
1494                    filters.setLength(0);
1495
1496                    if (nfilt == 0) {
1497                        filters.append("NONE");
1498                    }
1499                    else {
1500                        for (int i = 0, k = 0; i < nfilt; i++) {
1501                            log.trace("getMetadata(): filter[{}]", i);
1502                            if (i > 0)
1503                                filters.append(", ");
1504                            if (k > 0)
1505                                compression.append(", ");
1506
1507                            try {
1508                                cdNelmts[0] = 20;
1509                                cdValues = new int[(int) cdNelmts[0]];
1510                                cdValues = new int[(int) cdNelmts[0]];
1511                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1512                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1513                                for (int j = 0; j < cdNelmts[0]; j++)
1514                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1515                            }
1516                            catch (Exception err) {
1517                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1518                                filters.append("ERROR");
1519                                continue;
1520                            }
1521
1522                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1523                                filters.append("NONE");
1524                            }
1525                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1526                                filters.append("GZIP");
1527                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1528                                k++;
1529                            }
1530                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1531                                filters.append("Error detection filter");
1532                            }
1533                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1534                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1535                            }
1536                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1537                                filters.append("NBIT");
1538                            }
1539                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1540                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1541                            }
1542                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1543                                filters.append("SZIP");
1544                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1545                                k++;
1546                                int flag = -1;
1547                                try {
1548                                    flag = H5.H5Zget_filter_info(filter);
1549                                }
1550                                catch (Exception ex) {
1551                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1552                                    flag = -1;
1553                                }
1554                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1555                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1556                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1557                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1558                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1559                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1560                            }
1561                            else {
1562                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1563                                for (int j = 0; j < cdNelmts[0]; j++) {
1564                                    if (j > 0)
1565                                        filters.append(", ");
1566                                    filters.append(cdValues[j]);
1567                                }
1568                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1569                            }
1570                        } //  (int i=0; i<nfilt; i++)
1571                    }
1572
1573                    if (compression.length() == 0)
1574                        compression.append("NONE");
1575                    log.trace("getMetadata(): filter compression={}", compression);
1576                    log.trace("getMetadata(): filter information={}", filters);
1577
1578                    storage.setLength(0);
1579                    storage.append("SIZE: ").append(storageSize);
1580
1581                    try {
1582                        int[] at = { 0 };
1583                        H5.H5Pget_alloc_time(pcid, at);
1584                        storage.append(", allocation time: ");
1585                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1586                            storage.append("Early");
1587                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1588                            storage.append("Incremental");
1589                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1590                            storage.append("Late");
1591                        else
1592                            storage.append("Default");
1593                    }
1594                    catch (Exception ex) {
1595                        log.debug("getMetadata(): Storage allocation time:", ex);
1596                    }
1597                    log.trace("getMetadata(): storage={}", storage);
1598                }
1599                finally {
1600                    try {
1601                        H5.H5Pclose(paid);
1602                    }
1603                    catch (Exception ex) {
1604                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1605                    }
1606                    try {
1607                        H5.H5Pclose(pcid);
1608                    }
1609                    catch (Exception ex) {
1610                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1611                    }
1612                    close(did);
1613                }
1614            }
1615        }
1616
1617        List<Attribute> attrlist = null;
1618        try {
1619            attrlist = objMetadata.getMetadata(attrPropList);
1620        }
1621        catch (Exception ex) {
1622            log.debug("getMetadata(): getMetadata failed: ", ex);
1623        }
1624        return attrlist;
1625    }
1626
1627    /**
1628     * Writes a specific piece of metadata (such as an attribute) into the file.
1629     *
1630     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1631     * value. If the attribute does not exist in the file, it creates the
1632     * attribute in the file and attaches it to the object. It will fail to
1633     * write a new attribute to the object where an attribute with the same name
1634     * already exists. To update the value of an existing attribute in the file,
1635     * one needs to get the instance of the attribute by getMetadata(), change
1636     * its values, then use writeMetadata() to write the value.
1637     *
1638     * @param info
1639     *            the metadata to write.
1640     *
1641     * @throws Exception
1642     *             if the metadata can not be written
1643     */
1644    @Override
1645    public void writeMetadata(Object info) throws Exception {
1646        try {
1647            objMetadata.writeMetadata(info);
1648        }
1649        catch (Exception ex) {
1650            log.debug("writeMetadata(): Object not an Attribute");
1651            return;
1652        }
1653    }
1654
1655    /**
1656     * Deletes an existing piece of metadata from this object.
1657     *
1658     * @param info
1659     *            the metadata to delete.
1660     *
1661     * @throws HDF5Exception
1662     *             if the metadata can not be removed
1663     */
1664    @Override
1665    public void removeMetadata(Object info) throws HDF5Exception {
1666        try {
1667            objMetadata.removeMetadata(info);
1668        }
1669        catch (Exception ex) {
1670            log.debug("removeMetadata(): Object not an Attribute");
1671            return;
1672        }
1673
1674        Attribute attr = (Attribute) info;
1675        log.trace("removeMetadata(): {}", attr.getAttributeName());
1676        long did = open();
1677        if (did >= 0) {
1678            try {
1679                H5.H5Adelete(did, attr.getAttributeName());
1680            }
1681            finally {
1682                close(did);
1683            }
1684        }
1685        else {
1686            log.debug("removeMetadata(): failed to open compound dataset");
1687        }
1688    }
1689
1690    /**
1691     * Updates an existing piece of metadata attached to this object.
1692     *
1693     * @param info
1694     *            the metadata to update.
1695     *
1696     * @throws HDF5Exception
1697     *             if the metadata can not be updated
1698     */
1699    @Override
1700    public void updateMetadata(Object info) throws HDF5Exception {
1701        try {
1702            objMetadata.updateMetadata(info);
1703        }
1704        catch (Exception ex) {
1705            log.debug("updateMetadata(): Object not an Attribute");
1706            return;
1707        }
1708    }
1709
1710    /*
1711     * (non-Javadoc)
1712     *
1713     * @see hdf.object.HObject#setName(java.lang.String)
1714     */
1715    @Override
1716    public void setName(String newName) throws Exception {
1717        if (newName == null)
1718            throw new IllegalArgumentException("The new name is NULL");
1719
1720        H5File.renameObject(this, newName);
1721        super.setName(newName);
1722    }
1723
1724    /**
1725     * @deprecated Not for public use in the future. <br>
1726     *             Using
1727     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1728     *
1729     * @param name
1730     *            the name of the dataset to create.
1731     * @param pgroup
1732     *            parent group where the new dataset is created.
1733     * @param dims
1734     *            the dimension size of the dataset.
1735     * @param memberNames
1736     *            the names of compound datatype
1737     * @param memberDatatypes
1738     *            the datatypes of the compound datatype
1739     * @param memberSizes
1740     *            the dim sizes of the members
1741     * @param data
1742     *            list of data arrays written to the new dataset, null if no data is written to the new
1743     *            dataset.
1744     *
1745     * @return the new compound dataset if successful; otherwise returns null.
1746     *
1747     * @throws Exception
1748     *             if there is a failure.
1749     */
1750    @Deprecated
1751    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1752            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1753        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null)
1754                || (memberDatatypes == null) || (memberSizes == null)) {
1755            return null;
1756        }
1757
1758        int nMembers = memberNames.length;
1759        int memberRanks[] = new int[nMembers];
1760        long memberDims[][] = new long[nMembers][1];
1761        for (int i = 0; i < nMembers; i++) {
1762            memberRanks[i] = 1;
1763            memberDims[i][0] = memberSizes[i];
1764        }
1765
1766        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1767    }
1768
1769    /**
1770     * @deprecated Not for public use in the future. <br>
1771     *             Using
1772     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1773     *
1774     * @param name
1775     *            the name of the dataset to create.
1776     * @param pgroup
1777     *            parent group where the new dataset is created.
1778     * @param dims
1779     *            the dimension size of the dataset.
1780     * @param memberNames
1781     *            the names of compound datatype
1782     * @param memberDatatypes
1783     *            the datatypes of the compound datatype
1784     * @param memberRanks
1785     *            the ranks of the members
1786     * @param memberDims
1787     *            the dim sizes of the members
1788     * @param data
1789     *            list of data arrays written to the new dataset, null if no data is written to the new
1790     *            dataset.
1791     *
1792     * @return the new compound dataset if successful; otherwise returns null.
1793     *
1794     * @throws Exception
1795     *             if the dataset can not be created.
1796     */
1797    @Deprecated
1798    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1799            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1800        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1801                memberDims, data);
1802    }
1803
1804    /**
1805     * Creates a simple compound dataset in a file with/without chunking and compression.
1806     *
1807     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1808     * details of creating a compound dataset from users.
1809     *
1810     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1811     * dataset is not supported. The required information to create a compound dataset includes the
1812     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1813     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1814     *
1815     * The following example shows how to use this function to create a compound dataset in file.
1816     *
1817     * <pre>
1818     * H5File file = null;
1819     * String message = &quot;&quot;;
1820     * Group pgroup = null;
1821     * int[] DATA_INT = new int[DIM_SIZE];
1822     * float[] DATA_FLOAT = new float[DIM_SIZE];
1823     * String[] DATA_STR = new String[DIM_SIZE];
1824     * long[] DIMs = { 50, 10 };
1825     * long[] CHUNKs = { 25, 5 };
1826     *
1827     * try {
1828     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1829     *     file.open();
1830     *     pgroup = (Group) file.get(&quot;/&quot;);
1831     * }
1832     * catch (Exception ex) {
1833     * }
1834     *
1835     * Vector data = new Vector();
1836     * data.add(0, DATA_INT);
1837     * data.add(1, DATA_FLOAT);
1838     * data.add(2, DATA_STR);
1839     *
1840     * // create groups
1841     * Datatype[] mdtypes = new H5Datatype[3];
1842     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1843     * Dataset dset = null;
1844     * try {
1845     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1846     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1847     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1848     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1849     * }
1850     * catch (Exception ex) {
1851     *     failed(message, ex, file);
1852     *     return 1;
1853     * }
1854     * </pre>
1855     *
1856     * @param name
1857     *            the name of the dataset to create.
1858     * @param pgroup
1859     *            parent group where the new dataset is created.
1860     * @param dims
1861     *            the dimension size of the dataset.
1862     * @param maxdims
1863     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1864     * @param chunks
1865     *            the chunk size of the dataset. No chunking if chunk = null.
1866     * @param gzip
1867     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1868     * @param memberNames
1869     *            the names of compound datatype
1870     * @param memberDatatypes
1871     *            the datatypes of the compound datatype
1872     * @param memberRanks
1873     *            the ranks of the members
1874     * @param memberDims
1875     *            the dim sizes of the members
1876     * @param data
1877     *            list of data arrays written to the new dataset, null if no data is written to the new
1878     *            dataset.
1879     *
1880     * @return the new compound dataset if successful; otherwise returns null.
1881     *
1882     * @throws Exception
1883     *             if there is a failure.
1884     */
1885    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1886            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1887        H5CompoundDS dataset = null;
1888        String fullPath = null;
1889        long did = HDF5Constants.H5I_INVALID_HID;
1890        long plist = HDF5Constants.H5I_INVALID_HID;
1891        long sid = HDF5Constants.H5I_INVALID_HID;
1892        long tid = HDF5Constants.H5I_INVALID_HID;
1893
1894        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1895                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1896                || (memberDims == null)) {
1897            log.debug("create(): one or more parameters are null");
1898            return null;
1899        }
1900
1901        H5File file = (H5File) pgroup.getFileFormat();
1902        if (file == null) {
1903            log.debug("create(): parent group FileFormat is null");
1904            return null;
1905        }
1906
1907        String path = HObject.SEPARATOR;
1908        if (!pgroup.isRoot()) {
1909            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1910            if (name.endsWith("/"))
1911                name = name.substring(0, name.length() - 1);
1912            int idx = name.lastIndexOf('/');
1913            if (idx >= 0)
1914                name = name.substring(idx + 1);
1915        }
1916
1917        fullPath = path + name;
1918
1919        int typeSize = 0;
1920        int nMembers = memberNames.length;
1921        long[] mTypes = new long[nMembers];
1922        int memberSize = 1;
1923        for (int i = 0; i < nMembers; i++) {
1924            memberSize = 1;
1925            for (int j = 0; j < memberRanks[i]; j++)
1926                memberSize *= memberDims[i][j];
1927
1928            mTypes[i] = -1;
1929            // the member is an array
1930            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1931                long tmptid = -1;
1932                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1933                    try {
1934                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1935                    }
1936                    finally {
1937                        try {
1938                            H5.H5Tclose(tmptid);
1939                        }
1940                        catch (Exception ex) {
1941                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1942                        }
1943                    }
1944                }
1945            }
1946            else {
1947                mTypes[i] = memberDatatypes[i].createNative();
1948            }
1949            try {
1950                typeSize += H5.H5Tget_size(mTypes[i]);
1951            }
1952            catch (Exception ex) {
1953                log.debug("create(): array create H5Tget_size:", ex);
1954
1955                while (i > 0) {
1956                    try {
1957                        H5.H5Tclose(mTypes[i]);
1958                    }
1959                    catch (HDF5Exception ex2) {
1960                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1961                    }
1962                    i--;
1963                }
1964                throw ex;
1965            }
1966        } //  (int i = 0; i < nMembers; i++) {
1967
1968        // setup chunking and compression
1969        boolean isExtentable = false;
1970        if (maxdims != null) {
1971            for (int i = 0; i < maxdims.length; i++) {
1972                if (maxdims[i] == 0)
1973                    maxdims[i] = dims[i];
1974                else if (maxdims[i] < 0)
1975                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1976
1977                if (maxdims[i] != dims[i])
1978                    isExtentable = true;
1979            }
1980        }
1981
1982        // HDF5 requires you to use chunking in order to define extendible
1983        // datasets. Chunking makes it possible to extend datasets efficiently,
1984        // without having to reorganize storage excessively. Using default size
1985        // of 64x...which has good performance
1986        if ((chunks == null) && isExtentable) {
1987            chunks = new long[dims.length];
1988            for (int i = 0; i < dims.length; i++)
1989                chunks[i] = Math.min(dims[i], 64);
1990        }
1991
1992        // prepare the dataspace and datatype
1993        int rank = dims.length;
1994
1995        try {
1996            sid = H5.H5Screate_simple(rank, dims, maxdims);
1997
1998            // figure out creation properties
1999            plist = HDF5Constants.H5P_DEFAULT;
2000
2001            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2002            int offset = 0;
2003            for (int i = 0; i < nMembers; i++) {
2004                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2005                offset += H5.H5Tget_size(mTypes[i]);
2006            }
2007
2008            if (chunks != null) {
2009                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2010
2011                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2012                H5.H5Pset_chunk(plist, rank, chunks);
2013
2014                // compression requires chunking
2015                if (gzip > 0) {
2016                    H5.H5Pset_deflate(plist, gzip);
2017                }
2018            }
2019
2020            long fid = file.getFID();
2021
2022            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2023            dataset = new H5CompoundDS(file, name, path);
2024        }
2025        finally {
2026            try {
2027                H5.H5Pclose(plist);
2028            }
2029            catch (HDF5Exception ex) {
2030                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2031            }
2032            try {
2033                H5.H5Sclose(sid);
2034            }
2035            catch (HDF5Exception ex) {
2036                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2037            }
2038            try {
2039                H5.H5Tclose(tid);
2040            }
2041            catch (HDF5Exception ex) {
2042                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2043            }
2044            try {
2045                H5.H5Dclose(did);
2046            }
2047            catch (HDF5Exception ex) {
2048                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2049            }
2050
2051            for (int i = 0; i < nMembers; i++) {
2052                try {
2053                    H5.H5Tclose(mTypes[i]);
2054                }
2055                catch (HDF5Exception ex) {
2056                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2057                }
2058            }
2059        }
2060
2061        if (dataset != null) {
2062            pgroup.addToMemberList(dataset);
2063            if (data != null) {
2064                dataset.init();
2065                long selected[] = dataset.getSelectedDims();
2066                for (int i = 0; i < rank; i++)
2067                    selected[i] = dims[i];
2068                dataset.write(data);
2069            }
2070        }
2071
2072        return dataset;
2073    }
2074
2075    /*
2076     * (non-Javadoc)
2077     *
2078     * @see hdf.object.Dataset#isString(long)
2079     */
2080    @Override
2081    public boolean isString(long tid) {
2082        boolean b = false;
2083        try {
2084            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2085        }
2086        catch (Exception ex) {
2087            b = false;
2088        }
2089
2090        return b;
2091    }
2092
2093    /*
2094     * (non-Javadoc)
2095     *
2096     * @see hdf.object.Dataset#getSize(long)
2097     */
2098    @Override
2099    public long getSize(long tid) {
2100        return H5Datatype.getDatatypeSize(tid);
2101    }
2102
2103    /*
2104     * (non-Javadoc)
2105     *
2106     * @see hdf.object.Dataset#isVirtual()
2107     */
2108    @Override
2109    public boolean isVirtual() {
2110        return isVirtual;
2111    }
2112
2113    /*
2114     * (non-Javadoc)
2115     *
2116     * @see hdf.object.Dataset#getVirtualFilename(int)
2117     */
2118    @Override
2119    public String getVirtualFilename(int index) {
2120        if(isVirtual)
2121            return virtualNameList.get(index);
2122        else
2123            return null;
2124    }
2125
2126    /*
2127     * (non-Javadoc)
2128     *
2129     * @see hdf.object.Dataset#getVirtualMaps()
2130     */
2131    @Override
2132    public int getVirtualMaps() {
2133        if(isVirtual)
2134            return virtualNameList.size();
2135        else
2136            return -1;
2137    }
2138
2139    /*
2140     * (non-Javadoc)
2141     *
2142     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2143     */
2144    @Override
2145    public String toString(String delimiter, int maxItems) {
2146        Object theData = originalBuf;
2147        if (theData == null) {
2148            log.debug("toString: value is null");
2149            return null;
2150        }
2151
2152        if (theData instanceof List<?>) {
2153            log.trace("toString: value is list");
2154            return null;
2155        }
2156
2157        Class<? extends Object> valClass = theData.getClass();
2158
2159        if (!valClass.isArray()) {
2160            log.trace("toString: finish - not array");
2161            String strValue = theData.toString();
2162            if (maxItems > 0 && strValue.length() > maxItems)
2163                // truncate the extra characters
2164                strValue = strValue.substring(0, maxItems);
2165            return strValue;
2166        }
2167
2168        // value is an array
2169        StringBuilder sb = new StringBuilder();
2170        int n = Array.getLength(theData);
2171        if ((maxItems > 0) && (n > maxItems))
2172            n = maxItems;
2173
2174        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), n);
2175        if (((H5Datatype) getDatatype()).isStdRef()) {
2176            String cname = valClass.getName();
2177            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2178            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2179            String ref_str = ((H5ReferenceType) getDatatype()).getObjectReferenceName((byte[])theData);
2180            log.trace("toString: ref_str={}", ref_str);
2181            return ref_str;
2182        }
2183        else {
2184            return super.toString(delimiter, maxItems);
2185        }
2186    }
2187
2188}