001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.ArrayList;
020import java.util.Arrays;
021import java.util.Iterator;
022import java.util.List;
023import java.util.Vector;
024
025import hdf.hdf5lib.H5;
026import hdf.hdf5lib.HDF5Constants;
027import hdf.hdf5lib.HDFNativeData;
028import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
029import hdf.hdf5lib.exceptions.HDF5Exception;
030import hdf.hdf5lib.structs.H5O_info_t;
031import hdf.object.Attribute;
032import hdf.object.CompoundDS;
033import hdf.object.Dataset;
034import hdf.object.Datatype;
035import hdf.object.FileFormat;
036import hdf.object.Group;
037import hdf.object.HObject;
038import hdf.object.Utils;
039
040/**
041 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
042 * <p>
043 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
044 * that stores a description of the data elements, data layout, and all other information necessary
045 * to write, read, and interpret the stored data.
046 * <p>
047 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
048 * collection of one or more atomic types or small arrays of such types. Each member of a compound
049 * type has a name which is unique within that type, and a byte offset that determines the first
050 * byte (smallest byte address) of that member in a compound datum.
051 * <p>
052 * For more information on HDF5 datasets and datatypes, read the <a href=
053 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
054 * User's Guide</a>.
055 * <p>
056 * There are two basic types of compound datasets: simple compound data and nested compound data.
057 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
058 * are compound or array of compound data.
059 * <p>
060 * Since Java does not understand C structures, we cannot directly read/write compound data values
061 * as in the following C example.
062 *
063 * <pre>
064 * typedef struct s1_t {
065 *         int    a;
066 *         float  b;
067 *         double c;
068 *         } s1_t;
069 *     s1_t       s1[LENGTH];
070 *     ...
071 *     H5Dwrite(..., s1);
072 *     H5Dread(..., s1);
073 * </pre>
074 *
075 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
076 * data by fields instead of compound structure. As for the example above, the java.util.Vector
077 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
078 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
079 * by field.
080 *
081 * @version 1.1 9/4/2007
082 * @author Peter X. Cao
083 */
084public class H5CompoundDS extends CompoundDS {
085    private static final long serialVersionUID = -5968625125574032736L;
086
087    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
088
089    /**
090     * The list of attributes of this data object. Members of the list are instance of Attribute.
091     */
092    private List<Attribute> attributeList;
093
094    private int nAttributes = -1;
095
096    private H5O_info_t objInfo;
097
098    /**
099     * A list of names of all fields including nested fields.
100     * <p>
101     * The nested names are separated by CompoundDS.SEPARATOR. For example, if compound dataset "A" has
102     * the following nested structure,
103     *
104     * <pre>
105     * A --&gt; m01
106     * A --&gt; m02
107     * A --&gt; nest1 --&gt; m11
108     * A --&gt; nest1 --&gt; m12
109     * A --&gt; nest1 --&gt; nest2 --&gt; m21
110     * A --&gt; nest1 --&gt; nest2 --&gt; m22
111     * i.e.
112     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
113     * </pre>
114     *
115     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12,
116     * nest1[nest2[m21, nest1[nest2[m22}
117     *
118     */
119    private List<String> flatNameList;
120
121    /**
122     * A list of datatypes of all fields including nested fields.
123     */
124    private List<Datatype> flatTypeList;
125
126    /** flag to indicate if the dataset is an external dataset */
127    private boolean isExternal = false;
128
129    /** flag to indicate if the dataset is a virtual dataset */
130    private boolean isVirtual = false;
131    private List<String> virtualNameList;
132
133    /*
134     * Enum to indicate the type of I/O to perform inside of the common I/O
135     * function.
136     */
137    protected static enum IO_TYPE {
138        READ, WRITE
139    };
140
141    /**
142     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
143     * <p>
144     * The dataset object represents an existing dataset in the file. For example, new
145     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
146     * dataset,"dset1", at group "/g0/".
147     * <p>
148     * This object is usually constructed at FileFormat.open(), which loads the file structure and
149     * object information into memory. It is rarely used elsewhere.
150     *
151     * @param theFile
152     *            the file that contains the data object.
153     * @param theName
154     *            the name of the data object, e.g. "dset".
155     * @param thePath
156     *            the full path of the data object, e.g. "/arrays/".
157     */
158    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
159        this(theFile, theName, thePath, null);
160    }
161
162    /**
163     * @deprecated Not for public use in the future.<br>
164     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
165     *
166     * @param theFile
167     *            the file that contains the data object.
168     * @param theName
169     *            the name of the data object, e.g. "dset".
170     * @param thePath
171     *            the full path of the data object, e.g. "/arrays/".
172     * @param oid
173     *            the oid of the data object.
174     */
175    @Deprecated
176    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
177        super(theFile, theName, thePath, oid);
178        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
179
180        if ((oid == null) && (theFile != null)) {
181            // retrieve the object ID
182            try {
183                byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
184                this.oid = new long[1];
185                this.oid[0] = HDFNativeData.byteToLong(refBuf, 0);
186            }
187            catch (Exception ex) {
188                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
189            }
190        }
191    }
192
193    /*
194     * (non-Javadoc)
195     *
196     * @see hdf.object.HObject#open()
197     */
198    @Override
199    public long open() {
200        long did = -1;
201
202        try {
203            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
204            log.trace("open(): did={}", did);
205        }
206        catch (HDF5Exception ex) {
207            log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex);
208            did = -1;
209        }
210
211        return did;
212    }
213
214    /*
215     * (non-Javadoc)
216     *
217     * @see hdf.object.HObject#close(int)
218     */
219    @Override
220    public void close(long did) {
221        if (did >= 0) {
222            try {
223                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
224            }
225            catch (Exception ex) {
226                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
227            }
228            try {
229                H5.H5Dclose(did);
230            }
231            catch (HDF5Exception ex) {
232                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
233            }
234        }
235    }
236
237    /**
238     * Retrieves datatype and dataspace information from file and sets the dataset
239     * in memory.
240     * <p>
241     * The init() is designed to support lazy operation in a dataset object. When a
242     * data object is retrieved from file, the datatype, dataspace and raw data are
243     * not loaded into memory. When it is asked to read the raw data from file,
244     * init() is first called to get the datatype and dataspace information, then
245     * load the raw data from file.
246     * <p>
247     * init() is also used to reset the selection of a dataset (start, stride and
248     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
249     * the following example, init() at step 1) retrieves datatype and dataspace
250     * information from file. getData() at step 3) reads only one data point. init()
251     * at step 4) resets the selection to the whole dataset. getData() at step 4)
252     * reads the values of whole dataset into memory.
253     *
254     * <pre>
255     * dset = (Dataset) file.get(NAME_DATASET);
256     *
257     * // 1) get datatype and dataspace information from file
258     * dset.init();
259     * rank = dset.getRank(); // rank = 2, a 2D dataset
260     * count = dset.getSelectedDims();
261     * start = dset.getStartDims();
262     * dims = dset.getDims();
263     *
264     * // 2) select only one data point
265     * for (int i = 0; i &lt; rank; i++) {
266     *     start[0] = 0;
267     *     count[i] = 1;
268     * }
269     *
270     * // 3) read one data point
271     * data = dset.getData();
272     *
273     * // 4) reset selection to the whole dataset
274     * dset.init();
275     *
276     * // 5) clean the memory data buffer
277     * dset.clearData();
278     *
279     * // 6) Read the whole dataset
280     * data = dset.getData();
281     * </pre>
282     */
283    @Override
284    public void init() {
285        if (inited) {
286            resetSelection();
287            log.trace("init(): Dataset already initialized");
288            return; // already called. Initialize only once
289        }
290
291        long did = -1;
292        long tid = -1;
293        long sid = -1;
294        flatNameList = new Vector<>();
295        flatTypeList = new Vector<>();
296
297        did = open();
298        if (did >= 0) {
299            // check if it is an external or virtual dataset
300            long pid = -1;
301            try {
302                pid = H5.H5Dget_create_plist(did);
303                try {
304                    int nfiles = H5.H5Pget_external_count(pid);
305                    isExternal = (nfiles > 0);
306                    int layoutType = H5.H5Pget_layout(pid);
307                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
308                        try {
309                            long vmaps = H5.H5Pget_virtual_count(pid);
310                            if (vmaps > 0) {
311                                virtualNameList = new Vector<>();
312                                for (long next = 0; next < vmaps; next++) {
313                                    try {
314                                        String fname = H5.H5Pget_virtual_filename(pid, next);
315                                        virtualNameList.add(fname);
316                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
317                                    }
318                                    catch (Exception err) {
319                                        log.trace("init(): vds[{}] continue", next);
320                                    }
321                                }
322                            }
323                        }
324                        catch (Exception err) {
325                            log.debug("init(): vds count error: ", err);
326                        }
327                    }
328                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
329                }
330                catch (Exception ex) {
331                    log.debug("init(): check if it is an external or virtual dataset:", ex);
332                }
333            }
334            catch (Exception ex) {
335                log.debug("init(): H5Dget_create_plist() failure: ", ex);
336            }
337            finally {
338                try {
339                    H5.H5Pclose(pid);
340                }
341                catch (Exception ex) {
342                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
343                }
344            }
345
346            try {
347                sid = H5.H5Dget_space(did);
348                rank = H5.H5Sget_simple_extent_ndims(sid);
349                tid = H5.H5Dget_type(did);
350                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
351
352                if (rank == 0) {
353                    // a scalar data point
354                    rank = 1;
355                    dims = new long[1];
356                    dims[0] = 1;
357                    log.trace("init(): rank is a scalar data point");
358                }
359                else {
360                    dims = new long[rank];
361                    maxDims = new long[rank];
362                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
363                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
364                }
365
366                startDims = new long[rank];
367                selectedDims = new long[rank];
368
369                try {
370                    datatype = new H5Datatype(getFileFormat(), tid);
371
372                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid,
373                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
374
375                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
376                }
377                catch (Exception ex) {
378                    log.debug("init(): failed to create datatype for dataset: ", ex);
379                    datatype = null;
380                }
381
382                // initialize member information
383                numberOfMembers = flatNameList.size();
384                log.trace("init(): numberOfMembers={}", numberOfMembers);
385
386                memberNames = new String[numberOfMembers];
387                memberTypes = new Datatype[numberOfMembers];
388                memberOrders = new int[numberOfMembers];
389                isMemberSelected = new boolean[numberOfMembers];
390                memberDims = new Object[numberOfMembers];
391
392                for (int i = 0; i < numberOfMembers; i++) {
393                    isMemberSelected[i] = true;
394                    memberOrders[i] = 1;
395                    memberDims[i] = null;
396
397                    try {
398                        memberTypes[i] = flatTypeList.get(i);
399                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
400
401                        if (memberTypes[i].isArray()) {
402                            long mdim[] = memberTypes[i].getArrayDims();
403                            int idim[] = new int[mdim.length];
404                            int arrayNpoints = 1;
405
406                            for (int j = 0; j < idim.length; j++) {
407                                idim[j] = (int) mdim[j];
408                                arrayNpoints *= idim[j];
409                            }
410
411                            memberDims[i] = idim;
412                            memberOrders[i] = arrayNpoints;
413                        }
414                    }
415                    catch (Exception ex) {
416                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
417                        memberTypes[i] = null;
418                    }
419
420                    try {
421                        memberNames[i] = flatNameList.get(i);
422                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
423                    }
424                    catch (Exception ex) {
425                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
426                        memberNames[i] = "null";
427                    }
428                } //  (int i=0; i<numberOfMembers; i++)
429
430                inited = true;
431            }
432            catch (HDF5Exception ex) {
433                numberOfMembers = 0;
434                memberNames = null;
435                memberTypes = null;
436                memberOrders = null;
437                log.debug("init(): ", ex);
438            }
439            finally {
440                if (datatype != null)
441                    datatype.close(tid);
442
443                try {
444                    H5.H5Sclose(sid);
445                }
446                catch (HDF5Exception ex2) {
447                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
448                }
449            }
450
451            close(did);
452        }
453        else {
454            log.debug("init(): failed to open dataset");
455        }
456
457        resetSelection();
458    }
459
460    /*
461     * (non-Javadoc)
462     *
463     * @see hdf.object.DataFormat#hasAttribute()
464     */
465    @Override
466    public boolean hasAttribute() {
467        objInfo.num_attrs = nAttributes;
468
469        if (objInfo.num_attrs < 0) {
470            long did = open();
471            if (did >= 0) {
472                try {
473                    objInfo = H5.H5Oget_info(did);
474                    nAttributes = (int) objInfo.num_attrs;
475                }
476                catch (Exception ex) {
477                    objInfo.num_attrs = 0;
478                    log.debug("hasAttribute(): get object info failure: ", ex);
479                }
480                close(did);
481            }
482            else {
483                log.debug("hasAttribute(): could not open dataset");
484            }
485        }
486
487        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
488        return (objInfo.num_attrs > 0);
489    }
490
491    /*
492     * (non-Javadoc)
493     *
494     * @see hdf.object.Dataset#getDatatype()
495     */
496    @Override
497    public Datatype getDatatype() {
498        if (!inited)
499            init();
500
501        if (datatype == null) {
502            long did = -1;
503            long tid = -1;
504
505            did = open();
506            if (did >= 0) {
507                try {
508                    tid = H5.H5Dget_type(did);
509                    datatype = new H5Datatype(getFileFormat(), tid);
510                }
511                catch (Exception ex) {
512                    log.debug("getDatatype(): ", ex);
513                }
514                finally {
515                    try {
516                        H5.H5Tclose(tid);
517                    }
518                    catch (HDF5Exception ex) {
519                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
520                    }
521                    try {
522                        H5.H5Dclose(did);
523                    }
524                    catch (HDF5Exception ex) {
525                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
526                    }
527                }
528            }
529        }
530
531        if (isExternal) {
532            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
533
534            if (pdir == null) {
535                pdir = ".";
536            }
537            System.setProperty("user.dir", pdir);
538            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
539        }
540
541        return datatype;
542    }
543
544    @Override
545    public Object getFillValue() {
546        return null;
547    }
548
549    /*
550     * (non-Javadoc)
551     *
552     * @see hdf.object.Dataset#clear()
553     */
554    @Override
555    public void clear() {
556        super.clear();
557
558        if (attributeList != null) {
559            ((Vector<Attribute>) attributeList).setSize(0);
560        }
561    }
562
563    /*
564     * (non-Javadoc)
565     *
566     * @see hdf.object.Dataset#readBytes()
567     */
568    @Override
569    public byte[] readBytes() throws HDF5Exception {
570        byte[] theData = null;
571
572        if (!isInited())
573            init();
574
575        long did = open();
576        if (did >= 0) {
577            long fspace = -1;
578            long mspace = -1;
579            long tid = -1;
580
581            try {
582                long[] lsize = { 1 };
583                for (int j = 0; j < selectedDims.length; j++) {
584                    lsize[0] *= selectedDims[j];
585                }
586
587                fspace = H5.H5Dget_space(did);
588                mspace = H5.H5Screate_simple(rank, selectedDims, null);
589
590                // set the rectangle selection
591                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
592                if (rank * dims[0] > 1) {
593                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
594                            selectedDims, null); // set block to 1
595                }
596
597                tid = H5.H5Dget_type(did);
598                long size = H5.H5Tget_size(tid) * lsize[0];
599                log.trace("readBytes(): size = {}", size);
600
601                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
602
603                theData = new byte[(int) size];
604
605                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
606                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
607            }
608            catch (Exception ex) {
609                log.debug("readBytes(): failed to read data: ", ex);
610            }
611            finally {
612                try {
613                    H5.H5Sclose(fspace);
614                }
615                catch (Exception ex2) {
616                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
617                }
618                try {
619                    H5.H5Sclose(mspace);
620                }
621                catch (Exception ex2) {
622                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
623                }
624                try {
625                    H5.H5Tclose(tid);
626                }
627                catch (HDF5Exception ex2) {
628                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
629                }
630                close(did);
631            }
632        }
633
634        return theData;
635    }
636
637    /*
638     * (non-Javadoc)
639     *
640     * @see hdf.object.Dataset#read()
641     */
642    @Override
643    public Object read() throws Exception {
644        Object readData = null;
645
646        if (!isInited())
647            init();
648
649        try {
650            readData = compoundDatasetCommonIO(IO_TYPE.READ, null);
651        }
652        catch (Exception ex) {
653            log.debug("read(): failed to read compound dataset: ", ex);
654            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
655        }
656
657        return readData;
658    }
659
660    /**
661     * Writes the given data buffer into this dataset in a file.
662     * <p>
663     * The data buffer is a vector that contains the data values of compound fields. The data is written
664     * into file field by field.
665     *
666     * @param buf
667     *            The vector that contains the data values of compound fields.
668     *
669     * @throws Exception
670     *             If there is an error at the HDF5 library level.
671     */
672    @Override
673    public void write(Object buf) throws Exception {
674        if (this.getFileFormat().isReadOnly())
675            throw new Exception("cannot write to compound dataset in file opened as read-only");
676
677        if (!isInited())
678            init();
679
680        try {
681            compoundDatasetCommonIO(IO_TYPE.WRITE, buf);
682        }
683        catch (Exception ex) {
684            log.debug("write(): failed to write compound dataset: ", ex);
685            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
686        }
687    }
688
689    private Object compoundDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception {
690        H5Datatype dsDatatype = (H5Datatype) getDatatype();
691        Object data = null;
692
693        if (numberOfMembers <= 0) {
694            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
695            throw new Exception("dataset contains no members");
696        }
697
698        /*
699         * I/O type-specific pre-initialization.
700         */
701        if (ioType == IO_TYPE.WRITE) {
702            if ((writeBuf == null) || !(writeBuf instanceof List)) {
703                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
704                throw new Exception("write buffer is null or invalid");
705            }
706
707            /*
708             * Check for any unsupported datatypes and fail early before
709             * attempting to write to the dataset.
710             */
711            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
712                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
713                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
714            }
715
716            if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) {
717                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
718                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
719            }
720        }
721
722        long did = open();
723        if (did >= 0) {
724            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
725
726            try {
727                /*
728                 * NOTE: this call sets up a hyperslab selection in the file according to the
729                 * current selection in the dataset object.
730                 */
731                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
732                        selectedStride, selectedDims, spaceIDs);
733
734                data = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
735            }
736            finally {
737                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
738                    try {
739                        H5.H5Sclose(spaceIDs[0]);
740                    }
741                    catch (Exception ex) {
742                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
743                    }
744                }
745
746                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
747                    try {
748                        H5.H5Sclose(spaceIDs[1]);
749                    }
750                    catch (Exception ex) {
751                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
752                    }
753                }
754
755                close(did);
756            }
757        }
758        else
759            log.debug("compoundDatasetCommonIO(): failed to open dataset");
760
761        return data;
762    }
763
764    /*
765     * Private recursive routine to read/write an entire compound datatype field by
766     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
767     * COMPOUND datatypes.
768     *
769     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
770     * running counter so that we can index properly into the flattened name list
771     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
772     */
773    private Object compoundTypeIO(IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints, final H5Datatype cmpdType,
774            Object writeBuf, int[] globalMemberIndex) {
775        Object theData = null;
776
777        if (cmpdType.isArray()) {
778            log.trace("compoundTypeIO(): ARRAY type");
779
780            long[] arrayDims = cmpdType.getArrayDims();
781            int arrSize = nSelPoints;
782            for (int i = 0; i < arrayDims.length; i++) {
783                arrSize *= arrayDims[i];
784            }
785
786            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
787        }
788        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
789            /*
790             * TODO: true variable-length support.
791             */
792            String[] errVal = new String[nSelPoints];
793            String errStr = "*UNSUPPORTED*";
794
795            for (int j = 0; j < nSelPoints; j++)
796                errVal[j] = errStr;
797
798            /*
799             * Setup a fake data list.
800             */
801            Datatype baseType = cmpdType.getDatatypeBase();
802            while (baseType != null && !baseType.isCompound()) {
803                baseType = baseType.getDatatypeBase();
804            }
805
806            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
807            fakeVlenData.add(errVal);
808
809            theData = fakeVlenData;
810        }
811        else if (cmpdType.isCompound()) {
812            List<Object> memberDataList = null;
813            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
814
815            log.trace("compoundTypeIO(): {} {} members:", (ioType == IO_TYPE.READ) ? "read" : "write",
816                    typeList.size());
817
818            if (ioType == IO_TYPE.READ) {
819                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
820            }
821
822            try {
823                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
824                    H5Datatype memberType = null;
825                    String memberName = null;
826                    Object memberData = null;
827
828                    try {
829                        memberType = (H5Datatype) typeList.get(i);
830                    }
831                    catch (Exception ex) {
832                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
833                        globalMemberIndex[0]++;
834                        continue;
835                    }
836
837                    /*
838                     * Since the type list used here is not a flattened structure, we need to skip
839                     * the member selection check for compound types, as otherwise having a single
840                     * member not selected would skip the reading/writing for the entire compound
841                     * type. The member selection check will be deferred to the recursive compound
842                     * read/write below.
843                     */
844                    if (!memberType.isCompound()) {
845                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
846                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
847                            globalMemberIndex[0]++;
848                            continue; // the field is not selected
849                        }
850                    }
851
852                    if (!memberType.isCompound()) {
853                        try {
854                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
855                        }
856                        catch (Exception ex) {
857                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
858                            memberName = "null";
859                        }
860                    }
861
862                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
863
864                    if (ioType == IO_TYPE.READ) {
865                        try {
866                            if (memberType.isCompound())
867                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
868                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
869                                /*
870                                 * Recursively detect any nested array/vlen of compound types.
871                                 */
872                                boolean compoundFound = false;
873
874                                Datatype base = memberType.getDatatypeBase();
875                                while (base != null) {
876                                    if (base.isCompound())
877                                        compoundFound = true;
878
879                                    base = base.getDatatypeBase();
880                                }
881
882                                if (compoundFound) {
883                                    /*
884                                     * Skip the top-level array/vlen type.
885                                     */
886                                    globalMemberIndex[0]++;
887
888                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
889                                }
890                                else {
891                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
892                                    globalMemberIndex[0]++;
893                                }
894                            }
895                            else {
896                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
897                                globalMemberIndex[0]++;
898                            }
899                        }
900                        catch (Exception ex) {
901                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
902                            globalMemberIndex[0]++;
903                            memberData = null;
904                        }
905
906                        if (memberData == null) {
907                            String[] errVal = new String[nSelPoints];
908                            String errStr = "*ERROR*";
909
910                            for (int j = 0; j < nSelPoints; j++)
911                                errVal[j] = errStr;
912
913                            memberData = errVal;
914                        }
915
916                        memberDataList.add(memberData);
917                    }
918                    else {
919                        try {
920                            /*
921                             * TODO: currently doesn't correctly handle non-selected compound members.
922                             */
923                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
924                        }
925                        catch (Exception ex) {
926                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
927                            globalMemberIndex[0]++;
928                            continue;
929                        }
930
931                        if (memberData == null) {
932                            log.debug("compoundTypeIO(): member[{}] data is null", i);
933                            globalMemberIndex[0]++;
934                            continue;
935                        }
936
937                        try {
938                            if (memberType.isCompound()) {
939                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
940                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
941                            }
942                            else {
943                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
944                                globalMemberIndex[0]++;
945                            }
946                        }
947                        catch (Exception ex) {
948                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
949                            globalMemberIndex[0]++;
950                        }
951                    }
952                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
953            }
954            catch (Exception ex) {
955                log.debug("compoundTypeIO(): failure: ", ex);
956                memberDataList = null;
957            }
958
959            theData = memberDataList;
960        }
961
962        return theData;
963    }
964
965    /*
966     * Private routine to read a single field of a compound datatype by creating a
967     * compound datatype and inserting the single field into that datatype.
968     */
969    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType,
970            String memberName) throws Exception {
971        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
972        Object memberData = null;
973
974        try {
975            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
976            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
977        }
978        catch (OutOfMemoryError err) {
979            memberData = null;
980            throw new Exception("Out of memory");
981        }
982        catch (Exception ex) {
983            log.debug("readSingleCompoundMember(): ", ex);
984            memberData = null;
985        }
986
987        if (memberData != null) {
988            /*
989             * Create a compound datatype containing just a single field (the one which we
990             * want to read).
991             */
992            long compTid = -1;
993            try {
994                compTid = dsDatatype.createCompoundFieldType(memberName);
995            }
996            catch (HDF5Exception ex) {
997                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
998                        memberType.getDescription(), ex);
999                memberData = null;
1000            }
1001
1002            /*
1003             * Actually read the data for this member now that everything has been setup.
1004             */
1005            try {
1006                if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1007                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1008                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1009                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1010
1011                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1012                }
1013                else {
1014                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1015                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1016                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1017
1018                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1019                }
1020            }
1021            catch (HDF5DataFiltersException exfltr) {
1022                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1023                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1024            }
1025            catch (Exception ex) {
1026                log.debug("readSingleCompoundMember(): read failure: ", ex);
1027                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1028            }
1029            finally {
1030                dsDatatype.close(compTid);
1031            }
1032
1033            /*
1034             * Perform any necessary data conversions.
1035             */
1036            if (memberType.isUnsigned()) {
1037                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1038                memberData = Dataset.convertFromUnsignedC(memberData, null);
1039            }
1040            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1041                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1042
1043                /*
1044                 * For all other types that get read into memory as a byte[] (such as nested
1045                 * compounds and arrays of compounds), we must manually convert the byte[] into
1046                 * something usable.
1047                 */
1048                memberData = convertByteMember(memberType, (byte[]) memberData);
1049            }
1050        }
1051
1052        return memberData;
1053    }
1054
1055    /*
1056     * Private routine to write a single field of a compound datatype by creating a
1057     * compound datatype and inserting the single field into that datatype.
1058     */
1059    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType,
1060            String memberName, Object theData) throws Exception {
1061        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1062
1063        /*
1064         * Check for any unsupported datatypes before attempting to write this compound
1065         * member.
1066         */
1067        if (memberType.isVLEN() && !memberType.isVarStr()) {
1068            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1069            throw new Exception("writing of VL non-strings is not currently supported");
1070        }
1071
1072        /*
1073         * Perform any necessary data conversions before writing the data.
1074         */
1075        Object tmpData = theData;
1076        try {
1077            if (memberType.isUnsigned()) {
1078                // Check if we need to convert unsigned integer data from Java-style
1079                // to C-style integers
1080                long tsize = memberType.getDatatypeSize();
1081                String cname = theData.getClass().getName();
1082                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1083                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1084                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1085
1086                if (doIntConversion) {
1087                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1088                    tmpData = convertToUnsignedC(theData, null);
1089                }
1090            }
1091            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1092                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1093                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1094            }
1095            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1096                log.trace("writeSingleCompoundMember(): converting enum names to values");
1097                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1098            }
1099        }
1100        catch (Exception ex) {
1101            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1102            tmpData = null;
1103        }
1104
1105        if (tmpData == null) {
1106            log.debug("writeSingleCompoundMember(): data is null");
1107            return;
1108        }
1109
1110        /*
1111         * Create a compound datatype containing just a single field (the one which we
1112         * want to write).
1113         */
1114        long compTid = -1;
1115        try {
1116            compTid = dsDatatype.createCompoundFieldType(memberName);
1117        }
1118        catch (HDF5Exception ex) {
1119            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1120                    memberType.getDescription(), ex);
1121        }
1122
1123        /*
1124         * Actually write the data now that everything has been setup.
1125         */
1126        try {
1127            if (memberType.isVarStr()) {
1128                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1129                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1130                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1131
1132                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1133            }
1134            else {
1135                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1136                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1137                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1138
1139                // BUG!!! does not write nested compound data and no
1140                // exception was caught. Need to check if it is a java
1141                // error or C library error.
1142                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1143            }
1144        }
1145        catch (Exception ex) {
1146            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1147            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1148        }
1149        finally {
1150            dsDatatype.close(compTid);
1151        }
1152    }
1153
1154    /*
1155     * Private routine to convert datatypes that are read in as byte arrays to
1156     * regular types.
1157     */
1158    private Object convertByteMember(final H5Datatype dtype, byte[] byteData) {
1159        Object theObj = null;
1160
1161        if (dtype.getDatatypeSize() == 1) {
1162            /*
1163             * Normal byte[] type, such as an integer datatype of size 1.
1164             */
1165            theObj = byteData;
1166        }
1167        else if (dtype.isString() && !dtype.isVarStr() && convertByteToString) {
1168            log.trace("convertByteMember(): converting byte array to string array");
1169
1170            theObj = byteToString(byteData, (int) dtype.getDatatypeSize());
1171        }
1172        else if (dtype.isInteger()) {
1173            log.trace("convertByteMember(): converting byte array to integer array");
1174
1175            theObj = HDFNativeData.byteToInt(byteData);
1176        }
1177        else if (dtype.isFloat()) {
1178            log.trace("convertByteMember(): converting byte array to float array");
1179
1180            theObj = HDFNativeData.byteToFloat(byteData);
1181        }
1182        else if (dtype.isRef()) {
1183            log.trace("convertByteMember(): reference type - converting byte array to long array");
1184
1185            theObj = HDFNativeData.byteToLong(byteData);
1186        }
1187        else if (dtype.isArray()) {
1188            H5Datatype baseType = (H5Datatype) dtype.getDatatypeBase();
1189
1190            /*
1191             * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes.
1192             */
1193            while (baseType.isArray()) baseType = (H5Datatype) baseType.getDatatypeBase();
1194
1195            /*
1196             * Optimize for the common cases of Arrays.
1197             */
1198            switch (baseType.getDatatypeClass()) {
1199                case Datatype.CLASS_INTEGER:
1200                case Datatype.CLASS_FLOAT:
1201                case Datatype.CLASS_CHAR:
1202                case Datatype.CLASS_STRING:
1203                case Datatype.CLASS_BITFIELD:
1204                case Datatype.CLASS_OPAQUE:
1205                case Datatype.CLASS_COMPOUND:
1206                case Datatype.CLASS_REFERENCE:
1207                case Datatype.CLASS_ENUM:
1208                case Datatype.CLASS_VLEN:
1209                case Datatype.CLASS_TIME:
1210                    theObj = convertByteMember(baseType, byteData);
1211                    break;
1212
1213                case Datatype.CLASS_ARRAY:
1214                {
1215                    H5Datatype arrayType = (H5Datatype) dtype.getDatatypeBase();
1216
1217                    long[] arrayDims = dtype.getArrayDims();
1218                    int arrSize = 1;
1219                    for (int i = 0; i < arrayDims.length; i++) {
1220                        arrSize *= arrayDims[i];
1221                    }
1222
1223                    theObj = new Object[arrSize];
1224
1225                    for (int i = 0; i < arrSize; i++) {
1226                        byte[] indexedBytes = Arrays.copyOfRange(byteData, (int) (i * arrayType.getDatatypeSize()),
1227                                (int) ((i + 1) * arrayType.getDatatypeSize()));
1228                        ((Object[]) theObj)[i] = convertByteMember(arrayType, indexedBytes);
1229                    }
1230
1231                    break;
1232                }
1233
1234                case Datatype.CLASS_NO_CLASS:
1235                default:
1236                    log.debug("convertByteMember(): invalid datatype class");
1237                    theObj = new String("*ERROR*");
1238            }
1239        }
1240        else if (dtype.isCompound()) {
1241            /*
1242             * TODO: still valid after reading change?
1243             */
1244            theObj = convertCompoundByteMembers(dtype, byteData);
1245        }
1246        else {
1247            theObj = byteData;
1248        }
1249
1250        return theObj;
1251    }
1252
1253    /**
1254     * Given an array of bytes representing a compound Datatype, converts each of
1255     * its members into Objects and returns the results.
1256     *
1257     * @param dtype
1258     *            The compound datatype to convert
1259     * @param data
1260     *            The byte array representing the data of the compound Datatype
1261     * @return The converted types of the bytes
1262     */
1263    private Object convertCompoundByteMembers(final H5Datatype dtype, byte[] data) {
1264        List<Object> theData = null;
1265
1266        List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes());
1267        List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes());
1268        Iterator<Datatype> localIt = localTypes.iterator();
1269        while (localIt.hasNext()) {
1270            Datatype curType = localIt.next();
1271
1272            if (curType.isCompound())
1273                continue;
1274
1275            if (!allSelectedTypes.contains(curType))
1276                localIt.remove();
1277        }
1278
1279        theData = new ArrayList<>(localTypes.size());
1280        for (int i = 0, index = 0; i < localTypes.size(); i++) {
1281            Datatype curType = localTypes.get(i);
1282
1283            if (curType.isCompound())
1284                theData.add(convertCompoundByteMembers((H5Datatype) curType,
1285                        Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize())));
1286            else
1287                theData.add(convertByteMember((H5Datatype) curType,
1288                        Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize())));
1289
1290            index += curType.getDatatypeSize();
1291        }
1292
1293        return theData;
1294    }
1295
1296    @Override
1297    public Object convertFromUnsignedC() {
1298        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1299    }
1300
1301    @Override
1302    public Object convertToUnsignedC() {
1303        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1304    }
1305
1306    /*
1307     * (non-Javadoc)
1308     *
1309     * @see hdf.object.DataFormat#getMetadata()
1310     */
1311    @Override
1312    public List<Attribute> getMetadata() throws HDF5Exception {
1313        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1314    }
1315
1316    /*
1317     * (non-Javadoc)
1318     *
1319     * @see hdf.object.DataFormat#getMetadata(int...)
1320     */
1321    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1322
1323        if (!isInited()) {
1324            init();
1325        }
1326
1327        try {
1328            this.linkTargetObjName = H5File.getLinkTargetName(this);
1329        }
1330        catch (Exception ex) {
1331            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1332        }
1333
1334        if (attributeList != null) {
1335            log.trace("getMetadata(): attributeList != null");
1336            return attributeList;
1337        }
1338
1339        long did = -1;
1340        long pcid = -1;
1341        long paid = -1;
1342        int indxType = fileFormat.getIndexType(null);
1343        int order = fileFormat.getIndexOrder(null);
1344
1345        // load attributes first
1346        if (attrPropList.length > 0) {
1347            indxType = attrPropList[0];
1348            if (attrPropList.length > 1) {
1349                order = attrPropList[1];
1350            }
1351        }
1352
1353        attributeList = H5File.getAttribute(this, indxType, order);
1354
1355        did = open();
1356        if (did >= 0) {
1357            try {
1358                // get the compression and chunk information
1359                pcid = H5.H5Dget_create_plist(did);
1360                paid = H5.H5Dget_access_plist(did);
1361                long storageSize = H5.H5Dget_storage_size(did);
1362                int nfilt = H5.H5Pget_nfilters(pcid);
1363                int layoutType = H5.H5Pget_layout(pcid);
1364
1365                storageLayout.setLength(0);
1366                compression.setLength(0);
1367
1368                if (layoutType == HDF5Constants.H5D_CHUNKED) {
1369                    chunkSize = new long[rank];
1370                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1371                    int n = chunkSize.length;
1372
1373                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1374                    for (int i = 1; i < n; i++) {
1375                        storageLayout.append(" X ").append(chunkSize[i]);
1376                    }
1377
1378                    if (nfilt > 0) {
1379                        long nelmts = 1;
1380                        long uncompSize;
1381                        long datumSize = getDatatype().getDatatypeSize();
1382                        if (datumSize < 0) {
1383                            long tmptid = -1;
1384                            try {
1385                                tmptid = H5.H5Dget_type(did);
1386                                datumSize = H5.H5Tget_size(tmptid);
1387                            }
1388                            finally {
1389                                try {
1390                                    H5.H5Tclose(tmptid);
1391                                }
1392                                catch (Exception ex2) {
1393                                    log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1394                                }
1395                            }
1396                        }
1397
1398                        for (int i = 0; i < rank; i++) {
1399                            nelmts *= dims[i];
1400                        }
1401                        uncompSize = nelmts * datumSize;
1402
1403                        /* compression ratio = uncompressed size / compressed size */
1404
1405                        if (storageSize != 0) {
1406                            double ratio = (double) uncompSize / (double) storageSize;
1407                            DecimalFormat df = new DecimalFormat();
1408                            df.setMinimumFractionDigits(3);
1409                            df.setMaximumFractionDigits(3);
1410                            compression.append(df.format(ratio)).append(":1");
1411                        }
1412                    }
1413                }
1414                else if (layoutType == HDF5Constants.H5D_COMPACT) {
1415                    storageLayout.append("COMPACT");
1416                }
1417                else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1418                    storageLayout.append("CONTIGUOUS");
1419                    if (H5.H5Pget_external_count(pcid) > 0)
1420                        storageLayout.append(" - EXTERNAL ");
1421                }
1422                else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1423                    storageLayout.append("VIRTUAL - ");
1424                    try {
1425                        long vmaps = H5.H5Pget_virtual_count(pcid);
1426                        try {
1427                            int virtView = H5.H5Pget_virtual_view(paid);
1428                            long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1429                            if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1430                                storageLayout.append("First Missing");
1431                            else
1432                                storageLayout.append("Last Available");
1433                            storageLayout.append("\nGAP : ").append(virtGap);
1434                        }
1435                        catch (Exception err) {
1436                            log.debug("getMetadata(): vds error: ", err);
1437                            storageLayout.append("ERROR");
1438                        }
1439                        storageLayout.append("\nMAPS : ").append(vmaps);
1440                        if (vmaps > 0) {
1441                            for (long next = 0; next < vmaps; next++) {
1442                                try {
1443                                    H5.H5Pget_virtual_vspace(pcid, next);
1444                                    H5.H5Pget_virtual_srcspace(pcid, next);
1445                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1446                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1447                                    storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1448                                }
1449                                catch (Exception err) {
1450                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1451                                    storageLayout.append("ERROR");
1452                                }
1453                            }
1454                        }
1455                    }
1456                    catch (Exception err) {
1457                        log.debug("getMetadata(): vds count error: ", err);
1458                        storageLayout.append("ERROR");
1459                    }
1460                }
1461                else {
1462                    chunkSize = null;
1463                    storageLayout.append("NONE");
1464                }
1465
1466                int[] flags = { 0, 0 };
1467                long[] cdNelmts = { 20 };
1468                int[] cdValues = new int[(int) cdNelmts[0]];
1469                String[] cdName = { "", "" };
1470                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1471                int filter = -1;
1472                int[] filterConfig = { 1 };
1473
1474                filters.setLength(0);
1475
1476                if (nfilt == 0) {
1477                    filters.append("NONE");
1478                }
1479                else {
1480                    for (int i = 0, k = 0; i < nfilt; i++) {
1481                        log.trace("getMetadata(): filter[{}]", i);
1482                        if (i > 0) {
1483                            filters.append(", ");
1484                        }
1485                        if (k > 0) {
1486                            compression.append(", ");
1487                        }
1488
1489                        try {
1490                            cdNelmts[0] = 20;
1491                            cdValues = new int[(int) cdNelmts[0]];
1492                            cdValues = new int[(int) cdNelmts[0]];
1493                            filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1494                            log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1495                            for (int j = 0; j < cdNelmts[0]; j++) {
1496                                log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1497                            }
1498                        }
1499                        catch (Exception err) {
1500                            log.debug("getMetadata(): filter[{}] error: ", i, err);
1501                            filters.append("ERROR");
1502                            continue;
1503                        }
1504
1505                        if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1506                            filters.append("NONE");
1507                        }
1508                        else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1509                            filters.append("GZIP");
1510                            compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1511                            k++;
1512                        }
1513                        else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1514                            filters.append("Error detection filter");
1515                        }
1516                        else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1517                            filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1518                        }
1519                        else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1520                            filters.append("NBIT");
1521                        }
1522                        else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1523                            filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1524                        }
1525                        else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1526                            filters.append("SZIP");
1527                            compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1528                            k++;
1529                            int flag = -1;
1530                            try {
1531                                flag = H5.H5Zget_filter_info(filter);
1532                            }
1533                            catch (Exception ex) {
1534                                log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1535                                flag = -1;
1536                            }
1537                            if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1538                                compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1539                            }
1540                            else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1541                                    || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1542                                            + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1543                                compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1544                            }
1545                        }
1546                        else {
1547                            filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1548                            for (int j = 0; j < cdNelmts[0]; j++) {
1549                                if (j > 0)
1550                                    filters.append(", ");
1551                                filters.append(cdValues[j]);
1552                            }
1553                            log.debug("getMetadata(): filter[{}] is user defined compression", i);
1554                        }
1555                    } //  (int i=0; i<nfilt; i++)
1556                }
1557
1558                if (compression.length() == 0) {
1559                    compression.append("NONE");
1560                }
1561                log.trace("getMetadata(): filter compression={}", compression);
1562
1563                log.trace("getMetadata(): filter information={}", filters);
1564
1565                storage.setLength(0);
1566                storage.append("SIZE: ").append(storageSize);
1567
1568                try {
1569                    int[] at = { 0 };
1570                    H5.H5Pget_alloc_time(pcid, at);
1571                    storage.append(", allocation time: ");
1572                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1573                        storage.append("Early");
1574                    }
1575                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1576                        storage.append("Incremental");
1577                    }
1578                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1579                        storage.append("Late");
1580                    }
1581                    else
1582                        storage.append("Default");
1583                }
1584                catch (Exception ex) {
1585                    log.debug("getMetadata(): Storage allocation time:", ex);
1586                }
1587                log.trace("getMetadata(): storage={}", storage);
1588            }
1589            finally {
1590                try {
1591                    H5.H5Pclose(paid);
1592                }
1593                catch (Exception ex) {
1594                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1595                }
1596                try {
1597                    H5.H5Pclose(pcid);
1598                }
1599                catch (Exception ex) {
1600                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1601                }
1602                close(did);
1603            }
1604        }
1605
1606        return attributeList;
1607    }
1608
1609    /*
1610     * (non-Javadoc)
1611     *
1612     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1613     */
1614    @Override
1615    public void writeMetadata(Object info) throws Exception {
1616        // only attribute metadata is supported.
1617        if (!(info instanceof Attribute)) {
1618            log.debug("writeMetadata(): Object not an Attribute");
1619            return;
1620        }
1621
1622        boolean attrExisted = false;
1623        Attribute attr = (Attribute) info;
1624        log.trace("writeMetadata(): {}", attr.getName());
1625
1626        if (attributeList == null) {
1627            this.getMetadata();
1628        }
1629
1630        if (attributeList != null)
1631            attrExisted = attributeList.contains(attr);
1632
1633        getFileFormat().writeAttribute(this, attr, attrExisted);
1634        // add the new attribute into attribute list
1635        if (!attrExisted) {
1636            attributeList.add(attr);
1637            nAttributes = attributeList.size();
1638        }
1639    }
1640
1641    /*
1642     * (non-Javadoc)
1643     *
1644     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1645     */
1646    @Override
1647    public void removeMetadata(Object info) throws HDF5Exception {
1648        // only attribute metadata is supported.
1649        if (!(info instanceof Attribute)) {
1650            log.debug("removeMetadata(): Object not an Attribute");
1651            return;
1652        }
1653
1654        Attribute attr = (Attribute) info;
1655        log.trace("removeMetadata(): {}", attr.getName());
1656        long did = open();
1657        if (did >= 0) {
1658            try {
1659                H5.H5Adelete(did, attr.getName());
1660                List<Attribute> attrList = getMetadata();
1661                attrList.remove(attr);
1662                nAttributes = attrList.size();
1663            }
1664            finally {
1665                close(did);
1666            }
1667        }
1668    }
1669
1670    /*
1671     * (non-Javadoc)
1672     *
1673     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1674     */
1675    @Override
1676    public void updateMetadata(Object info) throws HDF5Exception {
1677        // only attribute metadata is supported.
1678        if (!(info instanceof Attribute)) {
1679            log.debug("updateMetadata(): Object not an Attribute");
1680            return;
1681        }
1682
1683        nAttributes = -1;
1684    }
1685
1686    /*
1687     * (non-Javadoc)
1688     *
1689     * @see hdf.object.HObject#setName(java.lang.String)
1690     */
1691    @Override
1692    public void setName(String newName) throws Exception {
1693        if (newName == null)
1694            throw new IllegalArgumentException("The new name is NULL");
1695
1696        H5File.renameObject(this, newName);
1697        super.setName(newName);
1698    }
1699
1700    /**
1701     * Resets selection of dataspace
1702     */
1703    private void resetSelection() {
1704        for (int i = 0; i < rank; i++) {
1705            startDims[i] = 0;
1706            selectedDims[i] = 1;
1707            if (selectedStride != null) {
1708                selectedStride[i] = 1;
1709            }
1710        }
1711
1712        if (rank == 1) {
1713            selectedIndex[0] = 0;
1714            selectedDims[0] = dims[0];
1715        }
1716        else if (rank == 2) {
1717            selectedIndex[0] = 0;
1718            selectedIndex[1] = 1;
1719            selectedDims[0] = dims[0];
1720            selectedDims[1] = dims[1];
1721        }
1722        else if (rank > 2) {
1723            // selectedIndex[0] = rank - 2; // columns
1724            // selectedIndex[1] = rank - 1; // rows
1725            // selectedIndex[2] = rank - 3;
1726            selectedIndex[0] = 0; // width, the fastest dimension
1727            selectedIndex[1] = 1; // height
1728            selectedIndex[2] = 2; // frames
1729            // selectedDims[rank - 1] = dims[rank - 1];
1730            // selectedDims[rank - 2] = dims[rank - 2];
1731            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1732            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1733        }
1734
1735        isDataLoaded = false;
1736        setAllMemberSelection(true);
1737    }
1738
1739    /**
1740     * @deprecated Not for public use in the future. <br>
1741     *             Using
1742     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1743     *
1744     * @param name
1745     *            the name of the dataset to create.
1746     * @param pgroup
1747     *            parent group where the new dataset is created.
1748     * @param dims
1749     *            the dimension size of the dataset.
1750     * @param memberNames
1751     *            the names of compound datatype
1752     * @param memberDatatypes
1753     *            the datatypes of the compound datatype
1754     * @param memberSizes
1755     *            the dim sizes of the members
1756     * @param data
1757     *            list of data arrays written to the new dataset, null if no data is written to the new
1758     *            dataset.
1759     *
1760     * @return the new compound dataset if successful; otherwise returns null.
1761     *
1762     * @throws Exception
1763     *             if there is a failure.
1764     */
1765    @Deprecated
1766    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1767            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1768        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1769                || (memberSizes == null)) {
1770            return null;
1771        }
1772
1773        int nMembers = memberNames.length;
1774        int memberRanks[] = new int[nMembers];
1775        long memberDims[][] = new long[nMembers][1];
1776        for (int i = 0; i < nMembers; i++) {
1777            memberRanks[i] = 1;
1778            memberDims[i][0] = memberSizes[i];
1779        }
1780
1781        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1782    }
1783
1784    /**
1785     * @deprecated Not for public use in the future. <br>
1786     *             Using
1787     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1788     *
1789     * @param name
1790     *            the name of the dataset to create.
1791     * @param pgroup
1792     *            parent group where the new dataset is created.
1793     * @param dims
1794     *            the dimension size of the dataset.
1795     * @param memberNames
1796     *            the names of compound datatype
1797     * @param memberDatatypes
1798     *            the datatypes of the compound datatype
1799     * @param memberRanks
1800     *            the ranks of the members
1801     * @param memberDims
1802     *            the dim sizes of the members
1803     * @param data
1804     *            list of data arrays written to the new dataset, null if no data is written to the new
1805     *            dataset.
1806     *
1807     * @return the new compound dataset if successful; otherwise returns null.
1808     *
1809     * @throws Exception
1810     *             if the dataset can not be created.
1811     */
1812    @Deprecated
1813    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1814            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1815        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1816                memberDims, data);
1817    }
1818
1819    /**
1820     * Creates a simple compound dataset in a file with/without chunking and compression.
1821     * <p>
1822     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1823     * details of creating a compound dataset from users.
1824     * <p>
1825     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1826     * dataset is not supported. The required information to create a compound dataset includes the
1827     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1828     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1829     * <p>
1830     * The following example shows how to use this function to create a compound dataset in file.
1831     *
1832     * <pre>
1833     * H5File file = null;
1834     * String message = &quot;&quot;;
1835     * Group pgroup = null;
1836     * int[] DATA_INT = new int[DIM_SIZE];
1837     * float[] DATA_FLOAT = new float[DIM_SIZE];
1838     * String[] DATA_STR = new String[DIM_SIZE];
1839     * long[] DIMs = { 50, 10 };
1840     * long[] CHUNKs = { 25, 5 };
1841     *
1842     * try {
1843     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1844     *     file.open();
1845     *     pgroup = (Group) file.get(&quot;/&quot;);
1846     * }
1847     * catch (Exception ex) {
1848     * }
1849     *
1850     * Vector data = new Vector();
1851     * data.add(0, DATA_INT);
1852     * data.add(1, DATA_FLOAT);
1853     * data.add(2, DATA_STR);
1854     *
1855     * // create groups
1856     * Datatype[] mdtypes = new H5Datatype[3];
1857     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1858     * Dataset dset = null;
1859     * try {
1860     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1861     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1862     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1863     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1864     * }
1865     * catch (Exception ex) {
1866     *     failed(message, ex, file);
1867     *     return 1;
1868     * }
1869     * </pre>
1870     *
1871     * @param name
1872     *            the name of the dataset to create.
1873     * @param pgroup
1874     *            parent group where the new dataset is created.
1875     * @param dims
1876     *            the dimension size of the dataset.
1877     * @param maxdims
1878     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1879     * @param chunks
1880     *            the chunk size of the dataset. No chunking if chunk = null.
1881     * @param gzip
1882     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1883     * @param memberNames
1884     *            the names of compound datatype
1885     * @param memberDatatypes
1886     *            the datatypes of the compound datatype
1887     * @param memberRanks
1888     *            the ranks of the members
1889     * @param memberDims
1890     *            the dim sizes of the members
1891     * @param data
1892     *            list of data arrays written to the new dataset, null if no data is written to the new
1893     *            dataset.
1894     *
1895     * @return the new compound dataset if successful; otherwise returns null.
1896     *
1897     * @throws Exception
1898     *             if there is a failure.
1899     */
1900    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1901            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1902                    throws Exception {
1903        H5CompoundDS dataset = null;
1904        String fullPath = null;
1905        long did = -1;
1906        long tid = -1;
1907        long plist = -1;
1908        long sid = -1;
1909
1910        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1911                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1912                || (memberDims == null)) {
1913            log.debug("create(): one or more parameters are null");
1914            return null;
1915        }
1916
1917        H5File file = (H5File) pgroup.getFileFormat();
1918        if (file == null) {
1919            log.debug("create(): parent group FileFormat is null");
1920            return null;
1921        }
1922
1923        String path = HObject.SEPARATOR;
1924        if (!pgroup.isRoot()) {
1925            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1926            if (name.endsWith("/")) {
1927                name = name.substring(0, name.length() - 1);
1928            }
1929            int idx = name.lastIndexOf('/');
1930            if (idx >= 0) {
1931                name = name.substring(idx + 1);
1932            }
1933        }
1934
1935        fullPath = path + name;
1936
1937        int typeSize = 0;
1938        int nMembers = memberNames.length;
1939        long[] mTypes = new long[nMembers];
1940        int memberSize = 1;
1941        for (int i = 0; i < nMembers; i++) {
1942            memberSize = 1;
1943            for (int j = 0; j < memberRanks[i]; j++) {
1944                memberSize *= memberDims[i][j];
1945            }
1946
1947            mTypes[i] = -1;
1948            // the member is an array
1949            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1950                long tmptid = -1;
1951                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1952                    try {
1953                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1954                    }
1955                    finally {
1956                        try {
1957                            H5.H5Tclose(tmptid);
1958                        }
1959                        catch (Exception ex) {
1960                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1961                        }
1962                    }
1963                }
1964            }
1965            else {
1966                mTypes[i] = memberDatatypes[i].createNative();
1967            }
1968            try {
1969                typeSize += H5.H5Tget_size(mTypes[i]);
1970            }
1971            catch (Exception ex) {
1972                log.debug("create(): array create H5Tget_size:", ex);
1973
1974                while (i > 0) {
1975                    try {
1976                        H5.H5Tclose(mTypes[i]);
1977                    }
1978                    catch (HDF5Exception ex2) {
1979                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1980                    }
1981                    i--;
1982                }
1983                throw ex;
1984            }
1985        } //  (int i = 0; i < nMembers; i++) {
1986
1987        // setup chunking and compression
1988        boolean isExtentable = false;
1989        if (maxdims != null) {
1990            for (int i = 0; i < maxdims.length; i++) {
1991                if (maxdims[i] == 0) {
1992                    maxdims[i] = dims[i];
1993                }
1994                else if (maxdims[i] < 0) {
1995                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1996                }
1997
1998                if (maxdims[i] != dims[i]) {
1999                    isExtentable = true;
2000                }
2001            }
2002        }
2003
2004        // HDF5 requires you to use chunking in order to define extendible
2005        // datasets. Chunking makes it possible to extend datasets efficiently,
2006        // without having to reorganize storage excessively. Using default size
2007        // of 64x...which has good performance
2008        if ((chunks == null) && isExtentable) {
2009            chunks = new long[dims.length];
2010            for (int i = 0; i < dims.length; i++)
2011                chunks[i] = Math.min(dims[i], 64);
2012        }
2013
2014        // prepare the dataspace and datatype
2015        int rank = dims.length;
2016
2017        try {
2018            sid = H5.H5Screate_simple(rank, dims, maxdims);
2019
2020            // figure out creation properties
2021            plist = HDF5Constants.H5P_DEFAULT;
2022
2023            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2024            int offset = 0;
2025            for (int i = 0; i < nMembers; i++) {
2026                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2027                offset += H5.H5Tget_size(mTypes[i]);
2028            }
2029
2030            if (chunks != null) {
2031                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2032
2033                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2034                H5.H5Pset_chunk(plist, rank, chunks);
2035
2036                // compression requires chunking
2037                if (gzip > 0) {
2038                    H5.H5Pset_deflate(plist, gzip);
2039                }
2040            }
2041
2042            long fid = file.getFID();
2043
2044            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2045            dataset = new H5CompoundDS(file, name, path);
2046        }
2047        finally {
2048            try {
2049                H5.H5Pclose(plist);
2050            }
2051            catch (HDF5Exception ex) {
2052                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2053            }
2054            try {
2055                H5.H5Sclose(sid);
2056            }
2057            catch (HDF5Exception ex) {
2058                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2059            }
2060            try {
2061                H5.H5Tclose(tid);
2062            }
2063            catch (HDF5Exception ex) {
2064                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2065            }
2066            try {
2067                H5.H5Dclose(did);
2068            }
2069            catch (HDF5Exception ex) {
2070                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2071            }
2072
2073            for (int i = 0; i < nMembers; i++) {
2074                try {
2075                    H5.H5Tclose(mTypes[i]);
2076                }
2077                catch (HDF5Exception ex) {
2078                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2079                }
2080            }
2081        }
2082
2083        if (dataset != null) {
2084            pgroup.addToMemberList(dataset);
2085            if (data != null) {
2086                dataset.init();
2087                long selected[] = dataset.getSelectedDims();
2088                for (int i = 0; i < rank; i++) {
2089                    selected[i] = dims[i];
2090                }
2091                dataset.write(data);
2092            }
2093        }
2094
2095        return dataset;
2096    }
2097
2098    /*
2099     * (non-Javadoc)
2100     *
2101     * @see hdf.object.Dataset#isString(long)
2102     */
2103    @Override
2104    public boolean isString(long tid) {
2105        boolean b = false;
2106        try {
2107            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2108        }
2109        catch (Exception ex) {
2110            b = false;
2111        }
2112
2113        return b;
2114    }
2115
2116    /*
2117     * (non-Javadoc)
2118     *
2119     * @see hdf.object.Dataset#getSize(long)
2120     */
2121    @Override
2122    public long getSize(long tid) {
2123        long tsize = -1;
2124
2125        try {
2126            tsize = H5.H5Tget_size(tid);
2127        }
2128        catch (Exception ex) {
2129            tsize = -1;
2130        }
2131
2132        return tsize;
2133    }
2134
2135    /*
2136     * (non-Javadoc)
2137     *
2138     * @see hdf.object.Dataset#isVirtual()
2139     */
2140    @Override
2141    public boolean isVirtual() {
2142        return isVirtual;
2143    }
2144
2145    /*
2146     * (non-Javadoc)
2147     *
2148     * @see hdf.object.Dataset#getVirtualFilename(int)
2149     */
2150    @Override
2151    public String getVirtualFilename(int index) {
2152        return (isVirtual) ? virtualNameList.get(index) : null;
2153    }
2154
2155    /*
2156     * (non-Javadoc)
2157     *
2158     * @see hdf.object.Dataset#getVirtualMaps()
2159     */
2160    @Override
2161    public int getVirtualMaps() {
2162        return (isVirtual) ? virtualNameList.size() : -1;
2163    }
2164
2165}