001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.ArrayList;
020import java.util.Arrays;
021import java.util.Iterator;
022import java.util.List;
023import java.util.Vector;
024
025import hdf.hdf5lib.H5;
026import hdf.hdf5lib.HDF5Constants;
027import hdf.hdf5lib.HDFNativeData;
028import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
029import hdf.hdf5lib.exceptions.HDF5Exception;
030import hdf.hdf5lib.structs.H5O_info_t;
031import hdf.object.Attribute;
032import hdf.object.CompoundDS;
033import hdf.object.Dataset;
034import hdf.object.Datatype;
035import hdf.object.FileFormat;
036import hdf.object.Group;
037import hdf.object.HObject;
038import hdf.object.Utils;
039
040/**
041 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
042 * <p>
043 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
044 * that stores a description of the data elements, data layout, and all other information necessary
045 * to write, read, and interpret the stored data.
046 * <p>
047 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
048 * collection of one or more atomic types or small arrays of such types. Each member of a compound
049 * type has a name which is unique within that type, and a byte offset that determines the first
050 * byte (smallest byte address) of that member in a compound datum.
051 * <p>
052 * For more information on HDF5 datasets and datatypes, read the <a href=
053 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
054 * User's Guide</a>.
055 * <p>
056 * There are two basic types of compound datasets: simple compound data and nested compound data.
057 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
058 * are compound or array of compound data.
059 * <p>
060 * Since Java does not understand C structures, we cannot directly read/write compound data values
061 * as in the following C example.
062 *
063 * <pre>
064 * typedef struct s1_t {
065 *         int    a;
066 *         float  b;
067 *         double c;
068 *         } s1_t;
069 *     s1_t       s1[LENGTH];
070 *     ...
071 *     H5Dwrite(..., s1);
072 *     H5Dread(..., s1);
073 * </pre>
074 *
075 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
076 * data by fields instead of compound structure. As for the example above, the java.util.Vector
077 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
078 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
079 * by field.
080 *
081 * @version 1.1 9/4/2007
082 * @author Peter X. Cao
083 */
084public class H5CompoundDS extends CompoundDS {
085    private static final long serialVersionUID = -5968625125574032736L;
086
087    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
088
089    /**
090     * The list of attributes of this data object. Members of the list are instance of Attribute.
091     */
092    private List<Attribute> attributeList;
093
094    private int nAttributes = -1;
095
096    private H5O_info_t objInfo;
097
098    /**
099     * A list of names of all fields including nested fields.
100     * <p>
101     * The nested names are separated by CompoundDS.SEPARATOR. For example, if compound dataset "A" has
102     * the following nested structure,
103     *
104     * <pre>
105     * A --&gt; m01
106     * A --&gt; m02
107     * A --&gt; nest1 --&gt; m11
108     * A --&gt; nest1 --&gt; m12
109     * A --&gt; nest1 --&gt; nest2 --&gt; m21
110     * A --&gt; nest1 --&gt; nest2 --&gt; m22
111     * i.e.
112     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
113     * </pre>
114     *
115     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12,
116     * nest1[nest2[m21, nest1[nest2[m22}
117     *
118     */
119    private List<String> flatNameList;
120
121    /**
122     * A list of datatypes of all fields including nested fields.
123     */
124    private List<Datatype> flatTypeList;
125
126    /** flag to indicate if the dataset is an external dataset */
127    private boolean isExternal = false;
128
129    /** flag to indicate if the dataset is a virtual dataset */
130    private boolean isVirtual = false;
131    private List<String> virtualNameList;
132
133    /*
134     * Enum to indicate the type of I/O to perform inside of the common I/O
135     * function.
136     */
137    protected static enum IO_TYPE {
138        READ, WRITE
139    };
140
141    /**
142     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
143     * <p>
144     * The dataset object represents an existing dataset in the file. For example, new
145     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
146     * dataset,"dset1", at group "/g0/".
147     * <p>
148     * This object is usually constructed at FileFormat.open(), which loads the file structure and
149     * object information into memory. It is rarely used elsewhere.
150     *
151     * @param theFile
152     *            the file that contains the data object.
153     * @param theName
154     *            the name of the data object, e.g. "dset".
155     * @param thePath
156     *            the full path of the data object, e.g. "/arrays/".
157     */
158    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
159        this(theFile, theName, thePath, null);
160    }
161
162    /**
163     * @deprecated Not for public use in the future.<br>
164     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
165     *
166     * @param theFile
167     *            the file that contains the data object.
168     * @param theName
169     *            the name of the data object, e.g. "dset".
170     * @param thePath
171     *            the full path of the data object, e.g. "/arrays/".
172     * @param oid
173     *            the oid of the data object.
174     */
175    @Deprecated
176    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
177        super(theFile, theName, thePath, oid);
178        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
179
180        if ((oid == null) && (theFile != null)) {
181            // retrieve the object ID
182            try {
183                byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
184                this.oid = new long[1];
185                this.oid[0] = HDFNativeData.byteToLong(refBuf, 0);
186            }
187            catch (Exception ex) {
188                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
189            }
190        }
191    }
192
193    /*
194     * (non-Javadoc)
195     *
196     * @see hdf.object.HObject#open()
197     */
198    @Override
199    public long open() {
200        long did = -1;
201
202        try {
203            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
204            log.trace("open(): did={}", did);
205        }
206        catch (HDF5Exception ex) {
207            log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex);
208            did = -1;
209        }
210
211        return did;
212    }
213
214    /*
215     * (non-Javadoc)
216     *
217     * @see hdf.object.HObject#close(int)
218     */
219    @Override
220    public void close(long did) {
221        if (did >= 0) {
222            try {
223                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
224            }
225            catch (Exception ex) {
226                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
227            }
228            try {
229                H5.H5Dclose(did);
230            }
231            catch (HDF5Exception ex) {
232                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
233            }
234        }
235    }
236
237    /**
238     * Retrieves datatype and dataspace information from file and sets the dataset
239     * in memory.
240     * <p>
241     * The init() is designed to support lazy operation in a dataset object. When a
242     * data object is retrieved from file, the datatype, dataspace and raw data are
243     * not loaded into memory. When it is asked to read the raw data from file,
244     * init() is first called to get the datatype and dataspace information, then
245     * load the raw data from file.
246     * <p>
247     * init() is also used to reset the selection of a dataset (start, stride and
248     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
249     * the following example, init() at step 1) retrieves datatype and dataspace
250     * information from file. getData() at step 3) reads only one data point. init()
251     * at step 4) resets the selection to the whole dataset. getData() at step 4)
252     * reads the values of whole dataset into memory.
253     *
254     * <pre>
255     * dset = (Dataset) file.get(NAME_DATASET);
256     *
257     * // 1) get datatype and dataspace information from file
258     * dset.init();
259     * rank = dset.getRank(); // rank = 2, a 2D dataset
260     * count = dset.getSelectedDims();
261     * start = dset.getStartDims();
262     * dims = dset.getDims();
263     *
264     * // 2) select only one data point
265     * for (int i = 0; i &lt; rank; i++) {
266     *     start[0] = 0;
267     *     count[i] = 1;
268     * }
269     *
270     * // 3) read one data point
271     * data = dset.getData();
272     *
273     * // 4) reset selection to the whole dataset
274     * dset.init();
275     *
276     * // 5) clean the memory data buffer
277     * dset.clearData();
278     *
279     * // 6) Read the whole dataset
280     * data = dset.getData();
281     * </pre>
282     */
283    @Override
284    public void init() {
285        if (inited) {
286            resetSelection();
287            log.trace("init(): Dataset already initialized");
288            return; // already called. Initialize only once
289        }
290
291        long did = -1;
292        long tid = -1;
293        long sid = -1;
294        flatNameList = new Vector<>();
295        flatTypeList = new Vector<>();
296
297        did = open();
298        if (did >= 0) {
299            // check if it is an external or virtual dataset
300            long pid = -1;
301            try {
302                pid = H5.H5Dget_create_plist(did);
303                try {
304                    int nfiles = H5.H5Pget_external_count(pid);
305                    isExternal = (nfiles > 0);
306                    int layoutType = H5.H5Pget_layout(pid);
307                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
308                        try {
309                            long vmaps = H5.H5Pget_virtual_count(pid);
310                            if (vmaps > 0) {
311                                virtualNameList = new Vector<>();
312                                for (long next = 0; next < vmaps; next++) {
313                                    try {
314                                        String fname = H5.H5Pget_virtual_filename(pid, next);
315                                        virtualNameList.add(fname);
316                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
317                                    }
318                                    catch (Exception err) {
319                                        log.trace("init(): vds[{}] continue", next);
320                                    }
321                                }
322                            }
323                        }
324                        catch (Exception err) {
325                            log.debug("init(): vds count error: ", err);
326                        }
327                    }
328                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
329                }
330                catch (Exception ex) {
331                    log.debug("init(): check if it is an external or virtual dataset:", ex);
332                }
333            }
334            catch (Exception ex) {
335                log.debug("init(): H5Dget_create_plist() failure: ", ex);
336            }
337            finally {
338                try {
339                    H5.H5Pclose(pid);
340                }
341                catch (Exception ex) {
342                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
343                }
344            }
345
346            try {
347                sid = H5.H5Dget_space(did);
348                rank = H5.H5Sget_simple_extent_ndims(sid);
349                tid = H5.H5Dget_type(did);
350                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
351
352                if (rank == 0) {
353                    // a scalar data point
354                    rank = 1;
355                    dims = new long[1];
356                    dims[0] = 1;
357                    log.trace("init(): rank is a scalar data point");
358                }
359                else {
360                    dims = new long[rank];
361                    maxDims = new long[rank];
362                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
363                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
364                }
365
366                startDims = new long[rank];
367                selectedDims = new long[rank];
368
369                try {
370                    datatype = new H5Datatype(getFileFormat(), tid);
371
372                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid,
373                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
374
375                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
376                }
377                catch (Exception ex) {
378                    log.debug("init(): failed to create datatype for dataset: ", ex);
379                    datatype = null;
380                }
381
382                // initialize member information
383                numberOfMembers = flatNameList.size();
384                log.trace("init(): numberOfMembers={}", numberOfMembers);
385
386                memberNames = new String[numberOfMembers];
387                memberTypes = new Datatype[numberOfMembers];
388                memberOrders = new int[numberOfMembers];
389                isMemberSelected = new boolean[numberOfMembers];
390                memberDims = new Object[numberOfMembers];
391
392                for (int i = 0; i < numberOfMembers; i++) {
393                    isMemberSelected[i] = true;
394                    memberOrders[i] = 1;
395                    memberDims[i] = null;
396
397                    try {
398                        memberTypes[i] = flatTypeList.get(i);
399                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
400
401                        if (memberTypes[i].isArray()) {
402                            long mdim[] = memberTypes[i].getArrayDims();
403                            int idim[] = new int[mdim.length];
404                            int arrayNpoints = 1;
405
406                            for (int j = 0; j < idim.length; j++) {
407                                idim[j] = (int) mdim[j];
408                                arrayNpoints *= idim[j];
409                            }
410
411                            memberDims[i] = idim;
412                            memberOrders[i] = arrayNpoints;
413                        }
414                    }
415                    catch (Exception ex) {
416                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
417                        memberTypes[i] = null;
418                    }
419
420                    try {
421                        memberNames[i] = flatNameList.get(i);
422                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
423                    }
424                    catch (Exception ex) {
425                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
426                        memberNames[i] = "null";
427                    }
428                } //  (int i=0; i<numberOfMembers; i++)
429
430                inited = true;
431            }
432            catch (HDF5Exception ex) {
433                numberOfMembers = 0;
434                memberNames = null;
435                memberTypes = null;
436                memberOrders = null;
437                log.debug("init(): ", ex);
438            }
439            finally {
440                if (datatype != null)
441                    datatype.close(tid);
442
443                try {
444                    H5.H5Sclose(sid);
445                }
446                catch (HDF5Exception ex2) {
447                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
448                }
449            }
450
451            close(did);
452        }
453        else {
454            log.debug("init(): failed to open dataset");
455        }
456
457        resetSelection();
458    }
459
460    /*
461     * (non-Javadoc)
462     *
463     * @see hdf.object.DataFormat#hasAttribute()
464     */
465    @Override
466    public boolean hasAttribute() {
467        objInfo.num_attrs = nAttributes;
468
469        if (objInfo.num_attrs < 0) {
470            long did = open();
471            if (did >= 0) {
472                try {
473                    objInfo = H5.H5Oget_info(did);
474                    nAttributes = (int) objInfo.num_attrs;
475                }
476                catch (Exception ex) {
477                    objInfo.num_attrs = 0;
478                    log.debug("hasAttribute(): get object info failure: ", ex);
479                }
480                close(did);
481            }
482            else {
483                log.debug("hasAttribute(): could not open dataset");
484            }
485        }
486
487        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
488        return (objInfo.num_attrs > 0);
489    }
490
491    /*
492     * (non-Javadoc)
493     *
494     * @see hdf.object.Dataset#getDatatype()
495     */
496    @Override
497    public Datatype getDatatype() {
498        if (!inited)
499            init();
500
501        if (datatype == null) {
502            long did = -1;
503            long tid = -1;
504
505            did = open();
506            if (did >= 0) {
507                try {
508                    tid = H5.H5Dget_type(did);
509                    datatype = new H5Datatype(getFileFormat(), tid);
510                }
511                catch (Exception ex) {
512                    log.debug("getDatatype(): ", ex);
513                }
514                finally {
515                    try {
516                        H5.H5Tclose(tid);
517                    }
518                    catch (HDF5Exception ex) {
519                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
520                    }
521                    try {
522                        H5.H5Dclose(did);
523                    }
524                    catch (HDF5Exception ex) {
525                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
526                    }
527                }
528            }
529        }
530
531        if (isExternal) {
532            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
533
534            if (pdir == null) {
535                pdir = ".";
536            }
537            System.setProperty("user.dir", pdir);
538            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
539        }
540
541        return datatype;
542    }
543
544    @Override
545    public Object getFillValue() {
546        return null;
547    }
548
549    /*
550     * (non-Javadoc)
551     *
552     * @see hdf.object.Dataset#clear()
553     */
554    @Override
555    public void clear() {
556        super.clear();
557
558        if (attributeList != null) {
559            ((Vector<Attribute>) attributeList).setSize(0);
560        }
561    }
562
563    /*
564     * (non-Javadoc)
565     *
566     * @see hdf.object.Dataset#readBytes()
567     */
568    @Override
569    public byte[] readBytes() throws HDF5Exception {
570        byte[] theData = null;
571
572        if (!isInited())
573            init();
574
575        long did = open();
576        if (did >= 0) {
577            long fspace = -1;
578            long mspace = -1;
579            long tid = -1;
580
581            try {
582                long[] lsize = { 1 };
583                for (int j = 0; j < selectedDims.length; j++) {
584                    lsize[0] *= selectedDims[j];
585                }
586
587                fspace = H5.H5Dget_space(did);
588                mspace = H5.H5Screate_simple(rank, selectedDims, null);
589
590                // set the rectangle selection
591                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
592                if (rank * dims[0] > 1) {
593                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
594                            selectedDims, null); // set block to 1
595                }
596
597                tid = H5.H5Dget_type(did);
598                long size = H5.H5Tget_size(tid) * lsize[0];
599                log.trace("readBytes(): size = {}", size);
600
601                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
602
603                theData = new byte[(int) size];
604
605                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
606                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
607            }
608            catch (Exception ex) {
609                log.debug("readBytes(): failed to read data: ", ex);
610            }
611            finally {
612                try {
613                    H5.H5Sclose(fspace);
614                }
615                catch (Exception ex2) {
616                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
617                }
618                try {
619                    H5.H5Sclose(mspace);
620                }
621                catch (Exception ex2) {
622                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
623                }
624                try {
625                    H5.H5Tclose(tid);
626                }
627                catch (HDF5Exception ex2) {
628                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
629                }
630                close(did);
631            }
632        }
633
634        return theData;
635    }
636
637    /*
638     * (non-Javadoc)
639     *
640     * @see hdf.object.Dataset#read()
641     */
642    @Override
643    public Object read() throws Exception {
644        Object readData = null;
645
646        if (!isInited())
647            init();
648
649        try {
650            readData = compoundDatasetCommonIO(IO_TYPE.READ, null);
651        }
652        catch (Exception ex) {
653            log.debug("read(): failed to read compound dataset: ", ex);
654            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
655        }
656
657        return readData;
658    }
659
660    /**
661     * Writes the given data buffer into this dataset in a file.
662     * <p>
663     * The data buffer is a vector that contains the data values of compound fields. The data is written
664     * into file field by field.
665     *
666     * @param buf
667     *            The vector that contains the data values of compound fields.
668     *
669     * @throws Exception
670     *             If there is an error at the HDF5 library level.
671     */
672    @Override
673    public void write(Object buf) throws Exception {
674        if (this.getFileFormat().isReadOnly())
675            throw new Exception("cannot write to compound dataset in file opened as read-only");
676
677        if (!isInited())
678            init();
679
680        try {
681            compoundDatasetCommonIO(IO_TYPE.WRITE, buf);
682        }
683        catch (Exception ex) {
684            log.debug("write(): failed to write compound dataset: ", ex);
685            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
686        }
687    }
688
689    private Object compoundDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception {
690        H5Datatype dsDatatype = (H5Datatype) getDatatype();
691        Object data = null;
692
693        if (numberOfMembers <= 0) {
694            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
695            throw new Exception("dataset contains no members");
696        }
697
698        /*
699         * I/O type-specific pre-initialization.
700         */
701        if (ioType == IO_TYPE.WRITE) {
702            if ((writeBuf == null) || !(writeBuf instanceof List)) {
703                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
704                throw new Exception("write buffer is null or invalid");
705            }
706
707            /*
708             * Check for any unsupported datatypes and fail early before
709             * attempting to write to the dataset.
710             */
711            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
712                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
713                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
714            }
715
716            if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) {
717                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
718                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
719            }
720        }
721
722        long did = open();
723        if (did >= 0) {
724            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
725
726            try {
727                /*
728                 * NOTE: this call sets up a hyperslab selection in the file according to the
729                 * current selection in the dataset object.
730                 */
731                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
732                        selectedStride, selectedDims, spaceIDs);
733
734                data = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
735            }
736            finally {
737                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
738                    try {
739                        H5.H5Sclose(spaceIDs[0]);
740                    }
741                    catch (Exception ex) {
742                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
743                    }
744                }
745
746                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
747                    try {
748                        H5.H5Sclose(spaceIDs[1]);
749                    }
750                    catch (Exception ex) {
751                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
752                    }
753                }
754
755                close(did);
756            }
757        }
758        else
759            log.debug("compoundDatasetCommonIO(): failed to open dataset");
760
761        return data;
762    }
763
764    /*
765     * Private recursive routine to read/write an entire compound datatype field by
766     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
767     * COMPOUND datatypes.
768     *
769     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
770     * running counter so that we can index properly into the flattened name list
771     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
772     */
773    private Object compoundTypeIO(IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints, final H5Datatype cmpdType,
774            Object writeBuf, int[] globalMemberIndex) {
775        Object theData = null;
776
777        if (cmpdType.isArray()) {
778            log.trace("compoundTypeIO(): ARRAY type");
779
780            long[] arrayDims = cmpdType.getArrayDims();
781            int arrSize = nSelPoints;
782            for (int i = 0; i < arrayDims.length; i++) {
783                arrSize *= arrayDims[i];
784            }
785
786            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
787        }
788        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
789            /*
790             * TODO: true variable-length support.
791             */
792            String[] errVal = new String[nSelPoints];
793            String errStr = "*UNSUPPORTED*";
794
795            for (int j = 0; j < nSelPoints; j++)
796                errVal[j] = errStr;
797
798            /*
799             * Setup a fake data list.
800             */
801            Datatype baseType = cmpdType.getDatatypeBase();
802            while (baseType != null && !baseType.isCompound()) {
803                baseType = baseType.getDatatypeBase();
804            }
805
806            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
807            fakeVlenData.add(errVal);
808
809            theData = fakeVlenData;
810        }
811        else if (cmpdType.isCompound()) {
812            List<Object> memberDataList = null;
813            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
814
815            log.trace("compoundTypeIO(): {} {} members:", (ioType == IO_TYPE.READ) ? "read" : "write",
816                    typeList.size());
817
818            if (ioType == IO_TYPE.READ) {
819                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
820            }
821
822            try {
823                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
824                    H5Datatype memberType = null;
825                    String memberName = null;
826                    Object memberData = null;
827
828                    try {
829                        memberType = (H5Datatype) typeList.get(i);
830                    }
831                    catch (Exception ex) {
832                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
833                        globalMemberIndex[0]++;
834                        continue;
835                    }
836
837                    /*
838                     * Since the type list used here is not a flattened structure, we need to skip
839                     * the member selection check for compound types, as otherwise having a single
840                     * member not selected would skip the reading/writing for the entire compound
841                     * type. The member selection check will be deferred to the recursive compound
842                     * read/write below.
843                     */
844                    if (!memberType.isCompound()) {
845                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
846                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
847                            globalMemberIndex[0]++;
848                            continue; // the field is not selected
849                        }
850                    }
851
852                    if (!memberType.isCompound()) {
853                        try {
854                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
855                        }
856                        catch (Exception ex) {
857                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
858                            memberName = "null";
859                        }
860                    }
861
862                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
863
864                    if (ioType == IO_TYPE.READ) {
865                        try {
866                            if (memberType.isCompound())
867                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
868                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
869                                /*
870                                 * Recursively detect any nested array/vlen of compound types.
871                                 */
872                                boolean compoundFound = false;
873
874                                Datatype base = memberType.getDatatypeBase();
875                                while (base != null) {
876                                    if (base.isCompound())
877                                        compoundFound = true;
878
879                                    base = base.getDatatypeBase();
880                                }
881
882                                if (compoundFound) {
883                                    /*
884                                     * Skip the top-level array/vlen type.
885                                     */
886                                    globalMemberIndex[0]++;
887
888                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
889                                }
890                                else {
891                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
892                                    globalMemberIndex[0]++;
893                                }
894                            }
895                            else {
896                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
897                                globalMemberIndex[0]++;
898                            }
899                        }
900                        catch (Exception ex) {
901                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
902                            globalMemberIndex[0]++;
903                            memberData = null;
904                        }
905
906                        if (memberData == null) {
907                            String[] errVal = new String[nSelPoints];
908                            String errStr = "*ERROR*";
909
910                            for (int j = 0; j < nSelPoints; j++)
911                                errVal[j] = errStr;
912
913                            memberData = errVal;
914                        }
915
916                        memberDataList.add(memberData);
917                    }
918                    else {
919                        try {
920                            /*
921                             * TODO: currently doesn't correctly handle non-selected compound members.
922                             */
923                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
924                        }
925                        catch (Exception ex) {
926                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
927                            globalMemberIndex[0]++;
928                            continue;
929                        }
930
931                        if (memberData == null) {
932                            log.debug("compoundTypeIO(): member[{}] data is null", i);
933                            globalMemberIndex[0]++;
934                            continue;
935                        }
936
937                        try {
938                            if (memberType.isCompound()) {
939                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
940                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
941                            }
942                            else {
943                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
944                                globalMemberIndex[0]++;
945                            }
946                        }
947                        catch (Exception ex) {
948                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
949                            globalMemberIndex[0]++;
950                        }
951                    }
952                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
953            }
954            catch (Exception ex) {
955                log.debug("compoundTypeIO(): failure: ", ex);
956                memberDataList = null;
957            }
958
959            theData = memberDataList;
960        }
961
962        return theData;
963    }
964
965    /*
966     * Private routine to read a single field of a compound datatype by creating a
967     * compound datatype and inserting the single field into that datatype.
968     */
969    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType,
970            String memberName) throws Exception {
971        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
972        Object memberData = null;
973
974        try {
975            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
976            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
977        }
978        catch (OutOfMemoryError err) {
979            memberData = null;
980            throw new Exception("Out of memory");
981        }
982        catch (Exception ex) {
983            log.debug("readSingleCompoundMember(): ", ex);
984            memberData = null;
985        }
986
987        if (memberData != null) {
988            /*
989             * Create a compound datatype containing just a single field (the one which we
990             * want to read).
991             */
992            long compTid = -1;
993            try {
994                compTid = dsDatatype.createCompoundFieldType(memberName);
995            }
996            catch (HDF5Exception ex) {
997                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
998                        memberType.getDescription(), ex);
999                memberData = null;
1000            }
1001
1002            /*
1003             * Actually read the data for this member now that everything has been setup.
1004             */
1005            try {
1006                if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1007                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1008                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1009                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1010
1011                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1012                }
1013                else {
1014                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1015                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1016                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1017
1018                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1019                }
1020            }
1021            catch (HDF5DataFiltersException exfltr) {
1022                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1023                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1024            }
1025            catch (Exception ex) {
1026                log.debug("readSingleCompoundMember(): read failure: ", ex);
1027                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1028            }
1029            finally {
1030                dsDatatype.close(compTid);
1031            }
1032
1033            /*
1034             * Perform any necessary data conversions.
1035             */
1036            if (memberType.isUnsigned()) {
1037                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1038                memberData = Dataset.convertFromUnsignedC(memberData, null);
1039            }
1040            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1041                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1042
1043                /*
1044                 * For all other types that get read into memory as a byte[] (such as nested
1045                 * compounds and arrays of compounds), we must manually convert the byte[] into
1046                 * something usable.
1047                 */
1048                memberData = convertByteMember(memberType, (byte[]) memberData);
1049            }
1050        }
1051
1052        return memberData;
1053    }
1054
1055    /*
1056     * Private routine to write a single field of a compound datatype by creating a
1057     * compound datatype and inserting the single field into that datatype.
1058     */
1059    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints, final H5Datatype memberType,
1060            String memberName, Object theData) throws Exception {
1061        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1062
1063        /*
1064         * Check for any unsupported datatypes before attempting to write this compound
1065         * member.
1066         */
1067        if (memberType.isVLEN() && !memberType.isVarStr()) {
1068            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1069            throw new Exception("writing of VL non-strings is not currently supported");
1070        }
1071
1072        /*
1073         * Perform any necessary data conversions before writing the data.
1074         */
1075        Object tmpData = theData;
1076        try {
1077            if (memberType.isUnsigned()) {
1078                // Check if we need to convert unsigned integer data from Java-style
1079                // to C-style integers
1080                long tsize = memberType.getDatatypeSize();
1081                String cname = theData.getClass().getName();
1082                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1083                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1084                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1085
1086                if (doIntConversion) {
1087                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1088                    tmpData = convertToUnsignedC(theData, null);
1089                }
1090            }
1091            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1092                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1093                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1094            }
1095            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1096                log.trace("writeSingleCompoundMember(): converting enum names to values");
1097                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1098            }
1099        }
1100        catch (Exception ex) {
1101            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1102            tmpData = null;
1103        }
1104
1105        if (tmpData == null) {
1106            log.debug("writeSingleCompoundMember(): data is null");
1107            return;
1108        }
1109
1110        /*
1111         * Create a compound datatype containing just a single field (the one which we
1112         * want to write).
1113         */
1114        long compTid = -1;
1115        try {
1116            compTid = dsDatatype.createCompoundFieldType(memberName);
1117        }
1118        catch (HDF5Exception ex) {
1119            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1120                    memberType.getDescription(), ex);
1121        }
1122
1123        /*
1124         * Actually write the data now that everything has been setup.
1125         */
1126        try {
1127            if (memberType.isVarStr()) {
1128                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1129                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1130                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1131
1132                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1133            }
1134            else {
1135                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1136                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1137                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1138
1139                // BUG!!! does not write nested compound data and no
1140                // exception was caught. Need to check if it is a java
1141                // error or C library error.
1142                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1143            }
1144        }
1145        catch (Exception ex) {
1146            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1147            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1148        }
1149        finally {
1150            dsDatatype.close(compTid);
1151        }
1152    }
1153
1154    /*
1155     * Private routine to convert datatypes that are read in as byte arrays to
1156     * regular types.
1157     */
1158    private Object convertByteMember(final H5Datatype dtype, byte[] byteData) {
1159        Object theObj = null;
1160
1161        if (dtype.getDatatypeSize() == 1) {
1162            /*
1163             * Normal byte[] type, such as an integer datatype of size 1.
1164             */
1165            theObj = byteData;
1166        }
1167        else if (dtype.isString() && !dtype.isVarStr() && convertByteToString) {
1168            log.trace("convertByteMember(): converting byte array to string array");
1169
1170            theObj = byteToString(byteData, (int) dtype.getDatatypeSize());
1171        }
1172        else if (dtype.isInteger()) {
1173            log.trace("convertByteMember(): converting byte array to integer array");
1174
1175            theObj = HDFNativeData.byteToInt(byteData);
1176        }
1177        else if (dtype.isFloat()) {
1178            log.trace("convertByteMember(): converting byte array to float array");
1179
1180            if (dtype.getDatatypeSize() == 16)
1181                theObj = dtype.byteToBigDecimal(byteData, 0);
1182            else
1183                theObj = HDFNativeData.byteToFloat(byteData);
1184        }
1185        else if (dtype.isRef()) {
1186            log.trace("convertByteMember(): reference type - converting byte array to long array");
1187
1188            theObj = HDFNativeData.byteToLong(byteData);
1189        }
1190        else if (dtype.isArray()) {
1191            H5Datatype baseType = (H5Datatype) dtype.getDatatypeBase();
1192
1193            /*
1194             * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes.
1195             */
1196            while (baseType.isArray()) baseType = (H5Datatype) baseType.getDatatypeBase();
1197
1198            /*
1199             * Optimize for the common cases of Arrays.
1200             */
1201            switch (baseType.getDatatypeClass()) {
1202                case Datatype.CLASS_INTEGER:
1203                case Datatype.CLASS_FLOAT:
1204                case Datatype.CLASS_CHAR:
1205                case Datatype.CLASS_STRING:
1206                case Datatype.CLASS_BITFIELD:
1207                case Datatype.CLASS_OPAQUE:
1208                case Datatype.CLASS_COMPOUND:
1209                case Datatype.CLASS_REFERENCE:
1210                case Datatype.CLASS_ENUM:
1211                case Datatype.CLASS_VLEN:
1212                case Datatype.CLASS_TIME:
1213                    theObj = convertByteMember(baseType, byteData);
1214                    break;
1215
1216                case Datatype.CLASS_ARRAY:
1217                {
1218                    H5Datatype arrayType = (H5Datatype) dtype.getDatatypeBase();
1219
1220                    long[] arrayDims = dtype.getArrayDims();
1221                    int arrSize = 1;
1222                    for (int i = 0; i < arrayDims.length; i++) {
1223                        arrSize *= arrayDims[i];
1224                    }
1225
1226                    theObj = new Object[arrSize];
1227
1228                    for (int i = 0; i < arrSize; i++) {
1229                        byte[] indexedBytes = Arrays.copyOfRange(byteData, (int) (i * arrayType.getDatatypeSize()),
1230                                (int) ((i + 1) * arrayType.getDatatypeSize()));
1231                        ((Object[]) theObj)[i] = convertByteMember(arrayType, indexedBytes);
1232                    }
1233
1234                    break;
1235                }
1236
1237                case Datatype.CLASS_NO_CLASS:
1238                default:
1239                    log.debug("convertByteMember(): invalid datatype class");
1240                    theObj = new String("*ERROR*");
1241            }
1242        }
1243        else if (dtype.isCompound()) {
1244            /*
1245             * TODO: still valid after reading change?
1246             */
1247            theObj = convertCompoundByteMembers(dtype, byteData);
1248        }
1249        else {
1250            theObj = byteData;
1251        }
1252
1253        return theObj;
1254    }
1255
1256    /**
1257     * Given an array of bytes representing a compound Datatype, converts each of
1258     * its members into Objects and returns the results.
1259     *
1260     * @param dtype
1261     *            The compound datatype to convert
1262     * @param data
1263     *            The byte array representing the data of the compound Datatype
1264     * @return The converted types of the bytes
1265     */
1266    private Object convertCompoundByteMembers(final H5Datatype dtype, byte[] data) {
1267        List<Object> theData = null;
1268
1269        List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes());
1270        List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes());
1271        Iterator<Datatype> localIt = localTypes.iterator();
1272        while (localIt.hasNext()) {
1273            Datatype curType = localIt.next();
1274
1275            if (curType.isCompound())
1276                continue;
1277
1278            if (!allSelectedTypes.contains(curType))
1279                localIt.remove();
1280        }
1281
1282        theData = new ArrayList<>(localTypes.size());
1283        for (int i = 0, index = 0; i < localTypes.size(); i++) {
1284            Datatype curType = localTypes.get(i);
1285
1286            if (curType.isCompound())
1287                theData.add(convertCompoundByteMembers((H5Datatype) curType,
1288                        Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize())));
1289            else
1290                theData.add(convertByteMember((H5Datatype) curType,
1291                        Arrays.copyOfRange(data, index, index + (int) curType.getDatatypeSize())));
1292
1293            index += curType.getDatatypeSize();
1294        }
1295
1296        return theData;
1297    }
1298
1299    @Override
1300    public Object convertFromUnsignedC() {
1301        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1302    }
1303
1304    @Override
1305    public Object convertToUnsignedC() {
1306        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1307    }
1308
1309    /*
1310     * (non-Javadoc)
1311     *
1312     * @see hdf.object.DataFormat#getMetadata()
1313     */
1314    @Override
1315    public List<Attribute> getMetadata() throws HDF5Exception {
1316        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1317    }
1318
1319    /*
1320     * (non-Javadoc)
1321     *
1322     * @see hdf.object.DataFormat#getMetadata(int...)
1323     */
1324    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1325
1326        if (!isInited()) {
1327            init();
1328        }
1329
1330        try {
1331            this.linkTargetObjName = H5File.getLinkTargetName(this);
1332        }
1333        catch (Exception ex) {
1334            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1335        }
1336
1337        if (attributeList != null) {
1338            log.trace("getMetadata(): attributeList != null");
1339            return attributeList;
1340        }
1341
1342        long did = -1;
1343        long pcid = -1;
1344        long paid = -1;
1345        int indxType = fileFormat.getIndexType(null);
1346        int order = fileFormat.getIndexOrder(null);
1347
1348        // load attributes first
1349        if (attrPropList.length > 0) {
1350            indxType = attrPropList[0];
1351            if (attrPropList.length > 1) {
1352                order = attrPropList[1];
1353            }
1354        }
1355
1356        attributeList = H5File.getAttribute(this, indxType, order);
1357
1358        did = open();
1359        if (did >= 0) {
1360            try {
1361                // get the compression and chunk information
1362                pcid = H5.H5Dget_create_plist(did);
1363                paid = H5.H5Dget_access_plist(did);
1364                long storageSize = H5.H5Dget_storage_size(did);
1365                int nfilt = H5.H5Pget_nfilters(pcid);
1366                int layoutType = H5.H5Pget_layout(pcid);
1367
1368                storageLayout.setLength(0);
1369                compression.setLength(0);
1370
1371                if (layoutType == HDF5Constants.H5D_CHUNKED) {
1372                    chunkSize = new long[rank];
1373                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1374                    int n = chunkSize.length;
1375
1376                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1377                    for (int i = 1; i < n; i++) {
1378                        storageLayout.append(" X ").append(chunkSize[i]);
1379                    }
1380
1381                    if (nfilt > 0) {
1382                        long nelmts = 1;
1383                        long uncompSize;
1384                        long datumSize = getDatatype().getDatatypeSize();
1385                        if (datumSize < 0) {
1386                            long tmptid = -1;
1387                            try {
1388                                tmptid = H5.H5Dget_type(did);
1389                                datumSize = H5.H5Tget_size(tmptid);
1390                            }
1391                            finally {
1392                                try {
1393                                    H5.H5Tclose(tmptid);
1394                                }
1395                                catch (Exception ex2) {
1396                                    log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1397                                }
1398                            }
1399                        }
1400
1401                        for (int i = 0; i < rank; i++) {
1402                            nelmts *= dims[i];
1403                        }
1404                        uncompSize = nelmts * datumSize;
1405
1406                        /* compression ratio = uncompressed size / compressed size */
1407
1408                        if (storageSize != 0) {
1409                            double ratio = (double) uncompSize / (double) storageSize;
1410                            DecimalFormat df = new DecimalFormat();
1411                            df.setMinimumFractionDigits(3);
1412                            df.setMaximumFractionDigits(3);
1413                            compression.append(df.format(ratio)).append(":1");
1414                        }
1415                    }
1416                }
1417                else if (layoutType == HDF5Constants.H5D_COMPACT) {
1418                    storageLayout.append("COMPACT");
1419                }
1420                else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1421                    storageLayout.append("CONTIGUOUS");
1422                    if (H5.H5Pget_external_count(pcid) > 0)
1423                        storageLayout.append(" - EXTERNAL ");
1424                }
1425                else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1426                    storageLayout.append("VIRTUAL - ");
1427                    try {
1428                        long vmaps = H5.H5Pget_virtual_count(pcid);
1429                        try {
1430                            int virtView = H5.H5Pget_virtual_view(paid);
1431                            long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1432                            if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1433                                storageLayout.append("First Missing");
1434                            else
1435                                storageLayout.append("Last Available");
1436                            storageLayout.append("\nGAP : ").append(virtGap);
1437                        }
1438                        catch (Exception err) {
1439                            log.debug("getMetadata(): vds error: ", err);
1440                            storageLayout.append("ERROR");
1441                        }
1442                        storageLayout.append("\nMAPS : ").append(vmaps);
1443                        if (vmaps > 0) {
1444                            for (long next = 0; next < vmaps; next++) {
1445                                try {
1446                                    H5.H5Pget_virtual_vspace(pcid, next);
1447                                    H5.H5Pget_virtual_srcspace(pcid, next);
1448                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1449                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1450                                    storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1451                                }
1452                                catch (Exception err) {
1453                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1454                                    storageLayout.append("ERROR");
1455                                }
1456                            }
1457                        }
1458                    }
1459                    catch (Exception err) {
1460                        log.debug("getMetadata(): vds count error: ", err);
1461                        storageLayout.append("ERROR");
1462                    }
1463                }
1464                else {
1465                    chunkSize = null;
1466                    storageLayout.append("NONE");
1467                }
1468
1469                int[] flags = { 0, 0 };
1470                long[] cdNelmts = { 20 };
1471                int[] cdValues = new int[(int) cdNelmts[0]];
1472                String[] cdName = { "", "" };
1473                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1474                int filter = -1;
1475                int[] filterConfig = { 1 };
1476
1477                filters.setLength(0);
1478
1479                if (nfilt == 0) {
1480                    filters.append("NONE");
1481                }
1482                else {
1483                    for (int i = 0, k = 0; i < nfilt; i++) {
1484                        log.trace("getMetadata(): filter[{}]", i);
1485                        if (i > 0) {
1486                            filters.append(", ");
1487                        }
1488                        if (k > 0) {
1489                            compression.append(", ");
1490                        }
1491
1492                        try {
1493                            cdNelmts[0] = 20;
1494                            cdValues = new int[(int) cdNelmts[0]];
1495                            cdValues = new int[(int) cdNelmts[0]];
1496                            filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1497                            log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1498                            for (int j = 0; j < cdNelmts[0]; j++) {
1499                                log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1500                            }
1501                        }
1502                        catch (Exception err) {
1503                            log.debug("getMetadata(): filter[{}] error: ", i, err);
1504                            filters.append("ERROR");
1505                            continue;
1506                        }
1507
1508                        if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1509                            filters.append("NONE");
1510                        }
1511                        else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1512                            filters.append("GZIP");
1513                            compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1514                            k++;
1515                        }
1516                        else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1517                            filters.append("Error detection filter");
1518                        }
1519                        else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1520                            filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1521                        }
1522                        else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1523                            filters.append("NBIT");
1524                        }
1525                        else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1526                            filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1527                        }
1528                        else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1529                            filters.append("SZIP");
1530                            compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1531                            k++;
1532                            int flag = -1;
1533                            try {
1534                                flag = H5.H5Zget_filter_info(filter);
1535                            }
1536                            catch (Exception ex) {
1537                                log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1538                                flag = -1;
1539                            }
1540                            if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1541                                compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1542                            }
1543                            else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1544                                    || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1545                                            + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1546                                compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1547                            }
1548                        }
1549                        else {
1550                            filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1551                            for (int j = 0; j < cdNelmts[0]; j++) {
1552                                if (j > 0)
1553                                    filters.append(", ");
1554                                filters.append(cdValues[j]);
1555                            }
1556                            log.debug("getMetadata(): filter[{}] is user defined compression", i);
1557                        }
1558                    } //  (int i=0; i<nfilt; i++)
1559                }
1560
1561                if (compression.length() == 0) {
1562                    compression.append("NONE");
1563                }
1564                log.trace("getMetadata(): filter compression={}", compression);
1565
1566                log.trace("getMetadata(): filter information={}", filters);
1567
1568                storage.setLength(0);
1569                storage.append("SIZE: ").append(storageSize);
1570
1571                try {
1572                    int[] at = { 0 };
1573                    H5.H5Pget_alloc_time(pcid, at);
1574                    storage.append(", allocation time: ");
1575                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1576                        storage.append("Early");
1577                    }
1578                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1579                        storage.append("Incremental");
1580                    }
1581                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1582                        storage.append("Late");
1583                    }
1584                    else
1585                        storage.append("Default");
1586                }
1587                catch (Exception ex) {
1588                    log.debug("getMetadata(): Storage allocation time:", ex);
1589                }
1590                log.trace("getMetadata(): storage={}", storage);
1591            }
1592            finally {
1593                try {
1594                    H5.H5Pclose(paid);
1595                }
1596                catch (Exception ex) {
1597                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1598                }
1599                try {
1600                    H5.H5Pclose(pcid);
1601                }
1602                catch (Exception ex) {
1603                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1604                }
1605                close(did);
1606            }
1607        }
1608
1609        return attributeList;
1610    }
1611
1612    /*
1613     * (non-Javadoc)
1614     *
1615     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1616     */
1617    @Override
1618    public void writeMetadata(Object info) throws Exception {
1619        // only attribute metadata is supported.
1620        if (!(info instanceof Attribute)) {
1621            log.debug("writeMetadata(): Object not an Attribute");
1622            return;
1623        }
1624
1625        boolean attrExisted = false;
1626        Attribute attr = (Attribute) info;
1627        log.trace("writeMetadata(): {}", attr.getName());
1628
1629        if (attributeList == null) {
1630            this.getMetadata();
1631        }
1632
1633        if (attributeList != null)
1634            attrExisted = attributeList.contains(attr);
1635
1636        getFileFormat().writeAttribute(this, attr, attrExisted);
1637        // add the new attribute into attribute list
1638        if (!attrExisted) {
1639            attributeList.add(attr);
1640            nAttributes = attributeList.size();
1641        }
1642    }
1643
1644    /*
1645     * (non-Javadoc)
1646     *
1647     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1648     */
1649    @Override
1650    public void removeMetadata(Object info) throws HDF5Exception {
1651        // only attribute metadata is supported.
1652        if (!(info instanceof Attribute)) {
1653            log.debug("removeMetadata(): Object not an Attribute");
1654            return;
1655        }
1656
1657        Attribute attr = (Attribute) info;
1658        log.trace("removeMetadata(): {}", attr.getName());
1659        long did = open();
1660        if (did >= 0) {
1661            try {
1662                H5.H5Adelete(did, attr.getName());
1663                List<Attribute> attrList = getMetadata();
1664                attrList.remove(attr);
1665                nAttributes = attrList.size();
1666            }
1667            finally {
1668                close(did);
1669            }
1670        }
1671    }
1672
1673    /*
1674     * (non-Javadoc)
1675     *
1676     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1677     */
1678    @Override
1679    public void updateMetadata(Object info) throws HDF5Exception {
1680        // only attribute metadata is supported.
1681        if (!(info instanceof Attribute)) {
1682            log.debug("updateMetadata(): Object not an Attribute");
1683            return;
1684        }
1685
1686        nAttributes = -1;
1687    }
1688
1689    /*
1690     * (non-Javadoc)
1691     *
1692     * @see hdf.object.HObject#setName(java.lang.String)
1693     */
1694    @Override
1695    public void setName(String newName) throws Exception {
1696        if (newName == null)
1697            throw new IllegalArgumentException("The new name is NULL");
1698
1699        H5File.renameObject(this, newName);
1700        super.setName(newName);
1701    }
1702
1703    /**
1704     * Resets selection of dataspace
1705     */
1706    private void resetSelection() {
1707        for (int i = 0; i < rank; i++) {
1708            startDims[i] = 0;
1709            selectedDims[i] = 1;
1710            if (selectedStride != null) {
1711                selectedStride[i] = 1;
1712            }
1713        }
1714
1715        if (rank == 1) {
1716            selectedIndex[0] = 0;
1717            selectedDims[0] = dims[0];
1718        }
1719        else if (rank == 2) {
1720            selectedIndex[0] = 0;
1721            selectedIndex[1] = 1;
1722            selectedDims[0] = dims[0];
1723            selectedDims[1] = dims[1];
1724        }
1725        else if (rank > 2) {
1726            // selectedIndex[0] = rank - 2; // columns
1727            // selectedIndex[1] = rank - 1; // rows
1728            // selectedIndex[2] = rank - 3;
1729            selectedIndex[0] = 0; // width, the fastest dimension
1730            selectedIndex[1] = 1; // height
1731            selectedIndex[2] = 2; // frames
1732            // selectedDims[rank - 1] = dims[rank - 1];
1733            // selectedDims[rank - 2] = dims[rank - 2];
1734            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1735            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1736        }
1737
1738        isDataLoaded = false;
1739        setAllMemberSelection(true);
1740    }
1741
1742    /**
1743     * @deprecated Not for public use in the future. <br>
1744     *             Using
1745     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1746     *
1747     * @param name
1748     *            the name of the dataset to create.
1749     * @param pgroup
1750     *            parent group where the new dataset is created.
1751     * @param dims
1752     *            the dimension size of the dataset.
1753     * @param memberNames
1754     *            the names of compound datatype
1755     * @param memberDatatypes
1756     *            the datatypes of the compound datatype
1757     * @param memberSizes
1758     *            the dim sizes of the members
1759     * @param data
1760     *            list of data arrays written to the new dataset, null if no data is written to the new
1761     *            dataset.
1762     *
1763     * @return the new compound dataset if successful; otherwise returns null.
1764     *
1765     * @throws Exception
1766     *             if there is a failure.
1767     */
1768    @Deprecated
1769    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1770            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1771        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1772                || (memberSizes == null)) {
1773            return null;
1774        }
1775
1776        int nMembers = memberNames.length;
1777        int memberRanks[] = new int[nMembers];
1778        long memberDims[][] = new long[nMembers][1];
1779        for (int i = 0; i < nMembers; i++) {
1780            memberRanks[i] = 1;
1781            memberDims[i][0] = memberSizes[i];
1782        }
1783
1784        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1785    }
1786
1787    /**
1788     * @deprecated Not for public use in the future. <br>
1789     *             Using
1790     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1791     *
1792     * @param name
1793     *            the name of the dataset to create.
1794     * @param pgroup
1795     *            parent group where the new dataset is created.
1796     * @param dims
1797     *            the dimension size of the dataset.
1798     * @param memberNames
1799     *            the names of compound datatype
1800     * @param memberDatatypes
1801     *            the datatypes of the compound datatype
1802     * @param memberRanks
1803     *            the ranks of the members
1804     * @param memberDims
1805     *            the dim sizes of the members
1806     * @param data
1807     *            list of data arrays written to the new dataset, null if no data is written to the new
1808     *            dataset.
1809     *
1810     * @return the new compound dataset if successful; otherwise returns null.
1811     *
1812     * @throws Exception
1813     *             if the dataset can not be created.
1814     */
1815    @Deprecated
1816    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1817            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1818        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1819                memberDims, data);
1820    }
1821
1822    /**
1823     * Creates a simple compound dataset in a file with/without chunking and compression.
1824     * <p>
1825     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1826     * details of creating a compound dataset from users.
1827     * <p>
1828     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1829     * dataset is not supported. The required information to create a compound dataset includes the
1830     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1831     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1832     * <p>
1833     * The following example shows how to use this function to create a compound dataset in file.
1834     *
1835     * <pre>
1836     * H5File file = null;
1837     * String message = &quot;&quot;;
1838     * Group pgroup = null;
1839     * int[] DATA_INT = new int[DIM_SIZE];
1840     * float[] DATA_FLOAT = new float[DIM_SIZE];
1841     * String[] DATA_STR = new String[DIM_SIZE];
1842     * long[] DIMs = { 50, 10 };
1843     * long[] CHUNKs = { 25, 5 };
1844     *
1845     * try {
1846     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1847     *     file.open();
1848     *     pgroup = (Group) file.get(&quot;/&quot;);
1849     * }
1850     * catch (Exception ex) {
1851     * }
1852     *
1853     * Vector data = new Vector();
1854     * data.add(0, DATA_INT);
1855     * data.add(1, DATA_FLOAT);
1856     * data.add(2, DATA_STR);
1857     *
1858     * // create groups
1859     * Datatype[] mdtypes = new H5Datatype[3];
1860     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1861     * Dataset dset = null;
1862     * try {
1863     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1864     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1865     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1866     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1867     * }
1868     * catch (Exception ex) {
1869     *     failed(message, ex, file);
1870     *     return 1;
1871     * }
1872     * </pre>
1873     *
1874     * @param name
1875     *            the name of the dataset to create.
1876     * @param pgroup
1877     *            parent group where the new dataset is created.
1878     * @param dims
1879     *            the dimension size of the dataset.
1880     * @param maxdims
1881     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1882     * @param chunks
1883     *            the chunk size of the dataset. No chunking if chunk = null.
1884     * @param gzip
1885     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1886     * @param memberNames
1887     *            the names of compound datatype
1888     * @param memberDatatypes
1889     *            the datatypes of the compound datatype
1890     * @param memberRanks
1891     *            the ranks of the members
1892     * @param memberDims
1893     *            the dim sizes of the members
1894     * @param data
1895     *            list of data arrays written to the new dataset, null if no data is written to the new
1896     *            dataset.
1897     *
1898     * @return the new compound dataset if successful; otherwise returns null.
1899     *
1900     * @throws Exception
1901     *             if there is a failure.
1902     */
1903    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1904            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1905                    throws Exception {
1906        H5CompoundDS dataset = null;
1907        String fullPath = null;
1908        long did = -1;
1909        long tid = -1;
1910        long plist = -1;
1911        long sid = -1;
1912
1913        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1914                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1915                || (memberDims == null)) {
1916            log.debug("create(): one or more parameters are null");
1917            return null;
1918        }
1919
1920        H5File file = (H5File) pgroup.getFileFormat();
1921        if (file == null) {
1922            log.debug("create(): parent group FileFormat is null");
1923            return null;
1924        }
1925
1926        String path = HObject.SEPARATOR;
1927        if (!pgroup.isRoot()) {
1928            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1929            if (name.endsWith("/")) {
1930                name = name.substring(0, name.length() - 1);
1931            }
1932            int idx = name.lastIndexOf('/');
1933            if (idx >= 0) {
1934                name = name.substring(idx + 1);
1935            }
1936        }
1937
1938        fullPath = path + name;
1939
1940        int typeSize = 0;
1941        int nMembers = memberNames.length;
1942        long[] mTypes = new long[nMembers];
1943        int memberSize = 1;
1944        for (int i = 0; i < nMembers; i++) {
1945            memberSize = 1;
1946            for (int j = 0; j < memberRanks[i]; j++) {
1947                memberSize *= memberDims[i][j];
1948            }
1949
1950            mTypes[i] = -1;
1951            // the member is an array
1952            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1953                long tmptid = -1;
1954                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1955                    try {
1956                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1957                    }
1958                    finally {
1959                        try {
1960                            H5.H5Tclose(tmptid);
1961                        }
1962                        catch (Exception ex) {
1963                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1964                        }
1965                    }
1966                }
1967            }
1968            else {
1969                mTypes[i] = memberDatatypes[i].createNative();
1970            }
1971            try {
1972                typeSize += H5.H5Tget_size(mTypes[i]);
1973            }
1974            catch (Exception ex) {
1975                log.debug("create(): array create H5Tget_size:", ex);
1976
1977                while (i > 0) {
1978                    try {
1979                        H5.H5Tclose(mTypes[i]);
1980                    }
1981                    catch (HDF5Exception ex2) {
1982                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1983                    }
1984                    i--;
1985                }
1986                throw ex;
1987            }
1988        } //  (int i = 0; i < nMembers; i++) {
1989
1990        // setup chunking and compression
1991        boolean isExtentable = false;
1992        if (maxdims != null) {
1993            for (int i = 0; i < maxdims.length; i++) {
1994                if (maxdims[i] == 0) {
1995                    maxdims[i] = dims[i];
1996                }
1997                else if (maxdims[i] < 0) {
1998                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1999                }
2000
2001                if (maxdims[i] != dims[i]) {
2002                    isExtentable = true;
2003                }
2004            }
2005        }
2006
2007        // HDF5 requires you to use chunking in order to define extendible
2008        // datasets. Chunking makes it possible to extend datasets efficiently,
2009        // without having to reorganize storage excessively. Using default size
2010        // of 64x...which has good performance
2011        if ((chunks == null) && isExtentable) {
2012            chunks = new long[dims.length];
2013            for (int i = 0; i < dims.length; i++)
2014                chunks[i] = Math.min(dims[i], 64);
2015        }
2016
2017        // prepare the dataspace and datatype
2018        int rank = dims.length;
2019
2020        try {
2021            sid = H5.H5Screate_simple(rank, dims, maxdims);
2022
2023            // figure out creation properties
2024            plist = HDF5Constants.H5P_DEFAULT;
2025
2026            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2027            int offset = 0;
2028            for (int i = 0; i < nMembers; i++) {
2029                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2030                offset += H5.H5Tget_size(mTypes[i]);
2031            }
2032
2033            if (chunks != null) {
2034                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2035
2036                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2037                H5.H5Pset_chunk(plist, rank, chunks);
2038
2039                // compression requires chunking
2040                if (gzip > 0) {
2041                    H5.H5Pset_deflate(plist, gzip);
2042                }
2043            }
2044
2045            long fid = file.getFID();
2046
2047            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2048            dataset = new H5CompoundDS(file, name, path);
2049        }
2050        finally {
2051            try {
2052                H5.H5Pclose(plist);
2053            }
2054            catch (HDF5Exception ex) {
2055                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2056            }
2057            try {
2058                H5.H5Sclose(sid);
2059            }
2060            catch (HDF5Exception ex) {
2061                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2062            }
2063            try {
2064                H5.H5Tclose(tid);
2065            }
2066            catch (HDF5Exception ex) {
2067                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2068            }
2069            try {
2070                H5.H5Dclose(did);
2071            }
2072            catch (HDF5Exception ex) {
2073                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2074            }
2075
2076            for (int i = 0; i < nMembers; i++) {
2077                try {
2078                    H5.H5Tclose(mTypes[i]);
2079                }
2080                catch (HDF5Exception ex) {
2081                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2082                }
2083            }
2084        }
2085
2086        if (dataset != null) {
2087            pgroup.addToMemberList(dataset);
2088            if (data != null) {
2089                dataset.init();
2090                long selected[] = dataset.getSelectedDims();
2091                for (int i = 0; i < rank; i++) {
2092                    selected[i] = dims[i];
2093                }
2094                dataset.write(data);
2095            }
2096        }
2097
2098        return dataset;
2099    }
2100
2101    /*
2102     * (non-Javadoc)
2103     *
2104     * @see hdf.object.Dataset#isString(long)
2105     */
2106    @Override
2107    public boolean isString(long tid) {
2108        boolean b = false;
2109        try {
2110            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2111        }
2112        catch (Exception ex) {
2113            b = false;
2114        }
2115
2116        return b;
2117    }
2118
2119    /*
2120     * (non-Javadoc)
2121     *
2122     * @see hdf.object.Dataset#getSize(long)
2123     */
2124    @Override
2125    public long getSize(long tid) {
2126        long tsize = -1;
2127
2128        try {
2129            tsize = H5.H5Tget_size(tid);
2130        }
2131        catch (Exception ex) {
2132            tsize = -1;
2133        }
2134
2135        return tsize;
2136    }
2137
2138    /*
2139     * (non-Javadoc)
2140     *
2141     * @see hdf.object.Dataset#isVirtual()
2142     */
2143    @Override
2144    public boolean isVirtual() {
2145        return isVirtual;
2146    }
2147
2148    /*
2149     * (non-Javadoc)
2150     *
2151     * @see hdf.object.Dataset#getVirtualFilename(int)
2152     */
2153    @Override
2154    public String getVirtualFilename(int index) {
2155        return (isVirtual) ? virtualNameList.get(index) : null;
2156    }
2157
2158    /*
2159     * (non-Javadoc)
2160     *
2161     * @see hdf.object.Dataset#getVirtualMaps()
2162     */
2163    @Override
2164    public int getVirtualMaps() {
2165        return (isVirtual) ? virtualNameList.size() : -1;
2166    }
2167
2168}