001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.structs.H5O_info_t;
028import hdf.object.Attribute;
029import hdf.object.CompoundDS;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.view.Tools;
036
037/**
038 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
039 * <p>
040 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
041 * that stores a description of the data elements, data layout, and all other information necessary
042 * to write, read, and interpret the stored data.
043 * <p>
044 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
045 * collection of one or more atomic types or small arrays of such types. Each member of a compound
046 * type has a name which is unique within that type, and a byte offset that determines the first
047 * byte (smallest byte address) of that member in a compound datum.
048 * <p>
049 * For more information on HDF5 datasets and datatypes, read the <a href=
050 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
051 * User's Guide</a>.
052 * <p>
053 * There are two basic types of compound datasets: simple compound data and nested compound data.
054 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
055 * are compound or array of compound data.
056 * <p>
057 * Since Java does not understand C structures, we cannot directly read/write compound data values
058 * as in the following C example.
059 *
060 * <pre>
061 * typedef struct s1_t {
062 *         int    a;
063 *         float  b;
064 *         double c;
065 *         } s1_t;
066 *     s1_t       s1[LENGTH];
067 *     ...
068 *     H5Dwrite(..., s1);
069 *     H5Dread(..., s1);
070 * </pre>
071 *
072 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
073 * data by fields instead of compound structure. As for the example above, the java.util.Vector
074 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
075 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
076 * by field.
077 *
078 * @version 1.1 9/4/2007
079 * @author Peter X. Cao
080 */
081public class H5CompoundDS extends CompoundDS {
082    private static final long serialVersionUID = -5968625125574032736L;
083
084    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
085
086    /**
087     * The list of attributes of this data object. Members of the list are instance of Attribute.
088     */
089    private List<Attribute> attributeList;
090
091    private int nAttributes = -1;
092
093    private H5O_info_t obj_info;
094
095    /**
096     * A list of names of all fields including nested fields.
097     * <p>
098     * The nested names are separated by CompoundDS.separator. For example, if compound dataset "A" has
099     * the following nested structure,
100     *
101     * <pre>
102     * A --&gt; m01
103     * A --&gt; m02
104     * A --&gt; nest1 --&gt; m11
105     * A --&gt; nest1 --&gt; m12
106     * A --&gt; nest1 --&gt; nest2 --&gt; m21
107     * A --&gt; nest1 --&gt; nest2 --&gt; m22
108     * i.e.
109     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
110     * </pre>
111     *
112     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12,
113     * nest1[nest2[m21, nest1[nest2[m22}
114     *
115     */
116    private List<String> flatNameList;
117
118    /**
119     * A list of datatypes of all fields including nested fields.
120     */
121    private List<Datatype> flatTypeList;
122
123    /** flag to indicate if the dataset is an external dataset */
124    private boolean isExternal = false;
125
126    /** flag to indicate if the dataset is a virtual dataset */
127    private boolean isVirtual = false;
128    private List<String> virtualNameList;
129
130    /**
131     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
132     * <p>
133     * The dataset object represents an existing dataset in the file. For example, new
134     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
135     * dataset,"dset1", at group "/g0/".
136     * <p>
137     * This object is usually constructed at FileFormat.open(), which loads the file structure and
138     * object information into memory. It is rarely used elsewhere.
139     *
140     * @param theFile
141     *            the file that contains the data object.
142     * @param theName
143     *            the name of the data object, e.g. "dset".
144     * @param thePath
145     *            the full path of the data object, e.g. "/arrays/".
146     */
147    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
148        this(theFile, theName, thePath, null);
149    }
150
151    /**
152     * @deprecated Not for public use in the future.<br>
153     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
154     *
155     * @param theFile
156     *            the file that contains the data object.
157     * @param theName
158     *            the name of the data object, e.g. "dset".
159     * @param thePath
160     *            the full path of the data object, e.g. "/arrays/".
161     * @param oid
162     *            the oid of the data object.
163     */
164    @Deprecated
165    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
166        super(theFile, theName, thePath, oid);
167        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
168
169        if ((oid == null) && (theFile != null)) {
170            // retrieve the object ID
171            try {
172                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
173                this.oid = new long[1];
174                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
175            }
176            catch (Exception ex) {
177                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
178            }
179        }
180    }
181
182    /*
183     * (non-Javadoc)
184     *
185     * @see hdf.object.HObject#open()
186     */
187    @Override
188    public long open() {
189        log.trace("open(): start");
190
191        long did = -1;
192
193        try {
194            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
195            log.trace("open(): did={}", did);
196        }
197        catch (HDF5Exception ex) {
198            log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex);
199            did = -1;
200        }
201
202        log.trace("open(): finish");
203        return did;
204    }
205
206    /*
207     * (non-Javadoc)
208     *
209     * @see hdf.object.HObject#close(int)
210     */
211    @Override
212    public void close(long did) {
213        log.trace("close(): start");
214
215        if (did >= 0) {
216            try {
217                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
218            }
219            catch (Exception ex) {
220                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
221            }
222            try {
223                H5.H5Dclose(did);
224            }
225            catch (HDF5Exception ex) {
226                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
227            }
228        }
229
230        log.trace("close(): finish");
231    }
232
233    /**
234     * Retrieves datatype and dataspace information from file and sets the dataset
235     * in memory.
236     * <p>
237     * The init() is designed to support lazy operation in a dataset object. When a
238     * data object is retrieved from file, the datatype, dataspace and raw data are
239     * not loaded into memory. When it is asked to read the raw data from file,
240     * init() is first called to get the datatype and dataspace information, then
241     * load the raw data from file.
242     * <p>
243     * init() is also used to reset the selection of a dataset (start, stride and
244     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
245     * the following example, init() at step 1) retrieves datatype and dataspace
246     * information from file. getData() at step 3) reads only one data point. init()
247     * at step 4) resets the selection to the whole dataset. getData() at step 4)
248     * reads the values of whole dataset into memory.
249     *
250     * <pre>
251     * dset = (Dataset) file.get(NAME_DATASET);
252     *
253     * // 1) get datatype and dataspace information from file
254     * dset.init();
255     * rank = dset.getRank(); // rank = 2, a 2D dataset
256     * count = dset.getSelectedDims();
257     * start = dset.getStartDims();
258     * dims = dset.getDims();
259     *
260     * // 2) select only one data point
261     * for (int i = 0; i &lt; rank; i++) {
262     *     start[0] = 0;
263     *     count[i] = 1;
264     * }
265     *
266     * // 3) read one data point
267     * data = dset.getData();
268     *
269     * // 4) reset selection to the whole dataset
270     * dset.init();
271     *
272     * // 5) clean the memory data buffer
273     * dset.clearData();
274     *
275     * // 6) Read the whole dataset
276     * data = dset.getData();
277     * </pre>
278     */
279    @Override
280    public void init() {
281        log.trace("init(): start");
282
283        if (inited) {
284            resetSelection();
285            log.trace("init(): Dataset already initialized");
286            log.trace("init(): finish");
287            return; // already called. Initialize only once
288        }
289
290        long did = -1;
291        long tid = -1;
292        long sid = -1;
293        flatNameList = new Vector<>();
294        flatTypeList = new Vector<>();
295
296        did = open();
297        if (did >= 0) {
298            // check if it is an external or virtual dataset
299            long pid = -1;
300            try {
301                pid = H5.H5Dget_create_plist(did);
302                try {
303                    int nfiles = H5.H5Pget_external_count(pid);
304                    isExternal = (nfiles > 0);
305                    int layout_type = H5.H5Pget_layout(pid);
306                    if (isVirtual = (layout_type == HDF5Constants.H5D_VIRTUAL)) {
307                        try {
308                            long vmaps = H5.H5Pget_virtual_count(pid);
309                            if (vmaps > 0) {
310                                virtualNameList = new Vector<>();
311                                for (long next = 0; next < vmaps; next++) {
312                                    try {
313                                        String fname = H5.H5Pget_virtual_filename(pid, next);
314                                        virtualNameList.add(fname);
315                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
316                                    }
317                                    catch (Throwable err) {
318                                        log.trace("init(): vds[{}] continue", next);
319                                        continue;
320                                    }
321                                }
322                            }
323                        }
324                        catch (Throwable err) {
325                            log.debug("init(): vds count error: ", err);
326                        }
327                    }
328                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
329                }
330                catch (Exception ex) {
331                    log.debug("init(): check if it is an external or virtual dataset:", ex);
332                }
333            }
334            catch (Exception ex) {
335                log.debug("init(): H5Dget_create_plist() failure: ", ex);
336            }
337            finally {
338                try {
339                    H5.H5Pclose(pid);
340                }
341                catch (Exception ex) {
342                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
343                }
344            }
345
346            try {
347                sid = H5.H5Dget_space(did);
348                rank = H5.H5Sget_simple_extent_ndims(sid);
349                tid = H5.H5Dget_type(did);
350                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
351                datatype = new H5Datatype(tid);
352
353                log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid,
354                        datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
355
356                if (rank == 0) {
357                    // a scalar data point
358                    rank = 1;
359                    dims = new long[1];
360                    dims[0] = 1;
361                    log.trace("init(): rank is a scalar data point");
362                }
363                else {
364                    dims = new long[rank];
365                    maxDims = new long[rank];
366                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
367                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
368                }
369
370                startDims = new long[rank];
371                selectedDims = new long[rank];
372
373                // initialize member information
374                ((H5Datatype) getDatatype()).extractCompoundInfo("", flatNameList, flatTypeList);
375                numberOfMembers = flatNameList.size();
376                log.trace("init(): numberOfMembers={}", numberOfMembers);
377
378                memberNames = new String[numberOfMembers];
379                memberTypes = new Datatype[numberOfMembers];
380                memberOrders = new int[numberOfMembers];
381                isMemberSelected = new boolean[numberOfMembers];
382                memberDims = new Object[numberOfMembers];
383
384                for (int i = 0; i < numberOfMembers; i++) {
385                    isMemberSelected[i] = true;
386                    memberOrders[i] = 1;
387                    memberDims[i] = null;
388
389                    try {
390                        memberTypes[i] = flatTypeList.get(i);
391                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
392
393                        if (memberTypes[i].isArray()) {
394                            long mdim[] = memberTypes[i].getArrayDims();
395                            int idim[] = new int[mdim.length];
396                            int arrayNpoints = 1;
397
398                            for (int j = 0; j < idim.length; j++) {
399                                idim[j] = (int) mdim[j];
400                                arrayNpoints *= idim[j];
401                            }
402
403                            memberDims[i] = idim;
404                            memberOrders[i] = arrayNpoints;
405                        }
406                    }
407                    catch (Exception ex) {
408                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
409                        memberTypes[i] = null;
410                    }
411
412                    try {
413                        memberNames[i] = flatNameList.get(i);
414                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
415                    }
416                    catch (Exception ex) {
417                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
418                        memberNames[i] = "null";
419                    }
420                } // for (int i=0; i<numberOfMembers; i++)
421
422                inited = true;
423            }
424            catch (HDF5Exception ex) {
425                numberOfMembers = 0;
426                memberNames = null;
427                memberTypes = null;
428                memberOrders = null;
429                log.debug("init(): ", ex);
430            }
431            finally {
432                getDatatype().close(tid);
433
434                try {
435                    H5.H5Sclose(sid);
436                }
437                catch (HDF5Exception ex2) {
438                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
439                }
440            }
441
442            log.trace("init(): close dataset");
443            close(did);
444        }
445        else {
446            log.debug("init(): failed to open dataset");
447        }
448
449        resetSelection();
450        log.trace("init(): finish");
451    }
452
453    /*
454     * (non-Javadoc)
455     *
456     * @see hdf.object.DataFormat#hasAttribute()
457     */
458    @Override
459    public boolean hasAttribute() {
460        obj_info.num_attrs = nAttributes;
461
462        if (obj_info.num_attrs < 0) {
463            long did = open();
464            if (did >= 0) {
465                try {
466                    obj_info = H5.H5Oget_info(did);
467                    nAttributes = (int) obj_info.num_attrs;
468                }
469                catch (Exception ex) {
470                    obj_info.num_attrs = 0;
471                    log.debug("hasAttribute(): get object info failure: ", ex);
472                }
473                close(did);
474            }
475            else {
476                log.debug("hasAttribute(): could not open dataset");
477            }
478        }
479
480        log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
481        return (obj_info.num_attrs > 0);
482    }
483
484    /*
485     * (non-Javadoc)
486     *
487     * @see hdf.object.Dataset#getDatatype()
488     */
489    @Override
490    public Datatype getDatatype() {
491        log.trace("getDatatype(): start");
492
493        if (datatype == null) {
494            log.trace("getDatatype(): datatype == null");
495            long did = -1;
496            long tid = -1;
497
498            did = open();
499            if (did >= 0) {
500                try {
501                    tid = H5.H5Dget_type(did);
502                    datatype = new H5Datatype(tid);
503                }
504                catch (Exception ex) {
505                    log.debug("getDatatype(): ", ex);
506                }
507                finally {
508                    try {
509                        H5.H5Tclose(tid);
510                    }
511                    catch (HDF5Exception ex) {
512                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
513                    }
514                    try {
515                        H5.H5Dclose(did);
516                    }
517                    catch (HDF5Exception ex) {
518                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
519                    }
520                }
521            }
522        }
523
524        log.trace("getDatatype(): finish");
525        return datatype;
526    }
527
528    @Override
529    public Object getFillValue() {
530        return null;
531    }
532
533    /*
534     * (non-Javadoc)
535     *
536     * @see hdf.object.Dataset#clear()
537     */
538    @Override
539    public void clear() {
540        super.clear();
541
542        if (attributeList != null) {
543            ((Vector<Attribute>) attributeList).setSize(0);
544        }
545    }
546
547    /*
548     * (non-Javadoc)
549     *
550     * @see hdf.object.Dataset#readBytes()
551     */
552    @Override
553    public byte[] readBytes() throws HDF5Exception {
554        log.trace("readBytes(): start");
555
556        byte[] theData = null;
557
558        if (!isInited())
559            init();
560
561        long did = open();
562        if (did >= 0) {
563            long fspace = -1;
564            long mspace = -1;
565            long tid = -1;
566
567            try {
568                long[] lsize = { 1 };
569                for (int j = 0; j < selectedDims.length; j++) {
570                    lsize[0] *= selectedDims[j];
571                }
572
573                fspace = H5.H5Dget_space(did);
574                mspace = H5.H5Screate_simple(rank, selectedDims, null);
575
576                // set the rectangle selection
577                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
578                if (rank * dims[0] > 1) {
579                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
580                            selectedDims, null); // set block to 1
581                }
582
583                tid = H5.H5Dget_type(did);
584                long size = H5.H5Tget_size(tid) * lsize[0];
585                log.trace("readBytes(): size = {}", size);
586
587                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
588
589                theData = new byte[(int) size];
590
591                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
592                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
593            }
594            catch (Exception ex) {
595                log.debug("readBytes(): failed to read data: ", ex);
596            }
597            finally {
598                try {
599                    H5.H5Sclose(fspace);
600                }
601                catch (Exception ex2) {
602                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
603                }
604                try {
605                    H5.H5Sclose(mspace);
606                }
607                catch (Exception ex2) {
608                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
609                }
610                try {
611                    H5.H5Tclose(tid);
612                }
613                catch (HDF5Exception ex2) {
614                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
615                }
616                close(did);
617            }
618        }
619
620        log.trace("readBytes(): finish");
621        return theData;
622    }
623
624    /*
625     * (non-Javadoc)
626     *
627     * @see hdf.object.Dataset#read()
628     */
629    @Override
630    public Object read() throws Exception {
631        log.trace("read(): start");
632
633        List<Object> memberDataList = null;
634        H5Datatype DSdatatype = null;
635
636        if (!isInited())
637            init();
638
639        if (numberOfMembers <= 0) {
640            log.debug("read(): Dataset contains no members");
641            log.trace("read(): finish");
642            return null; // this compound dataset does not have any member
643        }
644
645        try {
646            DSdatatype = (H5Datatype) this.getDatatype();
647        }
648        catch (Exception ex) {
649            log.debug("read(): get datatype: ", ex);
650        }
651
652        if (isExternal) {
653            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
654
655            if (pdir == null) {
656                pdir = ".";
657            }
658            System.setProperty("user.dir", pdir);// H5.H5Dchdir_ext(pdir);
659            log.trace("read(): External dataset: user.dir={}", pdir);
660        }
661
662        log.trace("read(): open dataset");
663
664        long did = open();
665        if (did >= 0) {
666            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
667
668            try {
669                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
670
671                log.trace("read(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
672
673                if (totalSelectedSpacePoints == 0) {
674                    log.debug("read(): No data to read. Dataset or selected subset is empty.");
675                    log.trace("read(): finish");
676                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
677                }
678
679                if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
680                    log.debug("read(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
681                    log.trace("read(): finish");
682                    throw new HDF5Exception("Invalid int size");
683                }
684
685                if (log.isDebugEnabled()) {
686                    // check is storage space is allocated
687                    try {
688                        long ssize = H5.H5Dget_storage_size(did);
689                        log.trace("read(): Storage space allocated = {}.", ssize);
690                    }
691                    catch (Exception ex) {
692                        log.debug("read(): check if storage space is allocated:", ex);
693                    }
694                }
695
696                /*
697                 * Read each member of the compound datatype into a separate byte
698                 * array, then extract the data into its type, such as int, long,
699                 * float, etc.
700                 */
701                /*
702                 * TODO: Can potentially just re-use the global lists
703                 */
704                List<Datatype> atomicList = new Vector<>();
705                DSdatatype.extractCompoundInfo(null, null, atomicList);
706                memberDataList = new Vector<>(atomicList.size());
707
708                log.trace("read(): foreach nMembers={}", atomicList.size());
709
710                for (int i = 0; i < atomicList.size(); i++) {
711                    H5Datatype member_type = null;
712                    Datatype member_base = null;
713                    String member_name = null;
714                    Object member_data = null;
715                    int member_size = 0;
716
717                    if (!isMemberSelected[i]) {
718                        log.debug("read(): Member[{}] is not selected", i);
719                        continue; // the field is not selected
720                    }
721
722                    try {
723                        member_type = (H5Datatype) atomicList.get(i);
724                    }
725                    catch (Exception ex) {
726                        log.debug("read(): get member {} failure: ", i, ex);
727                        continue;
728                    }
729
730                    try {
731                        member_base = member_type.getDatatypeBase();
732                    }
733                    catch (Exception ex) {
734                        log.debug("read(): get member {} base type failure: ", i, ex);
735                        continue;
736                    }
737
738                    try {
739                        member_name = new String(memberNames[i]);
740                    }
741                    catch (Exception ex) {
742                        log.debug("read(): get member {} name failure: ", i, ex);
743                        member_name = "null";
744                    }
745
746                    try {
747                        member_size = (int) member_type.getDatatypeSize();
748                    }
749                    catch (Exception ex) {
750                        log.debug("read(): get member {} size failure: ", i, ex);
751                        continue;
752                    }
753
754                    try {
755                        member_data = member_type.allocateArray((int) totalSelectedSpacePoints);
756                    }
757                    catch (OutOfMemoryError err) {
758                        member_data = null;
759                        throw new HDF5Exception("Out Of Memory.");
760                    }
761                    catch (Exception ex) {
762                        log.debug("read(): Member[{}]: ", i, ex);
763                        member_data = null;
764                    }
765
766                    log.trace("read(): {} Member[{}] is type {} of size={}", member_name, i, member_type.getDescription(), member_size);
767
768                    if (member_data != null) {
769                        long comp_tid = -1;
770                        try {
771                            comp_tid = member_type.createCompoundFieldType(flatNameList.get(i));
772                        }
773                        catch (HDF5Exception ex) {
774                            log.debug("read(): unable to create compound field type for Member[{}] of type {}: ", i, member_type.getDescription(), ex);
775
776                            String[] nullValues = new String[(int) totalSelectedSpacePoints];
777                            for (int j = 0; j < totalSelectedSpacePoints; j++) {
778                                nullValues[j] = "NULL";
779                            }
780                            memberDataList.add(nullValues);
781                            log.debug("read(): {} Member[{}] createCompoundFieldType failure:", member_name, i, ex);
782                            continue;
783                        }
784
785                        /*
786                         * Actually read the data for this member now that everything has been setup
787                         */
788                        try {
789                            if (member_type.isVLEN() || (member_type.isArray() && member_base.isVLEN())) {
790                                log.trace("read(): Member[{}]: H5DreadVL did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
791                                H5.H5DreadVL(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) member_data);
792                            }
793                            else if ((member_base != null) && member_base.isCompound()) {
794                                log.trace("read(): Member[{}]: H5Dread did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
795                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (byte[]) member_data, true);
796                            }
797                            else {
798                                log.trace("read(): Member[{}]: H5Dread did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
799                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, member_data);
800                            }
801                            log.trace("read(): member_data=***{}***", member_data);
802                        }
803                        catch (HDF5DataFiltersException exfltr) {
804                            log.debug("read(): {} Member[{}] read failure:", member_name, i, exfltr);
805                            log.trace("read(): finish");
806                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
807                        }
808                        catch (Exception ex) {
809                            String[] errValues = new String[(int) totalSelectedSpacePoints];
810                            for (int j = 0; j < totalSelectedSpacePoints; j++) {
811                                errValues[j] = "*ERROR*";
812                            }
813                            memberDataList.add(errValues);
814                            log.debug("read(): {} Member[{}] read failure:", member_name, i, ex);
815                            continue;
816                        }
817                        finally {
818                            DSdatatype.close(comp_tid);
819                        }
820
821
822                        /*
823                         * Perform any necessary data conversions
824                         */
825                        if (member_type.isUnsigned()) {
826                            log.trace("read(): Member[{}]: converting from unsigned C-type integers", i);
827                            member_data = Dataset.convertFromUnsignedC(member_data, null);
828                        }
829                        else if ((member_type.isString()) && convertByteToString && !member_type.isVarStr()) {
830                            if (Tools.getJavaObjectRuntimeClass(member_data) == 'B') {
831                                log.trace("read(): Member[{}]: converting byte array to string array", i);
832                                member_data = byteToString((byte[]) member_data, member_size / memberOrders[i]);
833                            }
834                        }
835                        else if (member_type.isRef()) {
836                            if (Tools.getJavaObjectRuntimeClass(member_data) == 'B') {
837                                log.trace("read(): Member[{}]: converting byte array to long array", i);
838                                member_data = HDFNativeData.byteToLong((byte[]) member_data);
839                            }
840                        }
841                        else if (member_type.isArray() && member_base.isCompound()) {
842                            // Since compounds are read into memory as a byte array, discover each member
843                            // type and size and convert the byte array to the correct type before adding
844                            // it to the list
845                            long atom_tid = -1;
846                            try {
847                                atom_tid = member_type.createNative();
848
849                                int numDims = H5.H5Tget_array_ndims(atom_tid);
850                                long[] dims = new long[numDims];
851                                H5.H5Tget_array_dims(atom_tid, dims);
852                                int numberOfCompounds = (int) dims[0] * (int) totalSelectedSpacePoints;
853                                int compoundSize = (member_size * (int) totalSelectedSpacePoints) / numberOfCompounds;
854
855                                Object current_data = new Object[numberOfCompounds];
856
857                                long base_tid = -1;
858                                long memberOffsets[] = null;
859                                long memberLengths[] = null;
860                                long memberTypes[] = null;
861                                int numberOfMembers;
862
863                                try {
864                                    base_tid = H5.H5Tget_super(atom_tid);
865                                    numberOfMembers = H5.H5Tget_nmembers(base_tid);
866                                    memberOffsets = new long[numberOfMembers];
867                                    memberLengths = new long[numberOfMembers];
868                                    memberTypes = new long[numberOfMembers];
869
870                                    for (int j = 0; j < numberOfMembers; j++) {
871                                        memberOffsets[j] = H5.H5Tget_member_offset(base_tid, j);
872                                        memberTypes[j] = H5.H5Tget_member_type(base_tid, j);
873                                    }
874
875                                    for (int j = 0; j < numberOfMembers; j++) {
876                                        if (j < numberOfMembers - 1) {
877                                            memberLengths[j] = (memberOffsets[j + 1] - memberOffsets[j]);
878                                        }
879                                        else {
880                                            memberLengths[j] = (compoundSize - memberOffsets[j]);
881                                        }
882                                    }
883
884                                    for (int j = 0; j < numberOfCompounds; j++) {
885                                        Object field_data = new Object[numberOfMembers];
886
887                                        for (int k = 0; k < numberOfMembers; k++) {
888                                            Object converted = convertCompoundByteMember((byte[]) member_data, memberTypes[k], memberOffsets[k] + (compoundSize * j),
889                                                    memberLengths[k]);
890
891                                            ((Object[]) field_data)[k] = Array.get(converted, 0);
892                                        }
893
894                                        ((Object[]) current_data)[j] = field_data;
895                                    }
896                                }
897                                catch (Exception ex) {
898                                    log.debug("read(): Convert Array of Compounds failure: ", ex);
899                                    continue;
900                                }
901                                finally {
902                                    for (int j = 0; j < memberTypes.length; j++) {
903                                        member_type.close(memberTypes[j]);
904                                    }
905
906                                    member_type.close(base_tid);
907                                }
908
909                                memberDataList.add(current_data);
910                            }
911                            catch (Exception ex) {
912                                log.debug("read(): Member[{}]: list.add failure(): ", i, ex);
913                            }
914                            finally {
915                                member_type.close(atom_tid);
916                            }
917                        } // if (member_type.isArray() && member_base.isCompound())
918                    } // if (member_data != null) {
919                    else {
920                        String[] errValues = new String[(int) totalSelectedSpacePoints];
921                        String errStr = "ERROR";
922
923                        for (int j = 0; j < totalSelectedSpacePoints; j++)
924                            errValues[j] = errStr;
925
926                        memberDataList.add(errValues);
927
928                        log.debug("read(): {} Member[{}] of type {} member_data is null", member_name, i, member_type.getDescription());
929                    }
930
931                    memberDataList.add(member_data);
932                } // end of for (int i=0; i<num_members; i++)
933            }
934            finally {
935                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
936                    try {
937                        H5.H5Sclose(spaceIDs[0]);
938                    }
939                    catch (Exception ex) {
940                        log.debug("read(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
941                    }
942                }
943
944                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
945                    try {
946                        H5.H5Sclose(spaceIDs[1]);
947                    }
948                    catch (Exception ex) {
949                        log.debug("read(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
950                    }
951                }
952
953                close(did);
954            }
955        }
956
957        log.trace("read(): finish");
958        return memberDataList;
959    }
960
961    /**
962     * Writes the given data buffer into this dataset in a file.
963     * <p>
964     * The data buffer is a vector that contains the data values of compound fields. The data is written
965     * into file field by field.
966     *
967     * @param buf
968     *            The vector that contains the data values of compound fields.
969     *
970     * @throws HDF5Exception
971     *             If there is an error at the HDF5 library level.
972     */
973    @Override
974    public void write(Object buf) throws HDF5Exception {
975        log.trace("write(): start");
976
977        Object tmpData = null;
978        H5Datatype DSdatatype = null;
979
980        if ((buf == null) || (numberOfMembers <= 0) || !(buf instanceof List)) {
981            log.debug("write(): buf is null or invalid or contains no members");
982            log.trace("write(): finish");
983            return;
984        }
985
986        if (!isInited())
987            init();
988
989        try {
990            DSdatatype = (H5Datatype) this.getDatatype();
991        }
992        catch (Exception ex) {
993            log.debug("write(): get datatype: ", ex);
994        }
995
996        /*
997         * Check for any unsupported datatypes and fail early before
998         * attempting to write to the dataset
999         */
1000        if (DSdatatype.isArray() || DSdatatype.isVLEN()) {
1001            H5Datatype baseType = (H5Datatype) DSdatatype.getDatatypeBase();
1002
1003            if (baseType != null) {
1004                if (baseType.isCompound()) {
1005                    log.debug("write(): cannot write dataset of type ARRAY of COMPOUND");
1006                    log.trace("write(): finish");
1007                    throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
1008                }
1009
1010                if (baseType.isCompound()) {
1011                    log.debug("write(): cannot write dataset of type VLEN of COMPOUND");
1012                    log.trace("write(): finish");
1013                    throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
1014                }
1015            }
1016            else {
1017                log.debug("write(): ARRAY or VLEN datatype has no base type");
1018                throw new HDF5Exception("Dataset's datatype (ARRAY or VLEN) has no base datatype");
1019            }
1020        }
1021
1022        log.trace("write(): open dataset");
1023
1024        long did = open();
1025        if (did >= 0) {
1026            long spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
1027
1028            try {
1029                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
1030
1031                log.trace("write(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
1032
1033                List<Datatype> atomicList = new Vector<>();
1034                DSdatatype.extractCompoundInfo(null, null, atomicList);
1035
1036                log.trace("write(): foreach nMembers={}", atomicList.size());
1037
1038                int currentMemberIndex = 0;
1039                for (int i = 0; i < atomicList.size(); i++) {
1040                    H5Datatype member_type = null;
1041                    String member_name = null;
1042                    Object member_data = null;
1043
1044                    if (!isMemberSelected[i]) {
1045                        log.debug("write(): Member[{}] is not selected", i);
1046                        continue; // the field is not selected
1047                    }
1048
1049                    try {
1050                        member_type = (H5Datatype) atomicList.get(i);
1051                    }
1052                    catch (Exception ex) {
1053                        log.debug("write(): get member {} failure: ", i, ex);
1054                        continue;
1055                    }
1056
1057                    try {
1058                        member_name = new String(memberNames[i]);
1059                    }
1060                    catch (Exception ex) {
1061                        log.debug("write(): get member {} name failure: ", i, ex);
1062                        member_name = "null";
1063                    }
1064
1065                    try {
1066                        member_data = ((List<?>) buf).get(currentMemberIndex++);
1067                    }
1068                    catch (Exception ex) {
1069                        log.debug("write(): get member {} data failure: ", i, ex);
1070                        continue;
1071                    }
1072
1073                    if (member_data == null) {
1074                        log.debug("write(): Member[{}] data is null", i);
1075                        continue;
1076                    }
1077
1078                    log.trace("write(): {} Member[{}] is type {} of size={}", member_name, i, member_type.getDescription(), member_type.getDatatypeSize());
1079
1080                    /*
1081                     * Check for any unsupported datatypes before attempting to write
1082                     * this compound member
1083                     */
1084                    if (member_type.isVLEN() && !member_type.isVarStr()) {
1085                        log.debug("write(): Member[{}]: write of VL non-strings is not currently supported");
1086                        continue;
1087                    }
1088
1089                    /*
1090                     * Perform any necessary data conversions before writing the data.
1091                     */
1092                    try {
1093                        tmpData = member_data;
1094
1095                        if (member_type.isUnsigned()) {
1096                            // Check if we need to convert integer data
1097                            long tsize = member_type.getDatatypeSize();
1098                            String cname = member_data.getClass().getName();
1099                            char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1100                            boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1101                                    || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1102
1103                            if (doIntConversion) {
1104                                log.trace("write(): Member[{}]: converting integer data to unsigned C-type integers", i);
1105                                tmpData = convertToUnsignedC(member_data, null);
1106                            }
1107                        }
1108                        else if (member_type.isString() && (Array.get(member_data, 0) instanceof String)) {
1109                            log.trace("write(): Member[{}]: converting string array to byte array", i);
1110                            tmpData = stringToByte((String[]) member_data, (int) member_type.getDatatypeSize());
1111                        }
1112                        else if (member_type.isEnum() && (Array.get(member_data, 0) instanceof String)) {
1113                            log.trace("write(): Member[{}]: converting enum names to values", i);
1114                            tmpData = member_type.convertEnumNameToValue((String[]) member_data);
1115                        }
1116                    }
1117                    catch (Exception ex) {
1118                        log.debug("write(): data conversion failure: ", ex);
1119                        tmpData = null;
1120                    }
1121
1122                    /*
1123                     * Actually write the data now that everything has been setup
1124                     */
1125                    if (tmpData != null) {
1126                        long comp_tid = -1;
1127                        try {
1128                            comp_tid = member_type.createCompoundFieldType(flatNameList.get(i));
1129                        }
1130                        catch (HDF5Exception ex) {
1131                            log.debug("write(): unable to create compound field type for Member[{}]: ", i, ex);
1132                            continue;
1133                        }
1134
1135                        try {
1136                            if (member_type.isVarStr()) {
1137                                log.trace("write(): Member[{}]: H5Dwrite_string did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
1138                                H5.H5Dwrite_string(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1139                            }
1140                            else {
1141                                // BUG!!! does not write nested compound data and no
1142                                // exception was caught need to check if it is a java
1143                                // error or C library error
1144                                log.trace("write(): Member[{}]: H5Dwrite did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
1145                                H5.H5Dwrite(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1146                            }
1147                        }
1148                        catch (Exception ex) {
1149                            log.debug("write(): write failure: ", ex);
1150                            log.trace("write(): finish");
1151                            throw new HDF5Exception(ex.getMessage());
1152                        }
1153                        finally {
1154                            DSdatatype.close(comp_tid);
1155                        }
1156                    }
1157                } // end of for (int i=0; i<num_members; i++)
1158            }
1159            finally {
1160                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1161                    try {
1162                        H5.H5Sclose(spaceIDs[0]);
1163                    }
1164                    catch (Exception ex) {
1165                        log.debug("write(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1166                    }
1167                }
1168
1169                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1170                    try {
1171                        H5.H5Sclose(spaceIDs[1]);
1172                    }
1173                    catch (Exception ex) {
1174                        log.debug("write(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1175                    }
1176                }
1177
1178                close(did);
1179            }
1180        }
1181
1182        log.trace("write(): finish");
1183    }
1184
1185    @Override
1186    public Object convertFromUnsignedC() {
1187        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1188    }
1189
1190    @Override
1191    public Object convertToUnsignedC() {
1192        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1193    }
1194
1195    /**
1196     * Set up the selection of hyperslab
1197     *
1198     * @param did
1199     *            IN dataset ID
1200     * @param spaceIDs
1201     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
1202     *
1203     * @return total number of data point selected
1204     *
1205     * @throws HDF5Exception
1206     *             If there is an error at the HDF5 library level.
1207     */
1208    private long selectHyperslab(long did, long[] spaceIDs) throws HDF5Exception {
1209        log.trace("selectHyperslab(): start");
1210
1211        long lsize = 1;
1212
1213        boolean isAllSelected = true;
1214        for (int i = 0; i < rank; i++) {
1215            lsize *= selectedDims[i];
1216            if (selectedDims[i] < dims[i]) {
1217                isAllSelected = false;
1218            }
1219        }
1220
1221        log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
1222
1223        if (isAllSelected) {
1224            spaceIDs[0] = HDF5Constants.H5S_ALL;
1225            spaceIDs[1] = HDF5Constants.H5S_ALL;
1226        }
1227        else {
1228            spaceIDs[1] = H5.H5Dget_space(did);
1229
1230            // When 1D dataspace is used in chunked dataset, reading is very
1231            // slow.
1232            // It is a known problem on HDF5 library for chunked dataset.
1233            // mspace = H5.H5Screate_simple(1, lsize, null);
1234            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
1235            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
1236                    null);
1237        }
1238
1239        return lsize;
1240    }
1241
1242    /*
1243     * (non-Javadoc)
1244     *
1245     * @see hdf.object.DataFormat#getMetadata()
1246     */
1247    @Override
1248    public List<Attribute> getMetadata() throws HDF5Exception {
1249        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1250    }
1251
1252    /*
1253     * (non-Javadoc)
1254     *
1255     * @see hdf.object.DataFormat#getMetadata(int...)
1256     */
1257    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1258        log.trace("getMetadata(): start");
1259
1260        if (!isInited()) {
1261            init();
1262            log.trace("getMetadata(): inited");
1263        }
1264
1265        try {
1266            this.linkTargetObjName = H5File.getLinkTargetName(this);
1267        }
1268        catch (Exception ex) {
1269            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1270        }
1271
1272        if (attributeList != null) {
1273            log.trace("getMetadata(): attributeList != null");
1274            log.trace("getMetadata(): finish");
1275            return attributeList;
1276        }
1277
1278        long did = -1;
1279        long pcid = -1;
1280        long paid = -1;
1281        int indxType = fileFormat.getIndexType(null);
1282        int order = fileFormat.getIndexOrder(null);
1283
1284        // load attributes first
1285        if (attrPropList.length > 0) {
1286            indxType = attrPropList[0];
1287            if (attrPropList.length > 1) {
1288                order = attrPropList[1];
1289            }
1290        }
1291
1292        attributeList = H5File.getAttribute(this, indxType, order);
1293        log.trace("getMetadata(): attributeList loaded");
1294
1295        log.trace("getMetadata(): open dataset");
1296        did = open();
1297        if (did >= 0) {
1298            log.trace("getMetadata(): dataset opened");
1299            try {
1300                compression = "";
1301
1302                // get the compression and chunk information
1303                pcid = H5.H5Dget_create_plist(did);
1304                paid = H5.H5Dget_access_plist(did);
1305                long storage_size = H5.H5Dget_storage_size(did);
1306                int nfilt = H5.H5Pget_nfilters(pcid);
1307                int layout_type = H5.H5Pget_layout(pcid);
1308                if (layout_type == HDF5Constants.H5D_CHUNKED) {
1309                    chunkSize = new long[rank];
1310                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1311                    int n = chunkSize.length;
1312                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
1313                    for (int i = 1; i < n; i++) {
1314                        storage_layout += " X " + chunkSize[i];
1315                    }
1316
1317                    if (nfilt > 0) {
1318                        long nelmts = 1;
1319                        long uncomp_size;
1320                        long datum_size = getDatatype().getDatatypeSize();
1321                        if (datum_size < 0) {
1322                            long tmptid = -1;
1323                            try {
1324                                tmptid = H5.H5Dget_type(did);
1325                                datum_size = H5.H5Tget_size(tmptid);
1326                            }
1327                            finally {
1328                                try {H5.H5Tclose(tmptid);}
1329                                catch (Exception ex2) {log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
1330                            }
1331                        }
1332
1333                        for (int i = 0; i < rank; i++) {
1334                            nelmts *= dims[i];
1335                        }
1336                        uncomp_size = nelmts * datum_size;
1337
1338                        /* compression ratio = uncompressed size / compressed size */
1339
1340                        if (storage_size != 0) {
1341                            double ratio = (double) uncomp_size / (double) storage_size;
1342                            DecimalFormat df = new DecimalFormat();
1343                            df.setMinimumFractionDigits(3);
1344                            df.setMaximumFractionDigits(3);
1345                            compression += df.format(ratio) + ":1";
1346                        }
1347                    }
1348                }
1349                else if (layout_type == HDF5Constants.H5D_COMPACT) {
1350                    storage_layout = "COMPACT";
1351                }
1352                else if (layout_type == HDF5Constants.H5D_CONTIGUOUS) {
1353                    storage_layout = "CONTIGUOUS";
1354                    if (H5.H5Pget_external_count(pcid) > 0)
1355                        storage_layout += " - EXTERNAL ";
1356                }
1357                else if (layout_type == HDF5Constants.H5D_VIRTUAL) {
1358                    storage_layout = "VIRTUAL - ";
1359                    try {
1360                        long vmaps = H5.H5Pget_virtual_count(pcid);
1361                        try {
1362                            int virt_view = H5.H5Pget_virtual_view(paid);
1363                            long virt_gap = H5.H5Pget_virtual_printf_gap(paid);
1364                            if (virt_view == HDF5Constants.H5D_VDS_FIRST_MISSING)
1365                                storage_layout += "First Missing";
1366                            else
1367                                storage_layout += "Last Available";
1368                            storage_layout += "\nGAP : " + String.valueOf(virt_gap);
1369                        }
1370                        catch (Throwable err) {
1371                            log.debug("getMetadata(): vds error: ", err);
1372                            storage_layout += "ERROR";
1373                        }
1374                        storage_layout += "\nMAPS : " + String.valueOf(vmaps);
1375                        if (vmaps > 0) {
1376                            for (long next = 0; next < vmaps; next++) {
1377                                try {
1378                                    H5.H5Pget_virtual_vspace(pcid, next);
1379                                    H5.H5Pget_virtual_srcspace(pcid, next);
1380                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1381                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1382                                    storage_layout += "\n" + fname + " : " + dsetname;
1383                                }
1384                                catch (Throwable err) {
1385                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1386                                    log.trace("getMetadata(): vds[{}] continue", next);
1387                                    storage_layout += "ERROR";
1388                                    continue;
1389                                }
1390                            }
1391                        }
1392                    }
1393                    catch (Throwable err) {
1394                        log.debug("getMetadata(): vds count error: ", err);
1395                        storage_layout += "ERROR";
1396                    }
1397                }
1398                else {
1399                    chunkSize = null;
1400                    storage_layout = "NONE";
1401                }
1402
1403                int[] flags = { 0, 0 };
1404                long[] cd_nelmts = { 20 };
1405                int[] cd_values = new int[(int) cd_nelmts[0]];
1406                String[] cd_name = { "", "" };
1407                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1408                int filter = -1;
1409                int[] filter_config = { 1 };
1410                filters = "";
1411
1412                for (int i = 0, k = 0; i < nfilt; i++) {
1413                    log.trace("getMetadata(): filter[{}]", i);
1414                    if (i > 0) {
1415                        filters += ", ";
1416                    }
1417                    if (k > 0) {
1418                        compression += ", ";
1419                    }
1420
1421                    try {
1422                        cd_nelmts[0] = 20;
1423                        cd_values = new int[(int) cd_nelmts[0]];
1424                        cd_values = new int[(int) cd_nelmts[0]];
1425                        filter = H5.H5Pget_filter(pcid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1426                        log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1427                        for (int j = 0; j < cd_nelmts[0]; j++) {
1428                            log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cd_values[j]);
1429                        }
1430                    }
1431                    catch (Throwable err) {
1432                        log.debug("getMetadata(): filter[{}] error: ", i, err);
1433                        log.trace("getMetadata(): filter[{}] continue", i);
1434                        filters += "ERROR";
1435                        continue;
1436                    }
1437
1438                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1439                        filters += "NONE";
1440                    }
1441                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1442                        filters += "GZIP";
1443                        compression += compression_gzip_txt + cd_values[0];
1444                        k++;
1445                    }
1446                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1447                        filters += "Error detection filter";
1448                    }
1449                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1450                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1451                    }
1452                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1453                        filters += "NBIT";
1454                    }
1455                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1456                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1457                    }
1458                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1459                        filters += "SZIP";
1460                        compression += "SZIP: Pixels per block = " + cd_values[1];
1461                        k++;
1462                        int flag = -1;
1463                        try {
1464                            flag = H5.H5Zget_filter_info(filter);
1465                        }
1466                        catch (Exception ex) {
1467                            log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1468                            flag = -1;
1469                        }
1470                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1471                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1472                        }
1473                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1474                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1475                                        + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1476                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1477                        }
1478                    }
1479                    else {
1480                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1481                        for (int j = 0; j < cd_nelmts[0]; j++) {
1482                            if (j > 0)
1483                                filters += ", ";
1484                            filters += cd_values[j];
1485                        }
1486                        log.debug("getMetadata(): filter[{}] is user defined compression", i);
1487                    }
1488                } // for (int i=0; i<nfilt; i++)
1489
1490                if (compression.length() == 0) {
1491                    compression = "NONE";
1492                }
1493                log.trace("getMetadata(): filter compression={}", compression);
1494
1495                if (filters.length() == 0) {
1496                    filters = "NONE";
1497                }
1498                log.trace("getMetadata(): filter information={}", filters);
1499
1500                storage = "SIZE: " + storage_size;
1501                try {
1502                    int[] at = { 0 };
1503                    H5.H5Pget_alloc_time(pcid, at);
1504                    storage += ", allocation time: ";
1505                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1506                        storage += "Early";
1507                    }
1508                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1509                        storage += "Incremental";
1510                    }
1511                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1512                        storage += "Late";
1513                    }
1514                }
1515                catch (Exception ex) {
1516                    log.debug("getMetadata(): Storage allocation time:", ex);
1517                }
1518                if (storage.length() == 0) {
1519                    storage = "NONE";
1520                }
1521                log.trace("getMetadata(): storage={}", storage);
1522            }
1523            finally {
1524                try {
1525                    H5.H5Pclose(paid);
1526                }
1527                catch (Exception ex) {
1528                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1529                }
1530                try {
1531                    H5.H5Pclose(pcid);
1532                }
1533                catch (Exception ex) {
1534                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1535                }
1536                close(did);
1537            }
1538        }
1539
1540        log.trace("getMetadata(): finish");
1541        return attributeList;
1542    }
1543
1544    /*
1545     * (non-Javadoc)
1546     *
1547     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1548     */
1549    @Override
1550    public void writeMetadata(Object info) throws Exception {
1551        log.trace("writeMetadata(): start");
1552
1553        // only attribute metadata is supported.
1554        if (!(info instanceof Attribute)) {
1555            log.debug("writeMetadata(): Object not an Attribute");
1556            log.trace("writeMetadata(): finish");
1557            return;
1558        }
1559
1560        boolean attrExisted = false;
1561        Attribute attr = (Attribute) info;
1562        log.trace("writeMetadata(): {}", attr.getName());
1563
1564        if (attributeList == null) {
1565            this.getMetadata();
1566        }
1567
1568        if (attributeList != null)
1569            attrExisted = attributeList.contains(attr);
1570
1571        getFileFormat().writeAttribute(this, attr, attrExisted);
1572        // add the new attribute into attribute list
1573        if (!attrExisted) {
1574            attributeList.add(attr);
1575            nAttributes = attributeList.size();
1576        }
1577
1578        log.trace("writeMetadata(): finish");
1579    }
1580
1581    /*
1582     * (non-Javadoc)
1583     *
1584     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1585     */
1586    @Override
1587    public void removeMetadata(Object info) throws HDF5Exception {
1588        log.trace("removeMetadata(): start");
1589
1590        // only attribute metadata is supported.
1591        if (!(info instanceof Attribute)) {
1592            log.debug("removeMetadata(): Object not an Attribute");
1593            log.trace("removeMetadata(): finish");
1594            return;
1595        }
1596
1597        Attribute attr = (Attribute) info;
1598        log.trace("removeMetadata(): {}", attr.getName());
1599        long did = open();
1600        if (did >= 0) {
1601            try {
1602                H5.H5Adelete(did, attr.getName());
1603                List<Attribute> attrList = getMetadata();
1604                attrList.remove(attr);
1605                nAttributes = attrList.size();
1606            }
1607            finally {
1608                close(did);
1609            }
1610        }
1611
1612        log.trace("removeMetadata(): finish");
1613    }
1614
1615    /*
1616     * (non-Javadoc)
1617     *
1618     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1619     */
1620    @Override
1621    public void updateMetadata(Object info) throws HDF5Exception {
1622        log.trace("updateMetadata(): start");
1623
1624        // only attribute metadata is supported.
1625        if (!(info instanceof Attribute)) {
1626            log.debug("updateMetadata(): Object not an Attribute");
1627            log.trace("updateMetadata(): finish");
1628            return;
1629        }
1630
1631        nAttributes = -1;
1632
1633        log.trace("updateMetadata(): finish");
1634    }
1635
1636    /*
1637     * (non-Javadoc)
1638     *
1639     * @see hdf.object.HObject#setName(java.lang.String)
1640     */
1641    @Override
1642    public void setName(String newName) throws Exception {
1643        H5File.renameObject(this, newName);
1644        super.setName(newName);
1645    }
1646
1647    /**
1648     * Resets selection of dataspace
1649     */
1650    private void resetSelection() {
1651        log.trace("resetSelection(): start");
1652
1653        for (int i = 0; i < rank; i++) {
1654            startDims[i] = 0;
1655            selectedDims[i] = 1;
1656            if (selectedStride != null) {
1657                selectedStride[i] = 1;
1658            }
1659        }
1660
1661        if (rank == 1) {
1662            selectedIndex[0] = 0;
1663            selectedDims[0] = dims[0];
1664        }
1665        else if (rank == 2) {
1666            selectedIndex[0] = 0;
1667            selectedIndex[1] = 1;
1668            selectedDims[0] = dims[0];
1669            selectedDims[1] = dims[1];
1670        }
1671        else if (rank > 2) {
1672            // selectedIndex[0] = rank - 2; // columns
1673            // selectedIndex[1] = rank - 1; // rows
1674            // selectedIndex[2] = rank - 3;
1675            selectedIndex[0] = 0; // width, the fastest dimension
1676            selectedIndex[1] = 1; // height
1677            selectedIndex[2] = 2; // frames
1678            // selectedDims[rank - 1] = dims[rank - 1];
1679            // selectedDims[rank - 2] = dims[rank - 2];
1680            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1681            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1682        }
1683
1684        isDataLoaded = false;
1685        setAllMemberSelection(true);
1686        log.trace("resetSelection(): finish");
1687    }
1688
1689    /**
1690     * @deprecated Not for public use in the future. <br>
1691     *             Using
1692     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1693     *
1694     * @param name
1695     *            the name of the dataset to create.
1696     * @param pgroup
1697     *            parent group where the new dataset is created.
1698     * @param dims
1699     *            the dimension size of the dataset.
1700     * @param memberNames
1701     *            the names of compound datatype
1702     * @param memberDatatypes
1703     *            the datatypes of the compound datatype
1704     * @param memberSizes
1705     *            the dim sizes of the members
1706     * @param data
1707     *            list of data arrays written to the new dataset, null if no data is written to the new
1708     *            dataset.
1709     *
1710     * @return the new compound dataset if successful; otherwise returns null.
1711     *
1712     * @throws Exception
1713     *             if there is a failure.
1714     */
1715    @Deprecated
1716    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1717            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1718        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1719                || (memberSizes == null)) {
1720            return null;
1721        }
1722
1723        int nMembers = memberNames.length;
1724        int memberRanks[] = new int[nMembers];
1725        long memberDims[][] = new long[nMembers][1];
1726        for (int i = 0; i < nMembers; i++) {
1727            memberRanks[i] = 1;
1728            memberDims[i][0] = memberSizes[i];
1729        }
1730
1731        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1732    }
1733
1734    /**
1735     * @deprecated Not for public use in the future. <br>
1736     *             Using
1737     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1738     *
1739     * @param name
1740     *            the name of the dataset to create.
1741     * @param pgroup
1742     *            parent group where the new dataset is created.
1743     * @param dims
1744     *            the dimension size of the dataset.
1745     * @param memberNames
1746     *            the names of compound datatype
1747     * @param memberDatatypes
1748     *            the datatypes of the compound datatype
1749     * @param memberRanks
1750     *            the ranks of the members
1751     * @param memberDims
1752     *            the dim sizes of the members
1753     * @param data
1754     *            list of data arrays written to the new dataset, null if no data is written to the new
1755     *            dataset.
1756     *
1757     * @return the new compound dataset if successful; otherwise returns null.
1758     *
1759     * @throws Exception
1760     *             if the dataset can not be created.
1761     */
1762    @Deprecated
1763    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1764            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1765        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1766                memberDims, data);
1767    }
1768
1769    /**
1770     * Creates a simple compound dataset in a file with/without chunking and compression.
1771     * <p>
1772     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1773     * details of creating a compound dataset from users.
1774     * <p>
1775     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1776     * dataset is not supported. The required information to create a compound dataset includes the
1777     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1778     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1779     * <p>
1780     * The following example shows how to use this function to create a compound dataset in file.
1781     *
1782     * <pre>
1783     * H5File file = null;
1784     * String message = &quot;&quot;;
1785     * Group pgroup = null;
1786     * int[] DATA_INT = new int[DIM_SIZE];
1787     * float[] DATA_FLOAT = new float[DIM_SIZE];
1788     * String[] DATA_STR = new String[DIM_SIZE];
1789     * long[] DIMs = { 50, 10 };
1790     * long[] CHUNKs = { 25, 5 };
1791     *
1792     * try {
1793     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1794     *     file.open();
1795     *     pgroup = (Group) file.get(&quot;/&quot;);
1796     * }
1797     * catch (Exception ex) {
1798     * }
1799     *
1800     * Vector data = new Vector();
1801     * data.add(0, DATA_INT);
1802     * data.add(1, DATA_FLOAT);
1803     * data.add(2, DATA_STR);
1804     *
1805     * // create groups
1806     * Datatype[] mdtypes = new H5Datatype[3];
1807     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1808     * Dataset dset = null;
1809     * try {
1810     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, -1, -1);
1811     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, -1, -1);
1812     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
1813     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1814     * }
1815     * catch (Exception ex) {
1816     *     failed(message, ex, file);
1817     *     return 1;
1818     * }
1819     * </pre>
1820     *
1821     * @param name
1822     *            the name of the dataset to create.
1823     * @param pgroup
1824     *            parent group where the new dataset is created.
1825     * @param dims
1826     *            the dimension size of the dataset.
1827     * @param maxdims
1828     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1829     * @param chunks
1830     *            the chunk size of the dataset. No chunking if chunk = null.
1831     * @param gzip
1832     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1833     * @param memberNames
1834     *            the names of compound datatype
1835     * @param memberDatatypes
1836     *            the datatypes of the compound datatype
1837     * @param memberRanks
1838     *            the ranks of the members
1839     * @param memberDims
1840     *            the dim sizes of the members
1841     * @param data
1842     *            list of data arrays written to the new dataset, null if no data is written to the new
1843     *            dataset.
1844     *
1845     * @return the new compound dataset if successful; otherwise returns null.
1846     *
1847     * @throws Exception
1848     *             if there is a failure.
1849     */
1850    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1851            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1852                    throws Exception {
1853        log.trace("create(): start");
1854
1855        H5CompoundDS dataset = null;
1856        String fullPath = null;
1857        long did = -1;
1858        long tid = -1;
1859        long plist = -1;
1860        long sid = -1;
1861
1862        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1863                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1864                || (memberDims == null)) {
1865            log.debug("create(): one or more parameters are null");
1866            log.trace("create(): finish");
1867            return null;
1868        }
1869
1870        H5File file = (H5File) pgroup.getFileFormat();
1871        if (file == null) {
1872            log.debug("create(): parent group FileFormat is null");
1873            log.trace("create(): finish");
1874            return null;
1875        }
1876
1877        String path = HObject.separator;
1878        if (!pgroup.isRoot()) {
1879            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1880            if (name.endsWith("/")) {
1881                name = name.substring(0, name.length() - 1);
1882            }
1883            int idx = name.lastIndexOf("/");
1884            if (idx >= 0) {
1885                name = name.substring(idx + 1);
1886            }
1887        }
1888
1889        fullPath = path + name;
1890
1891        int typeSize = 0;
1892        int nMembers = memberNames.length;
1893        long[] mTypes = new long[nMembers];
1894        int memberSize = 1;
1895        for (int i = 0; i < nMembers; i++) {
1896            memberSize = 1;
1897            for (int j = 0; j < memberRanks[i]; j++) {
1898                memberSize *= memberDims[i][j];
1899            }
1900
1901            mTypes[i] = -1;
1902            // the member is an array
1903            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1904                long tmptid = -1;
1905                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1906                    try {
1907                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1908                    }
1909                    finally {
1910                        try {
1911                            H5.H5Tclose(tmptid);
1912                        }
1913                        catch (Exception ex) {
1914                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1915                        }
1916                    }
1917                }
1918            }
1919            else {
1920                mTypes[i] = memberDatatypes[i].createNative();
1921            }
1922            try {
1923                typeSize += H5.H5Tget_size(mTypes[i]);
1924            }
1925            catch (Exception ex) {
1926                log.debug("create(): array create H5Tget_size:", ex);
1927
1928                while (i > 0) {
1929                    try {
1930                        H5.H5Tclose(mTypes[i]);
1931                    }
1932                    catch (HDF5Exception ex2) {
1933                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1934                    }
1935                    i--;
1936                }
1937                throw ex;
1938            }
1939        } // for (int i = 0; i < nMembers; i++) {
1940
1941        // setup chunking and compression
1942        boolean isExtentable = false;
1943        if (maxdims != null) {
1944            for (int i = 0; i < maxdims.length; i++) {
1945                if (maxdims[i] == 0) {
1946                    maxdims[i] = dims[i];
1947                }
1948                else if (maxdims[i] < 0) {
1949                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1950                }
1951
1952                if (maxdims[i] != dims[i]) {
1953                    isExtentable = true;
1954                }
1955            }
1956        }
1957
1958        // HDF5 requires you to use chunking in order to define extendible
1959        // datasets. Chunking makes it possible to extend datasets efficiently,
1960        // without having to reorganize storage excessively. Using default size
1961        // of 64x...which has good performance
1962        if ((chunks == null) && isExtentable) {
1963            chunks = new long[dims.length];
1964            for (int i = 0; i < dims.length; i++)
1965                chunks[i] = Math.min(dims[i], 64);
1966        }
1967
1968        // prepare the dataspace and datatype
1969        int rank = dims.length;
1970
1971        try {
1972            sid = H5.H5Screate_simple(rank, dims, maxdims);
1973
1974            // figure out creation properties
1975            plist = HDF5Constants.H5P_DEFAULT;
1976
1977            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1978            int offset = 0;
1979            for (int i = 0; i < nMembers; i++) {
1980                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1981                offset += H5.H5Tget_size(mTypes[i]);
1982            }
1983
1984            if (chunks != null) {
1985                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1986
1987                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1988                H5.H5Pset_chunk(plist, rank, chunks);
1989
1990                // compression requires chunking
1991                if (gzip > 0) {
1992                    H5.H5Pset_deflate(plist, gzip);
1993                }
1994            }
1995
1996            long fid = file.getFID();
1997
1998            log.trace("create(): create dataset");
1999            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2000            log.trace("create(): new H5CompoundDS");
2001            dataset = new H5CompoundDS(file, name, path);
2002        }
2003        finally {
2004            try {
2005                H5.H5Pclose(plist);
2006            }
2007            catch (HDF5Exception ex) {
2008                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2009            }
2010            try {
2011                H5.H5Sclose(sid);
2012            }
2013            catch (HDF5Exception ex) {
2014                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2015            }
2016            try {
2017                H5.H5Tclose(tid);
2018            }
2019            catch (HDF5Exception ex) {
2020                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2021            }
2022            try {
2023                H5.H5Dclose(did);
2024            }
2025            catch (HDF5Exception ex) {
2026                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2027            }
2028
2029            for (int i = 0; i < nMembers; i++) {
2030                try {
2031                    H5.H5Tclose(mTypes[i]);
2032                }
2033                catch (HDF5Exception ex) {
2034                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2035                }
2036            }
2037        }
2038
2039        if (dataset != null) {
2040            pgroup.addToMemberList(dataset);
2041            if (data != null) {
2042                dataset.init();
2043                long selected[] = dataset.getSelectedDims();
2044                for (int i = 0; i < rank; i++) {
2045                    selected[i] = dims[i];
2046                }
2047                dataset.write(data);
2048            }
2049        }
2050
2051        log.trace("create(): finish");
2052        return dataset;
2053    }
2054
2055    /*
2056     * (non-Javadoc)
2057     *
2058     * @see hdf.object.Dataset#isString(long)
2059     */
2060    @Override
2061    public boolean isString(long tid) {
2062        boolean b = false;
2063        try {
2064            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2065        }
2066        catch (Exception ex) {
2067            b = false;
2068        }
2069
2070        return b;
2071    }
2072
2073    /*
2074     * (non-Javadoc)
2075     *
2076     * @see hdf.object.Dataset#getSize(long)
2077     */
2078    @Override
2079    public long getSize(long tid) {
2080        long tsize = -1;
2081
2082        try {
2083            tsize = H5.H5Tget_size(tid);
2084        }
2085        catch (Exception ex) {
2086            tsize = -1;
2087        }
2088
2089        return tsize;
2090    }
2091
2092    /*
2093     * (non-Javadoc)
2094     *
2095     * @see hdf.object.Dataset#isVirtual()
2096     */
2097    @Override
2098    public boolean isVirtual() {
2099        return isVirtual;
2100    }
2101
2102    /*
2103     * (non-Javadoc)
2104     *
2105     * @see hdf.object.Dataset#getVirtualFilename(int)
2106     */
2107    @Override
2108    public String getVirtualFilename(int index) {
2109        if (isVirtual)
2110            return virtualNameList.get(index);
2111        else
2112            return null;
2113    }
2114
2115    /*
2116     * (non-Javadoc)
2117     *
2118     * @see hdf.object.Dataset#getVirtualMaps()
2119     */
2120    @Override
2121    public int getVirtualMaps() {
2122        if (isVirtual)
2123            return virtualNameList.size();
2124        else
2125            return -1;
2126    }
2127
2128    /**
2129     * Given an array of bytes representing a compound Datatype and a start index and length, converts
2130     * len number of bytes into the correct Object type and returns it.
2131     *
2132     * @param data
2133     *            The byte array representing the data of the compound Datatype
2134     * @param data_type
2135     *            The type of data to convert the bytes to
2136     * @param start
2137     *            The start index of the bytes to get
2138     * @param len
2139     *            The number of bytes to convert
2140     * @return The converted type of the bytes
2141     */
2142    private Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) {
2143        Object currentData = null;
2144
2145        try {
2146            long typeClass = H5.H5Tget_class(data_type);
2147
2148            if (typeClass == HDF5Constants.H5T_INTEGER) {
2149                long size = H5.H5Tget_size(data_type);
2150
2151                currentData = HDFNativeData.byteToInt((int) start, (int) (len / size), data);
2152            }
2153            else if (typeClass == HDF5Constants.H5T_FLOAT) {
2154                currentData = HDFNativeData.byteToDouble((int) start, 1, data);
2155            }
2156        }
2157        catch (Exception ex) {
2158            log.debug("convertCompoundByteMember(): conversion failure: ", ex);
2159        }
2160
2161        return currentData;
2162    }
2163}