001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.structs.H5O_info_t;
028import hdf.object.Attribute;
029import hdf.object.CompoundDS;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.view.Tools;
036
037/**
038 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
039 * <p>
040 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
041 * that stores a description of the data elements, data layout, and all other information necessary
042 * to write, read, and interpret the stored data.
043 * <p>
044 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
045 * collection of one or more atomic types or small arrays of such types. Each member of a compound
046 * type has a name which is unique within that type, and a byte offset that determines the first
047 * byte (smallest byte address) of that member in a compound datum.
048 * <p>
049 * For more information on HDF5 datasets and datatypes, read the <a href=
050 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
051 * User's Guide</a>.
052 * <p>
053 * There are two basic types of compound datasets: simple compound data and nested compound data.
054 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
055 * are compound or array of compound data.
056 * <p>
057 * Since Java does not understand C structures, we cannot directly read/write compound data values
058 * as in the following C example.
059 *
060 * <pre>
061 * typedef struct s1_t {
062 *         int    a;
063 *         float  b;
064 *         double c;
065 *         } s1_t;
066 *     s1_t       s1[LENGTH];
067 *     ...
068 *     H5Dwrite(..., s1);
069 *     H5Dread(..., s1);
070 * </pre>
071 *
072 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
073 * data by fields instead of compound structure. As for the example above, the java.util.Vector
074 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
075 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
076 * by field.
077 *
078 * @version 1.1 9/4/2007
079 * @author Peter X. Cao
080 */
081public class H5CompoundDS extends CompoundDS {
082    private static final long serialVersionUID = -5968625125574032736L;
083
084    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
085
086    /**
087     * The list of attributes of this data object. Members of the list are instance of Attribute.
088     */
089    private List<Attribute> attributeList;
090
091    private int nAttributes = -1;
092
093    private H5O_info_t obj_info;
094
095    /**
096     * A list of names of all fields including nested fields.
097     * <p>
098     * The nested names are separated by CompoundDS.separator. For example, if compound dataset "A" has
099     * the following nested structure,
100     *
101     * <pre>
102     * A --&gt; m01
103     * A --&gt; m02
104     * A --&gt; nest1 --&gt; m11
105     * A --&gt; nest1 --&gt; m12
106     * A --&gt; nest1 --&gt; nest2 --&gt; m21
107     * A --&gt; nest1 --&gt; nest2 --&gt; m22
108     * i.e.
109     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
110     * </pre>
111     *
112     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12,
113     * nest1[nest2[m21, nest1[nest2[m22}
114     *
115     */
116    private List<String> flatNameList;
117
118    /**
119     * A list of datatypes of all fields including nested fields.
120     */
121    private List<Datatype> flatTypeList;
122
123    /** flag to indicate if the dataset is an external dataset */
124    private boolean isExternal = false;
125
126    /** flag to indicate if the dataset is a virtual dataset */
127    private boolean isVirtual = false;
128    private List<String> virtualNameList;
129
130    /**
131     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
132     * <p>
133     * The dataset object represents an existing dataset in the file. For example, new
134     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
135     * dataset,"dset1", at group "/g0/".
136     * <p>
137     * This object is usually constructed at FileFormat.open(), which loads the file structure and
138     * object information into memory. It is rarely used elsewhere.
139     *
140     * @param theFile
141     *            the file that contains the data object.
142     * @param theName
143     *            the name of the data object, e.g. "dset".
144     * @param thePath
145     *            the full path of the data object, e.g. "/arrays/".
146     */
147    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
148        this(theFile, theName, thePath, null);
149    }
150
151    /**
152     * @deprecated Not for public use in the future.<br>
153     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
154     *
155     * @param theFile
156     *            the file that contains the data object.
157     * @param theName
158     *            the name of the data object, e.g. "dset".
159     * @param thePath
160     *            the full path of the data object, e.g. "/arrays/".
161     * @param oid
162     *            the oid of the data object.
163     */
164    @Deprecated
165    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
166        super(theFile, theName, thePath, oid);
167        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
168
169        if ((oid == null) && (theFile != null)) {
170            // retrieve the object ID
171            try {
172                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
173                this.oid = new long[1];
174                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
175            }
176            catch (Exception ex) {
177                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
178            }
179        }
180    }
181
182    /*
183     * (non-Javadoc)
184     *
185     * @see hdf.object.HObject#open()
186     */
187    @Override
188    public long open() {
189        log.trace("open(): start");
190
191        long did = -1;
192
193        try {
194            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
195            log.trace("open(): did={}", did);
196        }
197        catch (HDF5Exception ex) {
198            log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex);
199            did = -1;
200        }
201
202        log.trace("open(): finish");
203        return did;
204    }
205
206    /*
207     * (non-Javadoc)
208     *
209     * @see hdf.object.HObject#close(int)
210     */
211    @Override
212    public void close(long did) {
213        log.trace("close(): start");
214
215        if (did >= 0) {
216            try {
217                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
218            }
219            catch (Exception ex) {
220                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
221            }
222            try {
223                H5.H5Dclose(did);
224            }
225            catch (HDF5Exception ex) {
226                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
227            }
228        }
229
230        log.trace("close(): finish");
231    }
232
233    /**
234     * Retrieves datatype and dataspace information from file and sets the dataset
235     * in memory.
236     * <p>
237     * The init() is designed to support lazy operation in a dataset object. When a
238     * data object is retrieved from file, the datatype, dataspace and raw data are
239     * not loaded into memory. When it is asked to read the raw data from file,
240     * init() is first called to get the datatype and dataspace information, then
241     * load the raw data from file.
242     * <p>
243     * init() is also used to reset the selection of a dataset (start, stride and
244     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
245     * the following example, init() at step 1) retrieves datatype and dataspace
246     * information from file. getData() at step 3) reads only one data point. init()
247     * at step 4) resets the selection to the whole dataset. getData() at step 4)
248     * reads the values of whole dataset into memory.
249     *
250     * <pre>
251     * dset = (Dataset) file.get(NAME_DATASET);
252     *
253     * // 1) get datatype and dataspace information from file
254     * dset.init();
255     * rank = dset.getRank(); // rank = 2, a 2D dataset
256     * count = dset.getSelectedDims();
257     * start = dset.getStartDims();
258     * dims = dset.getDims();
259     *
260     * // 2) select only one data point
261     * for (int i = 0; i &lt; rank; i++) {
262     *     start[0] = 0;
263     *     count[i] = 1;
264     * }
265     *
266     * // 3) read one data point
267     * data = dset.getData();
268     *
269     * // 4) reset selection to the whole dataset
270     * dset.init();
271     *
272     * // 5) clean the memory data buffer
273     * dset.clearData();
274     *
275     * // 6) Read the whole dataset
276     * data = dset.getData();
277     * </pre>
278     */
279    @Override
280    public void init() {
281        log.trace("init(): start");
282
283        if (inited) {
284            resetSelection();
285            log.trace("init(): Dataset already initialized");
286            log.trace("init(): finish");
287            return; // already called. Initialize only once
288        }
289
290        long did = -1;
291        long tid = -1;
292        long sid = -1;
293        flatNameList = new Vector<>();
294        flatTypeList = new Vector<>();
295
296        did = open();
297        if (did >= 0) {
298            // check if it is an external or virtual dataset
299            long pid = -1;
300            try {
301                pid = H5.H5Dget_create_plist(did);
302                try {
303                    int nfiles = H5.H5Pget_external_count(pid);
304                    isExternal = (nfiles > 0);
305                    int layout_type = H5.H5Pget_layout(pid);
306                    if (isVirtual = (layout_type == HDF5Constants.H5D_VIRTUAL)) {
307                        try {
308                            long vmaps = H5.H5Pget_virtual_count(pid);
309                            if (vmaps > 0) {
310                                virtualNameList = new Vector<>();
311                                for (long next = 0; next < vmaps; next++) {
312                                    try {
313                                        String fname = H5.H5Pget_virtual_filename(pid, next);
314                                        virtualNameList.add(fname);
315                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
316                                    }
317                                    catch (Throwable err) {
318                                        log.trace("init(): vds[{}] continue", next);
319                                        continue;
320                                    }
321                                }
322                            }
323                        }
324                        catch (Throwable err) {
325                            log.debug("init(): vds count error: ", err);
326                        }
327                    }
328                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
329                }
330                catch (Exception ex) {
331                    log.debug("init(): check if it is an external or virtual dataset:", ex);
332                }
333            }
334            catch (Exception ex) {
335                log.debug("init(): H5Dget_create_plist() failure: ", ex);
336            }
337            finally {
338                try {
339                    H5.H5Pclose(pid);
340                }
341                catch (Exception ex) {
342                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
343                }
344            }
345
346            try {
347                sid = H5.H5Dget_space(did);
348                rank = H5.H5Sget_simple_extent_ndims(sid);
349                tid = H5.H5Dget_type(did);
350                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
351                datatype = new H5Datatype(tid);
352
353                log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid,
354                        datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
355
356                if (rank == 0) {
357                    // a scalar data point
358                    rank = 1;
359                    dims = new long[1];
360                    dims[0] = 1;
361                    log.trace("init(): rank is a scalar data point");
362                }
363                else {
364                    dims = new long[rank];
365                    maxDims = new long[rank];
366                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
367                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
368                }
369
370                startDims = new long[rank];
371                selectedDims = new long[rank];
372
373                // initialize member information
374                ((H5Datatype) getDatatype()).extractCompoundInfo("", flatNameList, flatTypeList);
375                numberOfMembers = flatNameList.size();
376                log.trace("init(): numberOfMembers={}", numberOfMembers);
377
378                memberNames = new String[numberOfMembers];
379                memberTypes = new Datatype[numberOfMembers];
380                memberOrders = new int[numberOfMembers];
381                isMemberSelected = new boolean[numberOfMembers];
382                memberDims = new Object[numberOfMembers];
383
384                for (int i = 0; i < numberOfMembers; i++) {
385                    isMemberSelected[i] = true;
386                    memberOrders[i] = 1;
387                    memberDims[i] = null;
388
389                    try {
390                        memberTypes[i] = flatTypeList.get(i);
391                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
392
393                        if (memberTypes[i].isArray()) {
394                            long mdim[] = memberTypes[i].getArrayDims();
395                            int idim[] = new int[mdim.length];
396                            int arrayNpoints = 1;
397
398                            for (int j = 0; j < idim.length; j++) {
399                                idim[j] = (int) mdim[j];
400                                arrayNpoints *= idim[j];
401                            }
402
403                            memberDims[i] = idim;
404                            memberOrders[i] = arrayNpoints;
405                        }
406                    }
407                    catch (Exception ex) {
408                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
409                        memberTypes[i] = null;
410                    }
411
412                    try {
413                        memberNames[i] = flatNameList.get(i);
414                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
415                    }
416                    catch (Exception ex) {
417                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
418                        memberNames[i] = "null";
419                    }
420                } // for (int i=0; i<numberOfMembers; i++)
421
422                inited = true;
423            }
424            catch (HDF5Exception ex) {
425                numberOfMembers = 0;
426                memberNames = null;
427                memberTypes = null;
428                memberOrders = null;
429                log.debug("init(): ", ex);
430            }
431            finally {
432                getDatatype().close(tid);
433
434                try {
435                    H5.H5Sclose(sid);
436                }
437                catch (HDF5Exception ex2) {
438                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
439                }
440            }
441
442            log.trace("init(): close dataset");
443            close(did);
444        }
445        else {
446            log.debug("init(): failed to open dataset");
447        }
448
449        resetSelection();
450        log.trace("init(): finish");
451    }
452
453    /*
454     * (non-Javadoc)
455     *
456     * @see hdf.object.DataFormat#hasAttribute()
457     */
458    @Override
459    public boolean hasAttribute() {
460        obj_info.num_attrs = nAttributes;
461
462        if (obj_info.num_attrs < 0) {
463            long did = open();
464            if (did >= 0) {
465                try {
466                    obj_info = H5.H5Oget_info(did);
467                    nAttributes = (int) obj_info.num_attrs;
468                }
469                catch (Exception ex) {
470                    obj_info.num_attrs = 0;
471                    log.debug("hasAttribute(): get object info failure: ", ex);
472                }
473                close(did);
474            }
475            else {
476                log.debug("hasAttribute(): could not open dataset");
477            }
478        }
479
480        log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
481        return (obj_info.num_attrs > 0);
482    }
483
484    /*
485     * (non-Javadoc)
486     *
487     * @see hdf.object.Dataset#getDatatype()
488     */
489    @Override
490    public Datatype getDatatype() {
491        log.trace("getDatatype(): start");
492
493        if (datatype == null) {
494            log.trace("getDatatype(): datatype == null");
495            long did = -1;
496            long tid = -1;
497
498            did = open();
499            if (did >= 0) {
500                try {
501                    tid = H5.H5Dget_type(did);
502                    datatype = new H5Datatype(tid);
503                }
504                catch (Exception ex) {
505                    log.debug("getDatatype(): ", ex);
506                }
507                finally {
508                    try {
509                        H5.H5Tclose(tid);
510                    }
511                    catch (HDF5Exception ex) {
512                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
513                    }
514                    try {
515                        H5.H5Dclose(did);
516                    }
517                    catch (HDF5Exception ex) {
518                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
519                    }
520                }
521            }
522        }
523
524        log.trace("getDatatype(): finish");
525        return datatype;
526    }
527
528    @Override
529    public Object getFillValue() {
530        return null;
531    }
532
533    /*
534     * (non-Javadoc)
535     *
536     * @see hdf.object.Dataset#clear()
537     */
538    @Override
539    public void clear() {
540        super.clear();
541
542        if (attributeList != null) {
543            ((Vector<Attribute>) attributeList).setSize(0);
544        }
545    }
546
547    /*
548     * (non-Javadoc)
549     *
550     * @see hdf.object.Dataset#readBytes()
551     */
552    @Override
553    public byte[] readBytes() throws HDF5Exception {
554        log.trace("readBytes(): start");
555
556        byte[] theData = null;
557
558        if (!isInited())
559            init();
560
561        long did = open();
562        if (did >= 0) {
563            long fspace = -1;
564            long mspace = -1;
565            long tid = -1;
566
567            try {
568                long[] lsize = { 1 };
569                for (int j = 0; j < selectedDims.length; j++) {
570                    lsize[0] *= selectedDims[j];
571                }
572
573                fspace = H5.H5Dget_space(did);
574                mspace = H5.H5Screate_simple(rank, selectedDims, null);
575
576                // set the rectangle selection
577                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
578                if (rank * dims[0] > 1) {
579                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
580                            selectedDims, null); // set block to 1
581                }
582
583                tid = H5.H5Dget_type(did);
584                long size = H5.H5Tget_size(tid) * lsize[0];
585                log.trace("readBytes(): size = {}", size);
586
587                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
588
589                theData = new byte[(int) size];
590
591                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
592                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
593            }
594            catch (Exception ex) {
595                log.debug("readBytes(): failed to read data: ", ex);
596            }
597            finally {
598                try {
599                    H5.H5Sclose(fspace);
600                }
601                catch (Exception ex2) {
602                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
603                }
604                try {
605                    H5.H5Sclose(mspace);
606                }
607                catch (Exception ex2) {
608                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
609                }
610                try {
611                    H5.H5Tclose(tid);
612                }
613                catch (HDF5Exception ex2) {
614                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
615                }
616                close(did);
617            }
618        }
619
620        log.trace("readBytes(): finish");
621        return theData;
622    }
623
624    /*
625     * (non-Javadoc)
626     *
627     * @see hdf.object.Dataset#read()
628     */
629    @Override
630    public Object read() throws Exception {
631        log.trace("read(): start");
632
633        List<Object> memberDataList = null;
634        H5Datatype DSdatatype = null;
635
636        if (!isInited())
637            init();
638
639        if (numberOfMembers <= 0) {
640            log.debug("read(): Dataset contains no members");
641            log.trace("read(): finish");
642            return null; // this compound dataset does not have any member
643        }
644
645        try {
646            DSdatatype = (H5Datatype) this.getDatatype();
647        }
648        catch (Exception ex) {
649            log.debug("read(): get datatype: ", ex);
650        }
651
652        if (isExternal) {
653            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
654
655            if (pdir == null) {
656                pdir = ".";
657            }
658            System.setProperty("user.dir", pdir);// H5.H5Dchdir_ext(pdir);
659            log.trace("read(): External dataset: user.dir={}", pdir);
660        }
661
662        log.trace("read(): open dataset");
663
664        long did = open();
665        if (did >= 0) {
666            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
667
668            try {
669                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
670
671                log.trace("read(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
672
673                if (totalSelectedSpacePoints == 0) {
674                    log.debug("read(): No data to read. Dataset or selected subset is empty.");
675                    log.trace("read(): finish");
676                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
677                }
678
679                if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
680                    log.debug("read(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
681                    log.trace("read(): finish");
682                    throw new HDF5Exception("Invalid int size");
683                }
684
685                if (log.isDebugEnabled()) {
686                    // check is storage space is allocated
687                    try {
688                        long ssize = H5.H5Dget_storage_size(did);
689                        log.trace("read(): Storage space allocated = {}.", ssize);
690                    }
691                    catch (Exception ex) {
692                        log.debug("read(): check if storage space is allocated:", ex);
693                    }
694                }
695
696                /*
697                 * Read each member of the compound datatype into a separate byte
698                 * array, then extract the data into its type, such as int, long,
699                 * float, etc.
700                 */
701                /*
702                 * TODO: Can potentially just re-use the global lists
703                 */
704                List<Datatype> atomicList = new Vector<>();
705                DSdatatype.extractCompoundInfo(null, null, atomicList);
706                memberDataList = new Vector<>(atomicList.size());
707
708                log.trace("read(): foreach nMembers={}", atomicList.size());
709
710                for (int i = 0; i < atomicList.size(); i++) {
711                    H5Datatype member_type = null;
712                    Datatype member_base = null;
713                    String member_name = null;
714                    Object member_data = null;
715                    int member_size = 0;
716
717                    if (!isMemberSelected[i]) {
718                        log.debug("read(): Member[{}] is not selected", i);
719                        continue; // the field is not selected
720                    }
721
722                    try {
723                        member_type = (H5Datatype) atomicList.get(i);
724                    }
725                    catch (Exception ex) {
726                        log.debug("read(): get member {} failure: ", i, ex);
727                        continue;
728                    }
729
730                    try {
731                        member_base = member_type.getDatatypeBase();
732                    }
733                    catch (Exception ex) {
734                        log.debug("read(): get member {} base type failure: ", i, ex);
735                        continue;
736                    }
737
738                    try {
739                        member_name = new String(memberNames[i]);
740                    }
741                    catch (Exception ex) {
742                        log.debug("read(): get member {} name failure: ", i, ex);
743                        member_name = "null";
744                    }
745
746                    try {
747                        member_size = (int) member_type.getDatatypeSize();
748                    }
749                    catch (Exception ex) {
750                        log.debug("read(): get member {} size failure: ", i, ex);
751                        continue;
752                    }
753
754                    try {
755                        member_data = member_type.allocateArray((int) totalSelectedSpacePoints);
756                    }
757                    catch (OutOfMemoryError err) {
758                        member_data = null;
759                        throw new HDF5Exception("Out Of Memory.");
760                    }
761                    catch (Exception ex) {
762                        log.debug("read(): Member[{}]: ", i, ex);
763                        member_data = null;
764                    }
765
766                    log.trace("read(): {} Member[{}] is type {} of size={}", member_name, i, member_type.getDescription(), member_size);
767
768                    if (member_data != null) {
769                        long comp_tid = -1;
770                        try {
771                            comp_tid = member_type.createCompoundFieldType(flatNameList.get(i));
772                        }
773                        catch (HDF5Exception ex) {
774                            log.debug("read(): unable to create compound field type for Member[{}] of type {}: ", i, member_type.getDescription(), ex);
775
776                            String[] nullValues = new String[(int) totalSelectedSpacePoints];
777                            for (int j = 0; j < totalSelectedSpacePoints; j++) {
778                                nullValues[j] = "NULL";
779                            }
780                            memberDataList.add(nullValues);
781                            log.debug("read(): {} Member[{}] createCompoundFieldType failure:", member_name, i, ex);
782                            continue;
783                        }
784
785                        /*
786                         * Actually read the data for this member now that everything has been setup
787                         */
788                        try {
789                            if (member_type.isVLEN() || (member_type.isArray() && member_base.isVLEN())) {
790                                log.trace("read(): Member[{}]: H5DreadVL did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
791                                H5.H5DreadVL(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) member_data);
792                            }
793                            else if ((member_base != null) && member_base.isCompound()) {
794                                log.trace("read(): Member[{}]: H5Dread did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
795                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (byte[]) member_data, true);
796                            }
797                            else {
798                                log.trace("read(): Member[{}]: H5Dread did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
799                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, member_data);
800                            }
801                        }
802                        catch (HDF5DataFiltersException exfltr) {
803                            log.debug("read(): {} Member[{}] read failure:", member_name, i, exfltr);
804                            log.trace("read(): finish");
805                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
806                        }
807                        catch (Exception ex) {
808                            String[] errValues = new String[(int) totalSelectedSpacePoints];
809                            for (int j = 0; j < totalSelectedSpacePoints; j++) {
810                                errValues[j] = "*ERROR*";
811                            }
812                            memberDataList.add(errValues);
813                            log.debug("read(): {} Member[{}] read failure:", member_name, i, ex);
814                            continue;
815                        }
816                        finally {
817                            DSdatatype.close(comp_tid);
818                        }
819
820
821                        /*
822                         * Perform any necessary data conversions
823                         */
824                        if (member_type.isUnsigned()) {
825                            log.trace("read(): Member[{}]: converting from unsigned C-type integers", i);
826                            member_data = Dataset.convertFromUnsignedC(member_data, null);
827                        }
828                        else if ((member_type.isString()) && convertByteToString && !member_type.isVarStr()) {
829                            if (Tools.getJavaObjectRuntimeClass(member_data) == 'B') {
830                                log.trace("read(): Member[{}]: converting byte array to string array", i);
831                                member_data = byteToString((byte[]) member_data, member_size / memberOrders[i]);
832                            }
833                        }
834                        else if (member_type.isRef()) {
835                            if (Tools.getJavaObjectRuntimeClass(member_data) == 'B') {
836                                log.trace("read(): Member[{}]: converting byte array to long array", i);
837                                member_data = HDFNativeData.byteToLong((byte[]) member_data);
838                            }
839                        }
840                        else if (member_type.isArray() && member_base.isCompound()) {
841                            // Since compounds are read into memory as a byte array, discover each member
842                            // type and size and convert the byte array to the correct type before adding
843                            // it to the list
844                            long atom_tid = -1;
845                            try {
846                                atom_tid = member_type.createNative();
847
848                                int numDims = H5.H5Tget_array_ndims(atom_tid);
849                                long[] dims = new long[numDims];
850                                H5.H5Tget_array_dims(atom_tid, dims);
851                                int numberOfCompounds = (int) dims[0] * (int) totalSelectedSpacePoints;
852                                int compoundSize = (member_size * (int) totalSelectedSpacePoints) / numberOfCompounds;
853
854                                Object current_data = new Object[numberOfCompounds];
855
856                                long base_tid = -1;
857                                long memberOffsets[] = null;
858                                long memberLengths[] = null;
859                                long memberTypes[] = null;
860                                int numberOfMembers;
861
862                                try {
863                                    base_tid = H5.H5Tget_super(atom_tid);
864                                    numberOfMembers = H5.H5Tget_nmembers(base_tid);
865                                    memberOffsets = new long[numberOfMembers];
866                                    memberLengths = new long[numberOfMembers];
867                                    memberTypes = new long[numberOfMembers];
868
869                                    for (int j = 0; j < numberOfMembers; j++) {
870                                        memberOffsets[j] = H5.H5Tget_member_offset(base_tid, j);
871                                        memberTypes[j] = H5.H5Tget_member_type(base_tid, j);
872                                    }
873
874                                    for (int j = 0; j < numberOfMembers; j++) {
875                                        if (j < numberOfMembers - 1) {
876                                            memberLengths[j] = (memberOffsets[j + 1] - memberOffsets[j]);
877                                        }
878                                        else {
879                                            memberLengths[j] = (compoundSize - memberOffsets[j]);
880                                        }
881                                    }
882
883                                    for (int j = 0; j < numberOfCompounds; j++) {
884                                        Object field_data = new Object[numberOfMembers];
885
886                                        for (int k = 0; k < numberOfMembers; k++) {
887                                            Object converted = convertCompoundByteMember((byte[]) member_data, memberTypes[k], memberOffsets[k] + (compoundSize * j),
888                                                    memberLengths[k]);
889
890                                            ((Object[]) field_data)[k] = Array.get(converted, 0);
891                                        }
892
893                                        ((Object[]) current_data)[j] = field_data;
894                                    }
895                                }
896                                catch (Exception ex) {
897                                    log.debug("read(): Convert Array of Compounds failure: ", ex);
898                                    continue;
899                                }
900                                finally {
901                                    for (int j = 0; j < memberTypes.length; j++) {
902                                        member_type.close(memberTypes[j]);
903                                    }
904
905                                    member_type.close(base_tid);
906                                }
907
908                                memberDataList.add(current_data);
909                            }
910                            catch (Exception ex) {
911                                log.debug("read(): Member[{}]: list.add failure(): ", i, ex);
912                            }
913                            finally {
914                                member_type.close(atom_tid);
915                            }
916                        } // if (member_type.isArray() && member_base.isCompound())
917                    } // if (member_data != null) {
918                    else {
919                        String[] errValues = new String[(int) totalSelectedSpacePoints];
920                        String errStr = "ERROR";
921
922                        for (int j = 0; j < totalSelectedSpacePoints; j++)
923                            errValues[j] = errStr;
924
925                        memberDataList.add(errValues);
926
927                        log.debug("read(): {} Member[{}] of type {} member_data is null", member_name, i, member_type.getDescription());
928                    }
929
930                    memberDataList.add(member_data);
931                } // end of for (int i=0; i<num_members; i++)
932            }
933            finally {
934                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
935                    try {
936                        H5.H5Sclose(spaceIDs[0]);
937                    }
938                    catch (Exception ex) {
939                        log.debug("read(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
940                    }
941                }
942
943                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
944                    try {
945                        H5.H5Sclose(spaceIDs[1]);
946                    }
947                    catch (Exception ex) {
948                        log.debug("read(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
949                    }
950                }
951
952                close(did);
953            }
954        }
955
956        log.trace("read(): finish");
957        return memberDataList;
958    }
959
960    /**
961     * Writes the given data buffer into this dataset in a file.
962     * <p>
963     * The data buffer is a vector that contains the data values of compound fields. The data is written
964     * into file field by field.
965     *
966     * @param buf
967     *            The vector that contains the data values of compound fields.
968     *
969     * @throws HDF5Exception
970     *             If there is an error at the HDF5 library level.
971     */
972    @Override
973    public void write(Object buf) throws HDF5Exception {
974        log.trace("write(): start");
975
976        Object tmpData = null;
977        H5Datatype DSdatatype = null;
978
979        if ((buf == null) || (numberOfMembers <= 0) || !(buf instanceof List)) {
980            log.debug("write(): buf is null or invalid or contains no members");
981            log.trace("write(): finish");
982            return;
983        }
984
985        if (!isInited())
986            init();
987
988        try {
989            DSdatatype = (H5Datatype) this.getDatatype();
990        }
991        catch (Exception ex) {
992            log.debug("write(): get datatype: ", ex);
993        }
994
995        /*
996         * Check for any unsupported datatypes and fail early before
997         * attempting to write to the dataset
998         */
999        if (DSdatatype.isArray() || DSdatatype.isVLEN()) {
1000            H5Datatype baseType = (H5Datatype) DSdatatype.getDatatypeBase();
1001
1002            if (baseType != null) {
1003                if (baseType.isCompound()) {
1004                    log.debug("write(): cannot write dataset of type ARRAY of COMPOUND");
1005                    log.trace("write(): finish");
1006                    throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
1007                }
1008
1009                if (baseType.isCompound()) {
1010                    log.debug("write(): cannot write dataset of type VLEN of COMPOUND");
1011                    log.trace("write(): finish");
1012                    throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
1013                }
1014            }
1015            else {
1016                log.debug("write(): ARRAY or VLEN datatype has no base type");
1017                throw new HDF5Exception("Dataset's datatype (ARRAY or VLEN) has no base datatype");
1018            }
1019        }
1020
1021        log.trace("write(): open dataset");
1022
1023        long did = open();
1024        if (did >= 0) {
1025            long spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
1026
1027            try {
1028                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
1029
1030                log.trace("write(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
1031
1032                List<Datatype> atomicList = new Vector<>();
1033                DSdatatype.extractCompoundInfo(null, null, atomicList);
1034
1035                log.trace("write(): foreach nMembers={}", atomicList.size());
1036
1037                int currentMemberIndex = 0;
1038                for (int i = 0; i < atomicList.size(); i++) {
1039                    H5Datatype member_type = null;
1040                    String member_name = null;
1041                    Object member_data = null;
1042
1043                    if (!isMemberSelected[i]) {
1044                        log.debug("write(): Member[{}] is not selected", i);
1045                        continue; // the field is not selected
1046                    }
1047
1048                    try {
1049                        member_type = (H5Datatype) atomicList.get(i);
1050                    }
1051                    catch (Exception ex) {
1052                        log.debug("write(): get member {} failure: ", i, ex);
1053                        continue;
1054                    }
1055
1056                    try {
1057                        member_name = new String(memberNames[i]);
1058                    }
1059                    catch (Exception ex) {
1060                        log.debug("write(): get member {} name failure: ", i, ex);
1061                        member_name = "null";
1062                    }
1063
1064                    try {
1065                        member_data = ((List<?>) buf).get(currentMemberIndex++);
1066                    }
1067                    catch (Exception ex) {
1068                        log.debug("write(): get member {} data failure: ", i, ex);
1069                        continue;
1070                    }
1071
1072                    if (member_data == null) {
1073                        log.debug("write(): Member[{}] data is null", i);
1074                        continue;
1075                    }
1076
1077                    log.trace("write(): {} Member[{}] is type {} of size={}", member_name, i, member_type.getDescription(), member_type.getDatatypeSize());
1078
1079                    /*
1080                     * Check for any unsupported datatypes before attempting to write
1081                     * this compound member
1082                     */
1083                    if (member_type.isVLEN() && !member_type.isVarStr()) {
1084                        log.debug("write(): Member[{}]: write of VL non-strings is not currently supported");
1085                        continue;
1086                    }
1087
1088                    /*
1089                     * Perform any necessary data conversions before writing the data.
1090                     */
1091                    try {
1092                        tmpData = member_data;
1093
1094                        if (member_type.isUnsigned()) {
1095                            // Check if we need to convert integer data
1096                            long tsize = member_type.getDatatypeSize();
1097                            String cname = member_data.getClass().getName();
1098                            char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1099                            boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1100                                    || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1101
1102                            if (doIntConversion) {
1103                                log.trace("write(): Member[{}]: converting integer data to unsigned C-type integers", i);
1104                                tmpData = convertToUnsignedC(member_data, null);
1105                            }
1106                        }
1107                        else if (member_type.isString() && (Array.get(member_data, 0) instanceof String)) {
1108                            log.trace("write(): Member[{}]: converting string array to byte array", i);
1109                            tmpData = stringToByte((String[]) member_data, (int) member_type.getDatatypeSize());
1110                        }
1111                        else if (member_type.isEnum() && (Array.get(member_data, 0) instanceof String)) {
1112                            log.trace("write(): Member[{}]: converting enum names to values", i);
1113                            tmpData = member_type.convertEnumNameToValue((String[]) member_data);
1114                        }
1115                    }
1116                    catch (Exception ex) {
1117                        log.debug("write(): data conversion failure: ", ex);
1118                        tmpData = null;
1119                    }
1120
1121                    /*
1122                     * Actually write the data now that everything has been setup
1123                     */
1124                    if (tmpData != null) {
1125                        long comp_tid = -1;
1126                        try {
1127                            comp_tid = member_type.createCompoundFieldType(flatNameList.get(i));
1128                        }
1129                        catch (HDF5Exception ex) {
1130                            log.debug("write(): unable to create compound field type for Member[{}]: ", i, ex);
1131                            continue;
1132                        }
1133
1134                        try {
1135                            if (member_type.isVarStr()) {
1136                                log.trace("write(): Member[{}]: H5Dwrite_string did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
1137                                H5.H5Dwrite_string(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1138                            }
1139                            else {
1140                                // BUG!!! does not write nested compound data and no
1141                                // exception was caught need to check if it is a java
1142                                // error or C library error
1143                                log.trace("write(): Member[{}]: H5Dwrite did={} comp_tid={} spaceIDs[0]={} spaceIDs[1]={}", i, did, comp_tid, spaceIDs[0], spaceIDs[1]);
1144                                H5.H5Dwrite(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1145                            }
1146                        }
1147                        catch (Exception ex) {
1148                            log.debug("write(): write failure: ", ex);
1149                            log.trace("write(): finish");
1150                            throw new HDF5Exception(ex.getMessage());
1151                        }
1152                        finally {
1153                            DSdatatype.close(comp_tid);
1154                        }
1155                    }
1156                } // end of for (int i=0; i<num_members; i++)
1157            }
1158            finally {
1159                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1160                    try {
1161                        H5.H5Sclose(spaceIDs[0]);
1162                    }
1163                    catch (Exception ex) {
1164                        log.debug("write(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1165                    }
1166                }
1167
1168                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1169                    try {
1170                        H5.H5Sclose(spaceIDs[1]);
1171                    }
1172                    catch (Exception ex) {
1173                        log.debug("write(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1174                    }
1175                }
1176
1177                close(did);
1178            }
1179        }
1180
1181        log.trace("write(): finish");
1182    }
1183
1184    @Override
1185    public Object convertFromUnsignedC() {
1186        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1187    }
1188
1189    @Override
1190    public Object convertToUnsignedC() {
1191        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1192    }
1193
1194    /**
1195     * Set up the selection of hyperslab
1196     *
1197     * @param did
1198     *            IN dataset ID
1199     * @param spaceIDs
1200     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
1201     *
1202     * @return total number of data point selected
1203     *
1204     * @throws HDF5Exception
1205     *             If there is an error at the HDF5 library level.
1206     */
1207    private long selectHyperslab(long did, long[] spaceIDs) throws HDF5Exception {
1208        log.trace("selectHyperslab(): start");
1209
1210        long lsize = 1;
1211
1212        boolean isAllSelected = true;
1213        for (int i = 0; i < rank; i++) {
1214            lsize *= selectedDims[i];
1215            if (selectedDims[i] < dims[i]) {
1216                isAllSelected = false;
1217            }
1218        }
1219
1220        log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
1221
1222        if (isAllSelected) {
1223            spaceIDs[0] = HDF5Constants.H5S_ALL;
1224            spaceIDs[1] = HDF5Constants.H5S_ALL;
1225        }
1226        else {
1227            spaceIDs[1] = H5.H5Dget_space(did);
1228
1229            // When 1D dataspace is used in chunked dataset, reading is very
1230            // slow.
1231            // It is a known problem on HDF5 library for chunked dataset.
1232            // mspace = H5.H5Screate_simple(1, lsize, null);
1233            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
1234            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
1235                    null);
1236        }
1237
1238        return lsize;
1239    }
1240
1241    /*
1242     * (non-Javadoc)
1243     *
1244     * @see hdf.object.DataFormat#getMetadata()
1245     */
1246    @Override
1247    public List<Attribute> getMetadata() throws HDF5Exception {
1248        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1249    }
1250
1251    /*
1252     * (non-Javadoc)
1253     *
1254     * @see hdf.object.DataFormat#getMetadata(int...)
1255     */
1256    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1257        log.trace("getMetadata(): start");
1258
1259        if (!isInited()) {
1260            init();
1261            log.trace("getMetadata(): inited");
1262        }
1263
1264        try {
1265            this.linkTargetObjName = H5File.getLinkTargetName(this);
1266        }
1267        catch (Exception ex) {
1268            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1269        }
1270
1271        if (attributeList != null) {
1272            log.trace("getMetadata(): attributeList != null");
1273            log.trace("getMetadata(): finish");
1274            return attributeList;
1275        }
1276
1277        long did = -1;
1278        long pcid = -1;
1279        long paid = -1;
1280        int indxType = fileFormat.getIndexType(null);
1281        int order = fileFormat.getIndexOrder(null);
1282
1283        // load attributes first
1284        if (attrPropList.length > 0) {
1285            indxType = attrPropList[0];
1286            if (attrPropList.length > 1) {
1287                order = attrPropList[1];
1288            }
1289        }
1290
1291        attributeList = H5File.getAttribute(this, indxType, order);
1292        log.trace("getMetadata(): attributeList loaded");
1293
1294        log.trace("getMetadata(): open dataset");
1295        did = open();
1296        if (did >= 0) {
1297            log.trace("getMetadata(): dataset opened");
1298            try {
1299                compression = "";
1300
1301                // get the compression and chunk information
1302                pcid = H5.H5Dget_create_plist(did);
1303                paid = H5.H5Dget_access_plist(did);
1304                long storage_size = H5.H5Dget_storage_size(did);
1305                int nfilt = H5.H5Pget_nfilters(pcid);
1306                int layout_type = H5.H5Pget_layout(pcid);
1307                if (layout_type == HDF5Constants.H5D_CHUNKED) {
1308                    chunkSize = new long[rank];
1309                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1310                    int n = chunkSize.length;
1311                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
1312                    for (int i = 1; i < n; i++) {
1313                        storage_layout += " X " + chunkSize[i];
1314                    }
1315
1316                    if (nfilt > 0) {
1317                        long nelmts = 1;
1318                        long uncomp_size;
1319                        long datum_size = getDatatype().getDatatypeSize();
1320                        if (datum_size < 0) {
1321                            long tmptid = -1;
1322                            try {
1323                                tmptid = H5.H5Dget_type(did);
1324                                datum_size = H5.H5Tget_size(tmptid);
1325                            }
1326                            finally {
1327                                try {H5.H5Tclose(tmptid);}
1328                                catch (Exception ex2) {log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
1329                            }
1330                        }
1331
1332                        for (int i = 0; i < rank; i++) {
1333                            nelmts *= dims[i];
1334                        }
1335                        uncomp_size = nelmts * datum_size;
1336
1337                        /* compression ratio = uncompressed size / compressed size */
1338
1339                        if (storage_size != 0) {
1340                            double ratio = (double) uncomp_size / (double) storage_size;
1341                            DecimalFormat df = new DecimalFormat();
1342                            df.setMinimumFractionDigits(3);
1343                            df.setMaximumFractionDigits(3);
1344                            compression += df.format(ratio) + ":1";
1345                        }
1346                    }
1347                }
1348                else if (layout_type == HDF5Constants.H5D_COMPACT) {
1349                    storage_layout = "COMPACT";
1350                }
1351                else if (layout_type == HDF5Constants.H5D_CONTIGUOUS) {
1352                    storage_layout = "CONTIGUOUS";
1353                    if (H5.H5Pget_external_count(pcid) > 0)
1354                        storage_layout += " - EXTERNAL ";
1355                }
1356                else if (layout_type == HDF5Constants.H5D_VIRTUAL) {
1357                    storage_layout = "VIRTUAL - ";
1358                    try {
1359                        long vmaps = H5.H5Pget_virtual_count(pcid);
1360                        try {
1361                            int virt_view = H5.H5Pget_virtual_view(paid);
1362                            long virt_gap = H5.H5Pget_virtual_printf_gap(paid);
1363                            if (virt_view == HDF5Constants.H5D_VDS_FIRST_MISSING)
1364                                storage_layout += "First Missing";
1365                            else
1366                                storage_layout += "Last Available";
1367                            storage_layout += "\nGAP : " + String.valueOf(virt_gap);
1368                        }
1369                        catch (Throwable err) {
1370                            log.debug("getMetadata(): vds error: ", err);
1371                            storage_layout += "ERROR";
1372                        }
1373                        storage_layout += "\nMAPS : " + String.valueOf(vmaps);
1374                        if (vmaps > 0) {
1375                            for (long next = 0; next < vmaps; next++) {
1376                                try {
1377                                    H5.H5Pget_virtual_vspace(pcid, next);
1378                                    H5.H5Pget_virtual_srcspace(pcid, next);
1379                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1380                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1381                                    storage_layout += "\n" + fname + " : " + dsetname;
1382                                }
1383                                catch (Throwable err) {
1384                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1385                                    log.trace("getMetadata(): vds[{}] continue", next);
1386                                    storage_layout += "ERROR";
1387                                    continue;
1388                                }
1389                            }
1390                        }
1391                    }
1392                    catch (Throwable err) {
1393                        log.debug("getMetadata(): vds count error: ", err);
1394                        storage_layout += "ERROR";
1395                    }
1396                }
1397                else {
1398                    chunkSize = null;
1399                    storage_layout = "NONE";
1400                }
1401
1402                int[] flags = { 0, 0 };
1403                long[] cd_nelmts = { 20 };
1404                int[] cd_values = new int[(int) cd_nelmts[0]];
1405                String[] cd_name = { "", "" };
1406                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1407                int filter = -1;
1408                int[] filter_config = { 1 };
1409                filters = "";
1410
1411                for (int i = 0, k = 0; i < nfilt; i++) {
1412                    log.trace("getMetadata(): filter[{}]", i);
1413                    if (i > 0) {
1414                        filters += ", ";
1415                    }
1416                    if (k > 0) {
1417                        compression += ", ";
1418                    }
1419
1420                    try {
1421                        cd_nelmts[0] = 20;
1422                        cd_values = new int[(int) cd_nelmts[0]];
1423                        cd_values = new int[(int) cd_nelmts[0]];
1424                        filter = H5.H5Pget_filter(pcid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1425                        log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1426                        for (int j = 0; j < cd_nelmts[0]; j++) {
1427                            log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cd_values[j]);
1428                        }
1429                    }
1430                    catch (Throwable err) {
1431                        log.debug("getMetadata(): filter[{}] error: ", i, err);
1432                        log.trace("getMetadata(): filter[{}] continue", i);
1433                        filters += "ERROR";
1434                        continue;
1435                    }
1436
1437                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1438                        filters += "NONE";
1439                    }
1440                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1441                        filters += "GZIP";
1442                        compression += compression_gzip_txt + cd_values[0];
1443                        k++;
1444                    }
1445                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1446                        filters += "Error detection filter";
1447                    }
1448                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1449                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1450                    }
1451                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1452                        filters += "NBIT";
1453                    }
1454                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1455                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1456                    }
1457                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1458                        filters += "SZIP";
1459                        compression += "SZIP: Pixels per block = " + cd_values[1];
1460                        k++;
1461                        int flag = -1;
1462                        try {
1463                            flag = H5.H5Zget_filter_info(filter);
1464                        }
1465                        catch (Exception ex) {
1466                            log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1467                            flag = -1;
1468                        }
1469                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1470                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1471                        }
1472                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1473                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1474                                        + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1475                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1476                        }
1477                    }
1478                    else {
1479                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1480                        for (int j = 0; j < cd_nelmts[0]; j++) {
1481                            if (j > 0)
1482                                filters += ", ";
1483                            filters += cd_values[j];
1484                        }
1485                        log.debug("getMetadata(): filter[{}] is user defined compression", i);
1486                    }
1487                } // for (int i=0; i<nfilt; i++)
1488
1489                if (compression.length() == 0) {
1490                    compression = "NONE";
1491                }
1492                log.trace("getMetadata(): filter compression={}", compression);
1493
1494                if (filters.length() == 0) {
1495                    filters = "NONE";
1496                }
1497                log.trace("getMetadata(): filter information={}", filters);
1498
1499                storage = "SIZE: " + storage_size;
1500                try {
1501                    int[] at = { 0 };
1502                    H5.H5Pget_alloc_time(pcid, at);
1503                    storage += ", allocation time: ";
1504                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1505                        storage += "Early";
1506                    }
1507                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1508                        storage += "Incremental";
1509                    }
1510                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1511                        storage += "Late";
1512                    }
1513                }
1514                catch (Exception ex) {
1515                    log.debug("getMetadata(): Storage allocation time:", ex);
1516                }
1517                if (storage.length() == 0) {
1518                    storage = "NONE";
1519                }
1520                log.trace("getMetadata(): storage={}", storage);
1521            }
1522            finally {
1523                try {
1524                    H5.H5Pclose(paid);
1525                }
1526                catch (Exception ex) {
1527                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1528                }
1529                try {
1530                    H5.H5Pclose(pcid);
1531                }
1532                catch (Exception ex) {
1533                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1534                }
1535                close(did);
1536            }
1537        }
1538
1539        log.trace("getMetadata(): finish");
1540        return attributeList;
1541    }
1542
1543    /*
1544     * (non-Javadoc)
1545     *
1546     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1547     */
1548    @Override
1549    public void writeMetadata(Object info) throws Exception {
1550        log.trace("writeMetadata(): start");
1551
1552        // only attribute metadata is supported.
1553        if (!(info instanceof Attribute)) {
1554            log.debug("writeMetadata(): Object not an Attribute");
1555            log.trace("writeMetadata(): finish");
1556            return;
1557        }
1558
1559        boolean attrExisted = false;
1560        Attribute attr = (Attribute) info;
1561        log.trace("writeMetadata(): {}", attr.getName());
1562
1563        if (attributeList == null) {
1564            this.getMetadata();
1565        }
1566
1567        if (attributeList != null)
1568            attrExisted = attributeList.contains(attr);
1569
1570        getFileFormat().writeAttribute(this, attr, attrExisted);
1571        // add the new attribute into attribute list
1572        if (!attrExisted) {
1573            attributeList.add(attr);
1574            nAttributes = attributeList.size();
1575        }
1576
1577        log.trace("writeMetadata(): finish");
1578    }
1579
1580    /*
1581     * (non-Javadoc)
1582     *
1583     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1584     */
1585    @Override
1586    public void removeMetadata(Object info) throws HDF5Exception {
1587        log.trace("removeMetadata(): start");
1588
1589        // only attribute metadata is supported.
1590        if (!(info instanceof Attribute)) {
1591            log.debug("removeMetadata(): Object not an Attribute");
1592            log.trace("removeMetadata(): finish");
1593            return;
1594        }
1595
1596        Attribute attr = (Attribute) info;
1597        log.trace("removeMetadata(): {}", attr.getName());
1598        long did = open();
1599        if (did >= 0) {
1600            try {
1601                H5.H5Adelete(did, attr.getName());
1602                List<Attribute> attrList = getMetadata();
1603                attrList.remove(attr);
1604                nAttributes = attrList.size();
1605            }
1606            finally {
1607                close(did);
1608            }
1609        }
1610
1611        log.trace("removeMetadata(): finish");
1612    }
1613
1614    /*
1615     * (non-Javadoc)
1616     *
1617     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1618     */
1619    @Override
1620    public void updateMetadata(Object info) throws HDF5Exception {
1621        log.trace("updateMetadata(): start");
1622
1623        // only attribute metadata is supported.
1624        if (!(info instanceof Attribute)) {
1625            log.debug("updateMetadata(): Object not an Attribute");
1626            log.trace("updateMetadata(): finish");
1627            return;
1628        }
1629
1630        nAttributes = -1;
1631
1632        log.trace("updateMetadata(): finish");
1633    }
1634
1635    /*
1636     * (non-Javadoc)
1637     *
1638     * @see hdf.object.HObject#setName(java.lang.String)
1639     */
1640    @Override
1641    public void setName(String newName) throws Exception {
1642        H5File.renameObject(this, newName);
1643        super.setName(newName);
1644    }
1645
1646    /**
1647     * Resets selection of dataspace
1648     */
1649    private void resetSelection() {
1650        log.trace("resetSelection(): start");
1651
1652        for (int i = 0; i < rank; i++) {
1653            startDims[i] = 0;
1654            selectedDims[i] = 1;
1655            if (selectedStride != null) {
1656                selectedStride[i] = 1;
1657            }
1658        }
1659
1660        if (rank == 1) {
1661            selectedIndex[0] = 0;
1662            selectedDims[0] = dims[0];
1663        }
1664        else if (rank == 2) {
1665            selectedIndex[0] = 0;
1666            selectedIndex[1] = 1;
1667            selectedDims[0] = dims[0];
1668            selectedDims[1] = dims[1];
1669        }
1670        else if (rank > 2) {
1671            // selectedIndex[0] = rank - 2; // columns
1672            // selectedIndex[1] = rank - 1; // rows
1673            // selectedIndex[2] = rank - 3;
1674            selectedIndex[0] = 0; // width, the fastest dimension
1675            selectedIndex[1] = 1; // height
1676            selectedIndex[2] = 2; // frames
1677            // selectedDims[rank - 1] = dims[rank - 1];
1678            // selectedDims[rank - 2] = dims[rank - 2];
1679            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1680            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1681        }
1682
1683        isDataLoaded = false;
1684        setAllMemberSelection(true);
1685        log.trace("resetSelection(): finish");
1686    }
1687
1688    /**
1689     * @deprecated Not for public use in the future. <br>
1690     *             Using
1691     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1692     *
1693     * @param name
1694     *            the name of the dataset to create.
1695     * @param pgroup
1696     *            parent group where the new dataset is created.
1697     * @param dims
1698     *            the dimension size of the dataset.
1699     * @param memberNames
1700     *            the names of compound datatype
1701     * @param memberDatatypes
1702     *            the datatypes of the compound datatype
1703     * @param memberSizes
1704     *            the dim sizes of the members
1705     * @param data
1706     *            list of data arrays written to the new dataset, null if no data is written to the new
1707     *            dataset.
1708     *
1709     * @return the new compound dataset if successful; otherwise returns null.
1710     *
1711     * @throws Exception
1712     *             if there is a failure.
1713     */
1714    @Deprecated
1715    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1716            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1717        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1718                || (memberSizes == null)) {
1719            return null;
1720        }
1721
1722        int nMembers = memberNames.length;
1723        int memberRanks[] = new int[nMembers];
1724        long memberDims[][] = new long[nMembers][1];
1725        for (int i = 0; i < nMembers; i++) {
1726            memberRanks[i] = 1;
1727            memberDims[i][0] = memberSizes[i];
1728        }
1729
1730        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1731    }
1732
1733    /**
1734     * @deprecated Not for public use in the future. <br>
1735     *             Using
1736     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1737     *
1738     * @param name
1739     *            the name of the dataset to create.
1740     * @param pgroup
1741     *            parent group where the new dataset is created.
1742     * @param dims
1743     *            the dimension size of the dataset.
1744     * @param memberNames
1745     *            the names of compound datatype
1746     * @param memberDatatypes
1747     *            the datatypes of the compound datatype
1748     * @param memberRanks
1749     *            the ranks of the members
1750     * @param memberDims
1751     *            the dim sizes of the members
1752     * @param data
1753     *            list of data arrays written to the new dataset, null if no data is written to the new
1754     *            dataset.
1755     *
1756     * @return the new compound dataset if successful; otherwise returns null.
1757     *
1758     * @throws Exception
1759     *             if the dataset can not be created.
1760     */
1761    @Deprecated
1762    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1763            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1764        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1765                memberDims, data);
1766    }
1767
1768    /**
1769     * Creates a simple compound dataset in a file with/without chunking and compression.
1770     * <p>
1771     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1772     * details of creating a compound dataset from users.
1773     * <p>
1774     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1775     * dataset is not supported. The required information to create a compound dataset includes the
1776     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1777     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1778     * <p>
1779     * The following example shows how to use this function to create a compound dataset in file.
1780     *
1781     * <pre>
1782     * H5File file = null;
1783     * String message = &quot;&quot;;
1784     * Group pgroup = null;
1785     * int[] DATA_INT = new int[DIM_SIZE];
1786     * float[] DATA_FLOAT = new float[DIM_SIZE];
1787     * String[] DATA_STR = new String[DIM_SIZE];
1788     * long[] DIMs = { 50, 10 };
1789     * long[] CHUNKs = { 25, 5 };
1790     *
1791     * try {
1792     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1793     *     file.open();
1794     *     pgroup = (Group) file.get(&quot;/&quot;);
1795     * }
1796     * catch (Exception ex) {
1797     * }
1798     *
1799     * Vector data = new Vector();
1800     * data.add(0, DATA_INT);
1801     * data.add(1, DATA_FLOAT);
1802     * data.add(2, DATA_STR);
1803     *
1804     * // create groups
1805     * Datatype[] mdtypes = new H5Datatype[3];
1806     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1807     * Dataset dset = null;
1808     * try {
1809     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, -1, -1);
1810     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, -1, -1);
1811     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
1812     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1813     * }
1814     * catch (Exception ex) {
1815     *     failed(message, ex, file);
1816     *     return 1;
1817     * }
1818     * </pre>
1819     *
1820     * @param name
1821     *            the name of the dataset to create.
1822     * @param pgroup
1823     *            parent group where the new dataset is created.
1824     * @param dims
1825     *            the dimension size of the dataset.
1826     * @param maxdims
1827     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1828     * @param chunks
1829     *            the chunk size of the dataset. No chunking if chunk = null.
1830     * @param gzip
1831     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1832     * @param memberNames
1833     *            the names of compound datatype
1834     * @param memberDatatypes
1835     *            the datatypes of the compound datatype
1836     * @param memberRanks
1837     *            the ranks of the members
1838     * @param memberDims
1839     *            the dim sizes of the members
1840     * @param data
1841     *            list of data arrays written to the new dataset, null if no data is written to the new
1842     *            dataset.
1843     *
1844     * @return the new compound dataset if successful; otherwise returns null.
1845     *
1846     * @throws Exception
1847     *             if there is a failure.
1848     */
1849    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1850            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1851                    throws Exception {
1852        log.trace("create(): start");
1853
1854        H5CompoundDS dataset = null;
1855        String fullPath = null;
1856        long did = -1;
1857        long tid = -1;
1858        long plist = -1;
1859        long sid = -1;
1860
1861        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1862                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1863                || (memberDims == null)) {
1864            log.debug("create(): one or more parameters are null");
1865            log.trace("create(): finish");
1866            return null;
1867        }
1868
1869        H5File file = (H5File) pgroup.getFileFormat();
1870        if (file == null) {
1871            log.debug("create(): parent group FileFormat is null");
1872            log.trace("create(): finish");
1873            return null;
1874        }
1875
1876        String path = HObject.separator;
1877        if (!pgroup.isRoot()) {
1878            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1879            if (name.endsWith("/")) {
1880                name = name.substring(0, name.length() - 1);
1881            }
1882            int idx = name.lastIndexOf("/");
1883            if (idx >= 0) {
1884                name = name.substring(idx + 1);
1885            }
1886        }
1887
1888        fullPath = path + name;
1889
1890        int typeSize = 0;
1891        int nMembers = memberNames.length;
1892        long[] mTypes = new long[nMembers];
1893        int memberSize = 1;
1894        for (int i = 0; i < nMembers; i++) {
1895            memberSize = 1;
1896            for (int j = 0; j < memberRanks[i]; j++) {
1897                memberSize *= memberDims[i][j];
1898            }
1899
1900            mTypes[i] = -1;
1901            // the member is an array
1902            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1903                long tmptid = -1;
1904                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1905                    try {
1906                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1907                    }
1908                    finally {
1909                        try {
1910                            H5.H5Tclose(tmptid);
1911                        }
1912                        catch (Exception ex) {
1913                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1914                        }
1915                    }
1916                }
1917            }
1918            else {
1919                mTypes[i] = memberDatatypes[i].createNative();
1920            }
1921            try {
1922                typeSize += H5.H5Tget_size(mTypes[i]);
1923            }
1924            catch (Exception ex) {
1925                log.debug("create(): array create H5Tget_size:", ex);
1926
1927                while (i > 0) {
1928                    try {
1929                        H5.H5Tclose(mTypes[i]);
1930                    }
1931                    catch (HDF5Exception ex2) {
1932                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1933                    }
1934                    i--;
1935                }
1936                throw ex;
1937            }
1938        } // for (int i = 0; i < nMembers; i++) {
1939
1940        // setup chunking and compression
1941        boolean isExtentable = false;
1942        if (maxdims != null) {
1943            for (int i = 0; i < maxdims.length; i++) {
1944                if (maxdims[i] == 0) {
1945                    maxdims[i] = dims[i];
1946                }
1947                else if (maxdims[i] < 0) {
1948                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1949                }
1950
1951                if (maxdims[i] != dims[i]) {
1952                    isExtentable = true;
1953                }
1954            }
1955        }
1956
1957        // HDF5 requires you to use chunking in order to define extendible
1958        // datasets. Chunking makes it possible to extend datasets efficiently,
1959        // without having to reorganize storage excessively. Using default size
1960        // of 64x...which has good performance
1961        if ((chunks == null) && isExtentable) {
1962            chunks = new long[dims.length];
1963            for (int i = 0; i < dims.length; i++)
1964                chunks[i] = Math.min(dims[i], 64);
1965        }
1966
1967        // prepare the dataspace and datatype
1968        int rank = dims.length;
1969
1970        try {
1971            sid = H5.H5Screate_simple(rank, dims, maxdims);
1972
1973            // figure out creation properties
1974            plist = HDF5Constants.H5P_DEFAULT;
1975
1976            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1977            int offset = 0;
1978            for (int i = 0; i < nMembers; i++) {
1979                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1980                offset += H5.H5Tget_size(mTypes[i]);
1981            }
1982
1983            if (chunks != null) {
1984                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1985
1986                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1987                H5.H5Pset_chunk(plist, rank, chunks);
1988
1989                // compression requires chunking
1990                if (gzip > 0) {
1991                    H5.H5Pset_deflate(plist, gzip);
1992                }
1993            }
1994
1995            long fid = file.getFID();
1996
1997            log.trace("create(): create dataset");
1998            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1999            log.trace("create(): new H5CompoundDS");
2000            dataset = new H5CompoundDS(file, name, path);
2001        }
2002        finally {
2003            try {
2004                H5.H5Pclose(plist);
2005            }
2006            catch (HDF5Exception ex) {
2007                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2008            }
2009            try {
2010                H5.H5Sclose(sid);
2011            }
2012            catch (HDF5Exception ex) {
2013                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2014            }
2015            try {
2016                H5.H5Tclose(tid);
2017            }
2018            catch (HDF5Exception ex) {
2019                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2020            }
2021            try {
2022                H5.H5Dclose(did);
2023            }
2024            catch (HDF5Exception ex) {
2025                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2026            }
2027
2028            for (int i = 0; i < nMembers; i++) {
2029                try {
2030                    H5.H5Tclose(mTypes[i]);
2031                }
2032                catch (HDF5Exception ex) {
2033                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2034                }
2035            }
2036        }
2037
2038        if (dataset != null) {
2039            pgroup.addToMemberList(dataset);
2040            if (data != null) {
2041                dataset.init();
2042                long selected[] = dataset.getSelectedDims();
2043                for (int i = 0; i < rank; i++) {
2044                    selected[i] = dims[i];
2045                }
2046                dataset.write(data);
2047            }
2048        }
2049
2050        log.trace("create(): finish");
2051        return dataset;
2052    }
2053
2054    /*
2055     * (non-Javadoc)
2056     *
2057     * @see hdf.object.Dataset#isString(long)
2058     */
2059    @Override
2060    public boolean isString(long tid) {
2061        boolean b = false;
2062        try {
2063            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2064        }
2065        catch (Exception ex) {
2066            b = false;
2067        }
2068
2069        return b;
2070    }
2071
2072    /*
2073     * (non-Javadoc)
2074     *
2075     * @see hdf.object.Dataset#getSize(long)
2076     */
2077    @Override
2078    public long getSize(long tid) {
2079        long tsize = -1;
2080
2081        try {
2082            tsize = H5.H5Tget_size(tid);
2083        }
2084        catch (Exception ex) {
2085            tsize = -1;
2086        }
2087
2088        return tsize;
2089    }
2090
2091    /*
2092     * (non-Javadoc)
2093     *
2094     * @see hdf.object.Dataset#isVirtual()
2095     */
2096    @Override
2097    public boolean isVirtual() {
2098        return isVirtual;
2099    }
2100
2101    /*
2102     * (non-Javadoc)
2103     *
2104     * @see hdf.object.Dataset#getVirtualFilename(int)
2105     */
2106    @Override
2107    public String getVirtualFilename(int index) {
2108        if (isVirtual)
2109            return virtualNameList.get(index);
2110        else
2111            return null;
2112    }
2113
2114    /*
2115     * (non-Javadoc)
2116     *
2117     * @see hdf.object.Dataset#getVirtualMaps()
2118     */
2119    @Override
2120    public int getVirtualMaps() {
2121        if (isVirtual)
2122            return virtualNameList.size();
2123        else
2124            return -1;
2125    }
2126
2127    /**
2128     * Given an array of bytes representing a compound Datatype and a start index and length, converts
2129     * len number of bytes into the correct Object type and returns it.
2130     *
2131     * @param data
2132     *            The byte array representing the data of the compound Datatype
2133     * @param data_type
2134     *            The type of data to convert the bytes to
2135     * @param start
2136     *            The start index of the bytes to get
2137     * @param len
2138     *            The number of bytes to convert
2139     * @return The converted type of the bytes
2140     */
2141    private Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) {
2142        Object currentData = null;
2143
2144        try {
2145            long typeClass = H5.H5Tget_class(data_type);
2146
2147            if (typeClass == HDF5Constants.H5T_INTEGER) {
2148                long size = H5.H5Tget_size(data_type);
2149
2150                currentData = HDFNativeData.byteToInt((int) start, (int) (len / size), data);
2151            }
2152            else if (typeClass == HDF5Constants.H5T_FLOAT) {
2153                currentData = HDFNativeData.byteToDouble((int) start, 1, data);
2154            }
2155        }
2156        catch (Exception ex) {
2157            log.debug("convertCompoundByteMember(): conversion failure: ", ex);
2158        }
2159
2160        return currentData;
2161    }
2162}