001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the file COPYING.                     *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * If you do not have access to this file, you may request a copy from       *
011 * help@hdfgroup.org.                                                        *
012 ****************************************************************************/
013
014package hdf.object.h5;
015
016import java.lang.reflect.Array;
017import java.text.DecimalFormat;
018import java.util.List;
019import java.util.Vector;
020
021import hdf.hdf5lib.H5;
022import hdf.hdf5lib.HDF5Constants;
023import hdf.hdf5lib.HDFNativeData;
024import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
025import hdf.hdf5lib.exceptions.HDF5Exception;
026import hdf.hdf5lib.structs.H5O_info_t;
027import hdf.object.Attribute;
028import hdf.object.CompoundDS;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034
035/**
036 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
037 * <p>
038 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that stores a
039 * description of the data elements, data layout, and all other information necessary to write, read, and interpret the
040 * stored data.
041 * <p>
042 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of one or more
043 * atomic types or small arrays of such types. Each member of a compound type has a name which is unique within that
044 * type, and a byte offset that determines the first byte (smallest byte address) of that member in a compound datum.
045 * <p>
046 * For more information on HDF5 datasets and datatypes, read the
047 * <a href="https://www.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html#t=HDF5_Users_Guide%2FHDF5_UG_Title%2FHDF5_UG_Title.htm">HDF5 User's Guide</a>.
048 * <p>
049 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a simple
050 * compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array of compound data.
051 * <p>
052 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the following C
053 * example.
054 *
055 * <pre>
056 * typedef struct s1_t {
057 *         int    a;
058 *         float  b;
059 *         double c;
060 *         } s1_t;
061 *     s1_t       s1[LENGTH];
062 *     ...
063 *     H5Dwrite(..., s1);
064 *     H5Dread(..., s1);
065 * </pre>
066 *
067 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by fields
068 * instead of compound structure. As for the example above, the java.util.Vector object has three elements: int[LENGTH],
069 * float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of int, float and double, we will be
070 * able to read/write the compound data by field.
071 *
072 * @version 1.1 9/4/2007
073 * @author Peter X. Cao
074 */
075public class H5CompoundDS extends CompoundDS {
076    private static final long serialVersionUID = -5968625125574032736L;
077
078    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
079
080    /**
081     * The list of attributes of this data object. Members of the list are instance of Attribute.
082     */
083    private List<Attribute> attributeList;
084
085    private int nAttributes = -1;
086
087    private H5O_info_t obj_info;
088
089    /**
090     * A list of names of all fields including nested fields.
091     * <p>
092     * The nested names are separated by CompoundDS.separator. For example, if compound dataset "A" has the following
093     * nested structure,
094     *
095     * <pre>
096     * A --&gt; m01
097     * A --&gt; m02
098     * A --&gt; nest1 --&gt; m11
099     * A --&gt; nest1 --&gt; m12
100     * A --&gt; nest1 --&gt; nest2 --&gt; m21
101     * A --&gt; nest1 --&gt; nest2 --&gt; m22
102     * i.e.
103     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
104     * </pre>
105     *
106     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12, nest1[nest2[m21,
107     * nest1[nest2[m22}
108     *
109     */
110    private List<String> flatNameList;
111
112    /**
113     * A list of datatypes of all fields including nested fields.
114     */
115    private List<Integer> flatTypeList;
116
117    /** flag to indicate is the dataset is an external dataset */
118    private boolean isExternal = false;
119
120    /**
121     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
122     * <p>
123     * The dataset object represents an existing dataset in the file. For example, new H5CompoundDS(file, "dset1",
124     * "/g0/") constructs a dataset object that corresponds to the dataset,"dset1", at group "/g0/".
125     * <p>
126     * This object is usually constructed at FileFormat.open(), which loads the file structure and object information
127     * into tree structure (TreeNode). It is rarely used elsewhere.
128     *
129     * @param theFile
130     *            the file that contains the data object.
131     * @param theName
132     *            the name of the data object, e.g. "dset".
133     * @param thePath
134     *            the full path of the data object, e.g. "/arrays/".
135     */
136    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
137        this(theFile, theName, thePath, null);
138    }
139
140    /**
141     * @deprecated Not for public use in the future.<br>
142     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
143     *
144     * @param theFile
145     *            the file that contains the data object.
146     * @param theName
147     *            the name of the data object, e.g. "dset".
148     * @param thePath
149     *            the full path of the data object, e.g. "/arrays/".
150     * @param oid
151     *            the oid of the data object.
152     */
153    @Deprecated
154    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
155        super(theFile, theName, thePath, oid);
156        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
157
158        if ((oid == null) && (theFile != null)) {
159            // retrieve the object ID
160            try {
161                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
162                this.oid = new long[1];
163                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
164            }
165            catch (Exception ex) {
166                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
167            }
168        }
169    }
170
171    /*
172     * (non-Javadoc)
173     *
174     * @see hdf.object.HObject#open()
175     */
176    @Override
177    public int open() {
178        log.trace("open(): start");
179
180        int did = -1;
181
182        try {
183            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
184            log.trace("open(): did={}", did);
185        }
186        catch (HDF5Exception ex) {
187            log.debug("open(): Failed to open dataset {}: ", getPath() + getName(), ex);
188            did = -1;
189        }
190
191        log.trace("open(): finish");
192        return did;
193    }
194
195    /*
196     * (non-Javadoc)
197     *
198     * @see hdf.object.HObject#close(int)
199     */
200    @Override
201    public void close(int did) {
202        log.trace("close(): start");
203
204        if (did >= 0) {
205            try {
206                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
207            }
208            catch (Exception ex) {
209                log.debug("close(): H5Fflush({}, {}) failure: ", did, HDF5Constants.H5F_SCOPE_LOCAL, ex);
210            }
211            try {
212                H5.H5Dclose(did);
213            }
214            catch (HDF5Exception ex) {
215                log.debug("close(): H5Dclose({}) failure: ", did, ex);
216            }
217        }
218
219        log.trace("close(): finish");
220    }
221
222    /*
223     * (non-Javadoc)
224     *
225     * @see hdf.object.Dataset#init()
226     */
227    @Override
228    public void init() {
229        log.trace("init(): start");
230
231        if (rank > 0) {
232            resetSelection();
233            log.trace("init(): Dataset already inited");
234            log.trace("init(): finish");
235            return; // already called. Initialize only once
236        }
237
238        int did = -1, sid = -1, tid = -1, tclass = -1;
239        flatNameList = new Vector<String>();
240        flatTypeList = new Vector<Integer>();
241        int[] memberTIDs = null;
242
243        did = open();
244        if (did >= 0) {
245            // check if it is an external dataset
246            int pid = -1;
247            try {
248                pid = H5.H5Dget_create_plist(did);
249                int nfiles = H5.H5Pget_external_count(pid);
250                isExternal = (nfiles > 0);
251                log.trace("init(): pid={} nfiles={} isExternal={}", pid, nfiles, isExternal);
252            }
253            catch (Exception ex) {
254                log.debug("init(): check if it is an external dataset:", ex);
255            }
256            finally {
257                try {
258                    H5.H5Pclose(pid);
259                }
260                catch (Exception ex) {
261                    log.debug("init(): H5Pclose({}) failure: ", pid, ex);
262                }
263            }
264
265            try {
266                sid = H5.H5Dget_space(did);
267                rank = H5.H5Sget_simple_extent_ndims(sid);
268                tid = H5.H5Dget_type(did);
269                tclass = H5.H5Tget_class(tid);
270
271                int tmptid = 0;
272                if (tclass == HDF5Constants.H5T_ARRAY) {
273                    // array of compound
274                    tmptid = tid;
275                    tid = H5.H5Tget_super(tmptid);
276                    try {
277                        H5.H5Tclose(tmptid);
278                    }
279                    catch (HDF5Exception ex) {
280                        log.debug("init(): H5Tclose({}) failure: ", tmptid, ex);
281                    }
282                }
283
284                if (rank == 0) {
285                    // a scalar data point
286                    rank = 1;
287                    dims = new long[1];
288                    dims[0] = 1;
289                }
290                else {
291                    dims = new long[rank];
292                    maxDims = new long[rank];
293                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
294                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
295                }
296
297                startDims = new long[rank];
298                selectedDims = new long[rank];
299
300                // initialize member information
301                extractCompoundInfo(tid, "", flatNameList, flatTypeList);
302                numberOfMembers = flatNameList.size();
303                log.trace("init(): numberOfMembers={}", numberOfMembers);
304
305                memberNames = new String[numberOfMembers];
306                memberTIDs = new int[numberOfMembers];
307                memberTypes = new Datatype[numberOfMembers];
308                memberOrders = new int[numberOfMembers];
309                isMemberSelected = new boolean[numberOfMembers];
310                memberDims = new Object[numberOfMembers];
311
312                for (int i = 0; i < numberOfMembers; i++) {
313                    isMemberSelected[i] = true;
314                    memberTIDs[i] = ((Integer) flatTypeList.get(i)).intValue();
315                    memberTypes[i] = new H5Datatype(memberTIDs[i]);
316                    memberNames[i] = (String) flatNameList.get(i);
317                    memberOrders[i] = 1;
318                    memberDims[i] = null;
319                    log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}",
320                            i, i, memberNames[i], i, memberTIDs[i], i, memberTypes[i].getDatatypeDescription());
321
322                    try {
323                        tclass = H5.H5Tget_class(memberTIDs[i]);
324                    }
325                    catch (HDF5Exception ex) {
326                        log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex);
327                    }
328
329                    if (tclass == HDF5Constants.H5T_ARRAY) {
330                        int n = H5.H5Tget_array_ndims(memberTIDs[i]);
331                        long mdim[] = new long[n];
332                        H5.H5Tget_array_dims(memberTIDs[i], mdim);
333                        int idim[] = new int[n];
334                        for (int j = 0; j < n; j++)
335                            idim[j] = (int) mdim[j];
336                        memberDims[i] = idim;
337                        tmptid = H5.H5Tget_super(memberTIDs[i]);
338                        memberOrders[i] = (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid));
339                        try {
340                            H5.H5Tclose(tmptid);
341                        }
342                        catch (HDF5Exception ex) {
343                            log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex);
344                        }
345                    }
346                } // for (int i=0; i<numberOfMembers; i++)
347            }
348            catch (HDF5Exception ex) {
349                numberOfMembers = 0;
350                memberNames = null;
351                memberTypes = null;
352                memberOrders = null;
353                log.debug("init():", ex);
354            }
355            finally {
356                try {
357                    H5.H5Tclose(tid);
358                }
359                catch (HDF5Exception ex2) {
360                    log.debug("init(): H5Tclose({}) failure: ", tid, ex2);
361                }
362                try {
363                    H5.H5Sclose(sid);
364                }
365                catch (HDF5Exception ex2) {
366                    log.debug("init(): H5Sclose({}) failure: ", sid, ex2);
367                }
368
369                if (memberTIDs != null) {
370                    for (int i = 0; i < memberTIDs.length; i++) {
371                        try {
372                            H5.H5Tclose(memberTIDs[i]);
373                        }
374                        catch (Exception ex) {
375                            log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex);
376                        }
377                    }
378                }
379            }
380
381            log.trace("init(): close dataset");
382            close(did);
383        }
384        else {
385            log.debug("init(): failed to open dataset");
386        }
387
388        resetSelection();
389        log.trace("init(): finish");
390    }
391
392    /*
393     * (non-Javadoc)
394     *
395     * @see hdf.object.DataFormat#hasAttribute()
396     */
397    public boolean hasAttribute() {
398        obj_info.num_attrs = nAttributes;
399
400        if (obj_info.num_attrs < 0) {
401            int did = open();
402            if (did >= 0) {
403                try {
404                    obj_info = H5.H5Oget_info(did);
405                    nAttributes = (int) obj_info.num_attrs;
406                }
407                catch (Exception ex) {
408                    obj_info.num_attrs = 0;
409                    log.debug("hasAttribute(): get object info failure: ", ex);
410                }
411                close(did);
412            }
413            else {
414                log.debug("hasAttribute(): could not open dataset");
415            }
416        }
417
418        return (obj_info.num_attrs > 0);
419    }
420
421    /*
422     * (non-Javadoc)
423     *
424     * @see hdf.object.Dataset#getDatatype()
425     */
426    @Override
427    public Datatype getDatatype() {
428        if (datatype == null) {
429            log.trace("getDatatype(): datatype == null");
430            datatype = new H5Datatype(Datatype.CLASS_COMPOUND, -1, -1, -1);
431        }
432
433        return datatype;
434    }
435
436    /*
437     * (non-Javadoc)
438     *
439     * @see hdf.object.Dataset#clear()
440     */
441    @Override
442    public void clear() {
443        super.clear();
444
445        if (attributeList != null) {
446            ((Vector<Attribute>) attributeList).setSize(0);
447        }
448    }
449
450    /*
451     * (non-Javadoc)
452     *
453     * @see hdf.object.Dataset#readBytes()
454     */
455    @Override
456    public byte[] readBytes() throws HDF5Exception {
457        log.trace("readBytes(): start");
458
459        byte[] theData = null;
460
461        if (rank <= 0) {
462            init();
463        }
464
465        int did = open();
466        if (did >= 0) {
467            int fspace = -1, mspace = -1, tid = -1;
468
469            try {
470                long[] lsize = { 1 };
471                for (int j = 0; j < selectedDims.length; j++) {
472                    lsize[0] *= selectedDims[j];
473                }
474
475                fspace = H5.H5Dget_space(did);
476                mspace = H5.H5Screate_simple(rank, selectedDims, null);
477
478                // set the rectangle selection
479                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
480                if (rank * dims[0] > 1) {
481                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
482                            selectedDims, null); // set block to 1
483                }
484
485                tid = H5.H5Dget_type(did);
486                int size = H5.H5Tget_size(tid) * (int) lsize[0];
487                log.trace("readBytes(): size = {}", size);
488
489                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Dataset too large to read.");
490
491                theData = new byte[size];
492
493                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
494                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
495            }
496            catch (Exception ex) {
497                log.debug("readBytes(): failed to read data: ", ex);
498            }
499            finally {
500                try {
501                    H5.H5Sclose(fspace);
502                }
503                catch (Exception ex2) {
504                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
505                }
506                try {
507                    H5.H5Sclose(mspace);
508                }
509                catch (Exception ex2) {
510                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
511                }
512                try {
513                    H5.H5Tclose(tid);
514                }
515                catch (HDF5Exception ex2) {
516                    log.debug("readBytes(): H5Tclose({}) failure: ", tid, ex2);
517                }
518                close(did);
519            }
520        }
521
522        log.trace("readBytes(): finish");
523        return theData;
524    }
525
526    /*
527     * (non-Javadoc)
528     *
529     * @see hdf.object.Dataset#read()
530     */
531    @Override
532    public Object read() throws Exception {
533        log.trace("read(): start");
534
535        List<Object> list = null;
536        Object member_data = null;
537        String member_name = null;
538        int member_class = -1;
539        int member_base_class = -1;
540        int member_size = 0;
541        int atom_tid = -1;
542        int did = -1;
543        int tid = -1;
544        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
545
546        if (rank <= 0) {
547            init(); // read data information into memory
548        }
549
550        if (numberOfMembers <= 0) {
551            log.debug("read(): Dataset contains no members");
552            log.trace("read(): finish");
553            return null; // this compound dataset does not have any member
554        }
555
556        if (isExternal) {
557            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
558
559            if (pdir == null) {
560                pdir = ".";
561            }
562            System.setProperty("user.dir", pdir);//H5.H5Dchdir_ext(pdir);
563            log.trace("read(): External dataset: user.dir={}", pdir);
564        }
565
566        long[] lsize = { 1 };
567        log.trace("read(): open dataset");
568        did = open();
569        if (did >= 0) {
570            list = new Vector<Object>(flatNameList.size());
571            Vector<Integer> atomicList = new Vector<Integer>();
572            try {
573                lsize[0] = selectHyperslab(did, spaceIDs);
574                log.trace("read(): opened dataset size {} for {}", lsize[0], nPoints);
575
576                if (lsize[0] == 0) {
577                    log.debug("read(): No data to read");
578                    log.trace("read(): finish");
579                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
580                }
581
582                if (lsize[0] < Integer.MIN_VALUE || lsize[0] > Integer.MAX_VALUE) {
583                    log.debug("read(): lsize outside valid int range; unsafe cast");
584                    log.trace("read(): finish");
585                    throw new HDF5Exception("Invalid int size");
586                }
587
588                if (log.isDebugEnabled()) {
589                    // check is storage space is allocated
590                    try {
591                        long ssize = H5.H5Dget_storage_size(did);
592                        log.trace("read(): Storage space allocated = {}.", ssize);
593                    }
594                    catch (Exception ex) {
595                        log.debug("read(): check if storage space is allocated:", ex);
596                    }
597                }
598
599                // read each of member data into a byte array, then extract
600                // it into its type such, int, long, float, etc.
601                int n = flatNameList.size();
602                tid = H5.H5Dget_type(did);
603                int tclass = H5.H5Tget_class(tid);
604                if (tclass == HDF5Constants.H5T_ARRAY) {
605                    // array of compound
606                    int tmptid = -1;
607                    try {
608                        tmptid = tid;
609                        log.trace("read(): H5Tget_super");
610                        tid = H5.H5Tget_super(tmptid);
611
612                        // ARRAY of COMPOUND currently unsupported
613                        if (H5.H5Tget_class(tid) == HDF5Constants.H5T_COMPOUND) {
614                            log.debug("read(): cannot read dataset of type ARRAY of COMPOUND");
615                            log.trace("read(): finish");
616                            throw new Exception("Unsupported dataset of type ARRAY of COMPOUND");
617                        }
618                    }
619                    finally {
620                        try {H5.H5Tclose(tmptid);}
621                        catch (Exception ex2) {log.debug("read(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
622                    }
623                }
624
625                extractCompoundInfo(tid, null, null, atomicList);
626
627                log.trace("read(): foreach nMembers={}", n);
628                for (int i = 0; i < n; i++) {
629                    boolean isVL = false;
630                    member_base_class = -1;
631
632                    if (!isMemberSelected[i]) {
633                        log.debug("read(): Member[{}] is not selected", i);
634                        continue; // the field is not selected
635                    }
636
637                    member_name = new String(memberNames[i]);
638
639                    atom_tid = ((Integer) atomicList.get(i)).intValue();
640                    try {
641                        member_class = H5.H5Tget_class(atom_tid);
642                        member_size = H5.H5Tget_size(atom_tid);
643                        member_data = H5Datatype.allocateArray(atom_tid, (int) lsize[0]);
644                    }
645                    catch (OutOfMemoryError err) {
646                        member_data = null;
647                        throw new HDF5Exception("Out Of Memory.");
648                    }
649                    catch (Exception ex) {
650                        log.debug("read(): Member[{}]: ", i, ex);
651                        member_data = null;
652                    }
653                    log.trace("read(): {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
654
655                    if (member_data == null || H5.H5Tequal(atom_tid, HDF5Constants.H5T_STD_REF_DSETREG)) {
656                        String[] nullValues = new String[(int) lsize[0]];
657                        String errorStr = "*unsupported*";
658                        for (int j = 0; j < lsize[0]; j++) {
659                            nullValues[j] = errorStr;
660                        }
661                        list.add(nullValues);
662
663                        log.trace("read(): {} Member[{}] of class {} is unsupported.", member_name, i, member_class);
664                        continue;
665                    }
666                    else if (member_class == HDF5Constants.H5T_ARRAY) {
667                        int tmptid = -1;
668                        try {
669                            tmptid = H5.H5Tget_super(atom_tid);
670                            member_base_class = H5.H5Tget_class(tmptid);
671
672                            isVL = isVL || H5.H5Tis_variable_str(tmptid);
673                            isVL = isVL || H5.H5Tdetect_class(tmptid, HDF5Constants.H5T_VLEN);
674
675                            if (member_base_class == HDF5Constants.H5T_COMPOUND) {
676                                try {
677                                    member_data = H5Datatype.allocateArray(tmptid, member_size * (int) lsize[0]);
678                                }
679                                catch (OutOfMemoryError err) {
680                                    member_data = null;
681                                    throw new HDF5Exception("Out Of Memory.");
682                                }
683                                catch (Exception ex) {
684                                    log.debug("read(): Member[{}]: Error allocating array for Compound: ", i, ex);
685                                    member_data = null;
686                                }
687                            }
688
689                            log.trace("read(): {} Array Member[{}] is class {} of size={}", member_name, i, member_base_class, member_size);
690                        }
691                        catch (Exception ex) {
692                            log.debug("read(): H5T_ARRAY id or class failure[{}]:", i, ex);
693                            continue;
694                        }
695                        finally {
696                            try {
697                                H5.H5Tclose(tmptid);
698                            }
699                            catch (Exception ex) {
700                                log.debug("read(): Member[{}]: H5Tclose(tmptid {}) failure:", i, tmptid, ex);
701                            }
702                        }
703
704                        // cannot deal with ARRAY of ARRAY, support only ARRAY of atomic types
705                        if (member_base_class == HDF5Constants.H5T_ARRAY) {
706                            String[] nullValues = new String[(int) lsize[0]];
707                            String errorStr = "*unsupported*";
708                            for (int j = 0; j < lsize[0]; j++) {
709                                nullValues[j] = errorStr;
710                            }
711                            list.add(nullValues);
712
713                            log.trace("read(): {} Member[{}] of type ARRAY of ARRAY is unsupported", member_name, i);
714                            continue;
715                        }
716                    }
717
718                    if (member_data != null) {
719                        int comp_tid = -1;
720                        int compInfo[] = { member_class, member_size, 0 };
721                        try {
722                            comp_tid = createCompoundFieldType(atom_tid, member_name, compInfo);
723                        }
724                        catch (HDF5Exception ex) {
725                            String[] nullValues = new String[(int) lsize[0]];
726                            for (int j = 0; j < lsize[0]; j++) {
727                                nullValues[j] = "*unsupported*";
728                            }
729                            list.add(nullValues);
730                            log.debug("read(): {} Member[{}] createCompoundFieldType failure:", member_name, i, ex);
731                            continue;
732                        }
733                        try {
734                            // See BUG#951 isVL = H5.H5Tdetect_class(atom_tid,
735                            // HDF5Constants.H5T_VLEN);
736                            isVL = isVL || H5.H5Tis_variable_str(atom_tid);
737                            isVL = isVL || H5.H5Tdetect_class(atom_tid, HDF5Constants.H5T_VLEN);
738                        }
739                        catch (Exception ex) {
740                            log.debug("read(): detection of varstr:", ex);
741                            isVL = false;
742                        }
743                        try {
744                            log.trace("read(): H5Dread({}) did={} spaceIDs[0]={} spaceIDs[1]={}", comp_tid, did, spaceIDs[0], spaceIDs[1]);
745                            if (isVL) {
746                                H5.H5DreadVL(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
747                                        (Object[]) member_data);
748                            }
749                            else if (member_base_class == HDF5Constants.H5T_COMPOUND) {
750                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
751                                        (byte[]) member_data, true);
752                            }
753                            else {
754                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, member_data);
755                            }
756                        }
757                        catch (HDF5DataFiltersException exfltr) {
758                            log.debug("read(): {} Member[{}] read failure:", member_name, i, exfltr);
759                            log.trace("read(): finish");
760                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
761                        }
762                        catch (HDF5Exception ex2) {
763                            String[] nullValues = new String[(int) lsize[0]];
764                            for (int j = 0; j < lsize[0]; j++) {
765                                nullValues[j] = "*unsupported*";
766                            }
767                            list.add(nullValues);
768                            log.debug("read(): {} Member[{}] read failure:", member_name, i, ex2);
769                            continue;
770                        }
771                        finally {
772                            try {H5.H5Tclose(comp_tid);}
773                            catch (Exception ex3) {log.debug("read(): H5Tclose(comp_tid {}) failure: ", comp_tid, ex3);}
774                        }
775
776                        if (!isVL) {
777                            String cname = member_data.getClass().getName();
778                            char dname = cname.charAt(cname.lastIndexOf("[") + 1);
779                            log.trace("read(!isVL): {} Member[{}] is cname {} of dname={} convert={}", member_name, i, cname, dname, convertByteToString);
780
781                            if ((member_class == HDF5Constants.H5T_STRING) && convertByteToString) {
782                                if (dname == 'B') {
783                                    member_data = byteToString((byte[]) member_data, member_size / memberOrders[i]);
784                                    log.trace("read(!isVL): convertByteToString: {} Member[{}]", member_name, i);
785                                }
786                            }
787                            else if (member_class == HDF5Constants.H5T_REFERENCE) {
788                                if (dname == 'B') {
789                                    member_data = HDFNativeData.byteToLong((byte[]) member_data);
790                                    log.trace("read(!isVL): convertByteToLong: {} Member[{}]", member_name, i);
791                                }
792                            }
793                            else if (compInfo[2] != 0) {
794                                member_data = Dataset.convertFromUnsignedC(member_data, null);
795                                log.trace("read(!isVL): convertFromUnsignedC: {} Member[{}]", member_name, i);
796                            }
797                            else if ((member_class == HDF5Constants.H5T_ENUM || member_base_class == HDF5Constants.H5T_ENUM)
798                                    && enumConverted) {
799                                try {
800                                    String[] strs = null;
801
802                                    if (member_class == HDF5Constants.H5T_ARRAY) {
803                                        int base_tid = -1;
804
805                                        try {
806                                            base_tid = H5.H5Tget_super(atom_tid);
807                                            strs = H5Datatype.convertEnumValueToName(base_tid, member_data, null);
808                                        }
809                                        catch (Exception ex) {
810                                            log.debug("read(): convertEnumValueToName failure: ", ex);
811                                        }
812                                        finally {
813                                            try {
814                                                H5.H5Tclose(base_tid);
815                                            }
816                                            catch (Exception ex) {
817                                                log.debug("read(): H5Tclose(base_tid {}) failure: ", base_tid, ex);
818                                            }
819                                        }
820                                    }
821                                    else {
822                                        strs = H5Datatype.convertEnumValueToName(atom_tid, member_data, null);
823                                    }
824
825                                    if (strs != null) {
826                                        member_data = strs;
827                                        log.trace("H5CompoundDS read(!isVL)convertEnumValueToName: {} Member[{}]", member_name, i);
828                                    }
829                                    log.trace("H5CompoundDS read(!isVL)convertEnumValueToName:data={}", strs[0]);
830                                }
831                                catch (Exception ex) {
832                                    log.debug("read: H5Datatype.convertEnumValueToName:", ex);
833                                }
834                            }
835                            else if (member_class == HDF5Constants.H5T_ARRAY && member_base_class == HDF5Constants.H5T_COMPOUND) {
836                                // Since compounds are read into memory as a byte array, discover each member
837                                // type and size and convert the byte array to the correct type before adding
838                                // it to the list
839
840                                int numDims = H5.H5Tget_array_ndims(atom_tid);
841                                long[] dims = new long[numDims];
842                                H5.H5Tget_array_dims(atom_tid, dims);
843                                int numberOfCompounds = (int) dims[0] * (int) lsize[0];
844                                int compoundSize = (member_size * (int) lsize[0]) / numberOfCompounds;
845
846                                Object current_data = new Object[numberOfCompounds];
847
848                                int base_tid = -1;
849                                long memberOffsets[] = null;
850                                long memberLengths[] = null;
851                                int memberTypes[] = null;
852                                int numberOfMembers;
853
854                                try {
855                                    base_tid = H5.H5Tget_super(atom_tid);
856                                    numberOfMembers = H5.H5Tget_nmembers(base_tid);
857                                    memberOffsets = new long[numberOfMembers];
858                                    memberLengths = new long[numberOfMembers];
859                                    memberTypes = new int[numberOfMembers];
860
861                                    for (int j = 0; j < numberOfMembers; j++) {
862                                        memberOffsets[j] = H5.H5Tget_member_offset(base_tid, j);
863                                        memberTypes[j] = H5.H5Tget_member_type(base_tid, j);
864                                    }
865
866                                    for (int j = 0; j < numberOfMembers; j++) {
867                                        if (j < numberOfMembers - 1) {
868                                            memberLengths[j] = (memberOffsets[j + 1] - memberOffsets[j]);
869                                        }
870                                        else {
871                                            memberLengths[j] = (compoundSize - memberOffsets[j]);
872                                        }
873                                    }
874
875                                    for (int j = 0; j < numberOfCompounds; j++) {
876                                        Object field_data = new Object[numberOfMembers];
877
878                                        for (int k = 0; k < numberOfMembers; k++) {
879                                            Object converted = convertCompoundByteMember((byte[]) member_data, memberTypes[k], memberOffsets[k] + (compoundSize * j), memberLengths[k]);
880
881                                            ((Object[]) field_data)[k] = Array.get(converted, 0);
882                                        }
883
884                                        ((Object[]) current_data)[j] = field_data;
885                                    }
886                                }
887                                catch (Exception ex) {
888                                    log.debug("read(): Convert Array of Compounds failure: ", ex);
889                                    continue;
890                                }
891                                finally {
892                                    for (int j = 0; j < memberTypes.length; j++) {
893                                        try {
894                                            H5.H5Tclose(memberTypes[j]);
895                                        }
896                                        catch (Exception ex) {
897                                            log.debug("read(): Member[{}]: H5Tclose(memberTypes[{}] {}) failure: ", i, j, memberTypes[j], ex);
898                                        }
899                                    }
900                                    try {
901                                        H5.H5Tclose(base_tid);
902                                    }
903                                    catch (Exception ex) {
904                                        log.debug("read(): Member[{}]: H5Tclose(base_tid {}) failure:", i, base_tid, ex);
905                                    }
906                                }
907
908                                list.add(current_data);
909                                continue;
910                            }
911                        }
912
913                        list.add(member_data);
914                    } // if (member_data != null)
915                } // end of for (int i=0; i<num_members; i++)
916
917            }
918            finally {
919                try {
920                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
921                        H5.H5Sclose(spaceIDs[0]);
922                }
923                catch (Exception ex2) {
924                    log.debug("read(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex2);
925                }
926                try {
927                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
928                        H5.H5Sclose(spaceIDs[1]);
929                }
930                catch (Exception ex2) {
931                    log.debug("read(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex2);
932                }
933
934                // close atomic types
935                int ntypes = atomicList.size();
936                for (int i = 0; i < ntypes; i++) {
937                    atom_tid = ((Integer) atomicList.get(i)).intValue();
938                    try {
939                        H5.H5Tclose(atom_tid);
940                    }
941                    catch (Exception ex2) {
942                        log.debug("read(): H5Tclose(atom_tid {}) failure: ", atom_tid, ex2);
943                    }
944                }
945                try {H5.H5Tclose(tid);}
946                catch (Exception ex2) {log.debug("read(): H5Tclose(tid {}) failure: ", tid, ex2);}
947
948                close(did);
949            }
950        }
951
952        log.trace("read(): finish");
953        return list;
954    }
955
956    /**
957     * Writes the given data buffer into this dataset in a file.
958     * <p>
959     * The data buffer is a vector that contains the data values of compound fields. The data is written into file field
960     * by field.
961     *
962     * @param buf
963     *            The vector that contains the data values of compound fields.
964     *
965     * @throws HDF5Exception
966     *             If there is an error at the HDF5 library level.
967     */
968    @Override
969    public void write(Object buf) throws HDF5Exception {
970        log.trace("H5CompoundDS write: start");
971        int did = -1;
972        int tid = -1;
973        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
974        Object member_data = null;
975        String member_name = null;
976        int atom_tid = -1, member_class = -1, member_size = 0;
977
978        List<?> list = (List<?>) buf;
979        if ((buf == null) || (numberOfMembers <= 0) || !(buf instanceof List)) {
980            log.debug("write(): buf is null or invalid or contains no members");
981            log.trace("write(): finish");
982            return;
983        }
984
985        long[] lsize = { 1 };
986        did = open();
987        if (did >= 0) {
988            Vector<Integer> atomicList = new Vector<Integer>();
989            try {
990                lsize[0] = selectHyperslab(did, spaceIDs);
991                int tmptid = H5.H5Dget_type(did);
992
993                // read each of member data into a byte array, then extract
994                // it into its type such, int, long, float, etc.
995                int idx = 0;
996                int n = flatNameList.size();
997                boolean isEnum = false;
998
999                try {
1000                    extractCompoundInfo(tmptid, null, null, atomicList);
1001                }
1002                finally {
1003                    try {H5.H5Tclose(tmptid);}
1004                    catch (Exception ex2) {log.debug("write(): H5Tclose(tmptid {}) failure:", tmptid, ex2);}
1005                }
1006                for (int i = 0; i < n; i++) {
1007                    log.trace("write(): Member[{}] of {}", i, n);
1008                    if (!isMemberSelected[i]) {
1009                        log.debug("write(): Member[{}] is not selected", i);
1010                        continue; // the field is not selected
1011                    }
1012
1013                    member_name = new String(memberNames[i]);
1014                    atom_tid = ((Integer) atomicList.get(i)).intValue();
1015                    member_data = list.get(idx++);
1016
1017                    if (member_data == null) {
1018                        log.debug("write(): Member[{}] data is null", i);
1019                        continue;
1020                    }
1021
1022                    boolean isVL = false;
1023                    try {
1024                        isVL = (H5.H5Tget_class(atom_tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(atom_tid));
1025                        log.trace("write(): Member[{}] isVL={}", i, isVL);
1026                    }
1027                    catch (Exception ex) {
1028                        log.debug("write(): isVL: ", ex);
1029                    }
1030
1031                    try {
1032                        member_class = H5.H5Tget_class(atom_tid);
1033                        member_size = H5.H5Tget_size(atom_tid);
1034                        isEnum = (member_class == HDF5Constants.H5T_ENUM);
1035                    }
1036                    catch (Exception ex) {
1037                        log.debug("write(): retrieve member class or size failure: ", ex);
1038                    }
1039                    log.trace("write(): {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
1040
1041                    Object tmpData = member_data;
1042
1043                    int compInfo[] = { member_class, member_size, 0 };
1044                    try {
1045                        tid = createCompoundFieldType(atom_tid, member_name, compInfo);
1046                        log.trace("write(): {} Member[{}] compInfo[class]={} compInfo[size]={} compInfo[unsigned]={}",
1047                                member_name, i, compInfo[0], compInfo[1], compInfo[2]);
1048                        if(isVL) {
1049                            H5.H5DwriteString(did, tid,
1050                                    spaceIDs[0], spaceIDs[1],
1051                                    HDF5Constants.H5P_DEFAULT, (String[])tmpData);
1052                        }
1053                        else {
1054                            if (compInfo[2] != 0) {
1055                                // check if need to convert integer data
1056                                int tsize = H5.H5Tget_size(tid);
1057                                String cname = member_data.getClass().getName();
1058                                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1059                                boolean doConversion = (((tsize == 1) && (dname == 'S'))
1060                                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1061
1062                                tmpData = member_data;
1063                                if (doConversion) {
1064                                    log.trace("write(): {} Member[{}] convertToUnsignedC", member_name, i);
1065                                    tmpData = convertToUnsignedC(member_data, null);
1066                                }
1067                            }
1068                            else if ((member_class == HDF5Constants.H5T_STRING) && (Array.get(member_data, 0) instanceof String)) {
1069                                log.trace("write(): {} Member[{}] stringToByte", member_name, i);
1070                                tmpData = stringToByte((String[]) member_data, member_size);
1071                            }
1072                            else if (isEnum && (Array.get(member_data, 0) instanceof String)) {
1073                                log.trace("write(): {} Member[{}] convertEnumNameToValue", member_name, i);
1074                                tmpData = H5Datatype.convertEnumNameToValue(atom_tid, (String[]) member_data, null);
1075                            }
1076
1077                            if (tmpData != null) {
1078                                // BUG!!! does not write nested compound data and no
1079                                // exception was caught
1080                                // need to check if it is a java error or C library
1081                                // error
1082                                log.debug("write(): H5Dwrite warning - does not write nested compound data");
1083                                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1084                            }
1085                        }
1086                    }
1087                    catch (Exception ex1) {
1088                        log.debug("write(): H5Dwrite process failure:", ex1);
1089                    }
1090                    finally {
1091                        try {
1092                            H5.H5Tclose(tid);
1093                        }
1094                        catch (Exception ex2) {
1095                            log.debug("write(): H5Tclose(tid {}) failure: ", tid, ex2);
1096                        }
1097                    }
1098                } // end of for (int i=0; i<num_members; i++)
1099            }
1100            finally {
1101                try {
1102                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
1103                        H5.H5Sclose(spaceIDs[0]);
1104                }
1105                catch (Exception ex2) {
1106                    log.debug("write(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex2);
1107                }
1108                try {
1109                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
1110                        H5.H5Sclose(spaceIDs[1]);
1111                }
1112                catch (Exception ex2) {
1113                    log.debug("write(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex2);
1114                }
1115
1116                // close atomic types
1117                int ntypes = atomicList.size();
1118                for (int i = 0; i < ntypes; i++) {
1119                    atom_tid = ((Integer) atomicList.get(i)).intValue();
1120                    try {
1121                        H5.H5Tclose(atom_tid);
1122                    }
1123                    catch (Exception ex2) {
1124                        log.debug("write(): H5Tclose(atom_tid {}) failure: ", atom_tid, ex2);
1125                    }
1126                }
1127            }
1128            close(did);
1129        }
1130        log.trace("write(): finish");
1131    }
1132
1133    /**
1134     * Set up the selection of hyperslab
1135     *
1136     * @param did
1137     *            IN dataset ID
1138     * @param spaceIDs
1139     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
1140     *
1141     * @return total number of data point selected
1142     *
1143     * @throws HDF5Exception
1144     *             If there is an error at the HDF5 library level.
1145     */
1146    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
1147        long lsize = 1;
1148
1149        boolean isAllSelected = true;
1150        for (int i = 0; i < rank; i++) {
1151            lsize *= selectedDims[i];
1152            if (selectedDims[i] < dims[i]) {
1153                isAllSelected = false;
1154            }
1155        }
1156
1157        log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
1158
1159        if (isAllSelected) {
1160            spaceIDs[0] = HDF5Constants.H5S_ALL;
1161            spaceIDs[1] = HDF5Constants.H5S_ALL;
1162        }
1163        else {
1164            spaceIDs[1] = H5.H5Dget_space(did);
1165
1166            // When 1D dataspace is used in chunked dataset, reading is very
1167            // slow.
1168            // It is a known problem on HDF5 library for chunked dataset.
1169            // mspace = H5.H5Screate_simple(1, lsize, null);
1170            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
1171            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
1172                    null);
1173        }
1174
1175        return lsize;
1176    }
1177
1178    /*
1179     * (non-Javadoc)
1180     *
1181     * @see hdf.object.DataFormat#getMetadata()
1182     */
1183    public List<Attribute> getMetadata() throws HDF5Exception {
1184        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1185    }
1186
1187    /*
1188     * (non-Javadoc)
1189     *
1190     * @see hdf.object.DataFormat#getMetadata(int...)
1191     */
1192    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1193        log.trace("getMetadata(): start");
1194
1195        if (rank <= 0) {
1196            init();
1197            log.trace("getMetadata(): inited");
1198        }
1199
1200        try {
1201            this.linkTargetObjName = H5File.getLinkTargetName(this);
1202        }
1203        catch (Exception ex) {
1204            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1205        }
1206
1207        if (attributeList != null) {
1208            log.debug("getMetadata(): attributeList != null");
1209            log.trace("getMetadata(): finish");
1210            return attributeList;
1211        }
1212
1213        int did = -1;
1214        int pcid = -1;
1215        int indxType = fileFormat.getIndexType(null);
1216        int order = fileFormat.getIndexOrder(null);
1217
1218        // load attributes first
1219        if (attrPropList.length > 0) {
1220            indxType = attrPropList[0];
1221            if (attrPropList.length > 1) {
1222                order = attrPropList[1];
1223            }
1224        }
1225        log.trace("getMetadata(): open dataset");
1226        did = open();
1227        if (did >= 0) {
1228            log.trace("getMetadata(): dataset opened");
1229            try {
1230                compression = "";
1231                attributeList = H5File.getAttribute(did, indxType, order);
1232                log.trace("getMetadata(): attributeList loaded");
1233
1234                // get the compression and chunk information
1235                pcid = H5.H5Dget_create_plist(did);
1236                long storage_size = H5.H5Dget_storage_size(did);
1237                int nfilt = H5.H5Pget_nfilters(pcid);
1238                int layout_type = H5.H5Pget_layout(pcid);
1239                if (layout_type == HDF5Constants.H5D_CHUNKED) {
1240                    chunkSize = new long[rank];
1241                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1242                    int n = chunkSize.length;
1243                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
1244                    for (int i = 1; i < n; i++) {
1245                        storage_layout += " X " + chunkSize[i];
1246                    }
1247
1248                    if(nfilt > 0) {
1249                        long    nelmts = 1;
1250                        long    uncomp_size;
1251                        long    datum_size = getDatatype().getDatatypeSize();
1252                        if (datum_size < 0) {
1253                            int tmptid = -1;
1254                            try {
1255                                tmptid = H5.H5Dget_type(did);
1256                                datum_size = H5.H5Tget_size(tmptid);
1257                            }
1258                            finally {
1259                                try {H5.H5Tclose(tmptid);}
1260                                catch (Exception ex2) {log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
1261                            }
1262                        }
1263
1264
1265                        for(int i = 0; i < rank; i++) {
1266                            nelmts *= dims[i];
1267                        }
1268                        uncomp_size = nelmts * datum_size;
1269
1270                        /* compression ratio = uncompressed size /  compressed size */
1271
1272                        if(storage_size != 0) {
1273                            double ratio = (double) uncomp_size / (double) storage_size;
1274                            DecimalFormat df = new DecimalFormat();
1275                            df.setMinimumFractionDigits(3);
1276                            df.setMaximumFractionDigits(3);
1277                            compression +=  df.format(ratio) + ":1";
1278                        }
1279                    }
1280                }
1281                else if (layout_type == HDF5Constants.H5D_COMPACT) {
1282                    storage_layout = "COMPACT";
1283                }
1284                else if (layout_type == HDF5Constants.H5D_CONTIGUOUS) {
1285                    storage_layout = "CONTIGUOUS";
1286                    if (H5.H5Pget_external_count(pcid) > 0)
1287                        storage_layout += " - EXTERNAL ";
1288                }
1289                else {
1290                    chunkSize = null;
1291                    storage_layout = "NONE";
1292                }
1293
1294                int[] flags = { 0, 0 };
1295                long[] cd_nelmts = { 20 };
1296                int[] cd_values = new int[(int) cd_nelmts[0]];;
1297                String[] cd_name = { "", "" };
1298                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1299                int filter = -1;
1300                int[] filter_config = { 1 };
1301                filters = "";
1302
1303                for (int i = 0, k = 0; i < nfilt; i++) {
1304                    log.trace("getMetadata(): filter[{}]", i);
1305                    if (i > 0) {
1306                        filters += ", ";
1307                    }
1308                    if (k > 0) {
1309                        compression += ", ";
1310                    }
1311
1312                    try {
1313                        cd_nelmts[0] = 20;
1314                        cd_values = new int[(int) cd_nelmts[0]];
1315                        cd_values = new int[(int) cd_nelmts[0]];
1316                        filter = H5.H5Pget_filter(pcid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1317                        log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1318                        for (int j = 0; j < cd_nelmts[0]; j++) {
1319                            log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cd_values[j]);
1320                        }
1321                    }
1322                    catch (Throwable err) {
1323                        filters += "ERROR";
1324                        continue;
1325                    }
1326
1327                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1328                        filters += "NONE";
1329                    }
1330                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1331                        filters += "GZIP";
1332                        compression += compression_gzip_txt + cd_values[0];
1333                        k++;
1334                    }
1335                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1336                        filters += "Error detection filter";
1337                    }
1338                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1339                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1340                    }
1341                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1342                        filters += "NBIT";
1343                    }
1344                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1345                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1346                    }
1347                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1348                        filters += "SZIP";
1349                        compression += "SZIP: Pixels per block = " + cd_values[1];
1350                        k++;
1351                        int flag = -1;
1352                        try {
1353                            flag = H5.H5Zget_filter_info(filter);
1354                        }
1355                        catch (Exception ex) {
1356                            log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1357                            flag = -1;
1358                        }
1359                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1360                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1361                        }
1362                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1363                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1364                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1365                        }
1366                    }
1367                    else {
1368                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1369                        for (int j = 0; j < cd_nelmts[0]; j++) {
1370                            if (j > 0)
1371                                filters += ", ";
1372                            filters += cd_values[j];
1373                        }
1374                        log.debug("getMetadata(): filter[{}] is user defined compression", i);
1375                    }
1376                } // for (int i=0; i<nfilt; i++)
1377
1378                if (compression.length() == 0) {
1379                    compression = "NONE";
1380                }
1381                log.trace("getMetadata(): filter compression={}", compression);
1382
1383                if (filters.length() == 0) {
1384                    filters = "NONE";
1385                }
1386                log.trace("getMetadata(): filter information={}", filters);
1387
1388                storage = "SIZE: " + storage_size;
1389                try {
1390                    int[] at = { 0 };
1391                    H5.H5Pget_alloc_time(pcid, at);
1392                    storage += ", allocation time: ";
1393                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1394                        storage += "Early";
1395                    }
1396                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1397                        storage += "Incremental";
1398                    }
1399                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1400                        storage += "Late";
1401                    }
1402                }
1403                catch (Exception ex) {
1404                    log.debug("getMetadata(): Storage allocation time:", ex);
1405                }
1406                if (storage.length() == 0) {
1407                    storage = "NONE";
1408                }
1409                log.trace("getMetadata(): storage={}", storage);
1410            }
1411            finally {
1412                try {
1413                    H5.H5Pclose(pcid);
1414                }
1415                catch (Exception ex) {
1416                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1417                }
1418                close(did);
1419            }
1420        }
1421
1422        log.trace("getMetadata(): finish");
1423        return attributeList;
1424    }
1425
1426    /*
1427     * (non-Javadoc)
1428     *
1429     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1430     */
1431    public void writeMetadata(Object info) throws Exception {
1432        log.trace("writeMetadata(): start");
1433
1434        // only attribute metadata is supported.
1435        if (!(info instanceof Attribute)) {
1436            log.debug("writeMetadata(): data not instanceof Attribute");
1437            log.trace("writeMetadata(): finish");
1438            return;
1439        }
1440
1441        boolean attrExisted = false;
1442        Attribute attr = (Attribute) info;
1443        log.trace("writeMetadata(): {}", attr.getName());
1444
1445        if (attributeList == null) {
1446            this.getMetadata();
1447        }
1448
1449        if (attributeList != null)
1450            attrExisted = attributeList.contains(attr);
1451
1452        getFileFormat().writeAttribute(this, attr, attrExisted);
1453        // add the new attribute into attribute list
1454        if (!attrExisted) {
1455            attributeList.add(attr);
1456            nAttributes = attributeList.size();
1457        }
1458
1459        log.trace("writeMetadata(): finish");
1460    }
1461
1462    /*
1463     * (non-Javadoc)
1464     *
1465     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1466     */
1467    public void removeMetadata(Object info) throws HDF5Exception {
1468        log.trace("removeMetadata(): start");
1469
1470        // only attribute metadata is supported.
1471        if (!(info instanceof Attribute)) {
1472            log.debug("removeMetadata(): data not instanceof Attribute");
1473            log.trace("removeMetadata(): finish");
1474            return;
1475        }
1476
1477        Attribute attr = (Attribute) info;
1478        log.trace("removeMetadata(): {}", attr.getName());
1479        int did = open();
1480        if (did >= 0) {
1481            try {
1482                H5.H5Adelete(did, attr.getName());
1483                List<Attribute> attrList = getMetadata();
1484                attrList.remove(attr);
1485                nAttributes = attrList.size();
1486            }
1487            finally {
1488                close(did);
1489            }
1490        }
1491
1492        log.trace("removeMetadata(): finish");
1493    }
1494
1495    /*
1496     * (non-Javadoc)
1497     *
1498     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1499     */
1500    public void updateMetadata(Object info) throws HDF5Exception {
1501        log.trace("updateMetadata(): start");
1502
1503        // only attribute metadata is supported.
1504        if (!(info instanceof Attribute)) {
1505            log.debug("updateMetadata(): data not instanceof Attribute");
1506            log.trace("updateMetadata(): finish");
1507            return;
1508        }
1509
1510        nAttributes = -1;
1511
1512        log.trace("updateMetadata(): finish");
1513    }
1514
1515    /*
1516     * (non-Javadoc)
1517     *
1518     * @see hdf.object.HObject#setName(java.lang.String)
1519     */
1520    @Override
1521    public void setName(String newName) throws Exception {
1522        H5File.renameObject(this, newName);
1523        super.setName(newName);
1524    }
1525
1526    /**
1527     * Resets selection of dataspace
1528     */
1529    private void resetSelection() {
1530        log.trace("resetSelection(): start");
1531
1532        for (int i = 0; i < rank; i++) {
1533            startDims[i] = 0;
1534            selectedDims[i] = 1;
1535            if (selectedStride != null) {
1536                selectedStride[i] = 1;
1537            }
1538        }
1539
1540        if (rank == 1) {
1541            selectedIndex[0] = 0;
1542            selectedDims[0] = dims[0];
1543        }
1544        else if (rank == 2) {
1545            selectedIndex[0] = 0;
1546            selectedIndex[1] = 1;
1547            selectedDims[0] = dims[0];
1548            selectedDims[1] = dims[1];
1549        }
1550        else if (rank > 2) {
1551            // selectedIndex[0] = rank - 2; // columns
1552            // selectedIndex[1] = rank - 1; // rows
1553            // selectedIndex[2] = rank - 3;
1554            selectedIndex[0] = 0; // width, the fastest dimension
1555            selectedIndex[1] = 1; // height
1556            selectedIndex[2] = 2; // frames
1557            // selectedDims[rank - 1] = dims[rank - 1];
1558            // selectedDims[rank - 2] = dims[rank - 2];
1559            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1560            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1561        }
1562
1563        isDataLoaded = false;
1564        setMemberSelection(true);
1565        log.trace("resetSelection(): finish");
1566    }
1567
1568    /**
1569     * @deprecated Not for public use in the future. <br>
1570     *             Using
1571     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1572     *
1573     * @param name
1574     *            the name of the dataset to create.
1575     * @param pgroup
1576     *            parent group where the new dataset is created.
1577     * @param dims
1578     *            the dimension size of the dataset.
1579     * @param memberNames
1580     *            the names of compound datatype
1581     * @param memberDatatypes
1582     *            the datatypes of the compound datatype
1583     * @param memberSizes
1584     *            the dim sizes of the members
1585     * @param data
1586     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1587     *
1588     * @return the new compound dataset if successful; otherwise returns null.
1589     *
1590     * @throws Exception if there is a failure.
1591     */
1592    @Deprecated
1593    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1594            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1595        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1596                || (memberSizes == null)) {
1597            return null;
1598        }
1599
1600        int nMembers = memberNames.length;
1601        int memberRanks[] = new int[nMembers];
1602        long memberDims[][] = new long[nMembers][1];
1603        for (int i = 0; i < nMembers; i++) {
1604            memberRanks[i] = 1;
1605            memberDims[i][0] = memberSizes[i];
1606        }
1607
1608        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1609    }
1610
1611    /**
1612     * @deprecated Not for public use in the future. <br>
1613     *             Using
1614     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1615     *
1616     * @param name
1617     *            the name of the dataset to create.
1618     * @param pgroup
1619     *            parent group where the new dataset is created.
1620     * @param dims
1621     *            the dimension size of the dataset.
1622     * @param memberNames
1623     *            the names of compound datatype
1624     * @param memberDatatypes
1625     *            the datatypes of the compound datatype
1626     * @param memberRanks
1627     *            the ranks of the members
1628     * @param memberDims
1629     *            the dim sizes of the members
1630     * @param data
1631     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1632     *
1633     * @return the new compound dataset if successful; otherwise returns null.
1634     *
1635     * @throws Exception if the dataset can not be created.
1636     */
1637    @Deprecated
1638    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1639            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1640        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1641                memberDims, data);
1642    }
1643
1644    /**
1645     * Creates a simple compound dataset in a file with/without chunking and compression.
1646     * <p>
1647     * This function provides an easy way to create a simple compound dataset in file by hiding tedious details of
1648     * creating a compound dataset from users.
1649     * <p>
1650     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound dataset is not
1651     * supported. The required information to create a compound dataset includes the name, the parent group and data
1652     * space of the dataset, the names, datatypes and data spaces of the compound fields. Other information such as
1653     * chunks, compression and the data buffer is optional.
1654     * <p>
1655     * The following example shows how to use this function to create a compound dataset in file.
1656     *
1657     * <pre>
1658     * H5File file = null;
1659     * String message = &quot;&quot;;
1660     * Group pgroup = null;
1661     * int[] DATA_INT = new int[DIM_SIZE];
1662     * float[] DATA_FLOAT = new float[DIM_SIZE];
1663     * String[] DATA_STR = new String[DIM_SIZE];
1664     * long[] DIMs = { 50, 10 };
1665     * long[] CHUNKs = { 25, 5 };
1666     *
1667     * try {
1668     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1669     *     file.open();
1670     *     pgroup = (Group) file.get(&quot;/&quot;);
1671     * }
1672     * catch (Exception ex) {
1673     * }
1674     *
1675     * Vector data = new Vector();
1676     * data.add(0, DATA_INT);
1677     * data.add(1, DATA_FLOAT);
1678     * data.add(2, DATA_STR);
1679     *
1680     * // create groups
1681     * Datatype[] mdtypes = new H5Datatype[3];
1682     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1683     * Dataset dset = null;
1684     * try {
1685     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, -1, -1);
1686     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, -1, -1);
1687     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
1688     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1689     * }
1690     * catch (Exception ex) {
1691     *     failed(message, ex, file);
1692     *     return 1;
1693     * }
1694     * </pre>
1695     *
1696     * @param name
1697     *            the name of the dataset to create.
1698     * @param pgroup
1699     *            parent group where the new dataset is created.
1700     * @param dims
1701     *            the dimension size of the dataset.
1702     * @param maxdims
1703     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1704     * @param chunks
1705     *            the chunk size of the dataset. No chunking if chunk = null.
1706     * @param gzip
1707     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1708     * @param memberNames
1709     *            the names of compound datatype
1710     * @param memberDatatypes
1711     *            the datatypes of the compound datatype
1712     * @param memberRanks
1713     *            the ranks of the members
1714     * @param memberDims
1715     *            the dim sizes of the members
1716     * @param data
1717     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1718     *
1719     * @return the new compound dataset if successful; otherwise returns null.
1720     *
1721     * @throws Exception if there is a failure.
1722     */
1723    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1724            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1725            throws Exception {
1726        log.trace("create(): start");
1727
1728        H5CompoundDS dataset = null;
1729        String fullPath = null;
1730        int did = -1, sid = -1, tid = -1, plist = -1;
1731
1732        log.trace("H5CompoundDS create start");
1733        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1734                || (memberNames == null) || (memberDatatypes == null)
1735                || (memberRanks == null) || (memberDims == null)) {
1736            log.debug("create(): one or more parameters are null");
1737            log.trace("create(): finish");
1738            return null;
1739        }
1740
1741        H5File file = (H5File) pgroup.getFileFormat();
1742        if (file == null) {
1743            log.debug("create(): parent group FileFormat is null");
1744            log.trace("create(): finish");
1745            return null;
1746        }
1747
1748        String path = HObject.separator;
1749        if (!pgroup.isRoot()) {
1750            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1751            if (name.endsWith("/")) {
1752                name = name.substring(0, name.length() - 1);
1753            }
1754            int idx = name.lastIndexOf("/");
1755            if (idx >= 0) {
1756                name = name.substring(idx + 1);
1757            }
1758        }
1759
1760        fullPath = path + name;
1761
1762        int typeSize = 0;
1763        int nMembers = memberNames.length;
1764        int[] mTypes = new int[nMembers];
1765        int memberSize = 1;
1766        for (int i = 0; i < nMembers; i++) {
1767            memberSize = 1;
1768            for (int j = 0; j < memberRanks[i]; j++) {
1769                memberSize *= memberDims[i][j];
1770            }
1771
1772            mTypes[i] = -1;
1773            // the member is an array
1774            if ((memberSize > 1) && (memberDatatypes[i].getDatatypeClass() != Datatype.CLASS_STRING)) {
1775                int tmptid = -1;
1776                if ((tmptid = memberDatatypes[i].toNative()) >= 0) {
1777                    try {
1778                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1779                    }
1780                    finally {
1781                        try {H5.H5Tclose(tmptid);}
1782                        catch (Exception ex) {log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);}
1783                    }
1784                }
1785            }
1786            else {
1787                mTypes[i] = memberDatatypes[i].toNative();
1788            }
1789            try {
1790                typeSize += H5.H5Tget_size(mTypes[i]);
1791            }
1792            catch (Exception ex) {
1793                log.debug("create(): array create H5Tget_size:", ex);
1794
1795                while (i > 0) {
1796                    try {H5.H5Tclose(mTypes[i]);}
1797                    catch (HDF5Exception ex2) {log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);}
1798                    i--;
1799                }
1800                throw ex;
1801            }
1802        } // for (int i = 0; i < nMembers; i++) {
1803
1804        // setup chunking and compression
1805        boolean isExtentable = false;
1806        if (maxdims != null) {
1807            for (int i = 0; i < maxdims.length; i++) {
1808                if (maxdims[i] == 0) {
1809                    maxdims[i] = dims[i];
1810                }
1811                else if (maxdims[i] < 0) {
1812                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1813                }
1814
1815                if (maxdims[i] != dims[i]) {
1816                    isExtentable = true;
1817                }
1818            }
1819        }
1820
1821        // HDF5 requires you to use chunking in order to define extendible
1822        // datasets. Chunking makes it possible to extend datasets efficiently,
1823        // without having to reorganize storage excessively. Using default size
1824        // of 64x...which has good performance
1825        if ((chunks == null) && isExtentable) {
1826            chunks = new long[dims.length];
1827            for (int i = 0; i < dims.length; i++)
1828                chunks[i] = Math.min(dims[i], 64);
1829        }
1830
1831        // prepare the dataspace and datatype
1832        int rank = dims.length;
1833
1834        try {
1835            sid = H5.H5Screate_simple(rank, dims, maxdims);
1836
1837            // figure out creation properties
1838            plist = HDF5Constants.H5P_DEFAULT;
1839
1840            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1841            int offset = 0;
1842            for (int i = 0; i < nMembers; i++) {
1843                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1844                offset += H5.H5Tget_size(mTypes[i]);
1845            }
1846
1847            if (chunks != null) {
1848                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1849
1850                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1851                H5.H5Pset_chunk(plist, rank, chunks);
1852
1853                // compression requires chunking
1854                if (gzip > 0) {
1855                    H5.H5Pset_deflate(plist, gzip);
1856                }
1857            }
1858
1859            int fid = file.getFID();
1860
1861            log.trace("create(): create dataset");
1862            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1863            log.trace("create(): new H5CompoundDS");
1864            dataset = new H5CompoundDS(file, name, path);
1865        }
1866        finally {
1867            try {
1868                H5.H5Pclose(plist);
1869            }
1870            catch (HDF5Exception ex) {
1871                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1872            }
1873            try {
1874                H5.H5Sclose(sid);
1875            }
1876            catch (HDF5Exception ex) {
1877                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1878            }
1879            try {
1880                H5.H5Tclose(tid);
1881            }
1882            catch (HDF5Exception ex) {
1883                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1884            }
1885            try {
1886                H5.H5Dclose(did);
1887            }
1888            catch (HDF5Exception ex) {
1889                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1890            }
1891
1892            for (int i = 0; i < nMembers; i++) {
1893                try {
1894                    H5.H5Tclose(mTypes[i]);
1895                }
1896                catch (HDF5Exception ex) {
1897                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
1898                }
1899            }
1900        }
1901
1902        if (dataset != null) {
1903            pgroup.addToMemberList(dataset);
1904            if (data != null) {
1905                dataset.init();
1906                long selected[] = dataset.getSelectedDims();
1907                for (int i = 0; i < rank; i++) {
1908                    selected[i] = dims[i];
1909                }
1910                dataset.write(data);
1911            }
1912        }
1913
1914        log.trace("create(): finish");
1915        return dataset;
1916    }
1917
1918    /**
1919     * Extracts compound information into flat structure.
1920     * <p>
1921     * For example, compound datatype "nest" has {nest1{a, b, c}, d, e} then extractCompoundInfo() will put the names of
1922     * nested compound fields into a flat list as
1923     *
1924     * <pre>
1925     * nest.nest1.a
1926     * nest.nest1.b
1927     * nest.nest1.c
1928     * nest.d
1929     * nest.e
1930     * </pre>
1931     *
1932     * @param tid   the identifier of the compound datatype
1933     * @param name  the name of the compound datatype
1934     * @param names  the list to store the member names of the compound datatype
1935     * @param flatTypeList2  the list to store the nested member names of the compound datatype
1936     */
1937    private void extractCompoundInfo(int tid, String name, List<String> names, List<Integer> flatTypeList2) {
1938        log.trace("extractCompoundInfo(): start: tid={}, name={}", tid, name);
1939
1940        int nMembers = 0, mclass = -1, mtype = -1;
1941        String mname = null;
1942
1943        try {
1944            nMembers = H5.H5Tget_nmembers(tid);
1945        }
1946        catch (Exception ex) {
1947            log.debug("extractCompoundInfo(): H5Tget_nmembers(tid {}) failure", tid, ex);
1948            nMembers = 0;
1949        }
1950        log.trace("extractCompoundInfo(): nMembers={}", nMembers);
1951
1952        if (nMembers <= 0) {
1953            log.debug("extractCompoundInfo(): datatype has no members");
1954            log.trace("extractCompoundInfo(): finish");
1955            return;
1956        }
1957
1958        int tmptid = -1;
1959        for (int i = 0; i < nMembers; i++) {
1960            log.trace("extractCompoundInfo(): nMembers[{}]", i);
1961            try {
1962                mtype = H5.H5Tget_member_type(tid, i);
1963            }
1964            catch (Exception ex) {
1965                log.debug("extractCompoundInfo(): continue after H5Tget_member_type[{}] failure: ", i, ex);
1966                continue;
1967            }
1968
1969            try {
1970                tmptid = mtype;
1971                mtype = H5.H5Tget_native_type(tmptid);
1972            }
1973            catch (HDF5Exception ex) {
1974                log.debug("extractCompoundInfo(): continue after H5Tget_native_type[{}] failure: ", i, ex);
1975                continue;
1976            }
1977            finally {
1978                try {
1979                    H5.H5Tclose(tmptid);
1980                }
1981                catch (HDF5Exception ex) {
1982                    log.debug("extractCompoundInfo(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1983                }
1984            }
1985
1986            try {
1987                mclass = H5.H5Tget_class(mtype);
1988            }
1989            catch (HDF5Exception ex) {
1990                log.debug("extractCompoundInfo(): continue after H5Tget_class[{}] failure: ", i, ex);
1991                continue;
1992            }
1993
1994            if (names != null) {
1995                mname = name + H5.H5Tget_member_name(tid, i);
1996                log.trace("extractCompoundInfo():[{}] mname={}, name={}", i, mname, name);
1997            }
1998
1999            if (mclass == HDF5Constants.H5T_COMPOUND) {
2000                extractCompoundInfo(mtype, mname + CompoundDS.separator, names, flatTypeList2);
2001                log.debug("extractCompoundInfo(): continue after recursive H5T_COMPOUND[{}]:", i);
2002                continue;
2003            }
2004            else if (mclass == HDF5Constants.H5T_ARRAY) {
2005                try {
2006                    tmptid = H5.H5Tget_super(mtype);
2007                    int tmpclass = H5.H5Tget_class(tmptid);
2008
2009                    // cannot deal with ARRAY of ARRAY, support only ARRAY of atomic types
2010                    if ((tmpclass == HDF5Constants.H5T_ARRAY)) {
2011                        log.debug("extractCompoundInfo():[{}] unsupported ARRAY of ARRAY", i);
2012                        continue;
2013                    }
2014                }
2015                catch (Exception ex) {
2016                    log.debug("extractCompoundInfo():[{}] continue after H5T_ARRAY id or class failure: ", i, ex);
2017                    continue;
2018                }
2019                finally {
2020                    try {
2021                        H5.H5Tclose(tmptid);
2022                    }
2023                    catch (Exception ex) {
2024                        log.debug("extractCompoundInfo():[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex);
2025                    }
2026                }
2027            }
2028
2029            if (names != null) {
2030                names.add(mname);
2031            }
2032            flatTypeList2.add(new Integer(mtype));
2033
2034        } // for (int i=0; i<nMembers; i++)
2035        log.trace("extractCompoundInfo(): finish");
2036    } // extractNestedCompoundInfo
2037
2038    /*
2039     * (non-Javadoc)
2040     *
2041     * @see hdf.object.Dataset#isString(int)
2042     */
2043    @Override
2044    public boolean isString(int tid) {
2045        boolean b = false;
2046        try {
2047            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2048        }
2049        catch (Exception ex) {
2050            b = false;
2051        }
2052
2053        return b;
2054    }
2055
2056    /*
2057     * (non-Javadoc)
2058     *
2059     * @see hdf.object.Dataset#getSize(int)
2060     */
2061    @Override
2062    public int getSize(int tid) {
2063        int tsize = -1;
2064
2065        try {
2066            tsize = H5.H5Tget_size(tid);
2067        }
2068        catch (Exception ex) {
2069            tsize = -1;
2070        }
2071
2072        return tsize;
2073    }
2074
2075    /**
2076     * Creates a datatype of a compound with one field.
2077     * <p>
2078     * This function is needed to read/write data field by field.
2079     *
2080     * @param atom_tid
2081     *            The datatype identifier of the compound to create
2082     * @param member_name
2083     *            The name of the datatype
2084     * @param compInfo
2085     *            compInfo[0]--IN: class of member datatype; compInfo[1]--IN: size of member datatype; compInfo[2]--OUT:
2086     *            non-zero if the base type of the compound field is unsigned; zero, otherwise.
2087     *
2088     * @return the identifier of the compound datatype.
2089     *
2090     * @throws HDF5Exception
2091     *             If there is an error at the HDF5 library level.
2092     */
2093    private final int createCompoundFieldType(int atom_tid, String member_name, int[] compInfo) throws HDF5Exception {
2094        log.trace("createCompoundFieldType start");
2095
2096        int nested_tid = -1;
2097
2098        int arrayType = -1;
2099        int baseType = -1;
2100        int tmp_tid1 = -1, tmp_tid4 = -1;
2101
2102        try {
2103            int member_class = compInfo[0];
2104            int member_size = compInfo[1];
2105
2106            log.trace("createCompoundFieldType(): {} Member is class {} of size={} with baseType={}", member_name, member_class, member_size,
2107                    baseType);
2108            if (member_class == HDF5Constants.H5T_ARRAY) {
2109                int mn = H5.H5Tget_array_ndims(atom_tid);
2110                long[] marray = new long[mn];
2111                H5.H5Tget_array_dims(atom_tid, marray);
2112                baseType = H5.H5Tget_super(atom_tid);
2113                tmp_tid4 = H5.H5Tget_native_type(baseType);
2114                arrayType = H5.H5Tarray_create(tmp_tid4, mn, marray);
2115                log.trace("createCompoundFieldType(): H5T_ARRAY {} Member is class {} of size={} with baseType={}", member_name, member_class,
2116                        member_size, baseType);
2117            }
2118
2119            try {
2120                if (baseType < 0) {
2121                    if (H5Datatype.isUnsigned(atom_tid)) {
2122                        compInfo[2] = 1;
2123                    }
2124                }
2125                else {
2126                    if (H5Datatype.isUnsigned(baseType)) {
2127                        compInfo[2] = 1;
2128                    }
2129                }
2130            }
2131            catch (Exception ex2) {
2132                log.debug("createCompoundFieldType(): baseType isUnsigned: ", ex2);
2133            }
2134            try {
2135                H5.H5Tclose(baseType);
2136                baseType = -1;
2137            }
2138            catch (HDF5Exception ex4) {
2139                log.debug("createCompoundFieldType(): H5Tclose(baseType {}) failure: ", baseType, ex4);
2140            }
2141
2142            member_size = H5.H5Tget_size(atom_tid);
2143            log.trace("createCompoundFieldType(): member_size={}",member_size);
2144
2145            // construct nested compound structure with a single field
2146            String theName = member_name;
2147            if (arrayType < 0) {
2148                tmp_tid1 = H5.H5Tcopy(atom_tid);
2149            }
2150            else {
2151                tmp_tid1 = H5.H5Tcopy(arrayType);
2152            }
2153            try {
2154                H5.H5Tclose(arrayType);
2155                arrayType = -1;
2156            }
2157            catch (HDF5Exception ex4) {
2158                log.debug("createCompoundFieldType(): H5Tclose(arrayType {}) failure: ", arrayType, ex4);
2159            }
2160            int sep = member_name.lastIndexOf(CompoundDS.separator);
2161            log.trace("createCompoundFieldType(): sep={}", sep);
2162
2163            while (sep > 0) {
2164                theName = member_name.substring(sep + 1);
2165                log.trace("createCompoundFieldType(): sep={} with name={}", sep, theName);
2166                nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
2167                H5.H5Tinsert(nested_tid, theName, 0, tmp_tid1);
2168                try {
2169                    log.trace("createCompoundFieldType(sep): H5.H5Tclose:tmp_tid1={}",tmp_tid1);
2170                    H5.H5Tclose(tmp_tid1);
2171                }
2172                catch (Exception ex) {
2173                    log.debug("createCompoundFieldType(): H5Tclose(tmp_tid {}) failure: ", tmp_tid1, ex);
2174                }
2175                tmp_tid1 = nested_tid;
2176                member_name = member_name.substring(0, sep);
2177                sep = member_name.lastIndexOf(CompoundDS.separator);
2178            }
2179
2180            nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
2181
2182            H5.H5Tinsert(nested_tid, member_name, 0, tmp_tid1);
2183        }
2184        finally {
2185            try {
2186                log.trace("createCompoundFieldType(): finally H5.H5Tclose:tmp_tid1={}", tmp_tid1);
2187                H5.H5Tclose(tmp_tid1);
2188            }
2189            catch (HDF5Exception ex3) {
2190                log.debug("createCompoundFieldType(): H5Tclose(tmp_tid {}) failure: ", tmp_tid1, ex3);
2191            }
2192            try {
2193                log.trace("createCompoundFieldType(): finally H5.H5Tclose:tmp_tid4={}", tmp_tid4);
2194                H5.H5Tclose(tmp_tid4);
2195            }
2196            catch (HDF5Exception ex3) {
2197                log.debug("createCompoundFieldType(): H5Tclose(tmp_tid {}) failure: ", tmp_tid4, ex3);
2198            }
2199            try {
2200                log.trace("createCompoundFieldType(): finally H5.H5Tclose:baseType={}", baseType);
2201                H5.H5Tclose(baseType);
2202            }
2203            catch (HDF5Exception ex4) {
2204                log.debug("createCompoundFieldType(): H5Tclose(baseType {}) failure: ", baseType, ex4);
2205            }
2206            try {
2207                log.trace("createCompoundFieldType(): finally H5.H5Tclose:arrayType={}", arrayType);
2208                H5.H5Tclose(arrayType);
2209            }
2210            catch (HDF5Exception ex4) {
2211                log.debug("createCompoundFieldType(): H5Tclose(arrayType {}) failure: ", arrayType, ex4);
2212            }
2213        }
2214
2215        log.trace("createCompoundFieldType(): finish");
2216        return nested_tid;
2217    }
2218
2219    /**
2220     * Given an array of bytes representing a compound Datatype and a start
2221     * index and length, converts len number of bytes into the correct
2222     * Object type and returns it.
2223     *
2224     * @param data The byte array representing the data of the compound Datatype
2225     * @param data_type The type of data to convert the bytes to
2226     * @param start The start index of the bytes to get
2227     * @param len The number of bytes to convert
2228     * @return The converted type of the bytes
2229     */
2230    private Object convertCompoundByteMember(byte[] data, int data_type, long start, long len) {
2231        Object currentData = null;
2232
2233        try {
2234            int typeClass = H5.H5Tget_class(data_type);
2235
2236            if (typeClass == HDF5Constants.H5T_INTEGER) {
2237                int size = H5.H5Tget_size(data_type);
2238
2239                currentData = HDFNativeData.byteToInt((int) start, (int) (len / size), data);
2240            }
2241            else if (typeClass == HDF5Constants.H5T_FLOAT) {
2242                currentData = HDFNativeData.byteToDouble((int) start, 1, data);
2243            }
2244        }
2245        catch (Exception ex) {
2246            log.debug("convertCompoundByteMember(): conversion failure: ", ex);
2247        }
2248
2249        return currentData;
2250    }
2251}