001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021
022import java.util.ArrayList;
023import java.util.Arrays;
024import java.util.Collection;
025import java.util.HashMap;
026import java.util.Iterator;
027import java.util.List;
028import java.util.Map;
029import java.util.Vector;
030
031import org.slf4j.Logger;
032import org.slf4j.LoggerFactory;
033
034import hdf.hdf5lib.H5;
035import hdf.hdf5lib.HDF5Constants;
036import hdf.hdf5lib.HDFNativeData;
037import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
038import hdf.hdf5lib.exceptions.HDF5Exception;
039
040import hdf.object.Attribute;
041import hdf.object.CompoundDS;
042import hdf.object.Dataset;
043import hdf.object.Datatype;
044import hdf.object.FileFormat;
045import hdf.object.Group;
046import hdf.object.HObject;
047import hdf.object.MetaDataContainer;
048import hdf.object.Utils;
049
050import hdf.object.h5.H5Datatype;
051import hdf.object.h5.H5ReferenceType;
052
053/**
054 * The H5CompoundAttr class defines an HDF5 attribute of compound datatypes.
055 *
056 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
057 * dataset, group or named datatype.
058 *
059 * Like a dataset, an attribute has a name, datatype and dataspace.
060 *
061 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
062 * collection of one or more atomic types or small arrays of such types. Each member of a compound
063 * type has a name which is unique within that type, and a byte offset that determines the first
064 * byte (smallest byte address) of that member in a compound datum.
065 *
066 * For more information on HDF5 attributes and datatypes, read the <a href=
067 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
068 * User's Guide</a>.
069 *
070 * There are two basic types of compound attributes: simple compound data and nested compound data.
071 * Members of a simple compound attribute have atomic datatypes. Members of a nested compound attribute
072 * are compound or array of compound data.
073 *
074 * Since Java does not understand C structures, we cannot directly read/write compound data values
075 * as in the following C example.
076 *
077 * <pre>
078 * typedef struct s1_t {
079 *         int    a;
080 *         float  b;
081 *         double c;
082 *         } s1_t;
083 *     s1_t       s1[LENGTH];
084 *     ...
085 *     H5Dwrite(..., s1);
086 *     H5Dread(..., s1);
087 * </pre>
088 *
089 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
090 * data by fields instead of compound structure. As for the example above, the java.util.Vector
091 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
092 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
093 * by field.
094 *
095 * @version 1.0 6/15/2021
096 * @author Allen Byrne
097 */
098public class H5CompoundAttr extends CompoundDS implements H5Attribute
099{
100    private static final long serialVersionUID = 2072473407027648309L;
101
102    private static final Logger log = LoggerFactory.getLogger(H5CompoundAttr.class);
103
104    /** The HObject to which this NC2Attribute is attached, Attribute interface */
105    protected HObject         parentObject;
106
107    /** additional information and properties for the attribute, Attribute interface */
108    private transient Map<String, Object> properties;
109
110    /**
111     * Create an attribute with specified name, data type and dimension sizes.
112     *
113     * @param parentObj
114     *            the HObject to which this H5CompoundAttr is attached.
115     * @param attrName
116     *            the name of the attribute.
117     * @param attrType
118     *            the datatype of the attribute.
119     * @param attrDims
120     *            the dimension sizes of the attribute, null for scalar attribute
121     *
122     * @see hdf.object.Datatype
123     */
124    public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
125        this(parentObj, attrName, attrType, attrDims, null);
126    }
127
128    /**
129     * Create an attribute with specific name and value.
130     *
131     * @param parentObj
132     *            the HObject to which this H5CompoundAttr is attached.
133     * @param attrName
134     *            the name of the attribute.
135     * @param attrType
136     *            the datatype of the attribute.
137     * @param attrDims
138     *            the dimension sizes of the attribute, null for scalar attribute
139     * @param attrValue
140     *            the value of the attribute, null if no value
141     *
142     * @see hdf.object.Datatype
143     */
144    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
145    public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
146        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
147                (parentObj == null) ? null : parentObj.getFullName(), null);
148
149        log.trace("H5CompoundAttr: start {}", parentObj);
150        this.parentObject = parentObj;
151
152        datatype = attrType;
153
154        if (attrValue != null) {
155            data = attrValue;
156            originalBuf = attrValue;
157            isDataLoaded = true;
158        }
159        properties = new HashMap();
160
161        if (attrDims == null) {
162            rank = 1;
163            dims = new long[] { 1 };
164            isScalar = true;
165        }
166        else {
167            dims = attrDims;
168            rank = dims.length;
169            isScalar = false;
170        }
171
172        selectedDims = new long[rank];
173        startDims = new long[rank];
174        selectedStride = new long[rank];
175
176        numberOfMembers = 0;
177        memberNames = null;
178        isMemberSelected = null;
179        memberTypes = null;
180
181        log.trace("attrName={}, attrType={}, attrValue={}, rank={}",
182                attrName, attrType.getDescription(), data, rank);
183
184        resetSelection();
185    }
186
187    /*
188     * (non-Javadoc)
189     *
190     * @see hdf.object.HObject#open()
191     */
192    @Override
193    public long open() {
194        if (parentObject == null) {
195            log.debug("open(): attribute's parent object is null");
196            return HDF5Constants.H5I_INVALID_HID;
197        }
198
199        long aid = HDF5Constants.H5I_INVALID_HID;
200        long pObjID = HDF5Constants.H5I_INVALID_HID;
201
202        try {
203            pObjID = parentObject.open();
204            if (pObjID >= 0) {
205                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
206                    log.trace("open(): FILE_TYPE_HDF5");
207                    if (H5.H5Aexists(pObjID, getName()))
208                        aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT);
209                }
210            }
211
212            log.trace("open(): aid={}", aid);
213        }
214        catch (Exception ex) {
215            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
216            aid = HDF5Constants.H5I_INVALID_HID;
217        }
218        finally {
219            parentObject.close(pObjID);
220        }
221
222        return aid;
223    }
224
225    /*
226     * (non-Javadoc)
227     *
228     * @see hdf.object.HObject#close(int)
229     */
230    @Override
231    public void close(long aid) {
232        if (aid >= 0) {
233            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
234                log.trace("close(): FILE_TYPE_HDF5");
235                try {
236                    H5.H5Aclose(aid);
237                }
238                catch (HDF5Exception ex) {
239                    log.debug("close(): H5Aclose({}) failure: ", aid, ex);
240                }
241            }
242        }
243    }
244
245
246    /**
247     * Retrieves datatype and dataspace information from file and sets the attribute
248     * in memory.
249     *
250     * The init() is designed to support lazy operation in a attribute object. When a
251     * data object is retrieved from file, the datatype, dataspace and raw data are
252     * not loaded into memory. When it is asked to read the raw data from file,
253     * init() is first called to get the datatype and dataspace information, then
254     * load the raw data from file.
255     *
256     * init() is also used to reset the selection of a attribute (start, stride and
257     * count) to the default, which is the entire attribute for 1D or 2D datasets. In
258     * the following example, init() at step 1) retrieves datatype and dataspace
259     * information from file. getData() at step 3) reads only one data point. init()
260     * at step 4) resets the selection to the whole attribute. getData() at step 4)
261     * reads the values of whole attribute into memory.
262     *
263     * <pre>
264     * dset = (Dataset) file.get(NAME_DATASET);
265     *
266     * // 1) get datatype and dataspace information from file
267     * attr.init();
268     * rank = attr.getAttributeRank(); // rank = 2, a 2D attribute
269     * count = attr.getSelectedDims();
270     * start = attr.getStartDims();
271     * dims = attr.getAttributeDims();
272     *
273     * // 2) select only one data point
274     * for (int i = 0; i &lt; rank; i++) {
275     *     start[0] = 0;
276     *     count[i] = 1;
277     * }
278     *
279     * // 3) read one data point
280     * data = attr.getAttributeData();
281     *
282     * // 4) reset selection to the whole attribute
283     * attr.init();
284     *
285     * // 5) clean the memory data buffer
286     * attr.clearData();
287     *
288     * // 6) Read the whole attribute
289     * data = attr.getAttributeData();
290     * </pre>
291     */
292    @Override
293    public void init() {
294        if (inited) {
295            resetSelection();
296            log.trace("init(): H5CompoundAttr already inited");
297            return;
298        }
299
300        long aid = HDF5Constants.H5I_INVALID_HID;
301        long tid = HDF5Constants.H5I_INVALID_HID;
302        long sid = HDF5Constants.H5I_INVALID_HID;
303        int tclass = HDF5Constants.H5I_INVALID_HID;
304        flatNameList = new Vector<>();
305        flatTypeList = new Vector<>();
306        long[] memberTIDs = null;
307
308        log.trace("init(): FILE_TYPE_HDF5");
309        aid = open();
310        if (aid >= 0) {
311            try {
312                sid = H5.H5Aget_space(aid);
313                rank = H5.H5Sget_simple_extent_ndims(sid);
314                space_type = H5.H5Sget_simple_extent_type(sid);
315                tid = H5.H5Aget_type(aid);
316                tclass = H5.H5Tget_class(tid);
317                log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type);
318
319                long tmptid = 0;
320
321                // Handle ARRAY and VLEN types by getting the base type
322                if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) {
323                    try {
324                        tmptid = tid;
325                        tid = H5.H5Tget_super(tmptid);
326                        log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid);
327                    }
328                    catch (Exception ex) {
329                        log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex);
330                        tid = -1;
331                    }
332                    finally {
333                        try {
334                            H5.H5Tclose(tmptid);
335                        }
336                        catch (HDF5Exception ex) {
337                            log.debug("init(): H5Tclose({}) failure: ", tmptid, ex);
338                        }
339                    }
340                }
341
342                if (tclass == HDF5Constants.H5T_COMPOUND) {
343                    // initialize member information
344                    H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList);
345                    numberOfMembers = flatNameList.size();
346                    log.trace("init(): numberOfMembers={}", numberOfMembers);
347
348                    memberNames = new String[numberOfMembers];
349                    memberTIDs = new long[numberOfMembers];
350                    memberTypes = new Datatype[numberOfMembers];
351                    memberOrders = new int[numberOfMembers];
352                    isMemberSelected = new boolean[numberOfMembers];
353                    memberDims = new Object[numberOfMembers];
354
355                    for (int i = 0; i < numberOfMembers; i++) {
356                        isMemberSelected[i] = true;
357                        memberTIDs[i] = flatTypeList.get(i).createNative();
358
359                        try {
360                            memberTypes[i] = flatTypeList.get(i);
361                        }
362                        catch (Exception ex) {
363                            log.debug("init(): failed to create datatype for member[{}]: ", i, ex);
364                            memberTypes[i] = null;
365                        }
366
367                        memberNames[i] = flatNameList.get(i);
368                        memberOrders[i] = 1;
369                        memberDims[i] = null;
370                        log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, i,
371                                memberNames[i], i, memberTIDs[i], i, memberTypes[i]);
372
373                        try {
374                            tclass = H5.H5Tget_class(memberTIDs[i]);
375                        }
376                        catch (HDF5Exception ex) {
377                            log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex);
378                        }
379
380                        if (tclass == HDF5Constants.H5T_ARRAY) {
381                            int n = H5.H5Tget_array_ndims(memberTIDs[i]);
382                            long mdim[] = new long[n];
383                            H5.H5Tget_array_dims(memberTIDs[i], mdim);
384                            int idim[] = new int[n];
385                            for (int j = 0; j < n; j++)
386                                idim[j] = (int) mdim[j];
387                            memberDims[i] = idim;
388                            tmptid = H5.H5Tget_super(memberTIDs[i]);
389                            memberOrders[i] = (int) (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid));
390                            try {
391                                H5.H5Tclose(tmptid);
392                            }
393                            catch (HDF5Exception ex) {
394                                log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex);
395                            }
396                        }
397                    } // (int i=0; i<numberOfMembers; i++)
398                }
399
400                if (rank == 0) {
401                    // a scalar data point
402                    isScalar = true;
403                    rank = 1;
404                    dims = new long[] { 1 };
405                    log.trace("init(): rank is a scalar data point");
406                }
407                else {
408                    isScalar = false;
409                    dims = new long[rank];
410                    maxDims = new long[rank];
411                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
412                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
413                }
414
415                inited = true;
416            }
417            catch (HDF5Exception ex) {
418                numberOfMembers = 0;
419                memberNames = null;
420                memberTypes = null;
421                memberOrders = null;
422                log.debug("init(): ", ex);
423            }
424            finally {
425                try {
426                    H5.H5Tclose(tid);
427                }
428                catch (HDF5Exception ex2) {
429                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
430                }
431                try {
432                    H5.H5Sclose(sid);
433                }
434                catch (HDF5Exception ex2) {
435                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
436                }
437
438                if (memberTIDs != null) {
439                    for (int i = 0; i < memberTIDs.length; i++) {
440                        try {
441                            H5.H5Tclose(memberTIDs[i]);
442                        }
443                        catch (Exception ex) {
444                            log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex);
445                        }
446                    }
447                }
448            }
449
450            close(aid);
451
452            startDims = new long[rank];
453            selectedDims = new long[rank];
454
455            resetSelection();
456        }
457        else {
458            log.debug("init(): failed to open attribute");
459        }
460    }
461
462    /**
463     * Returns the datatype of the data object.
464     *
465     * @return the datatype of the data object.
466     */
467    @Override
468    public Datatype getDatatype() {
469        if (!inited)
470            init();
471
472        if (datatype == null) {
473            long aid = HDF5Constants.H5I_INVALID_HID;
474            long tid = HDF5Constants.H5I_INVALID_HID;
475
476            aid = open();
477            if (aid >= 0) {
478                try {
479                    tid = H5.H5Aget_type(aid);
480                    int nativeClass = H5.H5Tget_class(tid);
481                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
482                        long lsize = 1;
483                        long sid = H5.H5Aget_space(aid);
484                        int rank = H5.H5Sget_simple_extent_ndims(sid);
485                        if (rank > 0) {
486                            long dims[] = new long[rank];
487                            H5.H5Sget_simple_extent_dims(sid, dims, null);
488                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
489                            for (int j = 0; j < dims.length; j++) {
490                                lsize *= dims[j];
491                            }
492                        }
493                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
494                    }
495                    else
496                        datatype = new H5Datatype(getFileFormat(), tid);
497                }
498                catch (Exception ex) {
499                    log.debug("getDatatype(): ", ex);
500                }
501                finally {
502                    try {
503                        H5.H5Tclose(tid);
504                    }
505                    catch (HDF5Exception ex) {
506                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
507                    }
508                    try {
509                        H5.H5Aclose(aid);
510                    }
511                    catch (HDF5Exception ex) {
512                        log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex);
513                    }
514                }
515            }
516        }
517
518        return datatype;
519    }
520
521    /**
522     * Returns the data buffer of the attribute in memory.
523     *
524     * If data is already loaded into memory, returns the data; otherwise, calls
525     * read() to read data from file into a memory buffer and returns the memory
526     * buffer.
527     *
528     * The whole attribute is read into memory. Users can also select
529     * a subset from the whole data. Subsetting is done in an implicit way.
530     *
531     * <b>How to Select a Subset</b>
532     *
533     * A selection is specified by three arrays: start, stride and count.
534     * <ol>
535     * <li>start: offset of a selection
536     * <li>stride: determines how many elements to move in each dimension
537     * <li>count: number of elements to select in each dimension
538     * </ol>
539     * getStartDims(), getStride() and getSelectedDims() returns the start,
540     * stride and count arrays respectively. Applications can make a selection
541     * by changing the values of the arrays.
542     *
543     * The following example shows how to make a subset. In the example, the
544     * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
545     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
546     * We want to select every other data point in dims[1] and dims[2]
547     *
548     * <pre>
549     * int rank = attribute.getRank(); // number of dimensions of the attribute
550     * long[] dims = attribute.getDims(); // the dimension sizes of the attribute
551     * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute
552     * long[] start = attribute.getStartDims(); // the offset of the selection
553     * long[] stride = attribute.getStride(); // the stride of the attribute
554     * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display
555     *
556     * // select dim1 and dim2 as 2D data for display,and slice through dim0
557     * selectedIndex[0] = 1;
558     * selectedIndex[1] = 2;
559     * selectedIndex[2] = 0;
560     *
561     * // reset the selection arrays
562     * for (int i = 0; i &lt; rank; i++) {
563     *     start[i] = 0;
564     *     selected[i] = 1;
565     *     stride[i] = 1;
566     * }
567     *
568     * // set stride to 2 on dim1 and dim2 so that every other data point is
569     * // selected.
570     * stride[1] = 2;
571     * stride[2] = 2;
572     *
573     * // set the selection size of dim1 and dim2
574     * selected[1] = dims[1] / stride[1];
575     * selected[2] = dims[1] / stride[2];
576     *
577     * // when H5CompoundAttr.getData() is called, the selection above will be used since
578     * // the dimension arrays are passed by reference. Changes of these arrays
579     * // outside the attribute object directly change the values of these array
580     * // in the attribute object.
581     * </pre>
582     *
583     * For H5CompoundAttr, the memory data object is an java.util.List object. Each
584     * element of the list is a data array that corresponds to a compound field.
585     *
586     * For example, if compound attribute "comp" has the following nested
587     * structure, and member datatypes
588     *
589     * <pre>
590     * comp --&gt; m01 (int)
591     * comp --&gt; m02 (float)
592     * comp --&gt; nest1 --&gt; m11 (char)
593     * comp --&gt; nest1 --&gt; m12 (String)
594     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
595     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
596     * </pre>
597     *
598     * getData() returns a list of six arrays: {int[], float[], char[],
599     * String[], long[] and double[]}.
600     *
601     * @return the memory buffer of the attribute.
602     *
603     * @throws Exception if object can not be read
604     * @throws OutOfMemoryError if memory is exhausted
605     */
606    @Override
607    public Object getData() throws Exception, OutOfMemoryError {
608        log.trace("getData(): isDataLoaded={}", isDataLoaded);
609        if (!isDataLoaded)
610            data = read(); // load the data, attributes read all data
611
612        nPoints = 1;
613        log.trace("getData(): selectedDims length={}", selectedDims.length);
614        int point_len = selectedDims.length;
615        //Partial data for 3 or more dimensions
616        if (rank > 2)
617            point_len = 3;
618        for (int j = 0; j < point_len; j++) {
619            log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]);
620            nPoints *= selectedDims[j];
621        }
622        log.trace("getData: read {}", nPoints);
623
624        // apply the selection for 3 or more dimensions
625        // selection only expects to use 3 selectedDims
626        //     where selectedIndex[0] is the row dimension
627        //     where selectedIndex[1] is the col dimension
628        //     where selectedIndex[2] is the frame dimension
629        if (rank > 2)
630            data = AttributeSelection();
631
632        return data;
633    }
634
635    /*
636     * (non-Javadoc)
637     *
638     * @see hdf.object.Attribute#readBytes()
639     */
640    @Override
641    public byte[] readBytes() throws HDF5Exception {
642        byte[] theData = null;
643
644        if (!isInited())
645            init();
646
647        long aid = open();
648        if (aid >= 0) {
649            long tid = HDF5Constants.H5I_INVALID_HID;
650
651            try {
652                long[] lsize = { 1 };
653                for (int j = 0; j < selectedDims.length; j++)
654                    lsize[0] *= selectedDims[j];
655
656                tid = H5.H5Aget_type(aid);
657                long size = H5.H5Tget_size(tid) * lsize[0];
658                log.trace("readBytes(): size={}", size);
659
660                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
661                    throw new Exception("Invalid int size");
662
663                theData = new byte[(int)size];
664
665                log.trace("readBytes(): read attribute id {} of size={}", tid, lsize);
666                H5.H5Aread(aid, tid, theData);
667            }
668            catch (Exception ex) {
669                log.debug("readBytes(): failed to read data: ", ex);
670            }
671            finally {
672                try {
673                    H5.H5Tclose(tid);
674                }
675                catch (HDF5Exception ex2) {
676                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
677                }
678                close(aid);
679            }
680        }
681
682        return theData;
683    }
684
685    /**
686     * Reads the data from file.
687     *
688     * read() reads the data from file to a memory buffer and returns the memory
689     * buffer. The attribute object does not hold the memory buffer. To store the
690     * memory buffer in the attribute object, one must call getData().
691     *
692     * By default, the whole attribute is read into memory.
693     *
694     * For CompoundAttr, the memory data object is an java.util.List object. Each
695     * element of the list is a data array that corresponds to a compound field.
696     *
697     * For example, if compound dataset "comp" has the following nested
698     * structure, and member datatypes
699     *
700     * <pre>
701     * comp --&gt; m01 (int)
702     * comp --&gt; m02 (float)
703     * comp --&gt; nest1 --&gt; m11 (char)
704     * comp --&gt; nest1 --&gt; m12 (String)
705     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
706     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
707     * </pre>
708     *
709     * getData() returns a list of six arrays: {int[], float[], char[],
710     * String[], long[] and double[]}.
711     *
712     * @return the data read from file.
713     *
714     * @see #getData()
715     * @see hdf.object.DataFormat#read()
716     *
717     * @throws Exception
718     *             if object can not be read
719     */
720    @Override
721    public Object read() throws Exception {
722        Object readData = null;
723
724        if (!isInited())
725            init();
726
727        try {
728            readData = compoundAttributeCommonIO(H5File.IO_TYPE.READ, null);
729        }
730        catch (Exception ex) {
731            log.debug("read(): failed to read compound attribute: ", ex);
732            throw new Exception("failed to read compound attribute: " + ex.getMessage(), ex);
733        }
734
735        return readData;
736    }
737
738    /**
739     * Writes the given data buffer into this attribute in a file.
740     *
741     * The data buffer is a vector that contains the data values of compound fields. The data is written
742     * into file as one data blob.
743     *
744     * @param buf
745     *            The vector that contains the data values of compound fields.
746     *
747     * @throws Exception
748     *             If there is an error at the HDF5 library level.
749     */
750    @Override
751    public void write(Object buf) throws Exception {
752        if (this.getFileFormat().isReadOnly())
753            throw new Exception("cannot write to compound attribute in file opened as read-only");
754
755        if (!buf.equals(data))
756            setData(buf);
757
758        init();
759
760        if (parentObject == null) {
761            log.debug("write(Object): parent object is null; nowhere to write attribute to");
762            return;
763        }
764
765        ((MetaDataContainer) getParentObject()).writeMetadata(this);
766
767        try {
768            compoundAttributeCommonIO(H5File.IO_TYPE.WRITE, buf);
769        }
770        catch (Exception ex) {
771            log.debug("write(Object): failed to write compound attribute: ", ex);
772            throw new Exception("failed to write compound attribute: " + ex.getMessage(), ex);
773        }
774        resetSelection();
775    }
776
777    /*
778     * Routine to convert datatypes that are read in as byte arrays to
779     * regular types.
780     */
781    @Override
782    protected Object convertByteMember(final Datatype dtype, byte[] byteData) {
783        Object theObj = null;
784
785        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
786            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
787        else
788            theObj = super.convertByteMember(dtype, byteData);
789
790        return theObj;
791    }
792
793    private Object compoundAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
794        H5Datatype dsDatatype = (H5Datatype)getDatatype();
795        Object theData = null;
796
797        if (numberOfMembers <= 0) {
798            log.debug("compoundAttributeCommonIO(): attribute contains no members");
799            throw new Exception("dataset contains no members");
800        }
801
802        /*
803         * I/O type-specific pre-initialization.
804         */
805        if (ioType == H5File.IO_TYPE.WRITE) {
806            if ((writeBuf == null) || !(writeBuf instanceof List)) {
807                log.debug("compoundAttributeCommonIO(): writeBuf is null or invalid");
808                throw new Exception("write buffer is null or invalid");
809            }
810
811            /*
812             * Check for any unsupported datatypes and fail early before
813             * attempting to write to the attribute.
814             */
815            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
816                log.debug("compoundAttributeCommonIO(): cannot write attribute of type ARRAY of COMPOUND");
817                throw new HDF5Exception("Unsupported attribute of type ARRAY of COMPOUND");
818            }
819
820            if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) {
821                log.debug("compoundAttributeCommonIO(): cannot write attribute of type VLEN of COMPOUND");
822                throw new HDF5Exception("Unsupported attribute of type VLEN of COMPOUND");
823            }
824        }
825
826        long aid = open();
827        if (aid >= 0) {
828            log.trace("compoundAttributeCommonIO(): isDataLoaded={}", isDataLoaded);
829            try {
830                theData = AttributeCommonIO(aid, ioType, writeBuf);
831            }
832            finally {
833                close(aid);
834            }
835        }
836        else
837            log.debug("compoundAttributeCommonIO(): failed to open attribute");
838
839        return theData;
840    }
841
842    /*
843     * Private recursive routine to read/write an entire compound datatype field by
844     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
845     * COMPOUND datatypes.
846     *
847     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
848     * running counter so that we can index properly into the flattened name list
849     * generated from H5Datatype.extractCompoundInfo() at attribute init time.
850     */
851    private Object compoundTypeIO(H5Datatype parentType, int nSelPoints, final H5Datatype cmpdType,
852            Object dataBuf, int[] globalMemberIndex) {
853        Object theData = null;
854
855        if (cmpdType.isArray()) {
856            log.trace("compoundTypeIO(): ARRAY type");
857
858            long[] arrayDims = cmpdType.getArrayDims();
859            int arrSize = nSelPoints;
860            for (int i = 0; i < arrayDims.length; i++) {
861                arrSize *= arrayDims[i];
862            }
863            theData = compoundTypeIO(cmpdType, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex);
864        }
865        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
866            /*
867             * TODO: true variable-length support.
868             */
869            String[] errVal = new String[nSelPoints];
870            String errStr = "*UNSUPPORTED*";
871
872            for (int j = 0; j < nSelPoints; j++)
873                errVal[j] = errStr;
874
875            /*
876             * Setup a fake data list.
877             */
878            Datatype baseType = cmpdType.getDatatypeBase();
879            while (baseType != null && !baseType.isCompound()) {
880                baseType = baseType.getDatatypeBase();
881            }
882
883            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
884            fakeVlenData.add(errVal);
885
886            theData = fakeVlenData;
887        }
888        else if (cmpdType.isCompound()) {
889            long parentLength = parentType.getDatatypeSize();
890            List<Object> memberDataList = null;
891            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
892            List<Long> offsetList = cmpdType.getCompoundMemberOffsets();
893
894            log.trace("compoundTypeIO(): read {} members: parentLength={}", typeList.size(), parentLength);
895
896            memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
897
898            try {
899                for (int i = 0; i < typeList.size(); i++) {
900                    long memberOffset = 0;  //offset into dataBuf
901                    H5Datatype memberType = null;
902                    String memberName = null;
903                    Object memberData = null;
904
905                    try {
906                        memberType = (H5Datatype) typeList.get(i);
907                        memberOffset = offsetList.get(i);
908                    }
909                    catch (Exception ex) {
910                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
911                        globalMemberIndex[0]++;
912                        continue;
913                    }
914
915                    /*
916                     * Since the type list used here is not a flattened structure, we need to skip
917                     * the member selection check for compound types, as otherwise having a single
918                     * member not selected would skip the reading/writing for the entire compound
919                     * type. The member selection check will be deferred to the recursive compound
920                     * read/write below.
921                     */
922                    if (!memberType.isCompound()) {
923                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
924                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
925                            globalMemberIndex[0]++;
926                            continue; // the field is not selected
927                        }
928                    }
929
930                    if (!memberType.isCompound()) {
931                        try {
932                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
933                        }
934                        catch (Exception ex) {
935                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
936                            memberName = "null";
937                        }
938                    }
939
940                    log.trace("compoundTypeIO(): member[{}]({}) is type {} offset {}", i, memberName,
941                            memberType.getDescription(), memberOffset);
942
943                    try {
944                        int mt_typesize = (int)memberType.getDatatypeSize();
945                        log.trace("compoundTypeIO(): member[{}] mt_typesize={}", i, mt_typesize);
946                        byte[] memberbuf = new byte[nSelPoints * mt_typesize];
947                        for (int dimindx = 0; dimindx < nSelPoints; dimindx++)
948                            try {
949                                System.arraycopy(dataBuf, (int)memberOffset + dimindx * (int)parentLength, memberbuf, dimindx * mt_typesize, mt_typesize);
950                            }
951                        catch (Exception err) {
952                            log.trace("compoundTypeIO(): arraycopy failure: ", err);
953                        }
954
955                        if (memberType.isCompound()) {
956                            memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf,
957                                    globalMemberIndex);
958                        }
959                        else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
960                            /*
961                             * Recursively detect any nested array/vlen of compound types.
962                             */
963                            boolean compoundFound = false;
964
965                            Datatype base = memberType.getDatatypeBase();
966                            while (base != null) {
967                                if (base.isCompound())
968                                    compoundFound = true;
969
970                                base = base.getDatatypeBase();
971                            }
972
973                            if (compoundFound) {
974                                /*
975                                 * Skip the top-level array/vlen type.
976                                 */
977                                globalMemberIndex[0]++;
978
979                                memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf,
980                                        globalMemberIndex);
981                            }
982                            else {
983                                memberData = convertByteMember(memberType, memberbuf);
984                                globalMemberIndex[0]++;
985                            }
986                        }
987                        else {
988                            memberData = convertByteMember(memberType, memberbuf);
989                            globalMemberIndex[0]++;
990                        }
991                    }
992                    catch (Exception ex) {
993                        log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
994                        globalMemberIndex[0]++;
995                        memberData = null;
996                    }
997
998                    if (memberData == null) {
999                        String[] errVal = new String[nSelPoints];
1000                        String errStr = "*ERROR*";
1001
1002                        for (int j = 0; j < nSelPoints; j++)
1003                            errVal[j] = errStr;
1004
1005                        memberData = errVal;
1006                    }
1007
1008                    memberDataList.add(memberData);
1009                } // (i = 0; i < atomicTypeList.size(); i++)
1010            }
1011            catch (Exception ex) {
1012                log.debug("compoundTypeIO(): failure: ", ex);
1013                memberDataList = null;
1014            }
1015
1016            theData = memberDataList;
1017        }
1018
1019        return theData;
1020    }
1021
1022    private Object compoundTypeWriteIO(H5Datatype parentType, final H5Datatype cmpdType,
1023            Object dataBuf, int[] globalMemberIndex) {
1024        Object theData = null;
1025        if (cmpdType.isArray()) {
1026            Object memberData = null;
1027            log.trace("compoundTypeWriteIO(): ARRAY type");
1028
1029            theData = compoundTypeWriteIO(cmpdType, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex);
1030        }
1031        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
1032            /*
1033             * TODO: true variable-length support.
1034             */
1035            String errVal = new String("*UNSUPPORTED*");
1036
1037            /*
1038             * Setup a fake data bytes.
1039             */
1040            Datatype baseType = cmpdType.getDatatypeBase();
1041            while (baseType != null && !baseType.isCompound()) {
1042                baseType = baseType.getDatatypeBase();
1043            }
1044
1045            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, 1);
1046            fakeVlenData.add(errVal);
1047
1048            theData = convertMemberByte(baseType, fakeVlenData);
1049        }
1050        else if (cmpdType.isCompound()) {
1051            long parentLength = parentType.getDatatypeSize();
1052            List<Object> memberDataList = null;
1053            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
1054            List<Long> offsetList = cmpdType.getCompoundMemberOffsets();
1055
1056            log.trace("compoundTypeWriteIO(): write {} members", typeList.size());
1057
1058            theData = new byte[(int)cmpdType.getDatatypeSize()];
1059            try {
1060                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
1061                    long memberOffset = 0;  //offset into dataBuf
1062                    H5Datatype memberType = null;
1063                    String memberName = null;
1064                    Object memberData = null;
1065
1066                    try {
1067                        memberType = (H5Datatype) typeList.get(i);
1068                        memberOffset = offsetList.get(i);
1069                    }
1070                    catch (Exception ex) {
1071                        log.debug("compoundTypeWriteIO(): get member {} failure: ", i, ex);
1072                        globalMemberIndex[0]++;
1073                        continue;
1074                    }
1075                    long memberLength = memberType.getDatatypeSize();
1076
1077                    /*
1078                     * Since the type list used here is not a flattened structure, we need to skip the member selection
1079                     * check for compound types, as otherwise having a single member not selected would skip the
1080                     * reading/writing for the entire compound type. The member selection check will be deferred to the
1081                     * recursive compound read/write below.
1082                     */
1083                    if (!memberType.isCompound()) {
1084                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
1085                            log.debug("compoundTypeWriteIO(): member[{}] is not selected", i);
1086                            globalMemberIndex[0]++;
1087                            continue; // the field is not selected
1088                        }
1089                    }
1090
1091                    if (!memberType.isCompound()) {
1092                        try {
1093                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
1094                        }
1095                        catch (Exception ex) {
1096                            log.debug("compoundTypeWriteIO(): get member {} name failure: ", i, ex);
1097                            memberName = "null";
1098                        }
1099                    }
1100
1101                    log.trace("compoundTypeWriteIO(): member[{}]({}) is type {} offset {}", i, memberName,
1102                            memberType.getDescription(), memberOffset);
1103
1104                    try {
1105                        /*
1106                         * TODO: currently doesn't correctly handle non-selected compound members.
1107                         */
1108                        memberData = ((List<?>) dataBuf).get(i);
1109                    }
1110                    catch (Exception ex) {
1111                        log.debug("compoundTypeWriteIO(): get member[{}] data failure: ", i, ex);
1112                        globalMemberIndex[0]++;
1113                        continue;
1114                    }
1115
1116                    if (memberData == null) {
1117                        log.debug("compoundTypeWriteIO(): member[{}] data is null", i);
1118                        globalMemberIndex[0]++;
1119                        continue;
1120                    }
1121
1122                    try {
1123                        if (memberType.isCompound()) {
1124                            List<?> nestedList = (List<?>) ((List<?>) dataBuf).get(i);
1125                            memberData = compoundTypeWriteIO(cmpdType, memberType, nestedList, globalMemberIndex);
1126                        }
1127                        else {
1128                            memberData = writeSingleCompoundMember(memberType, memberData);
1129                            globalMemberIndex[0]++;
1130                        }
1131                    }
1132                    catch (Exception ex) {
1133                        log.debug("compoundTypeWriteIO(): failed to write member[{}]: ", i, ex);
1134                        globalMemberIndex[0]++;
1135                    }
1136
1137                    byte[] indexedBytes = convertMemberByte(memberType, memberData);
1138                    try {
1139                        System.arraycopy(indexedBytes, 0, theData, writeListIndex, (int)memberLength);
1140                    }
1141                    catch (Exception err) {
1142                        log.trace("compoundTypeWriteIO(): arraycopy failure: ", err);
1143                    }
1144                    writeListIndex += memberLength;
1145                } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1146            }
1147            catch (Exception ex) {
1148                log.debug("compoundTypeWriteIO(): failure: ", ex);
1149                theData = null;
1150            }
1151        }
1152
1153        return theData;
1154    }
1155
1156    /*
1157     * Routine to convert datatypes that are in object arrays to
1158     * bytes.
1159     */
1160    private byte[] convertMemberByte(final Datatype dtype, Object theObj) {
1161        byte[] byteData = null;
1162
1163        if (dtype.getDatatypeSize() == 1) {
1164            /*
1165             * Normal byte[] type, such as an integer datatype of size 1.
1166             */
1167            byteData = (byte[])theObj;
1168        }
1169        else if (dtype.isString() && !dtype.isVarStr() && convertByteToString && !(theObj instanceof byte[])) {
1170            log.trace("convertMemberByte(): converting string array to byte array");
1171
1172            byteData = stringToByte((String[])theObj, (int) dtype.getDatatypeSize());
1173        }
1174        else if (dtype.isInteger()) {
1175            log.trace("convertMemberByte(): converting integer array to byte array");
1176
1177            switch ((int)dtype.getDatatypeSize()) {
1178            case 1:
1179                /*
1180                 * Normal byte[] type, such as an integer datatype of size 1.
1181                 */
1182                byteData = (byte[])theObj;
1183                break;
1184            case 2:
1185                byteData = HDFNativeData.shortToByte(0, 1, (short[])theObj);
1186                break;
1187            case 4:
1188                byteData = HDFNativeData.intToByte(0, 1, (int[])theObj);
1189                break;
1190            case 8:
1191                byteData = HDFNativeData.longToByte(0, 1, (long[])theObj);
1192                break;
1193            default:
1194                log.debug("convertMemberByte(): invalid datatype size");
1195                byteData = null;
1196                break;
1197            }
1198        }
1199        else if (dtype.isFloat()) {
1200            log.trace("convertMemberByte(): converting float array to byte array");
1201
1202            if (dtype.getDatatypeSize() == 16)
1203                byteData = ((H5Datatype)dtype).bigDecimalToByte((BigDecimal[])theObj, 0);
1204            else if (dtype.getDatatypeSize() == 8)
1205                byteData = HDFNativeData.doubleToByte(0, 1, (double[])theObj);
1206            else
1207                byteData = HDFNativeData.floatToByte(0, 1, (float[])theObj);
1208        }
1209        else if (((H5Datatype)dtype).isRegRef() || ((H5Datatype)dtype).isRefObj()) {
1210            log.trace("convertMemberByte(): reference type - converting long array to byte array");
1211
1212            byteData = HDFNativeData.longToByte(0, 1, (long[])theObj);
1213        }
1214        else if (dtype.isArray()) {
1215            Datatype baseType = dtype.getDatatypeBase();
1216
1217            /*
1218             * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes.
1219             */
1220            while (baseType.isArray())
1221                baseType = baseType.getDatatypeBase();
1222
1223            /*
1224             * Optimize for the common cases of Arrays.
1225             */
1226            switch (baseType.getDatatypeClass()) {
1227            case Datatype.CLASS_INTEGER:
1228            case Datatype.CLASS_FLOAT:
1229            case Datatype.CLASS_CHAR:
1230            case Datatype.CLASS_STRING:
1231            case Datatype.CLASS_BITFIELD:
1232            case Datatype.CLASS_OPAQUE:
1233            case Datatype.CLASS_COMPOUND:
1234            case Datatype.CLASS_REFERENCE:
1235            case Datatype.CLASS_ENUM:
1236            case Datatype.CLASS_VLEN:
1237            case Datatype.CLASS_TIME:
1238                byteData = convertMemberByte(baseType, theObj);
1239                break;
1240
1241            case Datatype.CLASS_ARRAY:
1242            {
1243                Datatype arrayType = dtype.getDatatypeBase();
1244
1245                long[] arrayDims = dtype.getArrayDims();
1246                int arrSize = 1;
1247                for (int i = 0; i < arrayDims.length; i++) {
1248                    arrSize *= arrayDims[i];
1249                }
1250
1251                byteData = new byte[arrSize * (int)arrayType.getDatatypeSize()];
1252
1253                for (int i = 0; i < arrSize; i++) {
1254                    byte[] indexedBytes = convertMemberByte(arrayType, ((Object[]) theObj)[i]);
1255                    try {
1256                        System.arraycopy(indexedBytes, 0, byteData, (int)(i * arrayType.getDatatypeSize()), (int)arrayType.getDatatypeSize());
1257                    }
1258                    catch (Exception err) {
1259                        log.trace("convertMemberByte(): arraycopy failure: ", err);
1260                    }
1261                }
1262
1263                break;
1264            }
1265
1266            case Datatype.CLASS_NO_CLASS:
1267            default:
1268                log.debug("convertMemberByte(): invalid datatype class");
1269                byteData = null;
1270            }
1271        }
1272        else if (dtype.isCompound()) {
1273            /*
1274             * TODO: still valid after reading change?
1275             */
1276            byteData = convertCompoundMemberBytes(dtype, (List<Object>)theObj);
1277        }
1278        else {
1279            log.debug("convertMemberByte(): no change as byte[]");
1280            byteData = (byte[])theObj;
1281        }
1282
1283        return byteData;
1284    }
1285
1286    /**
1287     * Given an array of objects representing a compound Datatype, converts each of
1288     * its members into bytes and returns the results.
1289     *
1290     * @param dtype
1291     *            The compound datatype to convert
1292     * @param theObj
1293     *            The object array representing the data of the compound Datatype
1294     * @return The converted bytes of the objects
1295     */
1296    private byte[] convertCompoundMemberBytes(final Datatype dtype, List<Object> theObj) {
1297        List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes());
1298        List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes());
1299        Iterator<Datatype> localIt = localTypes.iterator();
1300        while (localIt.hasNext()) {
1301            Datatype curType = localIt.next();
1302
1303            if (curType.isCompound())
1304                continue;
1305
1306            if (!allSelectedTypes.contains(curType))
1307                localIt.remove();
1308        }
1309
1310        byte[] byteData = new byte[(int)dtype.getDatatypeSize()];
1311        for (int i = 0, index = 0; i < localTypes.size(); i++) {
1312            Datatype curType = localTypes.get(i);
1313            byte[] indexedBytes = null;
1314            if (curType.isCompound())
1315                indexedBytes = convertCompoundMemberBytes(curType, (List<Object>)theObj.get(i));
1316            else
1317                indexedBytes = convertMemberByte(curType, theObj.get(i));
1318
1319            try {
1320                System.arraycopy(indexedBytes, 0, byteData, index + (int)curType.getDatatypeSize(), (int)curType.getDatatypeSize());
1321            }
1322            catch (Exception err) {
1323                log.trace("convertCompoundMemberBytes(): arraycopy failure: ", err);
1324            }
1325            index += curType.getDatatypeSize();
1326        }
1327
1328        return byteData;
1329    }
1330
1331    /*
1332     * Private routine to convert a single field of a compound datatype.
1333     */
1334    private Object writeSingleCompoundMember(final H5Datatype memberType, Object theData) throws Exception {
1335        /*
1336         * Perform any necessary data conversions before writing the data.
1337         */
1338        Object tmpData = theData;
1339        try {
1340            if (memberType.isUnsigned()) {
1341                // Check if we need to convert unsigned integer data from Java-style
1342                // to C-style integers
1343                long tsize = memberType.getDatatypeSize();
1344                String cname = theData.getClass().getName();
1345                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1346                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1347                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1348
1349                if (doIntConversion) {
1350                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1351                    tmpData = convertToUnsignedC(theData, null);
1352                }
1353            }
1354            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1355                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1356                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1357            }
1358            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1359                log.trace("writeSingleCompoundMember(): converting enum names to values");
1360                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1361            }
1362        }
1363        catch (Exception ex) {
1364            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1365            tmpData = null;
1366        }
1367
1368        if (tmpData == null) {
1369            log.debug("writeSingleCompoundMember(): data is null");
1370        }
1371
1372        return tmpData;
1373    }
1374
1375    /**
1376     * Converts the data values of this data object to appropriate Java integers if
1377     * they are unsigned integers.
1378     *
1379     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1380     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1381     *
1382     * @return the converted data buffer.
1383     */
1384    @Override
1385    public Object convertFromUnsignedC() {
1386        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1387    }
1388
1389    /**
1390     * Converts Java integer data values of this data object back to unsigned C-type
1391     * integer data if they are unsigned integers.
1392     *
1393     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1394     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1395     *
1396     * @return the converted data buffer.
1397     */
1398    @Override
1399    public Object convertToUnsignedC() {
1400        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1401    }
1402
1403    /* Implement interface Attribute */
1404
1405    /**
1406     * Returns the HObject to which this Attribute is currently "attached".
1407     *
1408     * @return the HObject to which this Attribute is currently "attached".
1409     */
1410    @Override
1411    public HObject getParentObject() {
1412        return parentObject;
1413    }
1414
1415    /**
1416     * Sets the HObject to which this Attribute is "attached".
1417     *
1418     * @param pObj
1419     *            the new HObject to which this Attribute is "attached".
1420     */
1421    @Override
1422    public void setParentObject(HObject pObj) {
1423        parentObject = pObj;
1424    }
1425
1426    /**
1427     * set a property for the attribute.
1428     *
1429     * @param key the attribute Map key
1430     * @param value the attribute Map value
1431     */
1432    @Override
1433    public void setProperty(String key, Object value) {
1434        properties.put(key, value);
1435    }
1436
1437    /**
1438     * get a property for a given key.
1439     *
1440     * @param key the attribute Map key
1441     *
1442     * @return the property
1443     */
1444    @Override
1445    public Object getProperty(String key) {
1446        return properties.get(key);
1447    }
1448
1449    /**
1450     * get all property keys.
1451     *
1452     * @return the Collection of property keys
1453     */
1454    @Override
1455    public Collection<String> getPropertyKeys() {
1456        return properties.keySet();
1457    }
1458
1459    /**
1460     * Returns the name of the object. For example, "Raster Image #2".
1461     *
1462     * @return The name of the object.
1463     */
1464    @Override
1465    public final String getAttributeName() {
1466        return getName();
1467    }
1468
1469    /**
1470     * Retrieves the attribute data from the file.
1471     *
1472     * @return the attribute data.
1473     *
1474     * @throws Exception
1475     *             if the data can not be retrieved
1476     */
1477    @Override
1478    public final Object getAttributeData() throws Exception, OutOfMemoryError {
1479        return getData();
1480    }
1481
1482    /**
1483     * Returns the datatype of the attribute.
1484     *
1485     * @return the datatype of the attribute.
1486     */
1487    @Override
1488    public final Datatype getAttributeDatatype() {
1489        return getDatatype();
1490    }
1491
1492    /**
1493     * Returns the space type for the attribute. It returns a
1494     * negative number if it failed to retrieve the type information from
1495     * the file.
1496     *
1497     * @return the space type for the attribute.
1498     */
1499    @Override
1500    public final int getAttributeSpaceType() {
1501        return getSpaceType();
1502    }
1503
1504    /**
1505     * Returns the rank (number of dimensions) of the attribute. It returns a
1506     * negative number if it failed to retrieve the dimension information from
1507     * the file.
1508     *
1509     * @return the number of dimensions of the attribute.
1510     */
1511    @Override
1512    public final int getAttributeRank() {
1513        return getRank();
1514    }
1515
1516    /**
1517     * Returns the selected size of the rows and columns of the attribute. It returns a
1518     * negative number if it failed to retrieve the size information from
1519     * the file.
1520     *
1521     * @return the selected size of the rows and colums of the attribute.
1522     */
1523    @Override
1524    public final int getAttributePlane() {
1525        return (int)getWidth() * (int)getHeight();
1526    }
1527
1528    /**
1529     * Returns the array that contains the dimension sizes of the data value of
1530     * the attribute. It returns null if it failed to retrieve the dimension
1531     * information from the file.
1532     *
1533     * @return the dimension sizes of the attribute.
1534     */
1535    @Override
1536    public final long[] getAttributeDims() {
1537        return getDims();
1538    }
1539
1540    /**
1541     * @return true if the data is a single scalar point; otherwise, returns
1542     *         false.
1543     */
1544    @Override
1545    public boolean isAttributeScalar() {
1546        return isScalar();
1547    }
1548
1549    /**
1550     * Not for public use in the future.
1551     *
1552     * setData() is not safe to use because it changes memory buffer
1553     * of the dataset object. Dataset operations such as write/read
1554     * will fail if the buffer type or size is changed.
1555     *
1556     * @param d  the object data -must be an array of Objects
1557     */
1558    @Override
1559    public void setAttributeData(Object d) {
1560        setData(d);
1561    }
1562
1563    /**
1564     * Writes the memory buffer of this dataset to file.
1565     *
1566     * @throws Exception if buffer can not be written
1567     */
1568    @Override
1569    public void writeAttribute() throws Exception {
1570        write();
1571    }
1572
1573    /**
1574     * Writes the given data buffer into this attribute in a file.
1575     *
1576     * The data buffer is a vector that contains the data values of compound fields. The data is written
1577     * into file as one data blob.
1578     *
1579     * @param buf
1580     *            The vector that contains the data values of compound fields.
1581     *
1582     * @throws Exception
1583     *             If there is an error at the library level.
1584     */
1585    @Override
1586    public void writeAttribute(Object buf) throws Exception {
1587        write(buf);
1588    }
1589
1590    /**
1591     * Returns a string representation of the data value. For
1592     * example, "0, 255".
1593     *
1594     * For a compound datatype, it will be a 1D array of strings with field
1595     * members separated by the delimiter. For example,
1596     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
1597     * float} of three data points.
1598     *
1599     * @param delimiter
1600     *            The delimiter used to separate individual data points. It
1601     *            can be a comma, semicolon, tab or space. For example,
1602     *            toString(",") will separate data by commas.
1603     *
1604     * @return the string representation of the data values.
1605     */
1606    @Override
1607    public String toAttributeString(String delimiter) {
1608        return toString(delimiter, -1);
1609    }
1610
1611    /**
1612     * Returns a string representation of the data value. For
1613     * example, "0, 255".
1614     *
1615     * For a compound datatype, it will be a 1D array of strings with field
1616     * members separated by the delimiter. For example,
1617     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
1618     * float} of three data points.
1619     *
1620     * @param delimiter
1621     *            The delimiter used to separate individual data points. It
1622     *            can be a comma, semicolon, tab or space. For example,
1623     *            toString(",") will separate data by commas.
1624     * @param maxItems
1625     *            The maximum number of Array values to return
1626     *
1627     * @return the string representation of the data values.
1628     */
1629    @Override
1630    public String toAttributeString(String delimiter, int maxItems) {
1631        Object theData = originalBuf;
1632        if (theData == null) {
1633            log.debug("toAttributeString: value is null");
1634            return null;
1635        }
1636
1637        // attribute value is an array
1638        StringBuilder sb = new StringBuilder();
1639        int numberTypes = ((ArrayList<Object[]>)theData).size();
1640        log.trace("toAttributeString: numberTypes={}", numberTypes);
1641        List<Datatype> cmpdTypes =  getDatatype().getCompoundMemberTypes();
1642
1643        int loopcnt = 0;
1644        while (loopcnt < maxItems) {
1645            if (loopcnt > 0)
1646                sb.append(delimiter);
1647            sb.append("{");
1648            for (int dv = 0; dv < numberTypes; dv++) {
1649                if (dv > 0)
1650                    sb.append(delimiter);
1651
1652                Object theobj = ((ArrayList<Object[]>)theData).get(dv);
1653                Class<? extends Object> valClass = theobj.getClass();
1654                log.trace("toAttributeString:valClass={}", valClass);
1655                int n = 0;
1656                Datatype dtype = cmpdTypes.get(dv);
1657                // value is an array
1658                if (valClass.isArray()) {
1659                    n = Array.getLength(theobj);
1660                    if (dtype.isRef())
1661                        n /= (int)dtype.getDatatypeSize();
1662                }
1663                else
1664                    n = ((ArrayList<Object[]>)theobj).size();
1665                //if ((maxItems > 0) && (n + loopcnt > maxItems))
1666                //    n = maxItems - loopcnt;
1667                log.trace("toAttributeString:[{}] theobj={} size={}", dv, theobj, n);
1668                String sobj = toString(theobj, dtype, delimiter, n);
1669                sb.append(sobj);
1670                loopcnt += n;
1671                if (loopcnt >= maxItems)
1672                    break;
1673            }  // end for (int dv = 0; dv < numberTypes; dv++)
1674            sb.append("}");
1675            break;
1676        }  // end for (int i = 1; i < n; i++)
1677
1678        return sb.toString();
1679    }
1680
1681    @Override
1682    protected String toString(Object theData, Datatype theType, String delimiter, int count) {
1683        log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(),
1684                theType.isUnsigned(), count);
1685        StringBuilder sb = new StringBuilder();
1686        Class<? extends Object> valClass = theData.getClass();
1687        log.trace("toString:valClass={}", valClass);
1688
1689        H5Datatype dtype = (H5Datatype)theType;
1690        log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef());
1691        if (dtype.isStdRef()) {
1692            return ((H5ReferenceType)dtype).toString(delimiter, count);
1693        }
1694        else if (dtype.isVLEN() && !dtype.isVarStr()) {
1695            log.trace("toString: vlen");
1696            String strValue;
1697
1698            for (int k = 0; k < count; k++) {
1699                Object value = Array.get(theData, k);
1700                if (value == null)
1701                    strValue = "null";
1702                else {
1703                    if (dtype.getDatatypeBase().isRef()) {
1704                        ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value;
1705                        log.trace("toString: vlen value={}", ref_value);
1706                        strValue = "{";
1707                        for (int m = 0; m < ref_value.size(); m++) {
1708                            byte[] curBytes = ref_value.get(m);
1709                            if (m > 0)
1710                                strValue += ", ";
1711                            if (H5ReferenceType.zeroArrayCheck(curBytes))
1712                                strValue += "NULL";
1713                            else {
1714                                if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) {
1715                                    strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT);
1716                                }
1717                                else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1718                                    try {
1719                                        strValue += H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), curBytes);
1720                                    }
1721                                    catch (Exception ex) {
1722                                        ex.printStackTrace();
1723                                    }
1724                                }
1725                                else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1726                                    try {
1727                                        strValue += H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), curBytes);
1728                                    }
1729                                    catch (Exception ex) {
1730                                        ex.printStackTrace();
1731                                    }
1732                                }
1733                            }
1734                        }
1735                        strValue += "}";
1736                    }
1737                    else
1738                        strValue = value.toString();
1739                }
1740                if (k > 0)
1741                    sb.append(", ");
1742                sb.append(strValue);
1743            }
1744        }
1745        else if (dtype.isRef()) {
1746            log.trace("toString: ref");
1747            int dtypesize = (int)dtype.getDatatypeSize();
1748            String strValue = "NULL";
1749            byte[] rElements = null;
1750
1751            for (int k = 0; k < count; k++) {
1752                // need to iterate if type is ArrayList
1753                if (theData instanceof ArrayList)
1754                    rElements = (byte[]) ((ArrayList) theData).get(k);
1755                else
1756                    rElements = (byte[])theData;
1757
1758                if (H5ReferenceType.zeroArrayCheck(rElements))
1759                    strValue = "NULL";
1760                else {
1761                    if (dtype.isStdRef()) {
1762                        strValue += H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT);
1763                    }
1764                    else if (dtypesize == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1765                        try {
1766                            strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), rElements);
1767                        }
1768                        catch (Exception ex) {
1769                            ex.printStackTrace();
1770                        }
1771                    }
1772                    else if (dtypesize == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1773                        try {
1774                            strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), rElements);
1775                        }
1776                        catch (Exception ex) {
1777                            ex.printStackTrace();
1778                        }
1779                    }
1780                }
1781                if (k > 0)
1782                    sb.append(", ");
1783                sb.append(strValue);
1784            }
1785        }
1786        else {
1787            return super.toString(theData, theType, delimiter, count);
1788        }
1789
1790        return sb.toString();
1791    }
1792
1793    /* Implement interface H5Attribute */
1794
1795    /**
1796     * The general read and write attribute operations for hdf5 object data.
1797     *
1798     * @param attr_id
1799     *        the attribute to access
1800     * @param ioType
1801     *        the type of IO operation
1802     * @param objBuf
1803     *        the data buffer to use for write operation
1804     *
1805     * @return the attribute data
1806     *
1807     * @throws Exception
1808     *             if the data can not be retrieved
1809     */
1810    @Override
1811    public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception {
1812        H5Datatype dsDatatype = (H5Datatype) getDatatype();
1813        Object theData = null;
1814
1815        long dt_size = dsDatatype.getDatatypeSize();
1816        log.trace("AttributeCommonIO(): create native");
1817        long tid = dsDatatype.createNative();
1818
1819        if (ioType == H5File.IO_TYPE.READ) {
1820            log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
1821
1822            long lsize = 1;
1823            for (int j = 0; j < dims.length; j++)
1824                lsize *= dims[j];
1825            log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize);
1826
1827            try {
1828                // Read data.
1829                Object attr_data = new byte[(int)(dt_size * lsize)];
1830
1831                try {
1832                    H5.H5Aread(attr_id, tid, attr_data);
1833                }
1834                catch (Exception ex) {
1835                    log.debug("AttributeCommonIO(): H5Aread failure: ", ex);
1836                }
1837                theData = compoundTypeIO(dsDatatype, (int)lsize, dsDatatype, attr_data, new int[]{0});
1838            }
1839            catch (Exception ex) {
1840                log.debug("AttributeCommonIO():read ioType read failure: ", ex);
1841                throw new Exception(ex.getMessage(), ex);
1842            }
1843            finally {
1844                dsDatatype.close(tid);
1845            }
1846            for (int i = 0; i < ((ArrayList<Object[]>)theData).size(); i++) {
1847                Object theobj = ((ArrayList<Object[]>)theData).get(i);
1848                log.trace("AttributeCommonIO():read ioType data: {}", theobj);
1849            }
1850            originalBuf = theData;
1851            isDataLoaded = true;
1852        } // H5File.IO_TYPE.READ
1853        else {
1854            theData = compoundTypeWriteIO(dsDatatype, dsDatatype, objBuf, new int[]{0});
1855            try {
1856                H5.H5Awrite(attr_id, tid, theData);
1857            }
1858            catch (Exception ex) {
1859                log.debug("AttributeCommonIO(): H5Awrite failure: ", ex);
1860            }
1861            finally {
1862                dsDatatype.close(tid);
1863            }
1864        } // H5File.IO_TYPE.WRITE
1865
1866        return theData;
1867    }
1868
1869    /**
1870     * Read a subset of an attribute for hdf5 object data.
1871     *
1872     * @return the selected attribute data
1873     *
1874     * @throws Exception
1875     *             if the data can not be retrieved
1876     */
1877    @Override
1878    public Object AttributeSelection() throws Exception {
1879        return originalBuf;
1880        //        H5Datatype dsDatatype = (H5Datatype) getDatatype();
1881        //        Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints);
1882        //        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1883        //        log.trace("AttributeSelection(): isText: converting byte array to string array");
1884        //        theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
1885        //    }
1886        //    else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1887        //        log.trace("AttributeSelection(): isFloat: converting byte array to BigDecimal array");
1888        //        theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData);
1889        //    }
1890        //    else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1891        //        log.trace("AttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array");
1892        //        long[] arrayDims = dsDatatype.getArrayDims();
1893        //        int asize = (int)nPoints;
1894        //        for (int j = 0; j < arrayDims.length; j++) {
1895        //            asize *= arrayDims[j];
1896        //        }
1897        //        theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
1898        //    }
1899        //        Object theOrig = originalBuf;
1900
1901        /*
1902         * Copy the selection from originalBuf to theData Only three dims are involved and selected data is 2 dimensions
1903         * getHeight() is the row dimension getWidth() is the col dimension
1904         * getDepth() is the frame dimension
1905         */
1906        //        long[] start = getStartDims();
1907        //        long curFrame = start[selectedIndex[2]];
1908        //        for (int col = 0; col < (int)getWidth(); col++) {
1909        //            for (int row = 0; row < (int)getHeight(); row++) {
1910
1911        //                int k = (int)startDims[selectedIndex[2]] * (int)getDepth();
1912        //                int index = row * (int)getWidth() + col;
1913        //                log.trace("compoundAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, col, k);
1914        //                int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() +
1915        //                                        col * (int)getHeight() +
1916        //                                        row);// * (int) dsDatatype.getDatatypeSize();
1917        //                int toIndex = (col * (int)getHeight() +
1918        //                        row);// * (int) dsDatatype.getDatatypeSize();
1919        //                int objSize = 1;
1920        //                if (dsDatatype.isArray()) {
1921        //                    long[] arrayDims = dsDatatype.getArrayDims();
1922        //                    objSize = (int)arrayDims.length;
1923        //                }
1924        //                for (int i = 0; i < ((ArrayList<Object[]>)theOrig).size(); i++) {
1925        //                    Object theOrigobj = ((ArrayList<Object[]>)theOrig).get(i);
1926        //                    Object theDataobj = ((ArrayList<Object[]>)theData).get(i);
1927        //                    log.trace("compoundAttributeSelection(): theOrig={} theData={}", theOrigobj, theDataobj);
1928        //                    System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1929        //                }
1930        //            }
1931        //        }
1932
1933        //        log.trace("compoundAttributeSelection(): theData={}", theData);
1934        //        return theData;
1935    }
1936}