001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.nio.ByteBuffer;
021import java.nio.ByteOrder;
022import java.text.DecimalFormat;
023import java.util.ArrayList;
024import java.util.Arrays;
025import java.util.Collection;
026import java.util.HashMap;
027import java.util.Iterator;
028import java.util.List;
029import java.util.Map;
030import java.util.Vector;
031
032import hdf.object.Attribute;
033import hdf.object.DataFormat;
034import hdf.object.Dataset;
035import hdf.object.Datatype;
036import hdf.object.FileFormat;
037import hdf.object.Group;
038import hdf.object.HObject;
039import hdf.object.MetaDataContainer;
040import hdf.object.ScalarDS;
041import hdf.object.h5.H5Attribute;
042import hdf.object.h5.H5Datatype;
043import hdf.object.h5.H5ReferenceType;
044
045import hdf.hdf5lib.H5;
046import hdf.hdf5lib.HDF5Constants;
047import hdf.hdf5lib.HDFNativeData;
048import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
049import hdf.hdf5lib.exceptions.HDF5Exception;
050import hdf.hdf5lib.exceptions.HDF5LibraryException;
051
052import org.slf4j.Logger;
053import org.slf4j.LoggerFactory;
054
055/**
056 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
057 * or named datatype.
058 *
059 * Like a dataset, an attribute has a name, datatype and dataspace.
060 *
061 * For more details on attributes, <a
062 * href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a>
063 *
064 * The following code is an example of an attribute with 1D integer array of two elements.
065 *
066 * <pre>
067 * // Example of creating a new attribute
068 * // The name of the new attribute
069 * String name = "Data range";
070 * // Creating an unsigned 1-byte integer datatype
071 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
072 *                              1,                      // size in bytes
073 *                              Datatype.ORDER_LE,      // byte order
074 *                              Datatype.SIGN_NONE);    // unsigned
075 * // 1-D array of size two
076 * long[] dims = {2};
077 * // The value of the attribute
078 * int[] value = {0, 255};
079 * // Create a new attribute
080 * Attribute dataRange = new H5ScalarAttr(name, type, dims);
081 * // Set the attribute value
082 * dataRange.setValue(value);
083 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
084 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
085 * </pre>
086 *
087 *
088 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and strings.
089 *
090 * @see hdf.object.Datatype
091 *
092 * @version 1.0 6/15/2021
093 * @author Allen Byrne
094 */
095public class H5ScalarAttr extends ScalarDS implements H5Attribute {
096    private static final long serialVersionUID = 2072473407027648309L;
097
098    private static final Logger log = LoggerFactory.getLogger(H5ScalarAttr.class);
099
100    /** The HObject to which this NC2Attribute is attached, Attribute interface */
101    protected HObject parentObject;
102
103    /** additional information and properties for the attribute, Attribute interface */
104    private transient Map<String, Object> properties;
105
106    /**
107     * flag to indicate if the datatype in file is the same as dataype in memory
108     */
109    protected boolean isNativeDatatype = false;
110
111    /**
112     * Create an attribute with specified name, data type and dimension sizes.
113     *
114     * For scalar attribute, the dimension size can be either an array of size one
115     * or null, and the rank can be either 1 or zero. Attribute is a general class
116     * and is independent of file format, e.g., the implementation of attribute
117     * applies to both HDF4 and HDF5.
118     *
119     * The following example creates a string attribute with the name "CLASS" and
120     * value "IMAGE".
121     *
122     * <pre>
123     * long[] attrDims = { 1 };
124     * String attrName = &quot;CLASS&quot;;
125     * String[] classValue = { &quot;IMAGE&quot; };
126     * Datatype attrType = null;
127     * try {
128     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
129     * Datatype.NATIVE);
130     * }
131     * catch (Exception ex) {}
132     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims);
133     * attr.setValue(classValue);
134     * </pre>
135     *
136     * @param parentObj
137     *            the HObject to which this H5ScalarAttr is attached.
138     * @param attrName
139     *            the name of the attribute.
140     * @param attrType
141     *            the datatype of the attribute.
142     * @param attrDims
143     *            the dimension sizes of the attribute, null for scalar attribute
144     *
145     * @see hdf.object.Datatype
146     */
147    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
148    {
149        this(parentObj, attrName, attrType, attrDims, null);
150    }
151
152    /**
153     * Create an attribute with specific name and value.
154     *
155     * For scalar attribute, the dimension size can be either an array of size one
156     * or null, and the rank can be either 1 or zero. Attribute is a general class
157     * and is independent of file format, e.g., the implementation of attribute
158     * applies to both HDF4 and HDF5.
159     *
160     * The following example creates a string attribute with the name "CLASS" and
161     * value "IMAGE".
162     *
163     * <pre>
164     * long[] attrDims = { 1 };
165     * String attrName = &quot;CLASS&quot;;
166     * String[] classValue = { &quot;IMAGE&quot; };
167     * Datatype attrType = null;
168     * try {
169     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
170     * Datatype.NATIVE);
171     * }
172     * catch (Exception ex) {}
173     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue);
174     * </pre>
175     *
176     * @param parentObj
177     *            the HObject to which this H5ScalarAttr is attached.
178     * @param attrName
179     *            the name of the attribute.
180     * @param attrType
181     *            the datatype of the attribute.
182     * @param attrDims
183     *            the dimension sizes of the attribute, null for scalar attribute
184     * @param attrValue
185     *            the value of the attribute, null if no value
186     *
187     * @see hdf.object.Datatype
188     */
189    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
190    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
191                        Object attrValue)
192    {
193        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
194              (parentObj == null) ? null : parentObj.getFullName(), null);
195
196        log.trace("H5ScalarAttr: start {}", parentObj);
197        this.parentObject = parentObj;
198
199        datatype = attrType;
200
201        if (attrValue != null) {
202            data         = attrValue;
203            originalBuf  = attrValue;
204            isDataLoaded = true;
205        }
206        properties = new HashMap();
207
208        if (attrDims == null) {
209            rank     = 1;
210            dims     = new long[] {1};
211            isScalar = true;
212        }
213        else {
214            dims     = attrDims;
215            rank     = dims.length;
216            isScalar = false;
217        }
218
219        selectedDims   = new long[rank];
220        startDims      = new long[rank];
221        selectedStride = new long[rank];
222
223        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
224                  attrType.getDescription(), data, rank, getDatatype().isUnsigned());
225
226        resetSelection();
227    }
228
229    /*
230     * (non-Javadoc)
231     *
232     * @see hdf.object.HObject#open()
233     */
234    @Override
235    public long open()
236    {
237        if (parentObject == null) {
238            log.debug("open(): attribute's parent object is null");
239            return HDF5Constants.H5I_INVALID_HID;
240        }
241
242        long aid    = HDF5Constants.H5I_INVALID_HID;
243        long pObjID = HDF5Constants.H5I_INVALID_HID;
244
245        try {
246            pObjID = parentObject.open();
247            if (pObjID >= 0) {
248                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
249                    log.trace("open(): FILE_TYPE_HDF5");
250                    if (H5.H5Aexists(pObjID, getName()))
251                        aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT);
252                }
253            }
254
255            log.trace("open(): aid={}", aid);
256        }
257        catch (Exception ex) {
258            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
259            aid = HDF5Constants.H5I_INVALID_HID;
260        }
261        finally {
262            parentObject.close(pObjID);
263        }
264
265        return aid;
266    }
267
268    /*
269     * (non-Javadoc)
270     *
271     * @see hdf.object.HObject#close(int)
272     */
273    @Override
274    public void close(long aid)
275    {
276        if (aid >= 0) {
277            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
278                log.trace("close(): FILE_TYPE_HDF5");
279                try {
280                    H5.H5Aclose(aid);
281                }
282                catch (HDF5Exception ex) {
283                    log.debug("close(): H5Aclose({}) failure: ", aid, ex);
284                }
285            }
286        }
287    }
288
289    /**
290     * Retrieves datatype and dataspace information from file and sets the attribute
291     * in memory.
292     *
293     * The init() is designed to support lazy operation in a attribute object. When a
294     * data object is retrieved from file, the datatype, dataspace and raw data are
295     * not loaded into memory. When it is asked to read the raw data from file,
296     * init() is first called to get the datatype and dataspace information, then
297     * load the raw data from file.
298     */
299    @Override
300    public void init()
301    {
302        if (inited) {
303            // already called. Initialize only once
304            resetSelection();
305            log.trace("init(): H5ScalarAttr already initialized");
306            return;
307        }
308
309        long aid       = HDF5Constants.H5I_INVALID_HID;
310        long tid       = HDF5Constants.H5I_INVALID_HID;
311        long sid       = HDF5Constants.H5I_INVALID_HID;
312        long nativeTID = HDF5Constants.H5I_INVALID_HID;
313
314        aid = open();
315        if (aid >= 0) {
316            try {
317                sid        = H5.H5Aget_space(aid);
318                rank       = H5.H5Sget_simple_extent_ndims(sid);
319                space_type = H5.H5Sget_simple_extent_type(sid);
320                if (space_type == HDF5Constants.H5S_NULL)
321                    isNULL = true;
322                else
323                    isNULL = false;
324                tid = H5.H5Aget_type(aid);
325                log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type);
326
327                if (rank == 0) {
328                    // a scalar data point
329                    isScalar = true;
330                    rank     = 1;
331                    dims     = new long[] {1};
332                    log.trace("init(): rank is a scalar data point");
333                }
334                else {
335                    isScalar = false;
336                    dims     = new long[rank];
337                    maxDims  = new long[rank];
338                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
339                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
340                }
341
342                if (datatype == null) {
343                    try {
344                        int nativeClass = H5.H5Tget_class(tid);
345                        if (nativeClass == HDF5Constants.H5T_REFERENCE) {
346                            long lsize = 1;
347                            if (rank > 0) {
348                                log.trace("init(): rank={}, dims={}", rank, dims);
349                                for (int j = 0; j < dims.length; j++) {
350                                    lsize *= dims[j];
351                                }
352                            }
353                            datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
354                        }
355                        else
356                            datatype = new H5Datatype(getFileFormat(), tid);
357
358                        log.trace(
359                            "init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
360                            tid, datatype.getDatatypeClass(), ((H5Datatype)datatype).isText(),
361                            datatype.isNamed(), datatype.isVLEN(), datatype.isEnum(), datatype.isUnsigned(),
362                            ((H5Datatype)datatype).isRegRef());
363                    }
364                    catch (Exception ex) {
365                        log.debug("init(): failed to create datatype for attribute: ", ex);
366                        datatype = null;
367                    }
368                }
369
370                // Check if the datatype in the file is the native datatype
371                try {
372                    nativeTID        = H5.H5Tget_native_type(tid);
373                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
374                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
375                }
376                catch (Exception ex) {
377                    log.debug("init(): check if native type failure: ", ex);
378                }
379
380                inited = true;
381            }
382            catch (HDF5Exception ex) {
383                log.debug("init(): ", ex);
384            }
385            finally {
386                try {
387                    H5.H5Tclose(nativeTID);
388                }
389                catch (Exception ex2) {
390                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
391                }
392                try {
393                    H5.H5Tclose(tid);
394                }
395                catch (HDF5Exception ex2) {
396                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
397                }
398                try {
399                    H5.H5Sclose(sid);
400                }
401                catch (HDF5Exception ex2) {
402                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
403                }
404            }
405
406            close(aid);
407
408            startDims    = new long[rank];
409            selectedDims = new long[rank];
410
411            resetSelection();
412        }
413        else {
414            log.debug("init(): failed to open attribute");
415        }
416    }
417
418    /**
419     * Returns the datatype of the data object.
420     *
421     * @return the datatype of the data object.
422     */
423    @Override
424    public Datatype getDatatype()
425    {
426        if (!inited)
427            init();
428
429        if (datatype == null) {
430            long aid = HDF5Constants.H5I_INVALID_HID;
431            long tid = HDF5Constants.H5I_INVALID_HID;
432
433            aid = open();
434            if (aid >= 0) {
435                try {
436                    tid = H5.H5Aget_type(aid);
437                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
438                    if (!isNativeDatatype) {
439                        long tmptid = -1;
440                        try {
441                            tmptid = H5Datatype.toNative(tid);
442                            if (tmptid >= 0) {
443                                try {
444                                    H5.H5Tclose(tid);
445                                }
446                                catch (Exception ex2) {
447                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
448                                }
449                                tid = tmptid;
450                            }
451                        }
452                        catch (Exception ex) {
453                            log.debug("getDatatype(): toNative: ", ex);
454                        }
455                    }
456                    int nativeClass = H5.H5Tget_class(tid);
457                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
458                        long lsize = 1;
459                        long sid   = H5.H5Aget_space(aid);
460                        int rank   = H5.H5Sget_simple_extent_ndims(sid);
461                        if (rank > 0) {
462                            long dims[] = new long[rank];
463                            H5.H5Sget_simple_extent_dims(sid, dims, null);
464                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
465                            for (int j = 0; j < dims.length; j++) {
466                                lsize *= dims[j];
467                            }
468                        }
469                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
470                    }
471                    else
472                        datatype = new H5Datatype(getFileFormat(), tid);
473                }
474                catch (Exception ex) {
475                    log.debug("getDatatype(): ", ex);
476                }
477                finally {
478                    try {
479                        H5.H5Tclose(tid);
480                    }
481                    catch (HDF5Exception ex) {
482                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
483                    }
484                    try {
485                        H5.H5Aclose(aid);
486                    }
487                    catch (HDF5Exception ex) {
488                        log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex);
489                    }
490                }
491            }
492        }
493
494        return datatype;
495    }
496
497    /**
498     * Returns the data buffer of the attribute in memory.
499     *
500     * If data is already loaded into memory, returns the data; otherwise, calls
501     * read() to read data from file into a memory buffer and returns the memory
502     * buffer.
503     *
504     * The whole attribute is read into memory. Users can also select
505     * a subset from the whole data. Subsetting is done in an implicit way.
506     *
507     * <b>How to Select a Subset</b>
508     *
509     * A selection is specified by three arrays: start, stride and count.
510     * <ol>
511     * <li>start: offset of a selection
512     * <li>stride: determines how many elements to move in each dimension
513     * <li>count: number of elements to select in each dimension
514     * </ol>
515     * getStartDims(), getStride() and getSelectedDims() returns the start,
516     * stride and count arrays respectively. Applications can make a selection
517     * by changing the values of the arrays.
518     *
519     * The following example shows how to make a subset. In the example, the
520     * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
521     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
522     * We want to select every other data point in dims[1] and dims[2]
523     *
524     * <pre>
525     * int rank = attribute.getRank(); // number of dimensions of the attribute
526     * long[] dims = attribute.getDims(); // the dimension sizes of the attribute
527     * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute
528     * long[] start = attribute.getStartDims(); // the offset of the selection
529     * long[] stride = attribute.getStride(); // the stride of the attribute
530     * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display
531     *
532     * // select dim1 and dim2 as 2D data for display,and slice through dim0
533     * selectedIndex[0] = 1;
534     * selectedIndex[1] = 2;
535     * selectedIndex[2] = 0;
536     *
537     * // reset the selection arrays
538     * for (int i = 0; i &lt; rank; i++) {
539     *     start[i] = 0;
540     *     selected[i] = 1;
541     *     stride[i] = 1;
542     * }
543     *
544     * // set stride to 2 on dim1 and dim2 so that every other data point is
545     * // selected.
546     * stride[1] = 2;
547     * stride[2] = 2;
548     *
549     * // set the selection size of dim1 and dim2
550     * selected[1] = dims[1] / stride[1];
551     * selected[2] = dims[1] / stride[2];
552     *
553     * // when H5ScalarAttr.getData() is called, the selection above will be used since
554     * // the dimension arrays are passed by reference. Changes of these arrays
555     * // outside the attribute object directly change the values of these array
556     * // in the attribute object.
557     * </pre>
558     *
559     * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte,
560     * short, int, float, double or String type based on the datatype of the
561     * attribute.
562     *
563     * @return the memory buffer of the attribute.
564     *
565     * @throws Exception if object can not be read
566     * @throws OutOfMemoryError if memory is exhausted
567     */
568    @Override
569    public Object getData() throws Exception, OutOfMemoryError
570    {
571        log.trace("getData(): isDataLoaded={}", isDataLoaded);
572        if (!isDataLoaded)
573            data = read(); // load the data, attributes read all data
574
575        nPoints = 1;
576        log.trace("getData(): selectedDims length={}", selectedDims.length);
577        int point_len = selectedDims.length;
578        // Partial data for 3 or more dimensions
579        if (rank > 2)
580            point_len = 3;
581        for (int j = 0; j < point_len; j++) {
582            log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]);
583            nPoints *= selectedDims[j];
584        }
585        log.trace("getData: read {}", nPoints);
586
587        // apply the selection for 3 or more dimensions
588        // selection only expects to use 3 selectedDims
589        //     where selectedIndex[0] is the row dimension
590        //     where selectedIndex[1] is the col dimension
591        //     where selectedIndex[2] is the frame dimension
592        if (rank > 2)
593            data = AttributeSelection();
594
595        return data;
596    }
597
598    /*
599     * (non-Javadoc)
600     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
601     */
602    @Override
603    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
604    {
605        // not supported
606        throw new UnsupportedOperationException("copy operation unsupported for H5.");
607    }
608
609    /*
610     * (non-Javadoc)
611     *
612     * @see hdf.object.Attribute#readBytes()
613     */
614    @Override
615    public byte[] readBytes() throws HDF5Exception
616    {
617        byte[] theData = null;
618
619        if (!isInited())
620            init();
621
622        long aid = open();
623        if (aid >= 0) {
624            long tid = HDF5Constants.H5I_INVALID_HID;
625
626            try {
627                long[] lsize = {1};
628                for (int j = 0; j < selectedDims.length; j++)
629                    lsize[0] *= selectedDims[j];
630
631                tid       = H5.H5Aget_type(aid);
632                long size = H5.H5Tget_size(tid) * lsize[0];
633                log.trace("readBytes(): size = {}", size);
634
635                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
636                    throw new Exception("Invalid int size");
637
638                theData = new byte[(int)size];
639
640                log.trace("readBytes(): read attribute id {} of size={}", tid, lsize);
641                H5.H5Aread(aid, tid, theData);
642            }
643            catch (Exception ex) {
644                log.debug("readBytes(): failed to read data: ", ex);
645            }
646            finally {
647                try {
648                    H5.H5Tclose(tid);
649                }
650                catch (HDF5Exception ex2) {
651                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
652                }
653                close(aid);
654            }
655        }
656
657        return theData;
658    }
659
660    /**
661     * Reads the data from file.
662     *
663     * read() reads the data from file to a memory buffer and returns the memory
664     * buffer. The attribute object does not hold the memory buffer. To store the
665     * memory buffer in the attribute object, one must call getData().
666     *
667     * By default, the whole attribute is read into memory.
668     *
669     * For ScalarAttr, the memory data buffer is a one-dimensional array of byte,
670     * short, int, float, double or String type based on the datatype of the
671     * attribute.
672     *
673     * @return the data read from file.
674     *
675     * @see #getData()
676     * @see hdf.object.DataFormat#read()
677     *
678     * @throws Exception
679     *             if object can not be read
680     */
681    @Override
682    public Object read() throws Exception
683    {
684        Object readData = null;
685
686        if (!isInited())
687            init();
688
689        try {
690            readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null);
691        }
692        catch (Exception ex) {
693            log.debug("read(): failed to read scalar attribute: ", ex);
694            throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex);
695        }
696
697        return readData;
698    }
699
700    /**
701     * Writes the given data buffer into this attribute in a file.
702     *
703     * @param buf
704     *            The buffer that contains the data values.
705     *
706     * @throws Exception
707     *             If there is an error at the HDF5 library level.
708     */
709    @Override
710    public void write(Object buf) throws Exception
711    {
712        if (this.getFileFormat().isReadOnly())
713            throw new Exception("cannot write to scalar attribute in file opened as read-only");
714
715        if (!buf.equals(data))
716            setData(buf);
717
718        if (parentObject == null) {
719            log.debug("write(Object): parent object is null; nowhere to write attribute to");
720            return;
721        }
722
723        ((MetaDataContainer)getParentObject()).writeMetadata(this);
724
725        try {
726            scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf);
727        }
728        catch (Exception ex) {
729            log.debug("write(Object): failed to write to scalar attribute: ", ex);
730            throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex);
731        }
732        resetSelection();
733    }
734
735    private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception
736    {
737        H5Datatype dsDatatype = (H5Datatype)getDatatype();
738        Object theData        = null;
739
740        /*
741         * I/O type-specific pre-initialization.
742         */
743        if (ioType == H5File.IO_TYPE.WRITE) {
744            if (writeBuf == null) {
745                log.debug("scalarAttributeCommonIO(): writeBuf is null");
746                throw new Exception("write buffer is null");
747            }
748        }
749
750        long aid = open();
751        if (aid >= 0) {
752            log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded);
753            try {
754                theData = AttributeCommonIO(aid, ioType, writeBuf);
755            }
756            finally {
757                close(aid);
758            }
759        }
760        else
761            log.debug("scalarAttributeCommonIO(): failed to open attribute");
762
763        return theData;
764    }
765
766    /* Implement interface Attribute */
767
768    /**
769     * Returns the HObject to which this Attribute is currently "attached".
770     *
771     * @return the HObject to which this Attribute is currently "attached".
772     */
773    @Override
774    public HObject getParentObject()
775    {
776        return parentObject;
777    }
778
779    /**
780     * Sets the HObject to which this Attribute is "attached".
781     *
782     * @param pObj
783     *            the new HObject to which this Attribute is "attached".
784     */
785    @Override
786    public void setParentObject(HObject pObj)
787    {
788        parentObject = pObj;
789    }
790
791    /**
792     * set a property for the attribute.
793     *
794     * @param key the attribute Map key
795     * @param value the attribute Map value
796     */
797    @Override
798    public void setProperty(String key, Object value)
799    {
800        properties.put(key, value);
801    }
802
803    /**
804     * get a property for a given key.
805     *
806     * @param key the attribute Map key
807     *
808     * @return the property
809     */
810    @Override
811    public Object getProperty(String key)
812    {
813        return properties.get(key);
814    }
815
816    /**
817     * get all property keys.
818     *
819     * @return the Collection of property keys
820     */
821    @Override
822    public Collection<String> getPropertyKeys()
823    {
824        return properties.keySet();
825    }
826
827    /**
828     * Returns the name of the object. For example, "Raster Image #2".
829     *
830     * @return The name of the object.
831     */
832    @Override
833    public final String getAttributeName()
834    {
835        return getName();
836    }
837
838    /**
839     * Retrieves the attribute data from the file.
840     *
841     * @return the attribute data.
842     *
843     * @throws Exception
844     *             if the data can not be retrieved
845     */
846    @Override
847    public final Object getAttributeData() throws Exception, OutOfMemoryError
848    {
849        return getData();
850    }
851
852    /**
853     * Returns the datatype of the attribute.
854     *
855     * @return the datatype of the attribute.
856     */
857    @Override
858    public final Datatype getAttributeDatatype()
859    {
860        return getDatatype();
861    }
862
863    /**
864     * Returns the space type for the attribute. It returns a
865     * negative number if it failed to retrieve the type information from
866     * the file.
867     *
868     * @return the space type for the attribute.
869     */
870    @Override
871    public final int getAttributeSpaceType()
872    {
873        return getSpaceType();
874    }
875
876    /**
877     * Returns the rank (number of dimensions) of the attribute. It returns a
878     * negative number if it failed to retrieve the dimension information from
879     * the file.
880     *
881     * @return the number of dimensions of the attribute.
882     */
883    @Override
884    public final int getAttributeRank()
885    {
886        return getRank();
887    }
888
889    /**
890     * Returns the selected size of the rows and columns of the attribute. It returns a
891     * negative number if it failed to retrieve the size information from
892     * the file.
893     *
894     * @return the selected size of the rows and colums of the attribute.
895     */
896    @Override
897    public final int getAttributePlane()
898    {
899        return (int)getWidth() * (int)getHeight();
900    }
901
902    /**
903     * Returns the array that contains the dimension sizes of the data value of
904     * the attribute. It returns null if it failed to retrieve the dimension
905     * information from the file.
906     *
907     * @return the dimension sizes of the attribute.
908     */
909    @Override
910    public final long[] getAttributeDims()
911    {
912        return getDims();
913    }
914
915    /**
916     * @return true if the dataspace is a NULL; otherwise, returns false.
917     */
918    @Override
919    public boolean isAttributeNULL()
920    {
921        return isNULL();
922    }
923
924    /**
925     * @return true if the data is a single scalar point; otherwise, returns false.
926     */
927    @Override
928    public boolean isAttributeScalar()
929    {
930        return isScalar();
931    }
932
933    /**
934     * Not for public use in the future.
935     *
936     * setData() is not safe to use because it changes memory buffer
937     * of the dataset object. Dataset operations such as write/read
938     * will fail if the buffer type or size is changed.
939     *
940     * @param d  the object data -must be an array of Objects
941     */
942    @Override
943    public void setAttributeData(Object d)
944    {
945        setData(d);
946    }
947
948    /**
949     * Writes the memory buffer of this dataset to file.
950     *
951     * @throws Exception if buffer can not be written
952     */
953    @Override
954    public void writeAttribute() throws Exception
955    {
956        write();
957    }
958
959    /**
960     * Writes the given data buffer into this attribute in a file.
961     *
962     * The data buffer is a vector that contains the data values of compound fields. The data is written
963     * into file as one data blob.
964     *
965     * @param buf
966     *            The vector that contains the data values of compound fields.
967     *
968     * @throws Exception
969     *             If there is an error at the library level.
970     */
971    @Override
972    public void writeAttribute(Object buf) throws Exception
973    {
974        write(buf);
975    }
976
977    /**
978     * Returns a string representation of the data value. For
979     * example, "0, 255".
980     *
981     * For a compound datatype, it will be a 1D array of strings with field
982     * members separated by the delimiter. For example,
983     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
984     * float} of three data points.
985     *
986     * @param delimiter
987     *            The delimiter used to separate individual data points. It
988     *            can be a comma, semicolon, tab or space. For example,
989     *            toString(",") will separate data by commas.
990     *
991     * @return the string representation of the data values.
992     */
993    @Override
994    public String toAttributeString(String delimiter)
995    {
996        return toString(delimiter, -1);
997    }
998
999    /**
1000     * Returns a string representation of the data value. For
1001     * example, "0, 255".
1002     *
1003     * For a compound datatype, it will be a 1D array of strings with field
1004     * members separated by the delimiter. For example,
1005     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
1006     * float} of three data points.
1007     *
1008     * @param delimiter
1009     *            The delimiter used to separate individual data points. It
1010     *            can be a comma, semicolon, tab or space. For example,
1011     *            toString(",") will separate data by commas.
1012     * @param maxItems
1013     *            The maximum number of Array values to return
1014     *
1015     * @return the string representation of the data values.
1016     */
1017    @Override
1018    public String toAttributeString(String delimiter, int maxItems)
1019    {
1020        Object theData = originalBuf;
1021        if (theData == null) {
1022            log.debug("toAttributeString: value is null");
1023            return null;
1024        }
1025
1026        Class<? extends Object> valClass = theData.getClass();
1027        if (!valClass.isArray() && !getDatatype().isRef()) {
1028            log.trace("toAttributeString: finish - not array");
1029            String strValue = theData.toString();
1030            if (maxItems > 0 && strValue.length() > maxItems)
1031                // truncate the extra characters
1032                strValue = strValue.substring(0, maxItems);
1033            return strValue;
1034        }
1035
1036        int n          = 0;
1037        Datatype dtype = getDatatype();
1038        // value is an array
1039        if (valClass.isArray()) {
1040            n = Array.getLength(theData);
1041            if (dtype.isRef())
1042                n /= (int)dtype.getDatatypeSize();
1043        }
1044        else
1045            n = ((ArrayList<Object[]>)theData).size();
1046        if ((maxItems > 0) && (n > maxItems))
1047            n = maxItems;
1048
1049        return toString(theData, dtype, delimiter, n);
1050    }
1051
1052    @Override
1053    protected String toString(Object theData, Datatype theType, String delimiter, int count)
1054    {
1055        log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), theType.isUnsigned(),
1056                  count);
1057        StringBuilder sb                 = new StringBuilder();
1058        Class<? extends Object> valClass = theData.getClass();
1059        log.trace("toString:valClass={}", valClass);
1060
1061        H5Datatype dtype = (H5Datatype)theType;
1062        log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef());
1063        if (dtype.isStdRef()) {
1064            return ((H5ReferenceType)dtype).toString(delimiter, count);
1065        }
1066        else if (dtype.isVLEN() && !dtype.isVarStr()) {
1067            log.trace("toString: vlen");
1068            String strValue;
1069
1070            for (int k = 0; k < count; k++) {
1071                Object value = Array.get(theData, k);
1072                if (value == null)
1073                    strValue = "null";
1074                else {
1075                    if (dtype.getDatatypeBase().isRef()) {
1076                        ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value;
1077                        log.trace("toString: vlen value={}", ref_value);
1078                        strValue = "{";
1079                        for (int m = 0; m < ref_value.size(); m++) {
1080                            byte[] curBytes = ref_value.get(m);
1081                            if (m > 0)
1082                                strValue += ", ";
1083                            if (H5ReferenceType.zeroArrayCheck(curBytes))
1084                                strValue += "NULL";
1085                            else {
1086                                if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) {
1087                                    strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT);
1088                                }
1089                                else if (dtype.getDatatypeBase().getDatatypeSize() ==
1090                                         HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1091                                    try {
1092                                        strValue += H5Datatype.descRegionDataset(
1093                                            parentObject.getFileFormat().getFID(), curBytes);
1094                                    }
1095                                    catch (Exception ex) {
1096                                        ex.printStackTrace();
1097                                    }
1098                                }
1099                                else if (dtype.getDatatypeBase().getDatatypeSize() ==
1100                                         HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1101                                    try {
1102                                        strValue += H5Datatype.descReferenceObject(
1103                                            parentObject.getFileFormat().getFID(), curBytes);
1104                                    }
1105                                    catch (Exception ex) {
1106                                        ex.printStackTrace();
1107                                    }
1108                                }
1109                            }
1110                        }
1111                        strValue += "}";
1112                    }
1113                    else
1114                        strValue = value.toString();
1115                }
1116                if (k > 0)
1117                    sb.append(", ");
1118                sb.append(strValue);
1119            }
1120        }
1121        else if (dtype.isRef()) {
1122            log.trace("toString: ref");
1123            String strValue  = "NULL";
1124            byte[] rElements = null;
1125
1126            for (int k = 0; k < count; k++) {
1127                // need to iterate if type is ArrayList
1128                if (theData instanceof ArrayList)
1129                    rElements = (byte[])((ArrayList)theData).get(k);
1130                else
1131                    rElements = (byte[])Array.get(theData, k);
1132
1133                if (H5ReferenceType.zeroArrayCheck(rElements))
1134                    strValue = "NULL";
1135                else {
1136                    if (dtype.isStdRef()) {
1137                        strValue = H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT);
1138                    }
1139                    else if (dtype.getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1140                        try {
1141                            strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(),
1142                                                                    rElements);
1143                        }
1144                        catch (Exception ex) {
1145                            ex.printStackTrace();
1146                        }
1147                    }
1148                    else if (dtype.getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1149                        try {
1150                            strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(),
1151                                                                      rElements);
1152                        }
1153                        catch (Exception ex) {
1154                            ex.printStackTrace();
1155                        }
1156                    }
1157                }
1158                if (k > 0)
1159                    sb.append(", ");
1160                sb.append(strValue);
1161            }
1162        }
1163        else {
1164            return super.toString(theData, theType, delimiter, count);
1165        }
1166
1167        return sb.toString();
1168    }
1169
1170    /* Implement interface H5Attribute */
1171
1172    /**
1173     * The general read and write attribute operations for hdf5 object data.
1174     *
1175     * @param attr_id
1176     *        the attribute to access
1177     * @param ioType
1178     *        the type of IO operation
1179     * @param objBuf
1180     *        the data buffer to use for write operation
1181     *
1182     * @return the attribute data
1183     *
1184     * @throws Exception
1185     *             if the data can not be retrieved
1186     */
1187    @Override
1188    public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception
1189    {
1190        H5Datatype dsDatatype = (H5Datatype)getDatatype();
1191        Object theData        = null;
1192
1193        long dt_size = dsDatatype.getDatatypeSize();
1194        log.trace("AttributeCommonIO(): create native");
1195        long tid = dsDatatype.createNative();
1196
1197        if (ioType == H5File.IO_TYPE.READ) {
1198            log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}",
1199                      dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
1200            log.trace("AttributeCommonIO():read ioType isVLEN={}", dsDatatype.isVLEN());
1201
1202            long lsize = 1;
1203            for (int j = 0; j < dims.length; j++)
1204                lsize *= dims[j];
1205            log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize);
1206
1207            try {
1208                if (dsDatatype.isVarStr()) {
1209                    String[] strs = new String[(int)lsize];
1210                    for (int j = 0; j < lsize; j++)
1211                        strs[j] = "";
1212                    try {
1213                        log.trace("AttributeCommonIO():read ioType H5Aread_VLStrings");
1214                        H5.H5Aread_VLStrings(attr_id, tid, strs);
1215                    }
1216                    catch (Exception ex) {
1217                        log.debug("AttributeCommonIO():read ioType H5Aread_VLStrings failure: ", ex);
1218                        ex.printStackTrace();
1219                    }
1220                    theData = strs;
1221                }
1222                else if (dsDatatype.isCompound()) {
1223                    String[] strs = new String[(int)lsize];
1224                    for (int j = 0; j < lsize; j++)
1225                        strs[j] = "";
1226                    try {
1227                        log.trace("AttributeCommonIO():read ioType H5AreadComplex");
1228                        H5.H5AreadComplex(attr_id, tid, strs);
1229                    }
1230                    catch (Exception ex) {
1231                        ex.printStackTrace();
1232                    }
1233                    theData = strs;
1234                }
1235                else if (dsDatatype.isVLEN()) {
1236                    log.trace("AttributeCommonIO():read ioType:VLEN-REF H5Aread isArray()={}",
1237                              dsDatatype.isArray());
1238                    theData = new ArrayList[(int)lsize];
1239                    for (int j = 0; j < lsize; j++)
1240                        ((ArrayList[])theData)[j] = new ArrayList<byte[]>();
1241
1242                    try {
1243                        H5.H5AreadVL(attr_id, tid, (Object[])theData);
1244                    }
1245                    catch (Exception ex) {
1246                        log.debug("AttributeCommonIO():read ioType:VLEN-REF H5Aread failure: ", ex);
1247                        ex.printStackTrace();
1248                    }
1249                }
1250                else {
1251                    Object attr_data = null;
1252                    try {
1253                        attr_data = H5Datatype.allocateArray(dsDatatype, (int)lsize);
1254                    }
1255                    catch (OutOfMemoryError e) {
1256                        log.debug("AttributeCommonIO():read ioType out of memory", e);
1257                        theData = null;
1258                    }
1259                    if (attr_data == null)
1260                        log.debug("AttributeCommonIO():read ioType allocateArray returned null");
1261
1262                    log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray());
1263                    try {
1264                        H5.H5Aread(attr_id, tid, attr_data);
1265                    }
1266                    catch (Exception ex) {
1267                        log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex);
1268                        ex.printStackTrace();
1269                    }
1270
1271                    /*
1272                     * Perform any necessary data conversions.
1273                     */
1274                    if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) {
1275                        log.trace(
1276                            "AttributeCommonIO():read ioType isText: converting byte array to string array");
1277                        theData = byteToString((byte[])attr_data, (int)dsDatatype.getDatatypeSize());
1278                    }
1279                    else if (dsDatatype.isFloat() && dt_size == 16) {
1280                        log.trace(
1281                            "AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array");
1282                        theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[])attr_data);
1283                    }
1284                    else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() &&
1285                             dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1286                        log.trace(
1287                            "AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array");
1288                        long[] arrayDims = dsDatatype.getArrayDims();
1289                        int asize        = (int)nPoints;
1290                        for (int j = 0; j < arrayDims.length; j++) {
1291                            asize *= arrayDims[j];
1292                        }
1293                        theData = ((H5Datatype)dsDatatype.getDatatypeBase())
1294                                      .byteToBigDecimal(0, asize, (byte[])attr_data);
1295                    }
1296                    else if (dsDatatype.isRef() && (attr_data instanceof byte[])) {
1297                        log.trace(
1298                            "AttributeCommonIO():read ioType isRef: converting byte array to List of bytes");
1299                        theData = new ArrayList<byte[]>((int)lsize);
1300                        for (int m = 0; m < (int)lsize; m++) {
1301                            byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()];
1302                            try {
1303                                System.arraycopy(attr_data, m * (int)dt_size, curBytes, 0,
1304                                                 (int)dsDatatype.getDatatypeSize());
1305                                ((ArrayList<byte[]>)theData).add(curBytes);
1306                            }
1307                            catch (Exception err) {
1308                                log.trace("AttributeCommonIO(): arraycopy failure: ", err);
1309                            }
1310                        }
1311                    }
1312                    else
1313                        theData = attr_data;
1314                }
1315            }
1316            catch (HDF5DataFiltersException exfltr) {
1317                log.debug("AttributeCommonIO():read ioType read failure: ", exfltr);
1318                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1319            }
1320            catch (Exception ex) {
1321                log.debug("AttributeCommonIO():read ioType read failure: ", ex);
1322                throw new Exception(ex.getMessage(), ex);
1323            }
1324            finally {
1325                dsDatatype.close(tid);
1326            }
1327            log.trace("AttributeCommonIO():read ioType data: {}", theData);
1328            originalBuf  = theData;
1329            isDataLoaded = true;
1330        } // H5File.IO_TYPE.READ
1331        else {
1332            /*
1333             * Perform any necessary data conversions before writing the data.
1334             *
1335             * Note that v-len strings do not get converted, regardless of
1336             * conversion request type.
1337             */
1338            Object tmpData = objBuf;
1339            try {
1340                // Check if we need to convert integer data
1341                String cname = objBuf.getClass().getName();
1342                char dname   = cname.charAt(cname.lastIndexOf("[") + 1);
1343                boolean doIntConversion =
1344                    (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I')) ||
1345                     ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1346
1347                if (doIntConversion) {
1348                    log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers");
1349                    tmpData = convertToUnsignedC(objBuf, null);
1350                }
1351                else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString &&
1352                         !(objBuf instanceof byte[])) {
1353                    log.trace("AttributeCommonIO(): converting string array to byte array");
1354                    tmpData = stringToByte((String[])objBuf, (int)dt_size);
1355                }
1356                else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) {
1357                    log.trace("AttributeCommonIO(): converting enum names to values");
1358                    tmpData = dsDatatype.convertEnumNameToValue((String[])objBuf);
1359                }
1360                else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1361                    log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array");
1362                    throw new Exception("data conversion failure: cannot write BigDecimal values");
1363                    // tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf);
1364                }
1365            }
1366            catch (Exception ex) {
1367                log.debug("AttributeCommonIO(): data conversion failure: ", ex);
1368                throw new Exception("data conversion failure: " + ex.getMessage());
1369            }
1370
1371            /*
1372             * Actually write the data now that everything has been setup.
1373             */
1374            try {
1375                if (dsDatatype.isVarStr()) {
1376                    log.trace("AttributeCommonIO(): H5Awrite_VLStrings aid={} tid={}", attr_id, tid);
1377
1378                    H5.H5Awrite_VLStrings(attr_id, tid, (Object[])tmpData);
1379                }
1380                else if (dsDatatype.isVLEN() ||
1381                         (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1382                    log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid);
1383
1384                    H5.H5AwriteVL(attr_id, tid, (Object[])tmpData);
1385                }
1386                else {
1387                    log.trace("AttributeCommonIO(): dsDatatype.isRef()={} data is String={}",
1388                              dsDatatype.isRef(), tmpData instanceof String);
1389                    if (dsDatatype.isRef() && tmpData instanceof String) {
1390                        // reference is a path+name to the object
1391                        log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE");
1392                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1393                        byte[] refBuf =
1394                            H5.H5Rcreate_object(getFID(), (String)tmpData, HDF5Constants.H5P_DEFAULT);
1395                        if (refBuf != null) {
1396                            H5.H5Awrite(attr_id, tid, refBuf);
1397                            H5.H5Rdestroy(refBuf);
1398                        }
1399                    }
1400                    else if (Array.get(tmpData, 0) instanceof String) {
1401                        int len     = ((String[])tmpData).length;
1402                        byte[] bval = Dataset.stringToByte((String[])tmpData, (int)dt_size);
1403                        if (bval != null && bval.length == dt_size * len) {
1404                            bval[bval.length - 1] = 0;
1405                            tmpData               = bval;
1406                        }
1407                        log.trace("AttributeCommonIO(): String={}: {}", tmpData);
1408                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1409                        H5.H5Awrite(attr_id, tid, tmpData);
1410                    }
1411                    else {
1412                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1413                        H5.H5Awrite(attr_id, tid, tmpData);
1414                    }
1415                }
1416            }
1417            catch (Exception ex) {
1418                log.debug("AttributeCommonIO(): write failure: ", ex);
1419                throw new Exception(ex.getMessage());
1420            }
1421            finally {
1422                dsDatatype.close(tid);
1423            }
1424        } // H5File.IO_TYPE.WRITE
1425
1426        return theData;
1427    }
1428
1429    /**
1430     * Read a subset of an attribute for hdf5 object data.
1431     *
1432     * @return the selected attribute data
1433     *
1434     * @throws Exception
1435     *             if the data can not be retrieved
1436     */
1437    @Override
1438    public Object AttributeSelection() throws Exception
1439    {
1440        H5Datatype dsDatatype = (H5Datatype)getDatatype();
1441        int dsSize            = (int)dsDatatype.getDatatypeSize();
1442        if (dsDatatype.isArray())
1443            dsSize = (int)dsDatatype.getDatatypeBase().getDatatypeSize();
1444        Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints);
1445        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1446            log.trace("scalarAttributeSelection(): isText: converting byte array to string array");
1447            theData = byteToString((byte[])theData, dsSize);
1448        }
1449        else if (dsDatatype.isFloat() && dsSize == 16) {
1450            log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array");
1451            theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[])theData);
1452        }
1453        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsSize == 16) {
1454            log.trace(
1455                "scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array");
1456            long[] arrayDims = dsDatatype.getArrayDims();
1457            int asize        = (int)nPoints;
1458            for (int j = 0; j < arrayDims.length; j++) {
1459                asize *= arrayDims[j];
1460            }
1461            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[])theData);
1462        }
1463        Object theOrig = originalBuf;
1464        log.trace("scalarAttributeSelection(): originalBuf={} with datatype size={}", originalBuf, dsSize);
1465
1466        // Copy the selection from originalBuf to theData
1467        // Only three dims are involved and selected data is 2 dimensions
1468        //     getHeight() is the row dimension
1469        //     getWidth() is the col dimension
1470        //     getDepth() is the frame dimension
1471        long[] start  = getStartDims();
1472        long curFrame = start[selectedIndex[2]];
1473        int k         = (int)startDims[selectedIndex[2]] * (int)getDepth();
1474        for (int col = 0; col < (int)getWidth(); col++) {
1475            for (int row = 0; row < (int)getHeight(); row++) {
1476                int index = row * (int)getWidth() + col;
1477                log.trace("scalarAttributeSelection(): point[{}] row:col:k={}:{}:{}", curFrame, row, col, k);
1478                int fromIndex =
1479                    ((int)curFrame * (int)getWidth() * (int)getHeight() + col * (int)getHeight() + row);
1480                int toIndex = (col * (int)getHeight() + row);
1481                int objSize = 1;
1482                if (dsDatatype.isArray()) {
1483                    long[] arrayDims = dsDatatype.getArrayDims();
1484                    objSize          = arrayDims.length;
1485                    try {
1486                        System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1487                    }
1488                    catch (Exception err) {
1489                        log.debug("scalarAttributeSelection(): arraycopy failure: ", err);
1490                    }
1491                }
1492                else if (dsDatatype.isStdRef()) {
1493                    objSize   = (int)HDF5Constants.H5R_REF_BUF_SIZE;
1494                    fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1495                    toIndex   = toIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1496                    try {
1497                        System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1498                    }
1499                    catch (Exception err) {
1500                        log.debug("scalarAttributeSelection(): arraycopy failure: ", err);
1501                    }
1502                }
1503                else {
1504                    if (theOrig instanceof ArrayList) {
1505                        if (dsDatatype.isRef()) {
1506                            byte[] rElements = (byte[])((ArrayList)theOrig).get(fromIndex);
1507                            try {
1508                                System.arraycopy(rElements, 0, theData, toIndex * dsSize, dsSize);
1509                            }
1510                            catch (Exception err) {
1511                                log.trace("scalarAttributeSelection(): refarraycopy failure: ", err);
1512                            }
1513                        }
1514                        else {
1515                            Object value = Array.get(theOrig, fromIndex);
1516                            log.trace("scalarAttributeSelection(): value={}", value);
1517                            ((ArrayList<Object>)theData).add(toIndex, value);
1518                        }
1519                    }
1520                    else
1521                        theData = theOrig;
1522                }
1523            }
1524        }
1525
1526        log.trace("scalarAttributeSelection(): theData={}", theData);
1527        return theData;
1528    }
1529}