001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Collection;
024import java.util.HashMap;
025import java.util.Iterator;
026import java.util.List;
027import java.util.Map;
028import java.util.Vector;
029
030import hdf.hdf5lib.H5;
031import hdf.hdf5lib.HDF5Constants;
032import hdf.hdf5lib.HDFNativeData;
033import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
034import hdf.hdf5lib.exceptions.HDF5Exception;
035import hdf.hdf5lib.exceptions.HDF5LibraryException;
036
037import hdf.object.Attribute;
038import hdf.object.DataFormat;
039import hdf.object.Dataset;
040import hdf.object.Datatype;
041import hdf.object.FileFormat;
042import hdf.object.Group;
043import hdf.object.HObject;
044import hdf.object.MetaDataContainer;
045import hdf.object.ScalarDS;
046
047import hdf.object.h5.H5Attribute;
048import hdf.object.h5.H5Datatype;
049
050/**
051 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
052 * dataset, group or named datatype.
053 *
054 * Like a dataset, an attribute has a name, datatype and dataspace.
055 *
056 * For more details on attributes, <a href=
057 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
058 * User's Guide</a>
059 *
060 * The following code is an example of an attribute with 1D integer array of two elements.
061 *
062 * <pre>
063 * // Example of creating a new attribute
064 * // The name of the new attribute
065 * String name = "Data range";
066 * // Creating an unsigned 1-byte integer datatype
067 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
068 *                              1,                      // size in bytes
069 *                              Datatype.ORDER_LE,      // byte order
070 *                              Datatype.SIGN_NONE);    // unsigned
071 * // 1-D array of size two
072 * long[] dims = {2};
073 * // The value of the attribute
074 * int[] value = {0, 255};
075 * // Create a new attribute
076 * Attribute dataRange = new H5ScalarAttr(name, type, dims);
077 * // Set the attribute value
078 * dataRange.setValue(value);
079 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
080 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
081 * </pre>
082 *
083 *
084 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and
085 * strings.
086 *
087 * @see hdf.object.Datatype
088 *
089 * @version 1.0 6/15/2021
090 * @author Allen Byrne
091 */
092public class H5ScalarAttr extends ScalarDS implements H5Attribute
093{
094    private static final long serialVersionUID = 2072473407027648309L;
095
096    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarAttr.class);
097
098    /** The HObject to which this NC2Attribute is attached, Attribute interface */
099    protected HObject         parentObject;
100
101    /** additional information and properties for the attribute, Attribute interface */
102    private transient Map<String, Object> properties;
103
104    /**
105     * flag to indicate if the datatype in file is the same as dataype in memory
106     */
107    protected boolean isNativeDatatype = false;
108
109    /**
110     * Create an attribute with specified name, data type and dimension sizes.
111     *
112     * For scalar attribute, the dimension size can be either an array of size one
113     * or null, and the rank can be either 1 or zero. Attribute is a general class
114     * and is independent of file format, e.g., the implementation of attribute
115     * applies to both HDF4 and HDF5.
116     *
117     * The following example creates a string attribute with the name "CLASS" and
118     * value "IMAGE".
119     *
120     * <pre>
121     * long[] attrDims = { 1 };
122     * String attrName = &quot;CLASS&quot;;
123     * String[] classValue = { &quot;IMAGE&quot; };
124     * Datatype attrType = null;
125     * try {
126     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
127     * }
128     * catch (Exception ex) {}
129     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims);
130     * attr.setValue(classValue);
131     * </pre>
132     *
133     * @param parentObj
134     *            the HObject to which this H5ScalarAttr is attached.
135     * @param attrName
136     *            the name of the attribute.
137     * @param attrType
138     *            the datatype of the attribute.
139     * @param attrDims
140     *            the dimension sizes of the attribute, null for scalar attribute
141     *
142     * @see hdf.object.Datatype
143     */
144    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
145        this(parentObj, attrName, attrType, attrDims, null);
146    }
147
148    /**
149     * Create an attribute with specific name and value.
150     *
151     * For scalar attribute, the dimension size can be either an array of size one
152     * or null, and the rank can be either 1 or zero. Attribute is a general class
153     * and is independent of file format, e.g., the implementation of attribute
154     * applies to both HDF4 and HDF5.
155     *
156     * The following example creates a string attribute with the name "CLASS" and
157     * value "IMAGE".
158     *
159     * <pre>
160     * long[] attrDims = { 1 };
161     * String attrName = &quot;CLASS&quot;;
162     * String[] classValue = { &quot;IMAGE&quot; };
163     * Datatype attrType = null;
164     * try {
165     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
166     * }
167     * catch (Exception ex) {}
168     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue);
169     * </pre>
170     *
171     * @param parentObj
172     *            the HObject to which this H5ScalarAttr is attached.
173     * @param attrName
174     *            the name of the attribute.
175     * @param attrType
176     *            the datatype of the attribute.
177     * @param attrDims
178     *            the dimension sizes of the attribute, null for scalar attribute
179     * @param attrValue
180     *            the value of the attribute, null if no value
181     *
182     * @see hdf.object.Datatype
183     */
184    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
185    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
186        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
187                (parentObj == null) ? null : parentObj.getFullName(), null);
188
189        log.trace("H5ScalarAttr: start {}", parentObj);
190        this.parentObject = parentObj;
191
192        datatype = attrType;
193
194        if (attrValue != null) {
195            data = attrValue;
196            originalBuf = attrValue;
197            isDataLoaded = true;
198        }
199        properties = new HashMap();
200
201        if (attrDims == null) {
202            rank = 1;
203            dims = new long[] { 1 };
204            isScalar = true;
205        }
206        else {
207            dims = attrDims;
208            rank = dims.length;
209            isScalar = false;
210        }
211
212        selectedDims = new long[rank];
213        startDims = new long[rank];
214        selectedStride = new long[rank];
215
216        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
217                attrName, attrType.getDescription(), data, rank, getDatatype().isUnsigned());
218
219        resetSelection();
220    }
221
222    /*
223     * (non-Javadoc)
224     *
225     * @see hdf.object.HObject#open()
226     */
227    @Override
228    public long open() {
229        if (parentObject == null) {
230            log.debug("open(): attribute's parent object is null");
231            return HDF5Constants.H5I_INVALID_HID;
232        }
233
234        long aid = HDF5Constants.H5I_INVALID_HID;
235        long pObjID = HDF5Constants.H5I_INVALID_HID;
236
237        try {
238            pObjID = parentObject.open();
239            if (pObjID >= 0) {
240                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
241                    log.trace("open(): FILE_TYPE_HDF5");
242                    if (H5.H5Aexists(pObjID, getName()))
243                        aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT);
244                }
245            }
246
247            log.trace("open(): aid={}", aid);
248        }
249        catch (Exception ex) {
250            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
251            aid = HDF5Constants.H5I_INVALID_HID;
252        }
253        finally {
254            parentObject.close(pObjID);
255        }
256
257        return aid;
258    }
259
260    /*
261     * (non-Javadoc)
262     *
263     * @see hdf.object.HObject#close(int)
264     */
265    @Override
266    public void close(long aid) {
267        if (aid >= 0) {
268            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
269                log.trace("close(): FILE_TYPE_HDF5");
270                try {
271                    H5.H5Aclose(aid);
272                }
273                catch (HDF5Exception ex) {
274                    log.debug("close(): H5Aclose({}) failure: ", aid, ex);
275                }
276            }
277        }
278    }
279
280
281    /**
282     * Retrieves datatype and dataspace information from file and sets the attribute
283     * in memory.
284     *
285     * The init() is designed to support lazy operation in a attribute object. When a
286     * data object is retrieved from file, the datatype, dataspace and raw data are
287     * not loaded into memory. When it is asked to read the raw data from file,
288     * init() is first called to get the datatype and dataspace information, then
289     * load the raw data from file.
290     */
291    @Override
292    public void init() {
293        if (inited) {
294            resetSelection();
295            log.trace("init(): ScalarAttr already inited");
296            return;
297        }
298
299        long aid = HDF5Constants.H5I_INVALID_HID;
300        long tid = HDF5Constants.H5I_INVALID_HID;
301        long sid = HDF5Constants.H5I_INVALID_HID;
302        long nativeTID = HDF5Constants.H5I_INVALID_HID;
303
304        log.trace("init(): FILE_TYPE_HDF5");
305        aid = open();
306        if (aid >= 0) {
307            try {
308                sid = H5.H5Aget_space(aid);
309                rank = H5.H5Sget_simple_extent_ndims(sid);
310                space_type = H5.H5Sget_simple_extent_type(sid);
311                tid = H5.H5Aget_type(aid);
312                log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type);
313
314                try {
315                    datatype = new H5Datatype(getFileFormat(), tid);
316
317                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
318                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
319                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
320                }
321                catch (Exception ex) {
322                    log.debug("init(): failed to create datatype for attribute: ", ex);
323                    datatype = null;
324                }
325
326                // Check if the datatype in the file is the native datatype
327                try {
328                    nativeTID = H5.H5Tget_native_type(tid);
329                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
330                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
331                }
332                catch (Exception ex) {
333                    log.debug("init(): check if native type failure: ", ex);
334                }
335
336                if (rank == 0) {
337                    // a scalar data point
338                    rank = 1;
339                    dims = new long[1];
340                    dims[0] = 1;
341                    log.trace("init(): rank is a scalar data point");
342                }
343                else {
344                    dims = new long[rank];
345                    maxDims = new long[rank];
346                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
347                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
348                }
349
350                inited = true;
351            }
352            catch (HDF5Exception ex) {
353                log.debug("init(): ", ex);
354            }
355            finally {
356                try {
357                    H5.H5Tclose(nativeTID);
358                }
359                catch (Exception ex2) {
360                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
361                }
362                try {
363                    H5.H5Tclose(tid);
364                }
365                catch (HDF5Exception ex2) {
366                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
367                }
368                try {
369                    H5.H5Sclose(sid);
370                }
371                catch (HDF5Exception ex2) {
372                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
373                }
374
375            }
376
377            close(aid);
378
379            startDims = new long[rank];
380            selectedDims = new long[rank];
381
382            resetSelection();
383        }
384        else {
385            log.debug("init(): failed to open attribute");
386        }
387    }
388
389    /**
390     * Returns the datatype of the data object.
391     *
392     * @return the datatype of the data object.
393     */
394    @Override
395    public Datatype getDatatype() {
396        if (!inited)
397            init();
398
399        if (datatype == null) {
400            long aid = HDF5Constants.H5I_INVALID_HID;
401            long tid = HDF5Constants.H5I_INVALID_HID;
402
403            aid = open();
404            if (aid >= 0) {
405                try {
406                    tid = H5.H5Aget_type(aid);
407                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
408                    if (!isNativeDatatype) {
409                        long tmptid = -1;
410                        try {
411                            tmptid = H5Datatype.toNative(tid);
412                            if (tmptid >= 0) {
413                                try {
414                                    H5.H5Tclose(tid);
415                                }
416                                catch (Exception ex2) {
417                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
418                                }
419                                tid = tmptid;
420                            }
421                        }
422                        catch (Exception ex) {
423                            log.debug("getDatatype(): toNative: ", ex);
424                        }
425                    }
426                    datatype = new H5Datatype(getFileFormat(), tid);
427                }
428                catch (Exception ex) {
429                    log.debug("getDatatype(): ", ex);
430                }
431                finally {
432                    try {
433                        H5.H5Tclose(tid);
434                    }
435                    catch (HDF5Exception ex) {
436                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
437                    }
438                    try {
439                        H5.H5Aclose(aid);
440                    }
441                    catch (HDF5Exception ex) {
442                        log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex);
443                    }
444                }
445            }
446        }
447
448        return datatype;
449    }
450
451    /**
452     * Returns the data buffer of the attribute in memory.
453     *
454     * If data is already loaded into memory, returns the data; otherwise, calls
455     * read() to read data from file into a memory buffer and returns the memory
456     * buffer.
457     *
458     * The whole attribute is read into memory. Users can also select
459     * a subset from the whole data. Subsetting is done in an implicit way.
460     *
461     * <b>How to Select a Subset</b>
462     *
463     * A selection is specified by three arrays: start, stride and count.
464     * <ol>
465     * <li>start: offset of a selection
466     * <li>stride: determines how many elements to move in each dimension
467     * <li>count: number of elements to select in each dimension
468     * </ol>
469     * getStartDims(), getStride() and getSelectedDims() returns the start,
470     * stride and count arrays respectively. Applications can make a selection
471     * by changing the values of the arrays.
472     *
473     * The following example shows how to make a subset. In the example, the
474     * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
475     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
476     * We want to select every other data point in dims[1] and dims[2]
477     *
478     * <pre>
479     * int rank = attribute.getRank(); // number of dimensions of the attribute
480     * long[] dims = attribute.getDims(); // the dimension sizes of the attribute
481     * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute
482     * long[] start = attribute.getStartDims(); // the offset of the selection
483     * long[] stride = attribute.getStride(); // the stride of the attribute
484     * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display
485     *
486     * // select dim1 and dim2 as 2D data for display,and slice through dim0
487     * selectedIndex[0] = 1;
488     * selectedIndex[1] = 2;
489     * selectedIndex[2] = 0;
490     *
491     * // reset the selection arrays
492     * for (int i = 0; i &lt; rank; i++) {
493     *     start[i] = 0;
494     *     selected[i] = 1;
495     *     stride[i] = 1;
496     * }
497     *
498     * // set stride to 2 on dim1 and dim2 so that every other data point is
499     * // selected.
500     * stride[1] = 2;
501     * stride[2] = 2;
502     *
503     * // set the selection size of dim1 and dim2
504     * selected[1] = dims[1] / stride[1];
505     * selected[2] = dims[1] / stride[2];
506     *
507     * // when H5ScalarAttr.getData() is called, the selection above will be used since
508     * // the dimension arrays are passed by reference. Changes of these arrays
509     * // outside the attribute object directly change the values of these array
510     * // in the attribute object.
511     * </pre>
512     *
513     * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte,
514     * short, int, float, double or String type based on the datatype of the
515     * attribute.
516     *
517     * @return the memory buffer of the attribute.
518     *
519     * @throws Exception if object can not be read
520     * @throws OutOfMemoryError if memory is exhausted
521     */
522    @Override
523    public Object getData() throws Exception, OutOfMemoryError {
524        log.trace("getData(): isDataLoaded={}", isDataLoaded);
525        if (!isDataLoaded)
526            data = read(); // load the data, attributes read all data
527
528        nPoints = 1;
529        log.trace("getData(): selectedDims length={}", selectedDims.length);
530        int point_len = selectedDims.length;
531        //Partial data for 3 or more dimensions
532        if (rank > 2)
533            point_len = 3;
534        for (int j = 0; j < point_len; j++) {
535            log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]);
536            nPoints *= selectedDims[j];
537        }
538        log.trace("getData: read {}", nPoints);
539
540        // apply the selection for 3 or more dimensions
541        // selection only expects to use 3 selectedDims
542        //     where selectedIndex[0] is the row dimension
543        //     where selectedIndex[1] is the col dimension
544        //     where selectedIndex[2] is the frame dimension
545        if (rank > 2)
546            data = AttributeSelection();
547
548        return data;
549    }
550
551    /* Implement abstract Dataset */
552
553    /*
554     * (non-Javadoc)
555     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
556     */
557    @Override
558    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
559        // not supported
560        throw new UnsupportedOperationException("copy operation unsupported for H5.");
561    }
562
563    /*
564     * (non-Javadoc)
565     *
566     * @see hdf.object.Attribute#readBytes()
567     */
568    @Override
569    public byte[] readBytes() throws HDF5Exception {
570        byte[] theData = null;
571
572        if (!isInited())
573            init();
574
575        long aid = open();
576        if (aid >= 0) {
577            long tid = HDF5Constants.H5I_INVALID_HID;
578
579            try {
580                long[] lsize = { 1 };
581                for (int j = 0; j < selectedDims.length; j++)
582                    lsize[0] *= selectedDims[j];
583
584                tid = H5.H5Aget_type(aid);
585                long size = H5.H5Tget_size(tid) * lsize[0];
586                log.trace("readBytes(): size={}", size);
587
588                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
589                    throw new Exception("Invalid int size");
590
591                theData = new byte[(int)size];
592
593                log.trace("readBytes(): read attribute id {} of size={}", tid, lsize);
594                H5.H5Aread(aid, tid, theData);
595            }
596            catch (Exception ex) {
597                log.debug("readBytes(): failed to read data: ", ex);
598            }
599            finally {
600                try {
601                    H5.H5Tclose(tid);
602                }
603                catch (HDF5Exception ex2) {
604                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
605                }
606                close(aid);
607            }
608        }
609
610        return theData;
611    }
612
613    /**
614     * Reads the data from file.
615     *
616     * read() reads the data from file to a memory buffer and returns the memory
617     * buffer. The attribute object does not hold the memory buffer. To store the
618     * memory buffer in the attribute object, one must call getData().
619     *
620     * By default, the whole attribute is read into memory.
621     *
622     * For ScalarAttr, the memory data buffer is a one-dimensional array of byte,
623     * short, int, float, double or String type based on the datatype of the
624     * attribute.
625     *
626     * @return the data read from file.
627     *
628     * @see #getData()
629     * @see hdf.object.DataFormat#read()
630     *
631     * @throws Exception
632     *             if object can not be read
633     */
634    @Override
635    public Object read() throws Exception {
636        Object readData = null;
637
638        if (!isInited())
639            init();
640
641        try {
642            readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null);
643        }
644        catch (Exception ex) {
645            log.debug("read(): failed to read scalar attribute: ", ex);
646            throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex);
647        }
648
649        return readData;
650    }
651
652    /* Implement abstract Dataset */
653
654    /**
655     * Writes the given data buffer into this attribute in a file.
656     *
657     * @param buf
658     *            The buffer that contains the data values.
659     *
660     * @throws Exception
661     *             If there is an error at the HDF5 library level.
662     */
663    @Override
664    public void write(Object buf) throws Exception {
665        if (this.getFileFormat().isReadOnly())
666            throw new Exception("cannot write to scalar attribute in file opened as read-only");
667
668        if (!buf.equals(data))
669            setData(buf);
670
671        if (parentObject == null) {
672            log.debug("write(Object): parent object is null; nowhere to write attribute to");
673            return;
674        }
675
676        ((MetaDataContainer) getParentObject()).writeMetadata(this);
677
678        try {
679            scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf);
680        }
681        catch (Exception ex) {
682            log.debug("write(Object): failed to write to scalar attribute: ", ex);
683            throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex);
684        }
685        resetSelection();
686    }
687
688    private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
689        H5Datatype dsDatatype = (H5Datatype)getDatatype();
690        Object theData = null;
691
692        /*
693         * I/O type-specific pre-initialization.
694         */
695        if (ioType == H5File.IO_TYPE.WRITE) {
696            if (writeBuf == null) {
697                log.debug("scalarAttributeCommonIO(): writeBuf is null");
698                throw new Exception("write buffer is null");
699            }
700
701            /*
702             * Check for any unsupported datatypes and fail early before
703             * attempting to write to the attribute.
704             */
705            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
706                log.debug("scalarAttributeCommonIO(): Cannot write non-string variable-length data");
707                throw new HDF5Exception("Writing non-string variable-length data is not supported");
708            }
709
710            if (dsDatatype.isRegRef()) {
711                log.debug("scalarAttributeCommonIO(): Cannot write region reference data");
712                throw new HDF5Exception("Writing region reference data is not supported");
713            }
714        }
715
716        long aid = open();
717        if (aid >= 0) {
718            log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded);
719            try {
720                theData = AttributeCommonIO(aid, ioType, writeBuf);
721            }
722            finally {
723                close(aid);
724            }
725        }
726        else
727            log.debug("scalarAttributeCommonIO(): failed to open attribute");
728
729        return theData;
730    }
731
732    /* Implement interface Attribute */
733
734    /**
735     * Returns the HObject to which this Attribute is currently "attached".
736     *
737     * @return the HObject to which this Attribute is currently "attached".
738     */
739    public HObject getParentObject() {
740        return parentObject;
741    }
742
743    /**
744     * Sets the HObject to which this Attribute is "attached".
745     *
746     * @param pObj
747     *            the new HObject to which this Attribute is "attached".
748     */
749    public void setParentObject(HObject pObj) {
750        parentObject = pObj;
751    }
752
753    /**
754     * set a property for the attribute.
755     *
756     * @param key the attribute Map key
757     * @param value the attribute Map value
758     */
759    public void setProperty(String key, Object value) {
760        properties.put(key, value);
761    }
762
763    /**
764     * get a property for a given key.
765     *
766     * @param key the attribute Map key
767     *
768     * @return the property
769     */
770    public Object getProperty(String key) {
771        return properties.get(key);
772    }
773
774    /**
775     * get all property keys.
776     *
777     * @return the Collection of property keys
778     */
779    public Collection<String> getPropertyKeys() {
780        return properties.keySet();
781    }
782
783    /**
784     * Returns the name of the object. For example, "Raster Image #2".
785     *
786     * @return The name of the object.
787     */
788    public final String getAttributeName() {
789        return getName();
790    }
791
792    /**
793     * Retrieves the attribute data from the file.
794     *
795     * @return the attribute data.
796     *
797     * @throws Exception
798     *             if the data can not be retrieved
799     */
800    public final Object getAttributeData() throws Exception, OutOfMemoryError {
801        return getData();
802    }
803
804    /**
805     * Returns the datatype of the attribute.
806     *
807     * @return the datatype of the attribute.
808     */
809    public final Datatype getAttributeDatatype() {
810        return getDatatype();
811    }
812
813    /**
814     * Returns the space type for the attribute. It returns a
815     * negative number if it failed to retrieve the type information from
816     * the file.
817     *
818     * @return the space type for the attribute.
819     */
820    public final int getAttributeSpaceType() {
821        return getSpaceType();
822    }
823
824    /**
825     * Returns the rank (number of dimensions) of the attribute. It returns a
826     * negative number if it failed to retrieve the dimension information from
827     * the file.
828     *
829     * @return the number of dimensions of the attribute.
830     */
831    public final int getAttributeRank() {
832        return getRank();
833    }
834
835    /**
836     * Returns the selected size of the rows and columns of the attribute. It returns a
837     * negative number if it failed to retrieve the size information from
838     * the file.
839     *
840     * @return the selected size of the rows and colums of the attribute.
841     */
842    public final int getAttributePlane() {
843        return (int)getWidth() * (int)getHeight();
844    }
845
846    /**
847     * Returns the array that contains the dimension sizes of the data value of
848     * the attribute. It returns null if it failed to retrieve the dimension
849     * information from the file.
850     *
851     * @return the dimension sizes of the attribute.
852     */
853    public final long[] getAttributeDims() {
854        return getDims();
855    }
856
857    /**
858     * @return true if the data is a single scalar point; otherwise, returns
859     *         false.
860     */
861    public boolean isAttributeScalar() {
862        return isScalar();
863    }
864
865    /**
866     * Not for public use in the future.
867     *
868     * setData() is not safe to use because it changes memory buffer
869     * of the dataset object. Dataset operations such as write/read
870     * will fail if the buffer type or size is changed.
871     *
872     * @param d  the object data -must be an array of Objects
873     */
874    public void setAttributeData(Object d) {
875        setData(d);
876    }
877
878    /**
879     * Writes the memory buffer of this dataset to file.
880     *
881     * @throws Exception if buffer can not be written
882     */
883    public void writeAttribute() throws Exception {
884        write();
885    }
886
887    /**
888     * Writes the given data buffer into this attribute in a file.
889     *
890     * The data buffer is a vector that contains the data values of compound fields. The data is written
891     * into file as one data blob.
892     *
893     * @param buf
894     *            The vector that contains the data values of compound fields.
895     *
896     * @throws Exception
897     *             If there is an error at the library level.
898     */
899    public void writeAttribute(Object buf) throws Exception {
900        write(buf);
901    }
902
903    /**
904     * Returns a string representation of the data value. For
905     * example, "0, 255".
906     *
907     * For a compound datatype, it will be a 1D array of strings with field
908     * members separated by the delimiter. For example,
909     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
910     * float} of three data points.
911     *
912     * @param delimiter
913     *            The delimiter used to separate individual data points. It
914     *            can be a comma, semicolon, tab or space. For example,
915     *            toString(",") will separate data by commas.
916     *
917     * @return the string representation of the data values.
918     */
919    public String toAttributeString(String delimiter) {
920        return toString(delimiter, -1);
921    }
922
923    /**
924     * Returns a string representation of the data value. For
925     * example, "0, 255".
926     *
927     * For a compound datatype, it will be a 1D array of strings with field
928     * members separated by the delimiter. For example,
929     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
930     * float} of three data points.
931     *
932     * @param delimiter
933     *            The delimiter used to separate individual data points. It
934     *            can be a comma, semicolon, tab or space. For example,
935     *            toString(",") will separate data by commas.
936     * @param maxItems
937     *            The maximum number of Array values to return
938     *
939     * @return the string representation of the data values.
940     */
941    public String toAttributeString(String delimiter, int maxItems) {
942        return toString(delimiter, maxItems);
943    }
944
945    /* Implement interface H5Attribute */
946
947    /**
948     * The general read and write attribute operations for hdf5 object data.
949     *
950     * @param attr_id
951     *        the attribute to access
952     * @param ioType
953     *        the type of IO operation
954     * @param objBuf
955     *        the data buffer to use for write operation
956     *
957     * @return the attribute data
958     *
959     * @throws Exception
960     *             if the data can not be retrieved
961     */
962    public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception {
963        H5Datatype dsDatatype = (H5Datatype) getDatatype();
964        Object theData = null;
965
966        long dt_size = dsDatatype.getDatatypeSize();
967        log.trace("AttributeCommonIO(): create native");
968        long tid = dsDatatype.createNative();
969
970        if (ioType == H5File.IO_TYPE.READ) {
971            log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
972
973            long lsize = 1;
974            for (int j = 0; j < dims.length; j++)
975                lsize *= dims[j];
976            log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize);
977
978            try {
979                if (dsDatatype.isVarStr()) {
980                    String[] strs = new String[(int) lsize];
981                    for (int j = 0; j < lsize; j++)
982                        strs[j] = "";
983                    try {
984                        log.trace("AttributeCommonIO():read ioType H5AreadVL");
985                        H5.H5AreadVL(attr_id, tid, strs);
986                    }
987                    catch (Exception ex) {
988                        log.debug("AttributeCommonIO():read ioType H5AreadVL failure: ", ex);
989                        ex.printStackTrace();
990                    }
991                    theData = strs;
992                }
993                else if (dsDatatype.isCompound()) {
994                    String[] strs = new String[(int) lsize];
995                    for (int j = 0; j < lsize; j++)
996                        strs[j] = "";
997                    try {
998                        log.trace("AttributeCommonIO():read ioType H5AreadComplex");
999                        H5.H5AreadComplex(attr_id, tid, strs);
1000                    }
1001                    catch (Exception ex) {
1002                        ex.printStackTrace();
1003                    }
1004                    theData = strs;
1005                }
1006                else if (dsDatatype.isVLEN()) {
1007                    String[] strs = new String[(int) lsize];
1008                    for (int j = 0; j < lsize; j++)
1009                        strs[j] = "";
1010                    try {
1011                        log.trace("AttributeCommonIO():read ioType H5AreadVL");
1012                        H5.H5AreadVL(attr_id, tid, strs);
1013                    }
1014                    catch (Exception ex) {
1015                        log.debug("AttributeCommonIO():read ioType H5AreadVL failure: ", ex);
1016                        ex.printStackTrace();
1017                    }
1018                    theData = strs;
1019                }
1020                else {
1021                    Object attr_data = null;
1022                    try {
1023                        attr_data = H5Datatype.allocateArray(((H5Datatype)dsDatatype), (int) lsize);
1024                    }
1025                    catch (OutOfMemoryError e) {
1026                        log.debug("AttributeCommonIO():read ioType out of memory", e);
1027                        theData = null;
1028                    }
1029                    if (attr_data == null)
1030                        log.debug("AttributeCommonIO():read ioType allocateArray returned null");
1031
1032                    log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray());
1033                    try {
1034                        H5.H5Aread(attr_id, tid, attr_data);
1035                    }
1036                    catch (Exception ex) {
1037                        log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex);
1038                        ex.printStackTrace();
1039                    }
1040
1041                    /*
1042                     * Perform any necessary data conversions.
1043                     */
1044                    if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) {
1045                        log.trace("AttributeCommonIO():read ioType isText: converting byte array to string array");
1046                        theData = byteToString((byte[]) attr_data, (int) dsDatatype.getDatatypeSize());
1047                    }
1048                    else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1049                        log.trace("AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array");
1050                        theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) attr_data);
1051                    }
1052                    else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1053                        log.trace("AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array");
1054                        long[] arrayDims = dsDatatype.getArrayDims();
1055                        int asize = (int)nPoints;
1056                        for (int j = 0; j < arrayDims.length; j++) {
1057                            asize *= arrayDims[j];
1058                        }
1059                        theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) attr_data);
1060                    }
1061                    else if (dsDatatype.isRefObj()) {
1062                        log.trace("AttributeCommonIO():read ioType isREF: converting byte array to long array");
1063                        theData = HDFNativeData.byteToLong((byte[]) attr_data);
1064                    }
1065                    else
1066                        theData = attr_data;
1067                }
1068            }
1069            catch (HDF5DataFiltersException exfltr) {
1070                log.debug("AttributeCommonIO():read ioType read failure: ", exfltr);
1071                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1072            }
1073            catch (Exception ex) {
1074                log.debug("AttributeCommonIO():read ioType read failure: ", ex);
1075                throw new Exception(ex.getMessage(), ex);
1076            }
1077            finally {
1078                dsDatatype.close(tid);
1079            }
1080            log.trace("AttributeCommonIO():read ioType data: {}", theData);
1081            originalBuf = theData;
1082            isDataLoaded = true;
1083        } // H5File.IO_TYPE.READ
1084        else {
1085            /*
1086             * Perform any necessary data conversions before writing the data.
1087             *
1088             * Note that v-len strings do not get converted, regardless of
1089             * conversion request type.
1090             */
1091            Object tmpData = objBuf;
1092            try {
1093                // Check if we need to convert integer data
1094                String cname = objBuf.getClass().getName();
1095                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1096                boolean doIntConversion = (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I'))
1097                        || ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1098
1099                if (doIntConversion) {
1100                    log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers");
1101                    tmpData = convertToUnsignedC(objBuf, null);
1102                }
1103                else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
1104                    log.trace("AttributeCommonIO(): converting string array to byte array");
1105                    tmpData = stringToByte((String[]) objBuf, (int)dt_size);
1106                }
1107                else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) {
1108                    log.trace("AttributeCommonIO(): converting enum names to values");
1109                    tmpData = dsDatatype.convertEnumNameToValue((String[]) objBuf);
1110                }
1111                else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1112                    log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array");
1113                    throw new Exception("data conversion failure: cannot write BigDecimal values");
1114                    //tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf);
1115                }
1116            }
1117            catch (Exception ex) {
1118                log.debug("AttributeCommonIO(): data conversion failure: ", ex);
1119                throw new Exception("data conversion failure: " + ex.getMessage());
1120            }
1121
1122            /*
1123             * Actually write the data now that everything has been setup.
1124             */
1125            try {
1126                if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1127                    log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid);
1128
1129                    H5.H5AwriteVL(attr_id, tid, (Object[]) tmpData);
1130                }
1131                else {
1132                    if (dsDatatype.isRef() && tmpData instanceof String) {
1133                        // reference is a path+name to the object
1134                        tmpData = H5.H5Rcreate(getFID(), (String) tmpData, HDF5Constants.H5R_OBJECT, -1);
1135                        log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE");
1136                    }
1137                    else if (Array.get(tmpData, 0) instanceof String) {
1138                        int len = ((String[]) tmpData).length;
1139                        byte[] bval = Dataset.stringToByte((String[]) tmpData, (int)dt_size);
1140                        if (bval != null && bval.length == dt_size * len) {
1141                            bval[bval.length - 1] = 0;
1142                            tmpData = bval;
1143                        }
1144                        log.trace("AttributeCommonIO(): String={}: {}", tmpData);
1145                    }
1146
1147                    log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1148                    H5.H5Awrite(attr_id, tid, tmpData);
1149                }
1150            }
1151            catch (Exception ex) {
1152                log.debug("AttributeCommonIO(): write failure: ", ex);
1153                throw new Exception(ex.getMessage());
1154            }
1155            finally {
1156                dsDatatype.close(tid);
1157            }
1158        } // H5File.IO_TYPE.WRITE
1159
1160        return theData;
1161    }
1162
1163    /**
1164     * Read a subset of an attribute for hdf5 object data.
1165     *
1166     * @return the selected attribute data
1167     *
1168     * @throws Exception
1169     *             if the data can not be retrieved
1170     */
1171    public Object AttributeSelection() throws Exception {
1172        H5Datatype dsDatatype = (H5Datatype) getDatatype();
1173        Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints);
1174        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1175            log.trace("scalarAttributeSelection(): isText: converting byte array to string array");
1176            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
1177        }
1178        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1179            log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array");
1180            theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData);
1181        }
1182        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1183            log.trace("scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array");
1184            long[] arrayDims = dsDatatype.getArrayDims();
1185            int asize = (int)nPoints;
1186            for (int j = 0; j < arrayDims.length; j++) {
1187                asize *= arrayDims[j];
1188            }
1189            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
1190        }
1191        else if (dsDatatype.isRefObj()) {
1192            log.trace("AttributeSelection(): isREF: converting byte array to long array");
1193            theData = HDFNativeData.byteToLong((byte[]) theData);
1194        }
1195        Object theOrig = originalBuf;
1196        log.trace("scalarAttributeSelection(): originalBuf={}", originalBuf);
1197
1198        //Copy the selection from originalBuf to theData
1199        //Only three dims are involved and selected data is 2 dimensions
1200        //    getHeight() is the row dimension
1201        //    getWidth() is the col dimension
1202        //    getDepth() is the frame dimension
1203        long[] start = getStartDims();
1204        long curFrame = start[selectedIndex[2]];
1205        for (int col = 0; col < (int)getWidth(); col++) {
1206            for (int row = 0; row < (int)getHeight(); row++) {
1207
1208                int k = (int)startDims[selectedIndex[2]] * (int)getDepth();
1209                int index = row * (int)getWidth() + col;
1210                log.trace("scalarAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, col, k);
1211                int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() +
1212                                        col * (int)getHeight() +
1213                                        row);// * (int) dsDatatype.getDatatypeSize();
1214                int toIndex = (col * (int)getHeight() +
1215                        row);// * (int) dsDatatype.getDatatypeSize();
1216                int objSize = 1;
1217                if (dsDatatype.isArray()) {
1218                    long[] arrayDims = dsDatatype.getArrayDims();
1219                    objSize = (int)arrayDims.length;
1220                }
1221                System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1222            }
1223        }
1224
1225        log.trace("scalarAttributeSelection(): theData={}", theData);
1226        return theData;
1227    }
1228
1229    /* Implement abstract ScalarDS */
1230
1231    /*
1232     * (non-Javadoc)
1233     * @see hdf.object.ScalarDS#getPalette()
1234     */
1235    @Override
1236    public byte[][] getPalette() {
1237        if (palette == null)
1238            palette = readPalette(0);
1239
1240        return palette;
1241    }
1242
1243    /*
1244     * (non-Javadoc)
1245     * @see hdf.object.ScalarDS#readPalette(int)
1246     */
1247    @Override
1248    public byte[][] readPalette(int idx) {
1249        return null;
1250    }
1251
1252    /*
1253     * (non-Javadoc)
1254     * @see hdf.object.ScalarDS#getPaletteRefs()
1255     */
1256    @Override
1257    public byte[] getPaletteRefs() {
1258        return null;
1259    }
1260}