001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Collection;
024import java.util.HashMap;
025import java.util.Iterator;
026import java.util.List;
027import java.util.Map;
028import java.util.Vector;
029
030import hdf.hdf5lib.H5;
031import hdf.hdf5lib.HDF5Constants;
032import hdf.hdf5lib.HDFNativeData;
033import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
034import hdf.hdf5lib.exceptions.HDF5Exception;
035import hdf.hdf5lib.exceptions.HDF5LibraryException;
036
037import hdf.object.Attribute;
038import hdf.object.DataFormat;
039import hdf.object.Dataset;
040import hdf.object.Datatype;
041import hdf.object.FileFormat;
042import hdf.object.Group;
043import hdf.object.HObject;
044import hdf.object.MetaDataContainer;
045import hdf.object.ScalarDS;
046
047import hdf.object.h5.H5Attribute;
048import hdf.object.h5.H5Datatype;
049import hdf.object.h5.H5ReferenceType;
050
051/**
052 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
053 * dataset, group or named datatype.
054 *
055 * Like a dataset, an attribute has a name, datatype and dataspace.
056 *
057 * For more details on attributes, <a href=
058 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
059 * User's Guide</a>
060 *
061 * The following code is an example of an attribute with 1D integer array of two elements.
062 *
063 * <pre>
064 * // Example of creating a new attribute
065 * // The name of the new attribute
066 * String name = "Data range";
067 * // Creating an unsigned 1-byte integer datatype
068 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
069 *                              1,                      // size in bytes
070 *                              Datatype.ORDER_LE,      // byte order
071 *                              Datatype.SIGN_NONE);    // unsigned
072 * // 1-D array of size two
073 * long[] dims = {2};
074 * // The value of the attribute
075 * int[] value = {0, 255};
076 * // Create a new attribute
077 * Attribute dataRange = new H5ScalarAttr(name, type, dims);
078 * // Set the attribute value
079 * dataRange.setValue(value);
080 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
081 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
082 * </pre>
083 *
084 *
085 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and
086 * strings.
087 *
088 * @see hdf.object.Datatype
089 *
090 * @version 1.0 6/15/2021
091 * @author Allen Byrne
092 */
093public class H5ScalarAttr extends ScalarDS implements H5Attribute
094{
095    private static final long serialVersionUID = 2072473407027648309L;
096
097    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarAttr.class);
098
099    /** The HObject to which this NC2Attribute is attached, Attribute interface */
100    protected HObject         parentObject;
101
102    /** additional information and properties for the attribute, Attribute interface */
103    private transient Map<String, Object> properties;
104
105    /**
106     * flag to indicate if the datatype in file is the same as dataype in memory
107     */
108    protected boolean isNativeDatatype = false;
109
110    /**
111     * Create an attribute with specified name, data type and dimension sizes.
112     *
113     * For scalar attribute, the dimension size can be either an array of size one
114     * or null, and the rank can be either 1 or zero. Attribute is a general class
115     * and is independent of file format, e.g., the implementation of attribute
116     * applies to both HDF4 and HDF5.
117     *
118     * The following example creates a string attribute with the name "CLASS" and
119     * value "IMAGE".
120     *
121     * <pre>
122     * long[] attrDims = { 1 };
123     * String attrName = &quot;CLASS&quot;;
124     * String[] classValue = { &quot;IMAGE&quot; };
125     * Datatype attrType = null;
126     * try {
127     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
128     * }
129     * catch (Exception ex) {}
130     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims);
131     * attr.setValue(classValue);
132     * </pre>
133     *
134     * @param parentObj
135     *            the HObject to which this H5ScalarAttr is attached.
136     * @param attrName
137     *            the name of the attribute.
138     * @param attrType
139     *            the datatype of the attribute.
140     * @param attrDims
141     *            the dimension sizes of the attribute, null for scalar attribute
142     *
143     * @see hdf.object.Datatype
144     */
145    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
146        this(parentObj, attrName, attrType, attrDims, null);
147    }
148
149    /**
150     * Create an attribute with specific name and value.
151     *
152     * For scalar attribute, the dimension size can be either an array of size one
153     * or null, and the rank can be either 1 or zero. Attribute is a general class
154     * and is independent of file format, e.g., the implementation of attribute
155     * applies to both HDF4 and HDF5.
156     *
157     * The following example creates a string attribute with the name "CLASS" and
158     * value "IMAGE".
159     *
160     * <pre>
161     * long[] attrDims = { 1 };
162     * String attrName = &quot;CLASS&quot;;
163     * String[] classValue = { &quot;IMAGE&quot; };
164     * Datatype attrType = null;
165     * try {
166     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
167     * }
168     * catch (Exception ex) {}
169     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue);
170     * </pre>
171     *
172     * @param parentObj
173     *            the HObject to which this H5ScalarAttr is attached.
174     * @param attrName
175     *            the name of the attribute.
176     * @param attrType
177     *            the datatype of the attribute.
178     * @param attrDims
179     *            the dimension sizes of the attribute, null for scalar attribute
180     * @param attrValue
181     *            the value of the attribute, null if no value
182     *
183     * @see hdf.object.Datatype
184     */
185    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
186    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
187        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
188                (parentObj == null) ? null : parentObj.getFullName(), null);
189
190        log.trace("H5ScalarAttr: start {}", parentObj);
191        this.parentObject = parentObj;
192
193        datatype = attrType;
194
195        if (attrValue != null) {
196            data = attrValue;
197            originalBuf = attrValue;
198            isDataLoaded = true;
199        }
200        properties = new HashMap();
201
202        if (attrDims == null) {
203            rank = 1;
204            dims = new long[] { 1 };
205            isScalar = true;
206        }
207        else {
208            dims = attrDims;
209            rank = dims.length;
210            isScalar = false;
211        }
212
213        selectedDims = new long[rank];
214        startDims = new long[rank];
215        selectedStride = new long[rank];
216
217        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
218                attrName, attrType.getDescription(), data, rank, getDatatype().isUnsigned());
219
220        resetSelection();
221    }
222
223    /*
224     * (non-Javadoc)
225     *
226     * @see hdf.object.HObject#open()
227     */
228    @Override
229    public long open() {
230        if (parentObject == null) {
231            log.debug("open(): attribute's parent object is null");
232            return HDF5Constants.H5I_INVALID_HID;
233        }
234
235        long aid = HDF5Constants.H5I_INVALID_HID;
236        long pObjID = HDF5Constants.H5I_INVALID_HID;
237
238        try {
239            pObjID = parentObject.open();
240            if (pObjID >= 0) {
241                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
242                    log.trace("open(): FILE_TYPE_HDF5");
243                    if (H5.H5Aexists(pObjID, getName()))
244                        aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT);
245                }
246            }
247
248            log.trace("open(): aid={}", aid);
249        }
250        catch (Exception ex) {
251            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
252            aid = HDF5Constants.H5I_INVALID_HID;
253        }
254        finally {
255            parentObject.close(pObjID);
256        }
257
258        return aid;
259    }
260
261    /*
262     * (non-Javadoc)
263     *
264     * @see hdf.object.HObject#close(int)
265     */
266    @Override
267    public void close(long aid) {
268        if (aid >= 0) {
269            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
270                log.trace("close(): FILE_TYPE_HDF5");
271                try {
272                    H5.H5Aclose(aid);
273                }
274                catch (HDF5Exception ex) {
275                    log.debug("close(): H5Aclose({}) failure: ", aid, ex);
276                }
277            }
278        }
279    }
280
281
282    /**
283     * Retrieves datatype and dataspace information from file and sets the attribute
284     * in memory.
285     *
286     * The init() is designed to support lazy operation in a attribute object. When a
287     * data object is retrieved from file, the datatype, dataspace and raw data are
288     * not loaded into memory. When it is asked to read the raw data from file,
289     * init() is first called to get the datatype and dataspace information, then
290     * load the raw data from file.
291     */
292    @Override
293    public void init() {
294        if (inited) {
295            resetSelection();
296            log.trace("init(): ScalarAttr already inited");
297            return;
298        }
299
300        long aid = HDF5Constants.H5I_INVALID_HID;
301        long tid = HDF5Constants.H5I_INVALID_HID;
302        long sid = HDF5Constants.H5I_INVALID_HID;
303        long nativeTID = HDF5Constants.H5I_INVALID_HID;
304
305        log.trace("init(): FILE_TYPE_HDF5");
306        aid = open();
307        if (aid >= 0) {
308            try {
309                sid = H5.H5Aget_space(aid);
310                rank = H5.H5Sget_simple_extent_ndims(sid);
311                space_type = H5.H5Sget_simple_extent_type(sid);
312                tid = H5.H5Aget_type(aid);
313                log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type);
314
315                if (rank == 0) {
316                    // a scalar data point
317                    rank = 1;
318                    dims = new long[1];
319                    dims[0] = 1;
320                    log.trace("init(): rank is a scalar data point");
321                }
322                else {
323                    dims = new long[rank];
324                    maxDims = new long[rank];
325                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
326                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
327                }
328
329                if (datatype == null) {
330                    try {
331                        int nativeClass = H5.H5Tget_class(tid);
332                        if (nativeClass == HDF5Constants.H5T_REFERENCE) {
333                            long lsize = 1;
334                            if (rank > 0) {
335                                log.trace("init(): rank={}, dims={}", rank, dims);
336                                for (int j = 0; j < dims.length; j++) {
337                                    lsize *= dims[j];
338                                }
339                            }
340                            datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
341                        }
342                        else
343                            datatype = new H5Datatype(getFileFormat(), tid);
344
345                        log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
346                                tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
347                                datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
348                    }
349                    catch (Exception ex) {
350                        log.debug("init(): failed to create datatype for attribute: ", ex);
351                        datatype = null;
352                    }
353                }
354
355                // Check if the datatype in the file is the native datatype
356                try {
357                    nativeTID = H5.H5Tget_native_type(tid);
358                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
359                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
360                }
361                catch (Exception ex) {
362                    log.debug("init(): check if native type failure: ", ex);
363                }
364
365                inited = true;
366            }
367            catch (HDF5Exception ex) {
368                log.debug("init(): ", ex);
369            }
370            finally {
371                try {
372                    H5.H5Tclose(nativeTID);
373                }
374                catch (Exception ex2) {
375                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
376                }
377                try {
378                    H5.H5Tclose(tid);
379                }
380                catch (HDF5Exception ex2) {
381                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
382                }
383                try {
384                    H5.H5Sclose(sid);
385                }
386                catch (HDF5Exception ex2) {
387                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
388                }
389
390            }
391
392            close(aid);
393
394            startDims = new long[rank];
395            selectedDims = new long[rank];
396
397            resetSelection();
398        }
399        else {
400            log.debug("init(): failed to open attribute");
401        }
402    }
403
404    /**
405     * Returns the datatype of the data object.
406     *
407     * @return the datatype of the data object.
408     */
409    @Override
410    public Datatype getDatatype() {
411        if (!inited)
412            init();
413
414        if (datatype == null) {
415            log.trace("getDatatype(): datatype == null");
416            long aid = HDF5Constants.H5I_INVALID_HID;
417            long tid = HDF5Constants.H5I_INVALID_HID;
418
419            aid = open();
420            if (aid >= 0) {
421                try {
422                    tid = H5.H5Aget_type(aid);
423                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
424                    if (!isNativeDatatype) {
425                        long tmptid = -1;
426                        try {
427                            tmptid = H5Datatype.toNative(tid);
428                            if (tmptid >= 0) {
429                                try {
430                                    H5.H5Tclose(tid);
431                                }
432                                catch (Exception ex2) {
433                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
434                                }
435                                tid = tmptid;
436                            }
437                        }
438                        catch (Exception ex) {
439                            log.debug("getDatatype(): toNative: ", ex);
440                        }
441                    }
442                    int nativeClass = H5.H5Tget_class(tid);
443                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
444                        long lsize = 1;
445                        long sid = H5.H5Aget_space(aid);
446                        int rank = H5.H5Sget_simple_extent_ndims(sid);
447                        if (rank > 0) {
448                            long dims[] = new long[rank];
449                            H5.H5Sget_simple_extent_dims(sid, dims, null);
450                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
451                            for (int j = 0; j < dims.length; j++) {
452                                lsize *= dims[j];
453                            }
454                        }
455                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
456                    }
457                    else
458                        datatype = new H5Datatype(getFileFormat(), tid);
459                }
460                catch (Exception ex) {
461                    log.debug("getDatatype(): ", ex);
462                }
463                finally {
464                    try {
465                        H5.H5Tclose(tid);
466                    }
467                    catch (HDF5Exception ex) {
468                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
469                    }
470                    try {
471                        H5.H5Aclose(aid);
472                    }
473                    catch (HDF5Exception ex) {
474                        log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex);
475                    }
476                }
477            }
478        }
479
480        return datatype;
481    }
482
483    /**
484     * Returns the data buffer of the attribute in memory.
485     *
486     * If data is already loaded into memory, returns the data; otherwise, calls
487     * read() to read data from file into a memory buffer and returns the memory
488     * buffer.
489     *
490     * The whole attribute is read into memory. Users can also select
491     * a subset from the whole data. Subsetting is done in an implicit way.
492     *
493     * <b>How to Select a Subset</b>
494     *
495     * A selection is specified by three arrays: start, stride and count.
496     * <ol>
497     * <li>start: offset of a selection
498     * <li>stride: determines how many elements to move in each dimension
499     * <li>count: number of elements to select in each dimension
500     * </ol>
501     * getStartDims(), getStride() and getSelectedDims() returns the start,
502     * stride and count arrays respectively. Applications can make a selection
503     * by changing the values of the arrays.
504     *
505     * The following example shows how to make a subset. In the example, the
506     * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
507     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
508     * We want to select every other data point in dims[1] and dims[2]
509     *
510     * <pre>
511     * int rank = attribute.getRank(); // number of dimensions of the attribute
512     * long[] dims = attribute.getDims(); // the dimension sizes of the attribute
513     * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute
514     * long[] start = attribute.getStartDims(); // the offset of the selection
515     * long[] stride = attribute.getStride(); // the stride of the attribute
516     * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display
517     *
518     * // select dim1 and dim2 as 2D data for display,and slice through dim0
519     * selectedIndex[0] = 1;
520     * selectedIndex[1] = 2;
521     * selectedIndex[2] = 0;
522     *
523     * // reset the selection arrays
524     * for (int i = 0; i &lt; rank; i++) {
525     *     start[i] = 0;
526     *     selected[i] = 1;
527     *     stride[i] = 1;
528     * }
529     *
530     * // set stride to 2 on dim1 and dim2 so that every other data point is
531     * // selected.
532     * stride[1] = 2;
533     * stride[2] = 2;
534     *
535     * // set the selection size of dim1 and dim2
536     * selected[1] = dims[1] / stride[1];
537     * selected[2] = dims[1] / stride[2];
538     *
539     * // when H5ScalarAttr.getData() is called, the selection above will be used since
540     * // the dimension arrays are passed by reference. Changes of these arrays
541     * // outside the attribute object directly change the values of these array
542     * // in the attribute object.
543     * </pre>
544     *
545     * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte,
546     * short, int, float, double or String type based on the datatype of the
547     * attribute.
548     *
549     * @return the memory buffer of the attribute.
550     *
551     * @throws Exception if object can not be read
552     * @throws OutOfMemoryError if memory is exhausted
553     */
554    @Override
555    public Object getData() throws Exception, OutOfMemoryError {
556        log.trace("getData(): isDataLoaded={}", isDataLoaded);
557        if (!isDataLoaded)
558            data = read(); // load the data, attributes read all data
559
560        nPoints = 1;
561        log.trace("getData(): selectedDims length={}", selectedDims.length);
562        int point_len = selectedDims.length;
563        //Partial data for 3 or more dimensions
564        if (rank > 2)
565            point_len = 3;
566        for (int j = 0; j < point_len; j++) {
567            log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]);
568            nPoints *= selectedDims[j];
569        }
570        log.trace("getData: read {}", nPoints);
571
572        // apply the selection for 3 or more dimensions
573        // selection only expects to use 3 selectedDims
574        //     where selectedIndex[0] is the row dimension
575        //     where selectedIndex[1] is the col dimension
576        //     where selectedIndex[2] is the frame dimension
577        if (rank > 2)
578            data = AttributeSelection();
579
580        return data;
581    }
582
583    /* Implement abstract Dataset */
584
585    /*
586     * (non-Javadoc)
587     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
588     */
589    @Override
590    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
591        // not supported
592        throw new UnsupportedOperationException("copy operation unsupported for H5.");
593    }
594
595    /*
596     * (non-Javadoc)
597     *
598     * @see hdf.object.Attribute#readBytes()
599     */
600    @Override
601    public byte[] readBytes() throws HDF5Exception {
602        byte[] theData = null;
603
604        if (!isInited())
605            init();
606
607        long aid = open();
608        if (aid >= 0) {
609            long tid = HDF5Constants.H5I_INVALID_HID;
610
611            try {
612                long[] lsize = { 1 };
613                for (int j = 0; j < selectedDims.length; j++)
614                    lsize[0] *= selectedDims[j];
615
616                tid = H5.H5Aget_type(aid);
617                long size = H5.H5Tget_size(tid) * lsize[0];
618                log.trace("readBytes(): size={}", size);
619
620                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
621                    throw new Exception("Invalid int size");
622
623                theData = new byte[(int)size];
624
625                log.trace("readBytes(): read attribute id {} of size={}", tid, lsize);
626                H5.H5Aread(aid, tid, theData);
627            }
628            catch (Exception ex) {
629                log.debug("readBytes(): failed to read data: ", ex);
630            }
631            finally {
632                try {
633                    H5.H5Tclose(tid);
634                }
635                catch (HDF5Exception ex2) {
636                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
637                }
638                close(aid);
639            }
640        }
641
642        return theData;
643    }
644
645    /**
646     * Reads the data from file.
647     *
648     * read() reads the data from file to a memory buffer and returns the memory
649     * buffer. The attribute object does not hold the memory buffer. To store the
650     * memory buffer in the attribute object, one must call getData().
651     *
652     * By default, the whole attribute is read into memory.
653     *
654     * For ScalarAttr, the memory data buffer is a one-dimensional array of byte,
655     * short, int, float, double or String type based on the datatype of the
656     * attribute.
657     *
658     * @return the data read from file.
659     *
660     * @see #getData()
661     * @see hdf.object.DataFormat#read()
662     *
663     * @throws Exception
664     *             if object can not be read
665     */
666    @Override
667    public Object read() throws Exception {
668        Object readData = null;
669
670        if (!isInited())
671            init();
672
673        try {
674            readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null);
675        }
676        catch (Exception ex) {
677            log.debug("read(): failed to read scalar attribute: ", ex);
678            throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex);
679        }
680
681        return readData;
682    }
683
684    /* Implement abstract Dataset */
685
686    /**
687     * Writes the given data buffer into this attribute in a file.
688     *
689     * @param buf
690     *            The buffer that contains the data values.
691     *
692     * @throws Exception
693     *             If there is an error at the HDF5 library level.
694     */
695    @Override
696    public void write(Object buf) throws Exception {
697        if (this.getFileFormat().isReadOnly())
698            throw new Exception("cannot write to scalar attribute in file opened as read-only");
699
700        if (!buf.equals(data))
701            setData(buf);
702
703        if (parentObject == null) {
704            log.debug("write(Object): parent object is null; nowhere to write attribute to");
705            return;
706        }
707
708        ((MetaDataContainer) getParentObject()).writeMetadata(this);
709
710        try {
711            scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf);
712        }
713        catch (Exception ex) {
714            log.debug("write(Object): failed to write to scalar attribute: ", ex);
715            throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex);
716        }
717        resetSelection();
718    }
719
720    private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
721        H5Datatype dsDatatype = (H5Datatype)getDatatype();
722        Object theData = null;
723
724        /*
725         * I/O type-specific pre-initialization.
726         */
727        if (ioType == H5File.IO_TYPE.WRITE) {
728            if (writeBuf == null) {
729                log.debug("scalarAttributeCommonIO(): writeBuf is null");
730                throw new Exception("write buffer is null");
731            }
732
733            /*
734             * Check for any unsupported datatypes and fail early before
735             * attempting to write to the attribute.
736             */
737            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
738                log.debug("scalarAttributeCommonIO(): Cannot write non-string variable-length data");
739                throw new HDF5Exception("Writing non-string variable-length data is not supported");
740            }
741        }
742
743        long aid = open();
744        if (aid >= 0) {
745            log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded);
746            try {
747                theData = AttributeCommonIO(aid, ioType, writeBuf);
748            }
749            finally {
750                close(aid);
751            }
752        }
753        else
754            log.debug("scalarAttributeCommonIO(): failed to open attribute");
755
756        return theData;
757    }
758
759    /* Implement interface Attribute */
760
761    /**
762     * Returns the HObject to which this Attribute is currently "attached".
763     *
764     * @return the HObject to which this Attribute is currently "attached".
765     */
766    public HObject getParentObject() {
767        return parentObject;
768    }
769
770    /**
771     * Sets the HObject to which this Attribute is "attached".
772     *
773     * @param pObj
774     *            the new HObject to which this Attribute is "attached".
775     */
776    public void setParentObject(HObject pObj) {
777        parentObject = pObj;
778    }
779
780    /**
781     * set a property for the attribute.
782     *
783     * @param key the attribute Map key
784     * @param value the attribute Map value
785     */
786    public void setProperty(String key, Object value) {
787        properties.put(key, value);
788    }
789
790    /**
791     * get a property for a given key.
792     *
793     * @param key the attribute Map key
794     *
795     * @return the property
796     */
797    public Object getProperty(String key) {
798        return properties.get(key);
799    }
800
801    /**
802     * get all property keys.
803     *
804     * @return the Collection of property keys
805     */
806    public Collection<String> getPropertyKeys() {
807        return properties.keySet();
808    }
809
810    /**
811     * Returns the name of the object. For example, "Raster Image #2".
812     *
813     * @return The name of the object.
814     */
815    public final String getAttributeName() {
816        return getName();
817    }
818
819    /**
820     * Retrieves the attribute data from the file.
821     *
822     * @return the attribute data.
823     *
824     * @throws Exception
825     *             if the data can not be retrieved
826     */
827    public final Object getAttributeData() throws Exception, OutOfMemoryError {
828        return getData();
829    }
830
831    /**
832     * Returns the datatype of the attribute.
833     *
834     * @return the datatype of the attribute.
835     */
836    public final Datatype getAttributeDatatype() {
837        return getDatatype();
838    }
839
840    /**
841     * Returns the space type for the attribute. It returns a
842     * negative number if it failed to retrieve the type information from
843     * the file.
844     *
845     * @return the space type for the attribute.
846     */
847    public final int getAttributeSpaceType() {
848        return getSpaceType();
849    }
850
851    /**
852     * Returns the rank (number of dimensions) of the attribute. It returns a
853     * negative number if it failed to retrieve the dimension information from
854     * the file.
855     *
856     * @return the number of dimensions of the attribute.
857     */
858    public final int getAttributeRank() {
859        return getRank();
860    }
861
862    /**
863     * Returns the selected size of the rows and columns of the attribute. It returns a
864     * negative number if it failed to retrieve the size information from
865     * the file.
866     *
867     * @return the selected size of the rows and colums of the attribute.
868     */
869    public final int getAttributePlane() {
870        return (int)getWidth() * (int)getHeight();
871    }
872
873    /**
874     * Returns the array that contains the dimension sizes of the data value of
875     * the attribute. It returns null if it failed to retrieve the dimension
876     * information from the file.
877     *
878     * @return the dimension sizes of the attribute.
879     */
880    public final long[] getAttributeDims() {
881        return getDims();
882    }
883
884    /**
885     * @return true if the data is a single scalar point; otherwise, returns
886     *         false.
887     */
888    public boolean isAttributeScalar() {
889        return isScalar();
890    }
891
892    /**
893     * Not for public use in the future.
894     *
895     * setData() is not safe to use because it changes memory buffer
896     * of the dataset object. Dataset operations such as write/read
897     * will fail if the buffer type or size is changed.
898     *
899     * @param d  the object data -must be an array of Objects
900     */
901    public void setAttributeData(Object d) {
902        setData(d);
903    }
904
905    /**
906     * Writes the memory buffer of this dataset to file.
907     *
908     * @throws Exception if buffer can not be written
909     */
910    public void writeAttribute() throws Exception {
911        write();
912    }
913
914    /**
915     * Writes the given data buffer into this attribute in a file.
916     *
917     * The data buffer is a vector that contains the data values of compound fields. The data is written
918     * into file as one data blob.
919     *
920     * @param buf
921     *            The vector that contains the data values of compound fields.
922     *
923     * @throws Exception
924     *             If there is an error at the library level.
925     */
926    public void writeAttribute(Object buf) throws Exception {
927        write(buf);
928    }
929
930    /**
931     * Returns a string representation of the data value. For
932     * example, "0, 255".
933     *
934     * For a compound datatype, it will be a 1D array of strings with field
935     * members separated by the delimiter. For example,
936     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
937     * float} of three data points.
938     *
939     * @param delimiter
940     *            The delimiter used to separate individual data points. It
941     *            can be a comma, semicolon, tab or space. For example,
942     *            toString(",") will separate data by commas.
943     *
944     * @return the string representation of the data values.
945     */
946    public String toAttributeString(String delimiter) {
947        return toString(delimiter, -1);
948    }
949
950    /**
951     * Returns a string representation of the data value. For
952     * example, "0, 255".
953     *
954     * For a compound datatype, it will be a 1D array of strings with field
955     * members separated by the delimiter. For example,
956     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
957     * float} of three data points.
958     *
959     * @param delimiter
960     *            The delimiter used to separate individual data points. It
961     *            can be a comma, semicolon, tab or space. For example,
962     *            toString(",") will separate data by commas.
963     * @param maxItems
964     *            The maximum number of Array values to return
965     *
966     * @return the string representation of the data values.
967     */
968    public String toAttributeString(String delimiter, int maxItems) {
969        Object theData = originalBuf;
970        if (theData == null) {
971            log.debug("toString: value is null");
972            return null;
973        }
974
975        if (theData instanceof List<?>) {
976            log.trace("toString: value is list");
977            return null;
978        }
979
980        Class<? extends Object> valClass = theData.getClass();
981
982        if (!valClass.isArray()) {
983            log.trace("toString: finish - not array");
984            String strValue = theData.toString();
985            if (maxItems > 0 && strValue.length() > maxItems)
986                // truncate the extra characters
987                strValue = strValue.substring(0, maxItems);
988            return strValue;
989        }
990
991        // value is an array
992        StringBuilder sb = new StringBuilder();
993        long lsize = 1;
994        for (int j = 0; j < dims.length; j++)
995            lsize *= dims[j];
996
997        H5Datatype dtype = (H5Datatype)getDatatype();
998        log.trace("toString: lsize={} isStdRef={} Array.getLength={}", lsize, dtype.isStdRef(), Array.getLength(theData));
999        if (dtype.isStdRef()) {
1000            return ((H5ReferenceType)dtype).toString(delimiter, maxItems);
1001        }
1002        return toString(delimiter, maxItems);
1003    }
1004
1005    /* Implement interface H5Attribute */
1006
1007    /**
1008     * The general read and write attribute operations for hdf5 object data.
1009     *
1010     * @param attr_id
1011     *        the attribute to access
1012     * @param ioType
1013     *        the type of IO operation
1014     * @param objBuf
1015     *        the data buffer to use for write operation
1016     *
1017     * @return the attribute data
1018     *
1019     * @throws Exception
1020     *             if the data can not be retrieved
1021     */
1022    public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception {
1023        H5Datatype dsDatatype = (H5Datatype) getDatatype();
1024        Object theData = null;
1025
1026        long dt_size = dsDatatype.getDatatypeSize();
1027        log.trace("AttributeCommonIO(): create native");
1028        long tid = dsDatatype.createNative();
1029
1030        if (ioType == H5File.IO_TYPE.READ) {
1031            log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
1032
1033            long lsize = 1;
1034            for (int j = 0; j < dims.length; j++)
1035                lsize *= dims[j];
1036            log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize);
1037
1038            try {
1039                if (dsDatatype.isVarStr()) {
1040                    String[] strs = new String[(int) lsize];
1041                    for (int j = 0; j < lsize; j++)
1042                        strs[j] = "";
1043                    try {
1044                        log.trace("AttributeCommonIO():read ioType H5AreadVL");
1045                        H5.H5AreadVL(attr_id, tid, strs);
1046                    }
1047                    catch (Exception ex) {
1048                        log.debug("AttributeCommonIO():read ioType H5AreadVL failure: ", ex);
1049                        ex.printStackTrace();
1050                    }
1051                    theData = strs;
1052                }
1053                else if (dsDatatype.isCompound()) {
1054                    String[] strs = new String[(int) lsize];
1055                    for (int j = 0; j < lsize; j++)
1056                        strs[j] = "";
1057                    try {
1058                        log.trace("AttributeCommonIO():read ioType H5AreadComplex");
1059                        H5.H5AreadComplex(attr_id, tid, strs);
1060                    }
1061                    catch (Exception ex) {
1062                        ex.printStackTrace();
1063                    }
1064                    theData = strs;
1065                }
1066                else if (dsDatatype.isVLEN()) {
1067                    String[] strs = new String[(int) lsize];
1068                    for (int j = 0; j < lsize; j++)
1069                        strs[j] = "";
1070                    try {
1071                        log.trace("AttributeCommonIO():read ioType H5AreadVL");
1072                        H5.H5AreadVL(attr_id, tid, strs);
1073                    }
1074                    catch (Exception ex) {
1075                        log.debug("AttributeCommonIO():read ioType H5AreadVL failure: ", ex);
1076                        ex.printStackTrace();
1077                    }
1078                    theData = strs;
1079                }
1080                else {
1081                    Object attr_data = null;
1082                    try {
1083                        attr_data = H5Datatype.allocateArray(((H5Datatype)dsDatatype), (int) lsize);
1084                    }
1085                    catch (OutOfMemoryError e) {
1086                        log.debug("AttributeCommonIO():read ioType out of memory", e);
1087                        theData = null;
1088                    }
1089                    if (attr_data == null)
1090                        log.debug("AttributeCommonIO():read ioType allocateArray returned null");
1091
1092                    log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray());
1093                    try {
1094                        H5.H5Aread(attr_id, tid, attr_data);
1095                    }
1096                    catch (Exception ex) {
1097                        log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex);
1098                        ex.printStackTrace();
1099                    }
1100
1101                    /*
1102                     * Perform any necessary data conversions.
1103                     */
1104                    if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) {
1105                        log.trace("AttributeCommonIO():read ioType isText: converting byte array to string array");
1106                        theData = byteToString((byte[]) attr_data, (int) dsDatatype.getDatatypeSize());
1107                    }
1108                    else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1109                        log.trace("AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array");
1110                        theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) attr_data);
1111                    }
1112                    else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1113                        log.trace("AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array");
1114                        long[] arrayDims = dsDatatype.getArrayDims();
1115                        int asize = (int)nPoints;
1116                        for (int j = 0; j < arrayDims.length; j++) {
1117                            asize *= arrayDims[j];
1118                        }
1119                        theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) attr_data);
1120                    }
1121                    else
1122                        theData = attr_data;
1123                }
1124            }
1125            catch (HDF5DataFiltersException exfltr) {
1126                log.debug("AttributeCommonIO():read ioType read failure: ", exfltr);
1127                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1128            }
1129            catch (Exception ex) {
1130                log.debug("AttributeCommonIO():read ioType read failure: ", ex);
1131                throw new Exception(ex.getMessage(), ex);
1132            }
1133            finally {
1134                dsDatatype.close(tid);
1135            }
1136            log.trace("AttributeCommonIO():read ioType data: {}", theData);
1137            originalBuf = theData;
1138            isDataLoaded = true;
1139        } // H5File.IO_TYPE.READ
1140        else {
1141            /*
1142             * Perform any necessary data conversions before writing the data.
1143             *
1144             * Note that v-len strings do not get converted, regardless of
1145             * conversion request type.
1146             */
1147            Object tmpData = objBuf;
1148            try {
1149                // Check if we need to convert integer data
1150                String cname = objBuf.getClass().getName();
1151                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1152                boolean doIntConversion = (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I'))
1153                        || ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1154
1155                if (doIntConversion) {
1156                    log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers");
1157                    tmpData = convertToUnsignedC(objBuf, null);
1158                }
1159                else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
1160                    log.trace("AttributeCommonIO(): converting string array to byte array");
1161                    tmpData = stringToByte((String[]) objBuf, (int)dt_size);
1162                }
1163                else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) {
1164                    log.trace("AttributeCommonIO(): converting enum names to values");
1165                    tmpData = dsDatatype.convertEnumNameToValue((String[]) objBuf);
1166                }
1167                else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1168                    log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array");
1169                    throw new Exception("data conversion failure: cannot write BigDecimal values");
1170                    //tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf);
1171                }
1172            }
1173            catch (Exception ex) {
1174                log.debug("AttributeCommonIO(): data conversion failure: ", ex);
1175                throw new Exception("data conversion failure: " + ex.getMessage());
1176            }
1177
1178            /*
1179             * Actually write the data now that everything has been setup.
1180             */
1181            try {
1182                if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1183                    log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid);
1184
1185                    H5.H5AwriteVL(attr_id, tid, (Object[]) tmpData);
1186                }
1187                else {
1188                    log.trace("AttributeCommonIO(): dsDatatype.isRef()={} data is String={}", dsDatatype.isRef(), tmpData instanceof String);
1189                    if (dsDatatype.isRef() && tmpData instanceof String) {
1190                        // reference is a path+name to the object
1191                        log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE");
1192                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1193                        byte[] refBuf = H5.H5Rcreate_object(getFID(), (String) tmpData, HDF5Constants.H5P_DEFAULT);
1194                        if (refBuf != null) {
1195                            H5.H5Awrite(attr_id, tid, refBuf);
1196                            H5.H5Rdestroy(refBuf);
1197                        }
1198                    }
1199                    else if (Array.get(tmpData, 0) instanceof String) {
1200                        int len = ((String[]) tmpData).length;
1201                        byte[] bval = Dataset.stringToByte((String[]) tmpData, (int)dt_size);
1202                        if (bval != null && bval.length == dt_size * len) {
1203                            bval[bval.length - 1] = 0;
1204                            tmpData = bval;
1205                        }
1206                        log.trace("AttributeCommonIO(): String={}: {}", tmpData);
1207                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1208                        H5.H5Awrite(attr_id, tid, tmpData);
1209                    }
1210                    else {
1211                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1212                        H5.H5Awrite(attr_id, tid, tmpData);
1213                    }
1214                }
1215            }
1216            catch (Exception ex) {
1217                log.debug("AttributeCommonIO(): write failure: ", ex);
1218                throw new Exception(ex.getMessage());
1219            }
1220            finally {
1221                dsDatatype.close(tid);
1222            }
1223        } // H5File.IO_TYPE.WRITE
1224
1225        return theData;
1226    }
1227
1228    /**
1229     * Read a subset of an attribute for hdf5 object data.
1230     *
1231     * @return the selected attribute data
1232     *
1233     * @throws Exception
1234     *             if the data can not be retrieved
1235     */
1236    public Object AttributeSelection() throws Exception {
1237        H5Datatype dsDatatype = (H5Datatype) getDatatype();
1238        Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints);
1239        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1240            log.trace("scalarAttributeSelection(): isText: converting byte array to string array");
1241            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
1242        }
1243        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1244            log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array");
1245            theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData);
1246        }
1247        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1248            log.trace("scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array");
1249            long[] arrayDims = dsDatatype.getArrayDims();
1250            int asize = (int)nPoints;
1251            for (int j = 0; j < arrayDims.length; j++) {
1252                asize *= arrayDims[j];
1253            }
1254            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
1255        }
1256        Object theOrig = originalBuf;
1257        log.trace("scalarAttributeSelection(): originalBuf={}", originalBuf);
1258
1259        //Copy the selection from originalBuf to theData
1260        //Only three dims are involved and selected data is 2 dimensions
1261        //    getHeight() is the row dimension
1262        //    getWidth() is the col dimension
1263        //    getDepth() is the frame dimension
1264        long[] start = getStartDims();
1265        long curFrame = start[selectedIndex[2]];
1266        for (int col = 0; col < (int)getWidth(); col++) {
1267            for (int row = 0; row < (int)getHeight(); row++) {
1268
1269                int k = (int)startDims[selectedIndex[2]] * (int)getDepth();
1270                int index = row * (int)getWidth() + col;
1271                log.trace("scalarAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, col, k);
1272                int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() +
1273                                        col * (int)getHeight() +
1274                                        row);// * (int) dsDatatype.getDatatypeSize();
1275                int toIndex = (col * (int)getHeight() +
1276                        row);// * (int) dsDatatype.getDatatypeSize();
1277                int objSize = 1;
1278                if (dsDatatype.isArray()) {
1279                    long[] arrayDims = dsDatatype.getArrayDims();
1280                    objSize = (int)arrayDims.length;
1281                }
1282                else if (dsDatatype.isRef()) {
1283                    objSize = (int)HDF5Constants.H5R_REF_BUF_SIZE;
1284                    fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1285                    toIndex = toIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1286                }
1287                System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1288            }
1289        }
1290
1291        log.trace("scalarAttributeSelection(): theData={}", theData);
1292        return theData;
1293    }
1294}