001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.MetaDataContainer;
033import hdf.object.ScalarDS;
034
035/**
036 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
037 * or named datatype.
038 *
039 * Like a dataset, an attribute has a name, datatype and dataspace.
040 *
041 * For more details on attributes, read <a
042 * href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a>
043 *
044 * The following code is an example of an attribute with 1D integer array of two elements.
045 *
046 * <pre>
047 * // Example of creating a new attribute
048 * // The name of the new attribute
049 * String name = "Data range";
050 * // Creating an unsigned 1-byte integer datatype
051 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
052 *                              1,                      // size in bytes
053 *                              Datatype.ORDER_LE,      // byte order
054 *                              Datatype.SIGN_NONE);    // unsigned
055 * // 1-D array of size two
056 * long[] dims = {2};
057 * // The value of the attribute
058 * int[] value = {0, 255};
059 * // Create a new attribute
060 * Attribute dataRange = new Attribute(name, type, dims);
061 * // Set the attribute value
062 * dataRange.setValue(value);
063 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
064 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
065 * </pre>
066 *
067 *
068 * For an atomic datatype, the value of an H4ScalarAttribute will be a 1D array of integers, floats and
069 * strings.
070 *
071 * @see hdf.object.Datatype
072 *
073 * @version 2.0 4/2/2018
074 * @author Peter X. Cao, Jordan T. Henderson
075 */
076public class H4ScalarAttribute extends ScalarDS implements Attribute {
077
078    private static final long serialVersionUID = 2072473407027648309L;
079
080    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4ScalarAttribute.class);
081
082    /** The HObject to which this NC2Attribute is attached, Attribute interface */
083    protected HObject parentObject;
084
085    /** additional information and properties for the attribute, Attribute interface */
086    private transient Map<String, Object> properties;
087
088    /**
089     * Create an attribute with specified name, data type and dimension sizes.
090     *
091     * For scalar attribute, the dimension size can be either an array of size one
092     * or null, and the rank can be either 1 or zero. Attribute is a general class
093     * and is independent of file format, e.g., the implementation of attribute
094     * applies to both HDF4 and HDF5.
095     *
096     * The following example creates a string attribute with the name "CLASS" and
097     * value "IMAGE".
098     *
099     * <pre>
100     * long[] attrDims = { 1 };
101     * String attrName = &quot;CLASS&quot;;
102     * String[] classValue = { &quot;IMAGE&quot; };
103     * Datatype attrType = null;
104     * try {
105     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
106     * Datatype.NATIVE);
107     * }
108     * catch (Exception ex) {}
109     * Attribute attr = new Attribute(attrName, attrType, attrDims);
110     * attr.setValue(classValue);
111     * </pre>
112     *
113     * @param parentObj
114     *            the HObject to which this Attribute is attached.
115     * @param attrName
116     *            the name of the attribute.
117     * @param attrType
118     *            the datatype of the attribute.
119     * @param attrDims
120     *            the dimension sizes of the attribute, null for scalar attribute
121     *
122     * @see hdf.object.Datatype
123     */
124    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
125    {
126        this(parentObj, attrName, attrType, attrDims, null);
127    }
128
129    /**
130     * Create an attribute with specific name and value.
131     *
132     * For scalar attribute, the dimension size can be either an array of size one
133     * or null, and the rank can be either 1 or zero. Attribute is a general class
134     * and is independent of file format, e.g., the implementation of attribute
135     * applies to both HDF4 and HDF5.
136     *
137     * The following example creates a string attribute with the name "CLASS" and
138     * value "IMAGE".
139     *
140     * <pre>
141     * long[] attrDims = { 1 };
142     * String attrName = &quot;CLASS&quot;;
143     * String[] classValue = { &quot;IMAGE&quot; };
144     * Datatype attrType = null;
145     * try {
146     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
147     * Datatype.NATIVE);
148     * }
149     * catch (Exception ex) {}
150     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
151     * </pre>
152     *
153     * @param parentObj
154     *            the HObject to which this Attribute is attached.
155     * @param attrName
156     *            the name of the attribute.
157     * @param attrType
158     *            the datatype of the attribute.
159     * @param attrDims
160     *            the dimension sizes of the attribute, null for scalar attribute
161     * @param attrValue
162     *            the value of the attribute, null if no value
163     *
164     * @see hdf.object.Datatype
165     */
166    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
167    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
168                             Object attrValue)
169    {
170        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
171              (parentObj == null) ? null : parentObj.getFullName(), null);
172
173        log.trace("H4ScalarAttribute: start {}", parentObj);
174        this.parentObject = parentObj;
175
176        unsignedConverted = false;
177
178        datatype = attrType;
179
180        if (attrValue != null) {
181            data         = attrValue;
182            originalBuf  = attrValue;
183            isDataLoaded = true;
184        }
185        properties = new HashMap();
186
187        if (attrDims == null) {
188            rank = 1;
189            dims = new long[] {1};
190        }
191        else {
192            dims = attrDims;
193            rank = dims.length;
194        }
195
196        selectedDims   = new long[rank];
197        startDims      = new long[rank];
198        selectedStride = new long[rank];
199
200        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
201                  getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
202
203        resetSelection();
204    }
205
206    /*
207     * (non-Javadoc)
208     *
209     * @see hdf.object.HObject#open()
210     */
211    @Override
212    public long open()
213    {
214        if (parentObject == null) {
215            log.debug("open(): attribute's parent object is null");
216            return -1;
217        }
218
219        long aid    = -1;
220        long pObjID = -1;
221
222        try {
223            pObjID = parentObject.open();
224            if (pObjID >= 0) {
225                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
226                    log.trace("open(): FILE_TYPE_HDF4");
227                    /*
228                     * TODO: Get type of HDF4 object this is attached to and retrieve attribute info.
229                     */
230                }
231            }
232
233            log.trace("open(): aid={}", aid);
234        }
235        catch (Exception ex) {
236            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
237            aid = -1;
238        }
239        finally {
240            parentObject.close(pObjID);
241        }
242
243        return aid;
244    }
245
246    /*
247     * (non-Javadoc)
248     *
249     * @see hdf.object.HObject#close(int)
250     */
251    @Override
252    public void close(long aid)
253    {
254        if (aid >= 0) {
255            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
256                log.trace("close(): FILE_TYPE_HDF4");
257                /*
258                 * TODO: Get type of HDF4 object this is attached to and close attribute.
259                 */
260            }
261        }
262    }
263
264    @Override
265    public void init()
266    {
267        if (inited) {
268            resetSelection();
269            log.trace("init(): Attribute already inited");
270            return;
271        }
272
273        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
274            log.trace("init(): FILE_TYPE_HDF4");
275            /*
276             * TODO: If HDF4 attribute object needs to init dependent objects.
277             */
278            inited = true;
279        }
280
281        resetSelection();
282    }
283
284    /**
285     * Reads the data from file.
286     *
287     * read() reads the data from file to a memory buffer and returns the memory
288     * buffer. The dataset object does not hold the memory buffer. To store the
289     * memory buffer in the dataset object, one must call getData().
290     *
291     * By default, the whole dataset is read into memory. Users can also select
292     * a subset to read. Subsetting is done in an implicit way.
293     *
294     * @return the data read from file.
295     *
296     * @see #getData()
297     *
298     * @throws Exception
299     *             if object can not be read
300     * @throws OutOfMemoryError
301     *             if memory is exhausted
302     */
303    @Override
304    public Object read() throws Exception, OutOfMemoryError
305    {
306        if (!inited)
307            init();
308
309        return data;
310    }
311
312    /* Implement abstract Dataset */
313
314    /**
315     * Writes a memory buffer to the object in the file.
316     *
317     * @param buf
318     *            The buffer that contains the data values.
319     *
320     * @throws Exception
321     *             if data can not be written
322     */
323    @Override
324    public void write(Object buf) throws Exception
325    {
326        log.trace("function of dataset: write(Object) start");
327        if (!buf.equals(data))
328            setData(buf);
329
330        init();
331
332        if (parentObject == null) {
333            log.debug("write(Object): parent object is null; nowhere to write attribute to");
334            return;
335        }
336
337        ((MetaDataContainer)getParentObject()).writeMetadata(this);
338    }
339
340    /*
341     * (non-Javadoc)
342     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
343     */
344    @Override
345    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
346    {
347        // not supported
348        throw new UnsupportedOperationException("copy operation unsupported for H4.");
349    }
350
351    /*
352     * (non-Javadoc)
353     * @see hdf.object.Dataset#readBytes()
354     */
355    @Override
356    public byte[] readBytes() throws Exception
357    {
358        // not supported
359        throw new UnsupportedOperationException("readBytes operation unsupported for H4.");
360    }
361
362    /* Implement interface Attribute */
363
364    /**
365     * Returns the HObject to which this Attribute is currently "attached".
366     *
367     * @return the HObject to which this Attribute is currently "attached".
368     */
369    public HObject getParentObject() { return parentObject; }
370
371    /**
372     * Sets the HObject to which this Attribute is "attached".
373     *
374     * @param pObj
375     *            the new HObject to which this Attribute is "attached".
376     */
377    public void setParentObject(HObject pObj) { parentObject = pObj; }
378
379    /**
380     * set a property for the attribute.
381     *
382     * @param key the attribute Map key
383     * @param value the attribute Map value
384     */
385    public void setProperty(String key, Object value) { properties.put(key, value); }
386
387    /**
388     * get a property for a given key.
389     *
390     * @param key the attribute Map key
391     *
392     * @return the property
393     */
394    public Object getProperty(String key) { return properties.get(key); }
395
396    /**
397     * get all property keys.
398     *
399     * @return the Collection of property keys
400     */
401    public Collection<String> getPropertyKeys() { return properties.keySet(); }
402
403    /**
404     * Returns the name of the object. For example, "Raster Image #2".
405     *
406     * @return The name of the object.
407     */
408    public final String getAttributeName() { return getName(); }
409
410    /**
411     * Retrieves the attribute data from the file.
412     *
413     * @return the attribute data.
414     *
415     * @throws Exception
416     *             if the data can not be retrieved
417     */
418    public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); }
419
420    /**
421     * Returns the datatype of the attribute.
422     *
423     * @return the datatype of the attribute.
424     */
425    public final Datatype getAttributeDatatype() { return getDatatype(); }
426
427    /**
428     * Returns the space type for the attribute. It returns a
429     * negative number if it failed to retrieve the type information from
430     * the file.
431     *
432     * @return the space type for the attribute.
433     */
434    public final int getAttributeSpaceType() { return getSpaceType(); }
435
436    /**
437     * Returns the rank (number of dimensions) of the attribute. It returns a
438     * negative number if it failed to retrieve the dimension information from
439     * the file.
440     *
441     * @return the number of dimensions of the attribute.
442     */
443    public final int getAttributeRank() { return getRank(); }
444
445    /**
446     * Returns the selected size of the rows and columns of the attribute. It returns a
447     * negative number if it failed to retrieve the size information from
448     * the file.
449     *
450     * @return the selected size of the rows and colums of the attribute.
451     */
452    public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); }
453
454    /**
455     * Returns the array that contains the dimension sizes of the data value of
456     * the attribute. It returns null if it failed to retrieve the dimension
457     * information from the file.
458     *
459     * @return the dimension sizes of the attribute.
460     */
461    public final long[] getAttributeDims() { return getDims(); }
462
463    /**
464     * @return true if the dataspace is a NULL; otherwise, returns false.
465     */
466    @Override
467    public boolean isAttributeNULL()
468    {
469        return isNULL();
470    }
471
472    /**
473     * @return true if the data is a single scalar point; otherwise, returns false.
474     */
475    public boolean isAttributeScalar() { return isScalar(); }
476
477    /**
478     * Not for public use in the future.
479     *
480     * setData() is not safe to use because it changes memory buffer
481     * of the dataset object. Dataset operations such as write/read
482     * will fail if the buffer type or size is changed.
483     *
484     * @param d  the object data -must be an array of Objects
485     */
486    public void setAttributeData(Object d) { setData(d); }
487
488    /**
489     * Writes the memory buffer of this dataset to file.
490     *
491     * @throws Exception if buffer can not be written
492     */
493    public void writeAttribute() throws Exception { write(); }
494
495    /**
496     * Writes the given data buffer into this attribute in a file.
497     *
498     * The data buffer is a vector that contains the data values of compound fields. The data is written
499     * into file as one data blob.
500     *
501     * @param buf
502     *            The vector that contains the data values of compound fields.
503     *
504     * @throws Exception
505     *             If there is an error at the library level.
506     */
507    public void writeAttribute(Object buf) throws Exception { write(buf); }
508
509    /**
510     * Returns a string representation of the data value. For
511     * example, "0, 255".
512     *
513     * For a compound datatype, it will be a 1D array of strings with field
514     * members separated by the delimiter. For example,
515     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
516     * float} of three data points.
517     *
518     * @param delimiter
519     *            The delimiter used to separate individual data points. It
520     *            can be a comma, semicolon, tab or space. For example,
521     *            toString(",") will separate data by commas.
522     *
523     * @return the string representation of the data values.
524     */
525    public String toAttributeString(String delimiter) { return toString(delimiter, -1); }
526
527    /**
528     * Returns a string representation of the data value. For
529     * example, "0, 255".
530     *
531     * For a compound datatype, it will be a 1D array of strings with field
532     * members separated by the delimiter. For example,
533     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
534     * float} of three data points.
535     *
536     * @param delimiter
537     *            The delimiter used to separate individual data points. It
538     *            can be a comma, semicolon, tab or space. For example,
539     *            toString(",") will separate data by commas.
540     * @param maxItems
541     *            The maximum number of Array values to return
542     *
543     * @return the string representation of the data values.
544     */
545    public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); }
546}