001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.MetaDataContainer;
033import hdf.object.ScalarDS;
034
035/**
036 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
037 * dataset, group or named datatype.
038 *
039 * Like a dataset, an attribute has a name, datatype and dataspace.
040 *
041 * For more details on attributes, <a href=
042 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
043 * User's Guide</a>
044 *
045 * The following code is an example of an attribute with 1D integer array of two elements.
046 *
047 * <pre>
048 * // Example of creating a new attribute
049 * // The name of the new attribute
050 * String name = "Data range";
051 * // Creating an unsigned 1-byte integer datatype
052 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
053 *                              1,                      // size in bytes
054 *                              Datatype.ORDER_LE,      // byte order
055 *                              Datatype.SIGN_NONE);    // unsigned
056 * // 1-D array of size two
057 * long[] dims = {2};
058 * // The value of the attribute
059 * int[] value = {0, 255};
060 * // Create a new attribute
061 * Attribute dataRange = new Attribute(name, type, dims);
062 * // Set the attribute value
063 * dataRange.setValue(value);
064 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
065 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
066 * </pre>
067 *
068 *
069 * For an atomic datatype, the value of an H4ScalarAttribute will be a 1D array of integers, floats and
070 * strings.
071 *
072 * @see hdf.object.Datatype
073 *
074 * @version 2.0 4/2/2018
075 * @author Peter X. Cao, Jordan T. Henderson
076 */
077public class H4ScalarAttribute extends ScalarDS implements Attribute {
078
079    private static final long serialVersionUID = 2072473407027648309L;
080
081    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4ScalarAttribute.class);
082
083    /** The HObject to which this NC2Attribute is attached, Attribute interface */
084    protected HObject         parentObject;
085
086    /** additional information and properties for the attribute, Attribute interface */
087    private transient Map<String, Object> properties;
088
089    /**
090     * Create an attribute with specified name, data type and dimension sizes.
091     *
092     * For scalar attribute, the dimension size can be either an array of size one
093     * or null, and the rank can be either 1 or zero. Attribute is a general class
094     * and is independent of file format, e.g., the implementation of attribute
095     * applies to both HDF4 and HDF5.
096     *
097     * The following example creates a string attribute with the name "CLASS" and
098     * value "IMAGE".
099     *
100     * <pre>
101     * long[] attrDims = { 1 };
102     * String attrName = &quot;CLASS&quot;;
103     * String[] classValue = { &quot;IMAGE&quot; };
104     * Datatype attrType = null;
105     * try {
106     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
107     * }
108     * catch (Exception ex) {}
109     * Attribute attr = new Attribute(attrName, attrType, attrDims);
110     * attr.setValue(classValue);
111     * </pre>
112     *
113     * @param parentObj
114     *            the HObject to which this Attribute is attached.
115     * @param attrName
116     *            the name of the attribute.
117     * @param attrType
118     *            the datatype of the attribute.
119     * @param attrDims
120     *            the dimension sizes of the attribute, null for scalar attribute
121     *
122     * @see hdf.object.Datatype
123     */
124    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
125        this(parentObj, attrName, attrType, attrDims, null);
126    }
127
128    /**
129     * Create an attribute with specific name and value.
130     *
131     * For scalar attribute, the dimension size can be either an array of size one
132     * or null, and the rank can be either 1 or zero. Attribute is a general class
133     * and is independent of file format, e.g., the implementation of attribute
134     * applies to both HDF4 and HDF5.
135     *
136     * The following example creates a string attribute with the name "CLASS" and
137     * value "IMAGE".
138     *
139     * <pre>
140     * long[] attrDims = { 1 };
141     * String attrName = &quot;CLASS&quot;;
142     * String[] classValue = { &quot;IMAGE&quot; };
143     * Datatype attrType = null;
144     * try {
145     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
146     * }
147     * catch (Exception ex) {}
148     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
149     * </pre>
150     *
151     * @param parentObj
152     *            the HObject to which this Attribute is attached.
153     * @param attrName
154     *            the name of the attribute.
155     * @param attrType
156     *            the datatype of the attribute.
157     * @param attrDims
158     *            the dimension sizes of the attribute, null for scalar attribute
159     * @param attrValue
160     *            the value of the attribute, null if no value
161     *
162     * @see hdf.object.Datatype
163     */
164    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
165    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
166        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
167                (parentObj == null) ? null : parentObj.getFullName(), null);
168
169        log.trace("H4ScalarAttribute: start {}", parentObj);
170        this.parentObject = parentObj;
171
172        unsignedConverted = false;
173
174        datatype = attrType;
175
176        if (attrValue != null) {
177            data = attrValue;
178            originalBuf = attrValue;
179            isDataLoaded = true;
180        }
181        properties = new HashMap();
182
183        if (attrDims == null) {
184            rank = 1;
185            dims = new long[] { 1 };
186        }
187        else {
188            dims = attrDims;
189            rank = dims.length;
190        }
191
192        selectedDims = new long[rank];
193        startDims = new long[rank];
194        selectedStride = new long[rank];
195
196        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
197                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
198
199        resetSelection();
200    }
201
202    /*
203     * (non-Javadoc)
204     *
205     * @see hdf.object.HObject#open()
206     */
207    @Override
208    public long open() {
209        if (parentObject == null) {
210            log.debug("open(): attribute's parent object is null");
211            return -1;
212        }
213
214        long aid = -1;
215        long pObjID = -1;
216
217        try {
218            pObjID = parentObject.open();
219            if (pObjID >= 0) {
220                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
221                    log.trace("open(): FILE_TYPE_HDF4");
222                    /*
223                     * TODO: Get type of HDF4 object this is attached to and retrieve attribute info.
224                     */
225                }
226            }
227
228            log.trace("open(): aid={}", aid);
229        }
230        catch (Exception ex) {
231            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
232            aid = -1;
233        }
234        finally {
235            parentObject.close(pObjID);
236        }
237
238        return aid;
239    }
240
241    /*
242     * (non-Javadoc)
243     *
244     * @see hdf.object.HObject#close(int)
245     */
246    @Override
247    public void close(long aid) {
248        if (aid >= 0) {
249            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
250                log.trace("close(): FILE_TYPE_HDF4");
251                /*
252                 * TODO: Get type of HDF4 object this is attached to and close attribute.
253                 */
254            }
255        }
256    }
257
258    @Override
259    public void init() {
260        if (inited) {
261            resetSelection();
262            log.trace("init(): Attribute already inited");
263            return;
264        }
265
266        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
267            log.trace("init(): FILE_TYPE_HDF4");
268            /*
269             * TODO: If HDF4 attribute object needs to init dependent objects.
270             */
271            inited = true;
272        }
273
274        resetSelection();
275    }
276
277    /**
278     * Reads the data from file.
279     *
280     * read() reads the data from file to a memory buffer and returns the memory
281     * buffer. The dataset object does not hold the memory buffer. To store the
282     * memory buffer in the dataset object, one must call getData().
283     *
284     * By default, the whole dataset is read into memory. Users can also select
285     * a subset to read. Subsetting is done in an implicit way.
286     *
287     * @return the data read from file.
288     *
289     * @see #getData()
290     *
291     * @throws Exception
292     *             if object can not be read
293     * @throws OutOfMemoryError
294     *             if memory is exhausted
295     */
296    @Override
297    public Object read() throws Exception, OutOfMemoryError {
298        if (!inited)
299            init();
300
301        return data;
302    }
303
304    /* Implement abstract Dataset */
305
306    /**
307     * Writes a memory buffer to the object in the file.
308     *
309     * @param buf
310     *            The buffer that contains the data values.
311     *
312     * @throws Exception
313     *             if data can not be written
314     */
315    @Override
316    public void write(Object buf) throws Exception {
317        log.trace("function of dataset: write(Object) start");
318        if (!buf.equals(data))
319            setData(buf);
320
321        init();
322
323        if (parentObject == null) {
324            log.debug("write(Object): parent object is null; nowhere to write attribute to");
325            return;
326        }
327
328        ((MetaDataContainer) getParentObject()).writeMetadata(this);
329    }
330
331    /*
332     * (non-Javadoc)
333     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
334     */
335    @Override
336    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
337        // not supported
338        throw new UnsupportedOperationException("copy operation unsupported for H4.");
339    }
340
341    /*
342     * (non-Javadoc)
343     * @see hdf.object.Dataset#readBytes()
344     */
345    @Override
346    public byte[] readBytes() throws Exception {
347        // not supported
348        throw new UnsupportedOperationException("readBytes operation unsupported for H4.");
349    }
350
351    /* Implement abstract ScalarDS */
352
353    /*
354     * (non-Javadoc)
355     * @see hdf.object.ScalarDS#getPalette()
356     */
357    @Override
358    public byte[][] getPalette() {
359        if (palette == null) {
360            palette = readPalette(0);
361        }
362
363        return palette;
364    }
365
366    /*
367     * (non-Javadoc)
368     * @see hdf.object.ScalarDS#readPalette(int)
369     */
370    @Override
371    public byte[][] readPalette(int idx) {
372        return null;
373    }
374
375    /*
376     * (non-Javadoc)
377     * @see hdf.object.ScalarDS#getPaletteRefs()
378     */
379    @Override
380    public byte[] getPaletteRefs() {
381        return null;
382    }
383
384    /* Implement interface Attribute */
385
386    /**
387     * Returns the HObject to which this Attribute is currently "attached".
388     *
389     * @return the HObject to which this Attribute is currently "attached".
390     */
391    public HObject getParentObject() {
392        return parentObject;
393    }
394
395    /**
396     * Sets the HObject to which this Attribute is "attached".
397     *
398     * @param pObj
399     *            the new HObject to which this Attribute is "attached".
400     */
401    public void setParentObject(HObject pObj) {
402        parentObject = pObj;
403    }
404
405    /**
406     * set a property for the attribute.
407     *
408     * @param key the attribute Map key
409     * @param value the attribute Map value
410     */
411    public void setProperty(String key, Object value) {
412        properties.put(key, value);
413    }
414
415    /**
416     * get a property for a given key.
417     *
418     * @param key the attribute Map key
419     *
420     * @return the property
421     */
422    public Object getProperty(String key) {
423        return properties.get(key);
424    }
425
426    /**
427     * get all property keys.
428     *
429     * @return the Collection of property keys
430     */
431    public Collection<String> getPropertyKeys() {
432        return properties.keySet();
433    }
434
435    /**
436     * Returns the name of the object. For example, "Raster Image #2".
437     *
438     * @return The name of the object.
439     */
440    public final String getAttributeName() {
441        return getName();
442    }
443
444    /**
445     * Retrieves the attribute data from the file.
446     *
447     * @return the attribute data.
448     *
449     * @throws Exception
450     *             if the data can not be retrieved
451     */
452    public final Object getAttributeData() throws Exception, OutOfMemoryError {
453        return getData();
454    }
455
456    /**
457     * Returns the datatype of the attribute.
458     *
459     * @return the datatype of the attribute.
460     */
461    public final Datatype getAttributeDatatype() {
462        return getDatatype();
463    }
464
465    /**
466     * Returns the space type for the attribute. It returns a
467     * negative number if it failed to retrieve the type information from
468     * the file.
469     *
470     * @return the space type for the attribute.
471     */
472    public final int getAttributeSpaceType() {
473        return getSpaceType();
474    }
475
476    /**
477     * Returns the rank (number of dimensions) of the attribute. It returns a
478     * negative number if it failed to retrieve the dimension information from
479     * the file.
480     *
481     * @return the number of dimensions of the attribute.
482     */
483    public final int getAttributeRank() {
484        return getRank();
485    }
486
487    /**
488     * Returns the selected size of the rows and columns of the attribute. It returns a
489     * negative number if it failed to retrieve the size information from
490     * the file.
491     *
492     * @return the selected size of the rows and colums of the attribute.
493     */
494    public final int getAttributePlane() {
495        return (int)getWidth() * (int)getHeight();
496    }
497
498    /**
499     * Returns the array that contains the dimension sizes of the data value of
500     * the attribute. It returns null if it failed to retrieve the dimension
501     * information from the file.
502     *
503     * @return the dimension sizes of the attribute.
504     */
505    public final long[] getAttributeDims() {
506        return getDims();
507    }
508
509    /**
510     * @return true if the data is a single scalar point; otherwise, returns
511     *         false.
512     */
513    public boolean isAttributeScalar() {
514        return isScalar();
515    }
516
517    /**
518     * Not for public use in the future.
519     *
520     * setData() is not safe to use because it changes memory buffer
521     * of the dataset object. Dataset operations such as write/read
522     * will fail if the buffer type or size is changed.
523     *
524     * @param d  the object data -must be an array of Objects
525     */
526    public void setAttributeData(Object d) {
527        setData(d);
528    }
529
530    /**
531     * Writes the memory buffer of this dataset to file.
532     *
533     * @throws Exception if buffer can not be written
534     */
535    public void writeAttribute() throws Exception {
536        write();
537    }
538
539    /**
540     * Writes the given data buffer into this attribute in a file.
541     *
542     * The data buffer is a vector that contains the data values of compound fields. The data is written
543     * into file as one data blob.
544     *
545     * @param buf
546     *            The vector that contains the data values of compound fields.
547     *
548     * @throws Exception
549     *             If there is an error at the library level.
550     */
551    public void writeAttribute(Object buf) throws Exception {
552        write(buf);
553    }
554
555    /**
556     * Returns a string representation of the data value. For
557     * example, "0, 255".
558     *
559     * For a compound datatype, it will be a 1D array of strings with field
560     * members separated by the delimiter. For example,
561     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
562     * float} of three data points.
563     *
564     * @param delimiter
565     *            The delimiter used to separate individual data points. It
566     *            can be a comma, semicolon, tab or space. For example,
567     *            toString(",") will separate data by commas.
568     *
569     * @return the string representation of the data values.
570     */
571    public String toAttributeString(String delimiter) {
572        return toString(delimiter, -1);
573    }
574
575    /**
576     * Returns a string representation of the data value. For
577     * example, "0, 255".
578     *
579     * For a compound datatype, it will be a 1D array of strings with field
580     * members separated by the delimiter. For example,
581     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
582     * float} of three data points.
583     *
584     * @param delimiter
585     *            The delimiter used to separate individual data points. It
586     *            can be a comma, semicolon, tab or space. For example,
587     *            toString(",") will separate data by commas.
588     * @param maxItems
589     *            The maximum number of Array values to return
590     *
591     * @return the string representation of the data values.
592     */
593    public String toAttributeString(String delimiter, int maxItems) {
594        return toString(delimiter, maxItems);
595    }
596}