001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.MetaDataContainer;
033import hdf.object.ScalarDS;
034
035/**
036 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group or named
037 * datatype.
038 *
039 * Like a dataset, an attribute has a name, datatype and dataspace.
040 *
041 * For more details on attributes, read <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5
042 * Attributes in HDF5 User Guide</a>
043 *
044 * The following code is an example of an attribute with 1D integer array of two elements.
045 *
046 * <pre>
047 * // Example of creating a new attribute
048 * // The name of the new attribute
049 * String name = "Data range";
050 * // Creating an unsigned 1-byte integer datatype
051 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
052 *                              1,                      // size in bytes
053 *                              Datatype.ORDER_LE,      // byte order
054 *                              Datatype.SIGN_NONE);    // unsigned
055 * // 1-D array of size two
056 * long[] dims = {2};
057 * // The value of the attribute
058 * int[] value = {0, 255};
059 * // Create a new attribute
060 * Attribute dataRange = new Attribute(name, type, dims);
061 * // Set the attribute value
062 * dataRange.setValue(value);
063 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
064 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
065 * </pre>
066 *
067 *
068 * For an atomic datatype, the value of an H4ScalarAttribute will be a 1D array of integers, floats and strings.
069 *
070 * @see hdf.object.Datatype
071 *
072 * @version 2.0 4/2/2018
073 * @author Peter X. Cao, Jordan T. Henderson
074 */
075public class H4ScalarAttribute extends ScalarDS implements Attribute {
076
077    private static final long serialVersionUID = 2072473407027648309L;
078
079    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4ScalarAttribute.class);
080
081    /** The HObject to which this NC2Attribute is attached, Attribute interface */
082    protected HObject         parentObject;
083
084    /** additional information and properties for the attribute, Attribute interface */
085    private transient Map<String, Object> properties;
086
087    /**
088     * Create an attribute with specified name, data type and dimension sizes.
089     *
090     * For scalar attribute, the dimension size can be either an array of size one
091     * or null, and the rank can be either 1 or zero. Attribute is a general class
092     * and is independent of file format, e.g., the implementation of attribute
093     * applies to both HDF4 and HDF5.
094     *
095     * The following example creates a string attribute with the name "CLASS" and
096     * value "IMAGE".
097     *
098     * <pre>
099     * long[] attrDims = { 1 };
100     * String attrName = &quot;CLASS&quot;;
101     * String[] classValue = { &quot;IMAGE&quot; };
102     * Datatype attrType = null;
103     * try {
104     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
105     * }
106     * catch (Exception ex) {}
107     * Attribute attr = new Attribute(attrName, attrType, attrDims);
108     * attr.setValue(classValue);
109     * </pre>
110     *
111     * @param parentObj
112     *            the HObject to which this Attribute is attached.
113     * @param attrName
114     *            the name of the attribute.
115     * @param attrType
116     *            the datatype of the attribute.
117     * @param attrDims
118     *            the dimension sizes of the attribute, null for scalar attribute
119     *
120     * @see hdf.object.Datatype
121     */
122    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
123        this(parentObj, attrName, attrType, attrDims, null);
124    }
125
126    /**
127     * Create an attribute with specific name and value.
128     *
129     * For scalar attribute, the dimension size can be either an array of size one
130     * or null, and the rank can be either 1 or zero. Attribute is a general class
131     * and is independent of file format, e.g., the implementation of attribute
132     * applies to both HDF4 and HDF5.
133     *
134     * The following example creates a string attribute with the name "CLASS" and
135     * value "IMAGE".
136     *
137     * <pre>
138     * long[] attrDims = { 1 };
139     * String attrName = &quot;CLASS&quot;;
140     * String[] classValue = { &quot;IMAGE&quot; };
141     * Datatype attrType = null;
142     * try {
143     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
144     * }
145     * catch (Exception ex) {}
146     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
147     * </pre>
148     *
149     * @param parentObj
150     *            the HObject to which this Attribute is attached.
151     * @param attrName
152     *            the name of the attribute.
153     * @param attrType
154     *            the datatype of the attribute.
155     * @param attrDims
156     *            the dimension sizes of the attribute, null for scalar attribute
157     * @param attrValue
158     *            the value of the attribute, null if no value
159     *
160     * @see hdf.object.Datatype
161     */
162    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
163    public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
164        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
165                (parentObj == null) ? null : parentObj.getFullName(), null);
166
167        log.trace("H4ScalarAttribute: start {}", parentObj);
168        this.parentObject = parentObj;
169
170        unsignedConverted = false;
171
172        datatype = attrType;
173
174        if (attrValue != null) {
175            data = attrValue;
176            originalBuf = attrValue;
177            isDataLoaded = true;
178        }
179        properties = new HashMap();
180
181        if (attrDims == null) {
182            rank = 1;
183            dims = new long[] { 1 };
184        }
185        else {
186            dims = attrDims;
187            rank = dims.length;
188        }
189
190        selectedDims = new long[rank];
191        startDims = new long[rank];
192        selectedStride = new long[rank];
193
194        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
195                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
196
197        resetSelection();
198    }
199
200    /*
201     * (non-Javadoc)
202     *
203     * @see hdf.object.HObject#open()
204     */
205    @Override
206    public long open() {
207        if (parentObject == null) {
208            log.debug("open(): attribute's parent object is null");
209            return -1;
210        }
211
212        long aid = -1;
213        long pObjID = -1;
214
215        try {
216            pObjID = parentObject.open();
217            if (pObjID >= 0) {
218                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
219                    log.trace("open(): FILE_TYPE_HDF4");
220                    /*
221                     * TODO: Get type of HDF4 object this is attached to and retrieve attribute info.
222                     */
223                }
224            }
225
226            log.trace("open(): aid={}", aid);
227        }
228        catch (Exception ex) {
229            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
230            aid = -1;
231        }
232        finally {
233            parentObject.close(pObjID);
234        }
235
236        return aid;
237    }
238
239    /*
240     * (non-Javadoc)
241     *
242     * @see hdf.object.HObject#close(int)
243     */
244    @Override
245    public void close(long aid) {
246        if (aid >= 0) {
247            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
248                log.trace("close(): FILE_TYPE_HDF4");
249                /*
250                 * TODO: Get type of HDF4 object this is attached to and close attribute.
251                 */
252            }
253        }
254    }
255
256    @Override
257    public void init() {
258        if (inited) {
259            resetSelection();
260            log.trace("init(): Attribute already inited");
261            return;
262        }
263
264        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
265            log.trace("init(): FILE_TYPE_HDF4");
266            /*
267             * TODO: If HDF4 attribute object needs to init dependent objects.
268             */
269            inited = true;
270        }
271
272        resetSelection();
273    }
274
275    /**
276     * Reads the data from file.
277     *
278     * read() reads the data from file to a memory buffer and returns the memory
279     * buffer. The dataset object does not hold the memory buffer. To store the
280     * memory buffer in the dataset object, one must call getData().
281     *
282     * By default, the whole dataset is read into memory. Users can also select
283     * a subset to read. Subsetting is done in an implicit way.
284     *
285     * @return the data read from file.
286     *
287     * @see #getData()
288     *
289     * @throws Exception
290     *             if object can not be read
291     * @throws OutOfMemoryError
292     *             if memory is exhausted
293     */
294    @Override
295    public Object read() throws Exception, OutOfMemoryError {
296        if (!inited)
297            init();
298
299        return data;
300    }
301
302    /* Implement abstract Dataset */
303
304    /**
305     * Writes a memory buffer to the object in the file.
306     *
307     * @param buf
308     *            The buffer that contains the data values.
309     *
310     * @throws Exception
311     *             if data can not be written
312     */
313    @Override
314    public void write(Object buf) throws Exception {
315        log.trace("function of dataset: write(Object) start");
316        if (!buf.equals(data))
317            setData(buf);
318
319        init();
320
321        if (parentObject == null) {
322            log.debug("write(Object): parent object is null; nowhere to write attribute to");
323            return;
324        }
325
326        ((MetaDataContainer) getParentObject()).writeMetadata(this);
327    }
328
329    /*
330     * (non-Javadoc)
331     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
332     */
333    @Override
334    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
335        // not supported
336        throw new UnsupportedOperationException("copy operation unsupported for H4.");
337    }
338
339    /*
340     * (non-Javadoc)
341     * @see hdf.object.Dataset#readBytes()
342     */
343    @Override
344    public byte[] readBytes() throws Exception {
345        // not supported
346        throw new UnsupportedOperationException("readBytes operation unsupported for H4.");
347    }
348
349    /* Implement interface Attribute */
350
351    /**
352     * Returns the HObject to which this Attribute is currently "attached".
353     *
354     * @return the HObject to which this Attribute is currently "attached".
355     */
356    public HObject getParentObject() {
357        return parentObject;
358    }
359
360    /**
361     * Sets the HObject to which this Attribute is "attached".
362     *
363     * @param pObj
364     *            the new HObject to which this Attribute is "attached".
365     */
366    public void setParentObject(HObject pObj) {
367        parentObject = pObj;
368    }
369
370    /**
371     * set a property for the attribute.
372     *
373     * @param key the attribute Map key
374     * @param value the attribute Map value
375     */
376    public void setProperty(String key, Object value) {
377        properties.put(key, value);
378    }
379
380    /**
381     * get a property for a given key.
382     *
383     * @param key the attribute Map key
384     *
385     * @return the property
386     */
387    public Object getProperty(String key) {
388        return properties.get(key);
389    }
390
391    /**
392     * get all property keys.
393     *
394     * @return the Collection of property keys
395     */
396    public Collection<String> getPropertyKeys() {
397        return properties.keySet();
398    }
399
400    /**
401     * Returns the name of the object. For example, "Raster Image #2".
402     *
403     * @return The name of the object.
404     */
405    public final String getAttributeName() {
406        return getName();
407    }
408
409    /**
410     * Retrieves the attribute data from the file.
411     *
412     * @return the attribute data.
413     *
414     * @throws Exception
415     *             if the data can not be retrieved
416     */
417    public final Object getAttributeData() throws Exception, OutOfMemoryError {
418        return getData();
419    }
420
421    /**
422     * Returns the datatype of the attribute.
423     *
424     * @return the datatype of the attribute.
425     */
426    public final Datatype getAttributeDatatype() {
427        return getDatatype();
428    }
429
430    /**
431     * Returns the space type for the attribute. It returns a
432     * negative number if it failed to retrieve the type information from
433     * the file.
434     *
435     * @return the space type for the attribute.
436     */
437    public final int getAttributeSpaceType() {
438        return getSpaceType();
439    }
440
441    /**
442     * Returns the rank (number of dimensions) of the attribute. It returns a
443     * negative number if it failed to retrieve the dimension information from
444     * the file.
445     *
446     * @return the number of dimensions of the attribute.
447     */
448    public final int getAttributeRank() {
449        return getRank();
450    }
451
452    /**
453     * Returns the selected size of the rows and columns of the attribute. It returns a
454     * negative number if it failed to retrieve the size information from
455     * the file.
456     *
457     * @return the selected size of the rows and colums of the attribute.
458     */
459    public final int getAttributePlane() {
460        return (int)getWidth() * (int)getHeight();
461    }
462
463    /**
464     * Returns the array that contains the dimension sizes of the data value of
465     * the attribute. It returns null if it failed to retrieve the dimension
466     * information from the file.
467     *
468     * @return the dimension sizes of the attribute.
469     */
470    public final long[] getAttributeDims() {
471        return getDims();
472    }
473
474    /**
475     * @return true if the dataspace is a NULL; otherwise, returns false.
476     */
477    @Override
478    public boolean isAttributeNULL() {
479        return isNULL();
480    }
481
482    /**
483     * @return true if the data is a single scalar point; otherwise, returns false.
484     */
485    public boolean isAttributeScalar() {
486        return isScalar();
487    }
488
489    /**
490     * Not for public use in the future.
491     *
492     * setData() is not safe to use because it changes memory buffer
493     * of the dataset object. Dataset operations such as write/read
494     * will fail if the buffer type or size is changed.
495     *
496     * @param d  the object data -must be an array of Objects
497     */
498    public void setAttributeData(Object d) {
499        setData(d);
500    }
501
502    /**
503     * Writes the memory buffer of this dataset to file.
504     *
505     * @throws Exception if buffer can not be written
506     */
507    public void writeAttribute() throws Exception {
508        write();
509    }
510
511    /**
512     * Writes the given data buffer into this attribute in a file.
513     *
514     * The data buffer is a vector that contains the data values of compound fields. The data is written
515     * into file as one data blob.
516     *
517     * @param buf
518     *            The vector that contains the data values of compound fields.
519     *
520     * @throws Exception
521     *             If there is an error at the library level.
522     */
523    public void writeAttribute(Object buf) throws Exception {
524        write(buf);
525    }
526
527    /**
528     * Returns a string representation of the data value. For
529     * example, "0, 255".
530     *
531     * For a compound datatype, it will be a 1D array of strings with field
532     * members separated by the delimiter. For example,
533     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
534     * float} of three data points.
535     *
536     * @param delimiter
537     *            The delimiter used to separate individual data points. It
538     *            can be a comma, semicolon, tab or space. For example,
539     *            toString(",") will separate data by commas.
540     *
541     * @return the string representation of the data values.
542     */
543    public String toAttributeString(String delimiter) {
544        return toString(delimiter, -1);
545    }
546
547    /**
548     * Returns a string representation of the data value. For
549     * example, "0, 255".
550     *
551     * For a compound datatype, it will be a 1D array of strings with field
552     * members separated by the delimiter. For example,
553     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
554     * float} of three data points.
555     *
556     * @param delimiter
557     *            The delimiter used to separate individual data points. It
558     *            can be a comma, semicolon, tab or space. For example,
559     *            toString(",") will separate data by commas.
560     * @param maxItems
561     *            The maximum number of Array values to return
562     *
563     * @return the string representation of the data values.
564     */
565    public String toAttributeString(String delimiter, int maxItems) {
566        return toString(delimiter, maxItems);
567    }
568}