001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034import org.slf4j.Logger;
035import org.slf4j.LoggerFactory;
036
037/**
038 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
039 * or named datatype.
040 *
041 * Like a dataset, an attribute has a name, datatype and dataspace.
042 *
043 * For more details on attributes, <a
044 * href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a>
045 *
046 * The following code is an example of an attribute with 1D integer array of two elements.
047 *
048 * <pre>
049 * // Example of creating a new attribute
050 * // The name of the new attribute
051 * String name = "Data range";
052 * // Creating an unsigned 1-byte integer datatype
053 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
054 *                              1,                      // size in bytes
055 *                              Datatype.ORDER_LE,      // byte order
056 *                              Datatype.SIGN_NONE);    // unsigned
057 * // 1-D array of size two
058 * long[] dims = {2};
059 * // The value of the attribute
060 * int[] value = {0, 255};
061 * // Create a new attribute
062 * Attribute dataRange = new Attribute(name, type, dims);
063 * // Set the attribute value
064 * dataRange.setValue(value);
065 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
066 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
067 * </pre>
068 *
069 *
070 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and strings. For a
071 * compound datatype, it will be a 1D array of strings with field members separated by a comma. For example,
072 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, float} of three data points.
073 *
074 * @see hdf.object.Datatype
075 *
076 * @version 2.0 4/2/2018
077 * @author Peter X. Cao, Jordan T. Henderson
078 */
079public class NC2Attribute extends ScalarDS implements Attribute {
080    private static final long serialVersionUID = 2072473407027648309L;
081
082    private static final Logger log = LoggerFactory.getLogger(NC2Attribute.class);
083
084    /** The HObject to which this NC2Attribute is attached, Attribute interface */
085    protected HObject parentObject;
086
087    /** additional information and properties for the attribute, Attribute interface */
088    private transient Map<String, Object> properties;
089
090    /**
091     * Create an attribute with specified name, data type and dimension sizes.
092     *
093     * For scalar attribute, the dimension size can be either an array of size one
094     * or null, and the rank can be either 1 or zero. Attribute is a general class
095     * and is independent of file format, e.g., the implementation of attribute
096     * applies to both HDF4 and HDF5.
097     *
098     * The following example creates a string attribute with the name "CLASS" and
099     * value "IMAGE".
100     *
101     * <pre>
102     * long[] attrDims = { 1 };
103     * String attrName = &quot;CLASS&quot;;
104     * String[] classValue = { &quot;IMAGE&quot; };
105     * Datatype attrType = null;
106     * try {
107     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
108     * Datatype.NATIVE);
109     * }
110     * catch (Exception ex) {}
111     * Attribute attr = new Attribute(attrName, attrType, attrDims);
112     * attr.setValue(classValue);
113     * </pre>
114     *
115     * @param parentObj
116     *            the HObject to which this Attribute is attached.
117     * @param attrName
118     *            the name of the attribute.
119     * @param attrType
120     *            the datatype of the attribute.
121     * @param attrDims
122     *            the dimension sizes of the attribute, null for scalar attribute
123     *
124     * @see hdf.object.Datatype
125     */
126    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
127    {
128        this(parentObj, attrName, attrType, attrDims, null);
129    }
130
131    /**
132     * Create an attribute with specific name and value.
133     *
134     * For scalar attribute, the dimension size can be either an array of size one
135     * or null, and the rank can be either 1 or zero. Attribute is a general class
136     * and is independent of file format, e.g., the implementation of attribute
137     * applies to both HDF4 and HDF5.
138     *
139     * The following example creates a string attribute with the name "CLASS" and
140     * value "IMAGE".
141     *
142     * <pre>
143     * long[] attrDims = { 1 };
144     * String attrName = &quot;CLASS&quot;;
145     * String[] classValue = { &quot;IMAGE&quot; };
146     * Datatype attrType = null;
147     * try {
148     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
149     * Datatype.NATIVE);
150     * }
151     * catch (Exception ex) {}
152     * NC2Attribute attr = new NC2Attribute(attrName, attrType, attrDims, classValue);
153     * </pre>
154     *
155     * @param parentObj
156     *            the HObject to which this Attribute is attached.
157     * @param attrName
158     *            the name of the attribute.
159     * @param attrType
160     *            the datatype of the attribute.
161     * @param attrDims
162     *            the dimension sizes of the attribute, null for scalar attribute
163     * @param attrValue
164     *            the value of the attribute, null if no value
165     *
166     * @see hdf.object.Datatype
167     */
168    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
169    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
170                        Object attrValue)
171    {
172        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
173              (parentObj == null) ? null : parentObj.getFullName(), null);
174
175        log.trace("NC2Attribute: start {}", parentObj);
176        this.parentObject = parentObj;
177
178        unsignedConverted = false;
179
180        datatype = attrType;
181
182        if (attrValue != null) {
183            data         = attrValue;
184            originalBuf  = attrValue;
185            isDataLoaded = true;
186        }
187        properties = new HashMap();
188
189        if (attrDims == null) {
190            rank = 1;
191            dims = new long[] {1};
192        }
193        else {
194            dims = attrDims;
195            rank = dims.length;
196        }
197
198        selectedDims   = new long[rank];
199        startDims      = new long[rank];
200        selectedStride = new long[rank];
201
202        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
203                  getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
204
205        resetSelection();
206    }
207
208    /*
209     * (non-Javadoc)
210     *
211     * @see hdf.object.HObject#open()
212     */
213    @Override
214    public long open()
215    {
216        long aid    = -1;
217        long pObjID = -1;
218
219        if (parentObject == null) {
220            log.debug("open(): attribute's parent object is null");
221            return -1;
222        }
223
224        try {
225            pObjID = parentObject.open();
226            if (pObjID >= 0) {
227                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
228                    log.trace("open(): FILE_TYPE_NC3");
229                    /*
230                     * TODO: Get type of netcdf3 object this is attached to and retrieve attribute info.
231                     */
232                }
233            }
234
235            log.trace("open(): aid={}", aid);
236        }
237        catch (Exception ex) {
238            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
239            aid = -1;
240        }
241        finally {
242            parentObject.close(pObjID);
243        }
244
245        return aid;
246    }
247
248    /*
249     * (non-Javadoc)
250     *
251     * @see hdf.object.HObject#close(int)
252     */
253    @Override
254    public void close(long aid)
255    {
256        if (aid >= 0) {
257            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
258                log.trace("close(): FILE_TYPE_NC3");
259                /*
260                 * TODO: Get type of netcdf3 object this is attached to and close attribute.
261                 */
262            }
263        }
264    }
265
266    @Override
267    public void init()
268    {
269        if (inited) {
270            resetSelection();
271            log.trace("init(): NC2Attribute already inited");
272            return;
273        }
274
275        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
276            log.trace("init(): FILE_TYPE_NC3");
277            /*
278             * TODO: If netcdf3 attribute object needs to init dependent objects.
279             */
280            inited = true;
281        }
282
283        resetSelection();
284    }
285
286    /**
287     * Reads the data from file.
288     *
289     * read() reads the data from file to a memory buffer and returns the memory
290     * buffer. The dataset object does not hold the memory buffer. To store the
291     * memory buffer in the dataset object, one must call getData().
292     *
293     * By default, the whole dataset is read into memory. Users can also select
294     * a subset to read. Subsetting is done in an implicit way.
295     *
296     * @return the data read from file.
297     *
298     * @see #getData()
299     *
300     * @throws Exception
301     *             if object can not be read
302     * @throws OutOfMemoryError
303     *             if memory is exhausted
304     */
305    @Override
306    public Object read() throws Exception, OutOfMemoryError
307    {
308        if (!inited)
309            init();
310
311        return data;
312    }
313
314    /* Implement abstract Dataset */
315
316    /**
317     * Writes a memory buffer to the object in the file.
318     *
319     * @param buf
320     *            the data to write
321     *
322     * @throws Exception
323     *             if data can not be written
324     */
325    @Override
326    public void write(Object buf) throws Exception
327    {
328        log.trace("function of dataset: write(Object) start");
329        if (!buf.equals(data))
330            setData(buf);
331
332        init();
333
334        if (parentObject == null) {
335            log.debug("write(Object): parent object is null; nowhere to write attribute to");
336            return;
337        }
338    }
339
340    /*
341     * (non-Javadoc)
342     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
343     */
344    @Override
345    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
346    {
347        // not supported
348        throw new UnsupportedOperationException("copy operation unsupported for NC2.");
349    }
350
351    /*
352     * (non-Javadoc)
353     * @see hdf.object.Dataset#readBytes()
354     */
355    @Override
356    public byte[] readBytes() throws Exception
357    {
358        // not supported
359        throw new UnsupportedOperationException("readBytes operation unsupported for NC2.");
360    }
361
362    /* Implement interface Attribute */
363
364    /**
365     * Returns the HObject to which this Attribute is currently "attached".
366     *
367     * @return the HObject to which this Attribute is currently "attached".
368     */
369    public HObject getParentObject() { return parentObject; }
370
371    /**
372     * Sets the HObject to which this Attribute is "attached".
373     *
374     * @param pObj
375     *            the new HObject to which this Attribute is "attached".
376     */
377    public void setParentObject(HObject pObj) { parentObject = pObj; }
378
379    /**
380     * set a property for the attribute.
381     *
382     * @param key the attribute Map key
383     * @param value the attribute Map value
384     */
385    public void setProperty(String key, Object value) { properties.put(key, value); }
386
387    /**
388     * get a property for a given key.
389     *
390     * @param key the attribute Map key
391     *
392     * @return the property
393     */
394    public Object getProperty(String key) { return properties.get(key); }
395
396    /**
397     * get all property keys.
398     *
399     * @return the Collection of property keys
400     */
401    public Collection<String> getPropertyKeys() { return properties.keySet(); }
402
403    /**
404     * Returns the name of the object. For example, "Raster Image #2".
405     *
406     * @return The name of the object.
407     */
408    public final String getAttributeName() { return getName(); }
409
410    /**
411     * Retrieves the attribute data from the file.
412     *
413     * @return the attribute data.
414     *
415     * @throws Exception
416     *             if the data can not be retrieved
417     */
418    public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); }
419
420    /**
421     * Returns the datatype of the attribute.
422     *
423     * @return the datatype of the attribute.
424     */
425    public final Datatype getAttributeDatatype() { return getDatatype(); }
426
427    /**
428     * Returns the space type for the attribute. It returns a
429     * negative number if it failed to retrieve the type information from
430     * the file.
431     *
432     * @return the space type for the attribute.
433     */
434    public final int getAttributeSpaceType() { return getSpaceType(); }
435
436    /**
437     * Returns the rank (number of dimensions) of the attribute. It returns a
438     * negative number if it failed to retrieve the dimension information from
439     * the file.
440     *
441     * @return the number of dimensions of the attribute.
442     */
443    public final int getAttributeRank() { return getRank(); }
444
445    /**
446     * Returns the selected size of the rows and columns of the attribute. It returns a
447     * negative number if it failed to retrieve the size information from
448     * the file.
449     *
450     * @return the selected size of the rows and colums of the attribute.
451     */
452    public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); }
453
454    /**
455     * Returns the array that contains the dimension sizes of the data value of
456     * the attribute. It returns null if it failed to retrieve the dimension
457     * information from the file.
458     *
459     * @return the dimension sizes of the attribute.
460     */
461    public final long[] getAttributeDims() { return getDims(); }
462
463    /**
464     * @return true if the dataspace is a NULL; otherwise, returns false.
465     */
466    public boolean isAttributeNULL() { return isNULL(); }
467
468    /**
469     * @return true if the data is a single scalar point; otherwise, returns false.
470     */
471    public boolean isAttributeScalar() { return isScalar(); }
472
473    /**
474     * Not for public use in the future.
475     *
476     * setData() is not safe to use because it changes memory buffer
477     * of the dataset object. Dataset operations such as write/read
478     * will fail if the buffer type or size is changed.
479     *
480     * @param d  the object data -must be an array of Objects
481     */
482    public void setAttributeData(Object d) { setData(d); }
483
484    /**
485     * Writes the memory buffer of this dataset to file.
486     *
487     * @throws Exception if buffer can not be written
488     */
489    public void writeAttribute() throws Exception { write(); }
490
491    /**
492     * Writes the given data buffer into this attribute in a file.
493     *
494     * The data buffer is a vector that contains the data values of compound fields. The data is written
495     * into file as one data blob.
496     *
497     * @param buf
498     *            The vector that contains the data values of compound fields.
499     *
500     * @throws Exception
501     *             If there is an error at the library level.
502     */
503    public void writeAttribute(Object buf) throws Exception { write(buf); }
504
505    /**
506     * Returns a string representation of the data value. For
507     * example, "0, 255".
508     *
509     * For a compound datatype, it will be a 1D array of strings with field
510     * members separated by the delimiter. For example,
511     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
512     * float} of three data points.
513     *
514     * @param delimiter
515     *            The delimiter used to separate individual data points. It
516     *            can be a comma, semicolon, tab or space. For example,
517     *            toString(",") will separate data by commas.
518     *
519     * @return the string representation of the data values.
520     */
521    public String toAttributeString(String delimiter) { return toString(delimiter, -1); }
522
523    /**
524     * Returns a string representation of the data value. For
525     * example, "0, 255".
526     *
527     * For a compound datatype, it will be a 1D array of strings with field
528     * members separated by the delimiter. For example,
529     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
530     * float} of three data points.
531     *
532     * @param delimiter
533     *            The delimiter used to separate individual data points. It
534     *            can be a comma, semicolon, tab or space. For example,
535     *            toString(",") will separate data by commas.
536     * @param maxItems
537     *            The maximum number of Array values to return
538     *
539     * @return the string representation of the data values.
540     */
541    public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); }
542}