001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import org.slf4j.Logger;
026import org.slf4j.LoggerFactory;
027
028import hdf.object.Attribute;
029import hdf.object.DataFormat;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
039 * dataset, group or named datatype.
040 *
041 * Like a dataset, an attribute has a name, datatype and dataspace.
042 *
043 * For more details on attributes, <a href=
044 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
045 * User's Guide</a>
046 *
047 * The following code is an example of an attribute with 1D integer array of two elements.
048 *
049 * <pre>
050 * // Example of creating a new attribute
051 * // The name of the new attribute
052 * String name = "Data range";
053 * // Creating an unsigned 1-byte integer datatype
054 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
055 *                              1,                      // size in bytes
056 *                              Datatype.ORDER_LE,      // byte order
057 *                              Datatype.SIGN_NONE);    // unsigned
058 * // 1-D array of size two
059 * long[] dims = {2};
060 * // The value of the attribute
061 * int[] value = {0, 255};
062 * // Create a new attribute
063 * Attribute dataRange = new Attribute(name, type, dims);
064 * // Set the attribute value
065 * dataRange.setValue(value);
066 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
067 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
068 * </pre>
069 *
070 *
071 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and
072 * strings. For a compound datatype, it will be a 1D array of strings with field members separated
073 * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
074 * float} of three data points.
075 *
076 * @see hdf.object.Datatype
077 *
078 * @version 2.0 4/2/2018
079 * @author Peter X. Cao, Jordan T. Henderson
080 */
081public class NC2Attribute extends ScalarDS implements Attribute
082{
083    private static final long serialVersionUID = 2072473407027648309L;
084
085    private static final Logger log = LoggerFactory.getLogger(NC2Attribute.class);
086
087    /** The HObject to which this NC2Attribute is attached, Attribute interface */
088    protected HObject         parentObject;
089
090    /** additional information and properties for the attribute, Attribute interface */
091    private transient Map<String, Object> properties;
092
093    /**
094     * Create an attribute with specified name, data type and dimension sizes.
095     *
096     * For scalar attribute, the dimension size can be either an array of size one
097     * or null, and the rank can be either 1 or zero. Attribute is a general class
098     * and is independent of file format, e.g., the implementation of attribute
099     * applies to both HDF4 and HDF5.
100     *
101     * The following example creates a string attribute with the name "CLASS" and
102     * value "IMAGE".
103     *
104     * <pre>
105     * long[] attrDims = { 1 };
106     * String attrName = &quot;CLASS&quot;;
107     * String[] classValue = { &quot;IMAGE&quot; };
108     * Datatype attrType = null;
109     * try {
110     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
111     * }
112     * catch (Exception ex) {}
113     * Attribute attr = new Attribute(attrName, attrType, attrDims);
114     * attr.setValue(classValue);
115     * </pre>
116     *
117     * @param parentObj
118     *            the HObject to which this Attribute is attached.
119     * @param attrName
120     *            the name of the attribute.
121     * @param attrType
122     *            the datatype of the attribute.
123     * @param attrDims
124     *            the dimension sizes of the attribute, null for scalar attribute
125     *
126     * @see hdf.object.Datatype
127     */
128    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
129        this(parentObj, attrName, attrType, attrDims, null);
130    }
131
132    /**
133     * Create an attribute with specific name and value.
134     *
135     * For scalar attribute, the dimension size can be either an array of size one
136     * or null, and the rank can be either 1 or zero. Attribute is a general class
137     * and is independent of file format, e.g., the implementation of attribute
138     * applies to both HDF4 and HDF5.
139     *
140     * The following example creates a string attribute with the name "CLASS" and
141     * value "IMAGE".
142     *
143     * <pre>
144     * long[] attrDims = { 1 };
145     * String attrName = &quot;CLASS&quot;;
146     * String[] classValue = { &quot;IMAGE&quot; };
147     * Datatype attrType = null;
148     * try {
149     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
150     * }
151     * catch (Exception ex) {}
152     * NC2Attribute attr = new NC2Attribute(attrName, attrType, attrDims, classValue);
153     * </pre>
154     *
155     * @param parentObj
156     *            the HObject to which this Attribute is attached.
157     * @param attrName
158     *            the name of the attribute.
159     * @param attrType
160     *            the datatype of the attribute.
161     * @param attrDims
162     *            the dimension sizes of the attribute, null for scalar attribute
163     * @param attrValue
164     *            the value of the attribute, null if no value
165     *
166     * @see hdf.object.Datatype
167     */
168    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
169    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
170        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
171                (parentObj == null) ? null : parentObj.getFullName(), null);
172
173        log.trace("NC2Attribute: start {}", parentObj);
174        this.parentObject = parentObj;
175
176        unsignedConverted = false;
177
178        datatype = attrType;
179
180        if (attrValue != null) {
181            data = attrValue;
182            originalBuf = attrValue;
183            isDataLoaded = true;
184        }
185        properties = new HashMap();
186
187        if (attrDims == null) {
188            rank = 1;
189            dims = new long[] { 1 };
190        }
191        else {
192            dims = attrDims;
193            rank = dims.length;
194        }
195
196        selectedDims = new long[rank];
197        startDims = new long[rank];
198        selectedStride = new long[rank];
199
200        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
201                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
202
203        resetSelection();
204    }
205
206    /*
207     * (non-Javadoc)
208     *
209     * @see hdf.object.HObject#open()
210     */
211    @Override
212    public long open() {
213        long aid = -1;
214        long pObjID = -1;
215
216        if (parentObject == null) {
217            log.debug("open(): attribute's parent object is null");
218            return -1;
219        }
220
221        try {
222            pObjID = parentObject.open();
223            if (pObjID >= 0) {
224                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
225                    log.trace("open(): FILE_TYPE_NC3");
226                    /*
227                     * TODO: Get type of netcdf3 object this is attached to and retrieve attribute info.
228                     */
229                }
230            }
231
232            log.trace("open(): aid={}", aid);
233        }
234        catch (Exception ex) {
235            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
236            aid = -1;
237        }
238        finally {
239            parentObject.close(pObjID);
240        }
241
242        return aid;
243    }
244
245    /*
246     * (non-Javadoc)
247     *
248     * @see hdf.object.HObject#close(int)
249     */
250    @Override
251    public void close(long aid) {
252        if (aid >= 0) {
253            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
254                log.trace("close(): FILE_TYPE_NC3");
255                /*
256                 * TODO: Get type of netcdf3 object this is attached to and close attribute.
257                 */
258            }
259        }
260    }
261
262    @Override
263    public void init() {
264        if (inited) {
265            resetSelection();
266            log.trace("init(): NC2Attribute already inited");
267            return;
268        }
269
270        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
271            log.trace("init(): FILE_TYPE_NC3");
272            /*
273             * TODO: If netcdf3 attribute object needs to init dependent objects.
274             */
275            inited = true;
276        }
277
278        resetSelection();
279    }
280
281    /**
282     * Reads the data from file.
283     *
284     * read() reads the data from file to a memory buffer and returns the memory
285     * buffer. The dataset object does not hold the memory buffer. To store the
286     * memory buffer in the dataset object, one must call getData().
287     *
288     * By default, the whole dataset is read into memory. Users can also select
289     * a subset to read. Subsetting is done in an implicit way.
290     *
291     * @return the data read from file.
292     *
293     * @see #getData()
294     *
295     * @throws Exception
296     *             if object can not be read
297     * @throws OutOfMemoryError
298     *             if memory is exhausted
299     */
300    @Override
301    public Object read() throws Exception, OutOfMemoryError {
302        if (!inited)
303            init();
304
305        return data;
306    }
307
308    /* Implement abstract Dataset */
309
310    /**
311     * Writes a memory buffer to the object in the file.
312     *
313     * @param buf
314     *            the data to write
315     *
316     * @throws Exception
317     *             if data can not be written
318     */
319    @Override
320    public void write(Object buf) throws Exception {
321        log.trace("function of dataset: write(Object) start");
322        if (!buf.equals(data))
323            setData(buf);
324
325        init();
326
327        if (parentObject == null) {
328            log.debug("write(Object): parent object is null; nowhere to write attribute to");
329            return;
330        }
331    }
332
333    /*
334     * (non-Javadoc)
335     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
336     */
337    @Override
338    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
339        // not supported
340        throw new UnsupportedOperationException("copy operation unsupported for NC2.");
341    }
342
343    /*
344     * (non-Javadoc)
345     * @see hdf.object.Dataset#readBytes()
346     */
347    @Override
348    public byte[] readBytes() throws Exception {
349        // not supported
350        throw new UnsupportedOperationException("readBytes operation unsupported for NC2.");
351    }
352
353    /* Implement interface Attribute */
354
355    /**
356     * Returns the HObject to which this Attribute is currently "attached".
357     *
358     * @return the HObject to which this Attribute is currently "attached".
359     */
360    public HObject getParentObject() {
361        return parentObject;
362    }
363
364    /**
365     * Sets the HObject to which this Attribute is "attached".
366     *
367     * @param pObj
368     *            the new HObject to which this Attribute is "attached".
369     */
370    public void setParentObject(HObject pObj) {
371        parentObject = pObj;
372    }
373
374    /**
375     * set a property for the attribute.
376     *
377     * @param key the attribute Map key
378     * @param value the attribute Map value
379     */
380    public void setProperty(String key, Object value) {
381        properties.put(key, value);
382    }
383
384    /**
385     * get a property for a given key.
386     *
387     * @param key the attribute Map key
388     *
389     * @return the property
390     */
391    public Object getProperty(String key) {
392        return properties.get(key);
393    }
394
395    /**
396     * get all property keys.
397     *
398     * @return the Collection of property keys
399     */
400    public Collection<String> getPropertyKeys() {
401        return properties.keySet();
402    }
403
404    /**
405     * Returns the name of the object. For example, "Raster Image #2".
406     *
407     * @return The name of the object.
408     */
409    public final String getAttributeName() {
410        return getName();
411    }
412
413    /**
414     * Retrieves the attribute data from the file.
415     *
416     * @return the attribute data.
417     *
418     * @throws Exception
419     *             if the data can not be retrieved
420     */
421    public final Object getAttributeData() throws Exception, OutOfMemoryError {
422        return getData();
423    }
424
425    /**
426     * Returns the datatype of the attribute.
427     *
428     * @return the datatype of the attribute.
429     */
430    public final Datatype getAttributeDatatype() {
431        return getDatatype();
432    }
433
434    /**
435     * Returns the space type for the attribute. It returns a
436     * negative number if it failed to retrieve the type information from
437     * the file.
438     *
439     * @return the space type for the attribute.
440     */
441    public final int getAttributeSpaceType() {
442        return getSpaceType();
443    }
444
445    /**
446     * Returns the rank (number of dimensions) of the attribute. It returns a
447     * negative number if it failed to retrieve the dimension information from
448     * the file.
449     *
450     * @return the number of dimensions of the attribute.
451     */
452    public final int getAttributeRank() {
453        return getRank();
454    }
455
456    /**
457     * Returns the selected size of the rows and columns of the attribute. It returns a
458     * negative number if it failed to retrieve the size information from
459     * the file.
460     *
461     * @return the selected size of the rows and colums of the attribute.
462     */
463    public final int getAttributePlane() {
464        return (int)getWidth() * (int)getHeight();
465    }
466
467    /**
468     * Returns the array that contains the dimension sizes of the data value of
469     * the attribute. It returns null if it failed to retrieve the dimension
470     * information from the file.
471     *
472     * @return the dimension sizes of the attribute.
473     */
474    public final long[] getAttributeDims() {
475        return getDims();
476    }
477
478    /**
479     * @return true if the data is a single scalar point; otherwise, returns
480     *         false.
481     */
482    public boolean isAttributeScalar() {
483        return isScalar();
484    }
485
486    /**
487     * Not for public use in the future.
488     *
489     * setData() is not safe to use because it changes memory buffer
490     * of the dataset object. Dataset operations such as write/read
491     * will fail if the buffer type or size is changed.
492     *
493     * @param d  the object data -must be an array of Objects
494     */
495    public void setAttributeData(Object d) {
496        setData(d);
497    }
498
499    /**
500     * Writes the memory buffer of this dataset to file.
501     *
502     * @throws Exception if buffer can not be written
503     */
504    public void writeAttribute() throws Exception {
505        write();
506    }
507
508    /**
509     * Writes the given data buffer into this attribute in a file.
510     *
511     * The data buffer is a vector that contains the data values of compound fields. The data is written
512     * into file as one data blob.
513     *
514     * @param buf
515     *            The vector that contains the data values of compound fields.
516     *
517     * @throws Exception
518     *             If there is an error at the library level.
519     */
520    public void writeAttribute(Object buf) throws Exception {
521        write(buf);
522    }
523
524    /**
525     * Returns a string representation of the data value. For
526     * example, "0, 255".
527     *
528     * For a compound datatype, it will be a 1D array of strings with field
529     * members separated by the delimiter. For example,
530     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
531     * float} of three data points.
532     *
533     * @param delimiter
534     *            The delimiter used to separate individual data points. It
535     *            can be a comma, semicolon, tab or space. For example,
536     *            toString(",") will separate data by commas.
537     *
538     * @return the string representation of the data values.
539     */
540    public String toAttributeString(String delimiter) {
541        return toString(delimiter, -1);
542    }
543
544    /**
545     * Returns a string representation of the data value. For
546     * example, "0, 255".
547     *
548     * For a compound datatype, it will be a 1D array of strings with field
549     * members separated by the delimiter. For example,
550     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
551     * float} of three data points.
552     *
553     * @param delimiter
554     *            The delimiter used to separate individual data points. It
555     *            can be a comma, semicolon, tab or space. For example,
556     *            toString(",") will separate data by commas.
557     * @param maxItems
558     *            The maximum number of Array values to return
559     *
560     * @return the string representation of the data values.
561     */
562    public String toAttributeString(String delimiter, int maxItems) {
563        return toString(delimiter, maxItems);
564    }
565}