001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034/**
035 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a
036 * dataset, group or named datatype.
037 *
038 * Like a dataset, an attribute has a name, datatype and dataspace.
039 *
040 * For more details on attributes, <a href=
041 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
042 * User's Guide</a>
043 *
044 * The following code is an example of an attribute with 1D integer array of two elements.
045 *
046 * <pre>
047 * // Example of creating a new attribute
048 * // The name of the new attribute
049 * String name = "Data range";
050 * // Creating an unsigned 1-byte integer datatype
051 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
052 *                              1,                      // size in bytes
053 *                              Datatype.ORDER_LE,      // byte order
054 *                              Datatype.SIGN_NONE);    // unsigned
055 * // 1-D array of size two
056 * long[] dims = {2};
057 * // The value of the attribute
058 * int[] value = {0, 255};
059 * // Create a new attribute
060 * Attribute dataRange = new Attribute(name, type, dims);
061 * // Set the attribute value
062 * dataRange.setValue(value);
063 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
064 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
065 * </pre>
066 *
067 *
068 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and
069 * strings. For a compound datatype, it will be a 1D array of strings with field members separated
070 * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
071 * float} of three data points.
072 *
073 * @see hdf.object.Datatype
074 *
075 * @version 2.0 4/2/2018
076 * @author Peter X. Cao, Jordan T. Henderson
077 */
078public class NC2Attribute extends ScalarDS implements Attribute
079{
080    private static final long serialVersionUID = 2072473407027648309L;
081
082    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NC2Attribute.class);
083
084    /** The HObject to which this NC2Attribute is attached, Attribute interface */
085    protected HObject         parentObject;
086
087    /** additional information and properties for the attribute, Attribute interface */
088    private transient Map<String, Object> properties;
089
090    /**
091     * Create an attribute with specified name, data type and dimension sizes.
092     *
093     * For scalar attribute, the dimension size can be either an array of size one
094     * or null, and the rank can be either 1 or zero. Attribute is a general class
095     * and is independent of file format, e.g., the implementation of attribute
096     * applies to both HDF4 and HDF5.
097     *
098     * The following example creates a string attribute with the name "CLASS" and
099     * value "IMAGE".
100     *
101     * <pre>
102     * long[] attrDims = { 1 };
103     * String attrName = &quot;CLASS&quot;;
104     * String[] classValue = { &quot;IMAGE&quot; };
105     * Datatype attrType = null;
106     * try {
107     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
108     * }
109     * catch (Exception ex) {}
110     * Attribute attr = new Attribute(attrName, attrType, attrDims);
111     * attr.setValue(classValue);
112     * </pre>
113     *
114     * @param parentObj
115     *            the HObject to which this Attribute is attached.
116     * @param attrName
117     *            the name of the attribute.
118     * @param attrType
119     *            the datatype of the attribute.
120     * @param attrDims
121     *            the dimension sizes of the attribute, null for scalar attribute
122     *
123     * @see hdf.object.Datatype
124     */
125    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
126        this(parentObj, attrName, attrType, attrDims, null);
127    }
128
129    /**
130     * Create an attribute with specific name and value.
131     *
132     * For scalar attribute, the dimension size can be either an array of size one
133     * or null, and the rank can be either 1 or zero. Attribute is a general class
134     * and is independent of file format, e.g., the implementation of attribute
135     * applies to both HDF4 and HDF5.
136     *
137     * The following example creates a string attribute with the name "CLASS" and
138     * value "IMAGE".
139     *
140     * <pre>
141     * long[] attrDims = { 1 };
142     * String attrName = &quot;CLASS&quot;;
143     * String[] classValue = { &quot;IMAGE&quot; };
144     * Datatype attrType = null;
145     * try {
146     *     attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
147     * }
148     * catch (Exception ex) {}
149     * NC2Attribute attr = new NC2Attribute(attrName, attrType, attrDims, classValue);
150     * </pre>
151     *
152     * @param parentObj
153     *            the HObject to which this Attribute is attached.
154     * @param attrName
155     *            the name of the attribute.
156     * @param attrType
157     *            the datatype of the attribute.
158     * @param attrDims
159     *            the dimension sizes of the attribute, null for scalar attribute
160     * @param attrValue
161     *            the value of the attribute, null if no value
162     *
163     * @see hdf.object.Datatype
164     */
165    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
166    public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
167        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
168                (parentObj == null) ? null : parentObj.getFullName(), null);
169
170        log.trace("NC2Attribute: start {}", parentObj);
171        this.parentObject = parentObj;
172
173        unsignedConverted = false;
174
175        datatype = attrType;
176
177        if (attrValue != null) {
178            data = attrValue;
179            originalBuf = attrValue;
180            isDataLoaded = true;
181        }
182        properties = new HashMap();
183
184        if (attrDims == null) {
185            rank = 1;
186            dims = new long[] { 1 };
187        }
188        else {
189            dims = attrDims;
190            rank = dims.length;
191        }
192
193        selectedDims = new long[rank];
194        startDims = new long[rank];
195        selectedStride = new long[rank];
196
197        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
198                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
199
200        resetSelection();
201    }
202
203    /*
204     * (non-Javadoc)
205     *
206     * @see hdf.object.HObject#open()
207     */
208    @Override
209    public long open() {
210        long aid = -1;
211        long pObjID = -1;
212
213        if (parentObject == null) {
214            log.debug("open(): attribute's parent object is null");
215            return -1;
216        }
217
218        try {
219            pObjID = parentObject.open();
220            if (pObjID >= 0) {
221                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
222                    log.trace("open(): FILE_TYPE_NC3");
223                    /*
224                     * TODO: Get type of netcdf3 object this is attached to and retrieve attribute info.
225                     */
226                }
227            }
228
229            log.trace("open(): aid={}", aid);
230        }
231        catch (Exception ex) {
232            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
233            aid = -1;
234        }
235        finally {
236            parentObject.close(pObjID);
237        }
238
239        return aid;
240    }
241
242    /*
243     * (non-Javadoc)
244     *
245     * @see hdf.object.HObject#close(int)
246     */
247    @Override
248    public void close(long aid) {
249        if (aid >= 0) {
250            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
251                log.trace("close(): FILE_TYPE_NC3");
252                /*
253                 * TODO: Get type of netcdf3 object this is attached to and close attribute.
254                 */
255            }
256        }
257    }
258
259    @Override
260    public void init() {
261        if (inited) {
262            resetSelection();
263            log.trace("init(): NC2Attribute already inited");
264            return;
265        }
266
267        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) {
268            log.trace("init(): FILE_TYPE_NC3");
269            /*
270             * TODO: If netcdf3 attribute object needs to init dependent objects.
271             */
272            inited = true;
273        }
274
275        resetSelection();
276    }
277
278    /**
279     * Reads the data from file.
280     *
281     * read() reads the data from file to a memory buffer and returns the memory
282     * buffer. The dataset object does not hold the memory buffer. To store the
283     * memory buffer in the dataset object, one must call getData().
284     *
285     * By default, the whole dataset is read into memory. Users can also select
286     * a subset to read. Subsetting is done in an implicit way.
287     *
288     * @return the data read from file.
289     *
290     * @see #getData()
291     *
292     * @throws Exception
293     *             if object can not be read
294     * @throws OutOfMemoryError
295     *             if memory is exhausted
296     */
297    @Override
298    public Object read() throws Exception, OutOfMemoryError {
299        if (!inited)
300            init();
301
302        return data;
303    }
304
305    /* Implement abstract Dataset */
306
307    /**
308     * Writes a memory buffer to the object in the file.
309     *
310     * @param buf
311     *            the data to write
312     *
313     * @throws Exception
314     *             if data can not be written
315     */
316    @Override
317    public void write(Object buf) throws Exception {
318        log.trace("function of dataset: write(Object) start");
319        if (!buf.equals(data))
320            setData(buf);
321
322        init();
323
324        if (parentObject == null) {
325            log.debug("write(Object): parent object is null; nowhere to write attribute to");
326            return;
327        }
328    }
329
330    /*
331     * (non-Javadoc)
332     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
333     */
334    @Override
335    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
336        // not supported
337        throw new UnsupportedOperationException("copy operation unsupported for NC2.");
338    }
339
340    /*
341     * (non-Javadoc)
342     * @see hdf.object.Dataset#readBytes()
343     */
344    @Override
345    public byte[] readBytes() throws Exception {
346        // not supported
347        throw new UnsupportedOperationException("readBytes operation unsupported for NC2.");
348    }
349
350    /* Implement abstract ScalarDS */
351
352    /*
353     * (non-Javadoc)
354     * @see hdf.object.ScalarDS#getPalette()
355     */
356    @Override
357    public byte[][] getPalette() {
358        if (palette == null)
359            palette = readPalette(0);
360
361        return palette;
362    }
363
364    /*
365     * (non-Javadoc)
366     * @see hdf.object.ScalarDS#readPalette(int)
367     */
368    @Override
369    public byte[][] readPalette(int idx) {
370        return null;
371    }
372
373    /*
374     * (non-Javadoc)
375     * @see hdf.object.ScalarDS#getPaletteRefs()
376     */
377    @Override
378    public byte[] getPaletteRefs() {
379        return null;
380    }
381
382    /* Implement interface Attribute */
383
384    /**
385     * Returns the HObject to which this Attribute is currently "attached".
386     *
387     * @return the HObject to which this Attribute is currently "attached".
388     */
389    public HObject getParentObject() {
390        return parentObject;
391    }
392
393    /**
394     * Sets the HObject to which this Attribute is "attached".
395     *
396     * @param pObj
397     *            the new HObject to which this Attribute is "attached".
398     */
399    public void setParentObject(HObject pObj) {
400        parentObject = pObj;
401    }
402
403    /**
404     * set a property for the attribute.
405     *
406     * @param key the attribute Map key
407     * @param value the attribute Map value
408     */
409    public void setProperty(String key, Object value) {
410        properties.put(key, value);
411    }
412
413    /**
414     * get a property for a given key.
415     *
416     * @param key the attribute Map key
417     *
418     * @return the property
419     */
420    public Object getProperty(String key) {
421        return properties.get(key);
422    }
423
424    /**
425     * get all property keys.
426     *
427     * @return the Collection of property keys
428     */
429    public Collection<String> getPropertyKeys() {
430        return properties.keySet();
431    }
432
433    /**
434     * Returns the name of the object. For example, "Raster Image #2".
435     *
436     * @return The name of the object.
437     */
438    public final String getAttributeName() {
439        return getName();
440    }
441
442    /**
443     * Retrieves the attribute data from the file.
444     *
445     * @return the attribute data.
446     *
447     * @throws Exception
448     *             if the data can not be retrieved
449     */
450    public final Object getAttributeData() throws Exception, OutOfMemoryError {
451        return getData();
452    }
453
454    /**
455     * Returns the datatype of the attribute.
456     *
457     * @return the datatype of the attribute.
458     */
459    public final Datatype getAttributeDatatype() {
460        return getDatatype();
461    }
462
463    /**
464     * Returns the space type for the attribute. It returns a
465     * negative number if it failed to retrieve the type information from
466     * the file.
467     *
468     * @return the space type for the attribute.
469     */
470    public final int getAttributeSpaceType() {
471        return getSpaceType();
472    }
473
474    /**
475     * Returns the rank (number of dimensions) of the attribute. It returns a
476     * negative number if it failed to retrieve the dimension information from
477     * the file.
478     *
479     * @return the number of dimensions of the attribute.
480     */
481    public final int getAttributeRank() {
482        return getRank();
483    }
484
485    /**
486     * Returns the selected size of the rows and columns of the attribute. It returns a
487     * negative number if it failed to retrieve the size information from
488     * the file.
489     *
490     * @return the selected size of the rows and colums of the attribute.
491     */
492    public final int getAttributePlane() {
493        return (int)getWidth() * (int)getHeight();
494    }
495
496    /**
497     * Returns the array that contains the dimension sizes of the data value of
498     * the attribute. It returns null if it failed to retrieve the dimension
499     * information from the file.
500     *
501     * @return the dimension sizes of the attribute.
502     */
503    public final long[] getAttributeDims() {
504        return getDims();
505    }
506
507    /**
508     * @return true if the data is a single scalar point; otherwise, returns
509     *         false.
510     */
511    public boolean isAttributeScalar() {
512        return isScalar();
513    }
514
515    /**
516     * Not for public use in the future.
517     *
518     * setData() is not safe to use because it changes memory buffer
519     * of the dataset object. Dataset operations such as write/read
520     * will fail if the buffer type or size is changed.
521     *
522     * @param d  the object data -must be an array of Objects
523     */
524    public void setAttributeData(Object d) {
525        setData(d);
526    }
527
528    /**
529     * Writes the memory buffer of this dataset to file.
530     *
531     * @throws Exception if buffer can not be written
532     */
533    public void writeAttribute() throws Exception {
534        write();
535    }
536
537    /**
538     * Writes the given data buffer into this attribute in a file.
539     *
540     * The data buffer is a vector that contains the data values of compound fields. The data is written
541     * into file as one data blob.
542     *
543     * @param buf
544     *            The vector that contains the data values of compound fields.
545     *
546     * @throws Exception
547     *             If there is an error at the library level.
548     */
549    public void writeAttribute(Object buf) throws Exception {
550        write(buf);
551    }
552
553    /**
554     * Returns a string representation of the data value. For
555     * example, "0, 255".
556     *
557     * For a compound datatype, it will be a 1D array of strings with field
558     * members separated by the delimiter. For example,
559     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
560     * float} of three data points.
561     *
562     * @param delimiter
563     *            The delimiter used to separate individual data points. It
564     *            can be a comma, semicolon, tab or space. For example,
565     *            toString(",") will separate data by commas.
566     *
567     * @return the string representation of the data values.
568     */
569    public String toAttributeString(String delimiter) {
570        return toString(delimiter, -1);
571    }
572
573    /**
574     * Returns a string representation of the data value. For
575     * example, "0, 255".
576     *
577     * For a compound datatype, it will be a 1D array of strings with field
578     * members separated by the delimiter. For example,
579     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
580     * float} of three data points.
581     *
582     * @param delimiter
583     *            The delimiter used to separate individual data points. It
584     *            can be a comma, semicolon, tab or space. For example,
585     *            toString(",") will separate data by commas.
586     * @param maxItems
587     *            The maximum number of Array values to return
588     *
589     * @return the string representation of the data values.
590     */
591    public String toAttributeString(String delimiter, int maxItems) {
592        return toString(delimiter, maxItems);
593    }
594}