001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import org.slf4j.Logger;
026import org.slf4j.LoggerFactory;
027
028import hdf.object.Attribute;
029import hdf.object.CompoundDataFormat;
030import hdf.object.CompoundDS;
031import hdf.object.DataFormat;
032import hdf.object.Dataset;
033import hdf.object.Datatype;
034import hdf.object.FileFormat;
035import hdf.object.Group;
036import hdf.object.HObject;
037import hdf.object.MetaDataContainer;
038
039/**
040 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group or named
041 * datatype.
042 *
043 * Like a dataset, an attribute has a name, datatype and dataspace.
044 *
045 * For more details on attributes, <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5
046 * Attributes in HDF5 User Guide</a>
047 *
048 * The following code is an example of an attribute with 1D integer array of two elements.
049 *
050 * <pre>
051 * // Example of creating a new attribute
052 * // The name of the new attribute
053 * String name = "Data range";
054 * // Creating an unsigned 1-byte integer datatype
055 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
056 *                              1,                      // size in bytes
057 *                              Datatype.ORDER_LE,      // byte order
058 *                              Datatype.SIGN_NONE);    // unsigned
059 * // 1-D array of size two
060 * long[] dims = {2};
061 * // The value of the attribute
062 * int[] value = {0, 255};
063 * // Create a new attribute
064 * Attribute dataRange = new Attribute(name, type, dims);
065 * // Set the attribute value
066 * dataRange.setValue(value);
067 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
068 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
069 * </pre>
070 *
071 * For a compound datatype, the value of an H4CompoundAttribute will be a 1D array of strings with field members
072 * separated by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, float} of
073 * three data points.
074 *
075 * @see hdf.object.Datatype
076 *
077 * @version 2.0 4/2/2018
078 * @author Peter X. Cao, Jordan T. Henderson
079 */
080public class H4CompoundAttribute extends CompoundDS implements Attribute {
081
082    private static final long serialVersionUID = 2072473407027648309L;
083
084    private static final Logger log = LoggerFactory.getLogger(H4CompoundAttribute.class);
085
086    /** The HObject to which this NC2Attribute is attached, Attribute interface */
087    protected HObject         parentObject;
088
089    /** additional information and properties for the attribute, Attribute interface */
090    private transient Map<String, Object> properties;
091
092    /**
093     * Create an attribute with specified name, data type and dimension sizes.
094     *
095     * For scalar attribute, the dimension size can be either an array of size one
096     * or null, and the rank can be either 1 or zero. Attribute is a general class
097     * and is independent of file format, e.g., the implementation of attribute
098     * applies to both HDF4 and HDF5.
099     *
100     * The following example creates a string attribute with the name "CLASS" and
101     * value "IMAGE".
102     *
103     * <pre>
104     * long[] attrDims = { 1 };
105     * String attrName = &quot;CLASS&quot;;
106     * String[] classValue = { &quot;IMAGE&quot; };
107     * Datatype attrType = null;
108     * try {
109     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
110     * }
111     * catch (Exception ex) {}
112     * Attribute attr = new Attribute(attrName, attrType, attrDims);
113     * attr.setValue(classValue);
114     * </pre>
115     *
116     * @param parentObj
117     *            the HObject to which this Attribute is attached.
118     * @param attrName
119     *            the name of the attribute.
120     * @param attrType
121     *            the datatype of the attribute.
122     * @param attrDims
123     *            the dimension sizes of the attribute, null for scalar attribute
124     *
125     * @see hdf.object.Datatype
126     */
127    public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
128        this(parentObj, attrName, attrType, attrDims, null);
129    }
130
131    /**
132     * Create an attribute with specific name and value.
133     *
134     * For scalar attribute, the dimension size can be either an array of size one
135     * or null, and the rank can be either 1 or zero. Attribute is a general class
136     * and is independent of file format, e.g., the implementation of attribute
137     * applies to both HDF4 and HDF5.
138     *
139     * The following example creates a string attribute with the name "CLASS" and
140     * value "IMAGE".
141     *
142     * <pre>
143     * long[] attrDims = { 1 };
144     * String attrName = &quot;CLASS&quot;;
145     * String[] classValue = { &quot;IMAGE&quot; };
146     * Datatype attrType = null;
147     * try {
148     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
149     * }
150     * catch (Exception ex) {}
151     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
152     * </pre>
153     *
154     * @param parentObj
155     *            the HObject to which this Attribute is attached.
156     * @param attrName
157     *            the name of the attribute.
158     * @param attrType
159     *            the datatype of the attribute.
160     * @param attrDims
161     *            the dimension sizes of the attribute, null for scalar attribute
162     * @param attrValue
163     *            the value of the attribute, null if no value
164     *
165     * @see hdf.object.Datatype
166     */
167    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
168    public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
169        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
170                (parentObj == null) ? null : parentObj.getFullName(), null);
171
172        log.trace("H4CompoundAttribute: start {}", parentObj);
173
174        this.parentObject = parentObj;
175
176        datatype = attrType;
177
178        if (attrValue != null) {
179            data = attrValue;
180            originalBuf = attrValue;
181            isDataLoaded = true;
182        }
183        properties = new HashMap();
184
185        if (attrDims == null) {
186            rank = 1;
187            dims = new long[] { 1 };
188        }
189        else {
190            dims = attrDims;
191            rank = dims.length;
192        }
193
194        selectedDims = new long[rank];
195        startDims = new long[rank];
196        selectedStride = new long[rank];
197
198        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
199                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
200
201        resetSelection();
202    }
203
204    /*
205     * (non-Javadoc)
206     *
207     * @see hdf.object.HObject#open()
208     */
209    @Override
210    public long open() {
211        if (parentObject == null) {
212            log.debug("open(): attribute's parent object is null");
213            return -1;
214        }
215
216        long aid = -1;
217        long pObjID = -1;
218
219        try {
220            pObjID = parentObject.open();
221            if (pObjID >= 0) {
222                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
223                    log.trace("open(): FILE_TYPE_HDF4");
224                    /*
225                     * TODO: Get type of HDF4 object this is attached to and retrieve attribute info.
226                     */
227                }
228            }
229
230            log.trace("open(): aid={}", aid);
231        }
232        catch (Exception ex) {
233            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
234            aid = -1;
235        }
236        finally {
237            parentObject.close(pObjID);
238        }
239
240        return aid;
241    }
242
243    /*
244     * (non-Javadoc)
245     *
246     * @see hdf.object.HObject#close(int)
247     */
248    @Override
249    public void close(long aid) {
250        if (aid >= 0) {
251            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
252                log.trace("close(): FILE_TYPE_HDF4");
253                /*
254                 * TODO: Get type of HDF4 object this is attached to and close attribute.
255                 */
256            }
257        }
258    }
259
260    @Override
261    public void init() {
262        if (inited) {
263            resetSelection();
264            log.trace("init(): Attribute already inited");
265            return;
266        }
267
268        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
269            log.trace("init(): FILE_TYPE_HDF4");
270            /*
271             * TODO: If HDF4 attribute object needs to init dependent objects.
272             */
273            inited = true;
274        }
275
276        resetSelection();
277    }
278
279    /**
280     * Reads the data from file.
281     *
282     * read() reads the data from file to a memory buffer and returns the memory
283     * buffer. The dataset object does not hold the memory buffer. To store the
284     * memory buffer in the dataset object, one must call getData().
285     *
286     * By default, the whole dataset is read into memory. Users can also select
287     * a subset to read. Subsetting is done in an implicit way.
288     *
289     * @return the data read from file.
290     *
291     * @see #getData()
292     *
293     * @throws Exception
294     *             if object can not be read
295     * @throws OutOfMemoryError
296     *             if memory is exhausted
297     */
298    @Override
299    public Object read() throws Exception, OutOfMemoryError {
300        if (!inited)
301            init();
302
303        /*
304         * TODO: For now, convert a compound Attribute's data (String[]) into a List for
305         * convenient processing
306         */
307        if (getDatatype().isCompound() && !(data instanceof List)) {
308            List<String> valueList = Arrays.asList((String[]) data);
309
310            data = valueList;
311        }
312
313        return data;
314    }
315
316    /* Implement abstract Dataset */
317
318    /**
319     * Writes a memory buffer to the object in the file.
320     *
321     * @param buf
322     *            The buffer that contains the data values.
323     *
324     * @throws Exception
325     *             if data can not be written
326     */
327    @Override
328    public void write(Object buf) throws Exception {
329        log.trace("function of dataset: write(Object) start");
330        if (!buf.equals(data))
331            setData(buf);
332
333        init();
334
335        if (parentObject == null) {
336            log.debug("write(Object): parent object is null; nowhere to write attribute to");
337            return;
338        }
339
340        ((MetaDataContainer) getParentObject()).writeMetadata(this);
341    }
342
343    /*
344     * (non-Javadoc)
345     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
346     */
347    @Override
348    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
349        // not supported
350        throw new UnsupportedOperationException("copy operation unsupported for H4.");
351    }
352
353    /*
354     * (non-Javadoc)
355     * @see hdf.object.Dataset#readBytes()
356     */
357    @Override
358    public byte[] readBytes() throws Exception {
359        // not supported
360        throw new UnsupportedOperationException("readBytes operation unsupported for H4.");
361    }
362
363    /**
364     * Given an array of bytes representing a compound Datatype and a start index
365     * and length, converts len number of bytes into the correct Object type and
366     * returns it.
367     *
368     * @param data
369     *            The byte array representing the data of the compound Datatype
370     * @param data_type
371     *            The type of data to convert the bytes to
372     * @param start
373     *            The start index of the bytes to get
374     * @param len
375     *            The number of bytes to convert
376     * @return The converted type of the bytes
377     */
378    protected Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) {
379        return null;
380    }
381
382    /**
383     * Converts the data values of this data object to appropriate Java integers if
384     * they are unsigned integers.
385     *
386     * @see hdf.object.Dataset#convertToUnsignedC(Object)
387     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
388     *
389     * @return the converted data buffer.
390     */
391    @Override
392    public Object convertFromUnsignedC() {
393        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
394    }
395
396    /**
397     * Converts Java integer data values of this data object back to unsigned C-type
398     * integer data if they are unsigned integers.
399     *
400     * @see hdf.object.Dataset#convertToUnsignedC(Object)
401     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
402     *
403     * @return the converted data buffer.
404     */
405    @Override
406    public Object convertToUnsignedC() {
407        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
408    }
409
410    /* Implement interface Attribute */
411
412    /**
413     * Returns the HObject to which this Attribute is currently "attached".
414     *
415     * @return the HObject to which this Attribute is currently "attached".
416     */
417    public HObject getParentObject() {
418        return parentObject;
419    }
420
421    /**
422     * Sets the HObject to which this Attribute is "attached".
423     *
424     * @param pObj
425     *            the new HObject to which this Attribute is "attached".
426     */
427    public void setParentObject(HObject pObj) {
428        parentObject = pObj;
429    }
430
431    /**
432     * set a property for the attribute.
433     *
434     * @param key the attribute Map key
435     * @param value the attribute Map value
436     */
437    public void setProperty(String key, Object value) {
438        properties.put(key, value);
439    }
440
441    /**
442     * get a property for a given key.
443     *
444     * @param key the attribute Map key
445     *
446     * @return the property
447     */
448    public Object getProperty(String key) {
449        return properties.get(key);
450    }
451
452    /**
453     * get all property keys.
454     *
455     * @return the Collection of property keys
456     */
457    public Collection<String> getPropertyKeys() {
458        return properties.keySet();
459    }
460
461    /**
462     * Returns the name of the object. For example, "Raster Image #2".
463     *
464     * @return The name of the object.
465     */
466    public final String getAttributeName() {
467        return getName();
468    }
469
470    /**
471     * Retrieves the attribute data from the file.
472     *
473     * @return the attribute data.
474     *
475     * @throws Exception
476     *             if the data can not be retrieved
477     */
478    public final Object getAttributeData() throws Exception, OutOfMemoryError {
479        return getData();
480    }
481
482    /**
483     * Returns the datatype of the attribute.
484     *
485     * @return the datatype of the attribute.
486     */
487    public final Datatype getAttributeDatatype() {
488        return getDatatype();
489    }
490
491    /**
492     * Returns the space type for the attribute. It returns a
493     * negative number if it failed to retrieve the type information from
494     * the file.
495     *
496     * @return the space type for the attribute.
497     */
498    public final int getAttributeSpaceType() {
499        return getSpaceType();
500    }
501
502    /**
503     * Returns the rank (number of dimensions) of the attribute. It returns a
504     * negative number if it failed to retrieve the dimension information from
505     * the file.
506     *
507     * @return the number of dimensions of the attribute.
508     */
509    public final int getAttributeRank() {
510        return getRank();
511    }
512
513    /**
514     * Returns the selected size of the rows and columns of the attribute. It returns a
515     * negative number if it failed to retrieve the size information from
516     * the file.
517     *
518     * @return the selected size of the rows and colums of the attribute.
519     */
520    public final int getAttributePlane() {
521        return (int)getWidth() * (int)getHeight();
522    }
523
524    /**
525     * Returns the array that contains the dimension sizes of the data value of
526     * the attribute. It returns null if it failed to retrieve the dimension
527     * information from the file.
528     *
529     * @return the dimension sizes of the attribute.
530     */
531    public final long[] getAttributeDims() {
532        return getDims();
533    }
534
535    /**
536     * @return true if the dataspace is a NULL; otherwise, returns false.
537     */
538    @Override
539    public boolean isAttributeNULL() {
540        return isNULL();
541    }
542
543    /**
544     * @return true if the data is a single scalar point; otherwise, returns false.
545     */
546    public boolean isAttributeScalar() {
547        return isScalar();
548    }
549
550    /**
551     * Not for public use in the future.
552     *
553     * setData() is not safe to use because it changes memory buffer
554     * of the dataset object. Dataset operations such as write/read
555     * will fail if the buffer type or size is changed.
556     *
557     * @param d  the object data -must be an array of Objects
558     */
559    public void setAttributeData(Object d) {
560        setData(d);
561    }
562
563    /**
564     * Writes the memory buffer of this dataset to file.
565     *
566     * @throws Exception if buffer can not be written
567     */
568    public void writeAttribute() throws Exception {
569        write();
570    }
571
572    /**
573     * Writes the given data buffer into this attribute in a file.
574     *
575     * The data buffer is a vector that contains the data values of compound fields. The data is written
576     * into file as one data blob.
577     *
578     * @param buf
579     *            The vector that contains the data values of compound fields.
580     *
581     * @throws Exception
582     *             If there is an error at the library level.
583     */
584    public void writeAttribute(Object buf) throws Exception {
585        write(buf);
586    }
587
588    /**
589     * Returns a string representation of the data value. For
590     * example, "0, 255".
591     *
592     * For a compound datatype, it will be a 1D array of strings with field
593     * members separated by the delimiter. For example,
594     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
595     * float} of three data points.
596     *
597     * @param delimiter
598     *            The delimiter used to separate individual data points. It
599     *            can be a comma, semicolon, tab or space. For example,
600     *            toString(",") will separate data by commas.
601     *
602     * @return the string representation of the data values.
603     */
604    public String toAttributeString(String delimiter) {
605        return toString(delimiter, -1);
606    }
607
608    /**
609     * Returns a string representation of the data value. For
610     * example, "0, 255".
611     *
612     * For a compound datatype, it will be a 1D array of strings with field
613     * members separated by the delimiter. For example,
614     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
615     * float} of three data points.
616     *
617     * @param delimiter
618     *            The delimiter used to separate individual data points. It
619     *            can be a comma, semicolon, tab or space. For example,
620     *            toString(",") will separate data by commas.
621     * @param maxItems
622     *            The maximum number of Array values to return
623     *
624     * @return the string representation of the data values.
625     */
626    public String toAttributeString(String delimiter, int maxItems) {
627        return toString(delimiter, maxItems);
628    }
629}