001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.CompoundDS;
027import hdf.object.CompoundDataFormat;
028import hdf.object.DataFormat;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034import hdf.object.MetaDataContainer;
035
036import org.slf4j.Logger;
037import org.slf4j.LoggerFactory;
038
039/**
040 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
041 * or named datatype.
042 *
043 * Like a dataset, an attribute has a name, datatype and dataspace.
044 *
045 * For more details on attributes, <a
046 * href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a>
047 *
048 * The following code is an example of an attribute with 1D integer array of two elements.
049 *
050 * <pre>
051 * // Example of creating a new attribute
052 * // The name of the new attribute
053 * String name = "Data range";
054 * // Creating an unsigned 1-byte integer datatype
055 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
056 *                              1,                      // size in bytes
057 *                              Datatype.ORDER_LE,      // byte order
058 *                              Datatype.SIGN_NONE);    // unsigned
059 * // 1-D array of size two
060 * long[] dims = {2};
061 * // The value of the attribute
062 * int[] value = {0, 255};
063 * // Create a new attribute
064 * Attribute dataRange = new Attribute(name, type, dims);
065 * // Set the attribute value
066 * dataRange.setValue(value);
067 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
068 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
069 * </pre>
070 *
071 * For a compound datatype, the value of an H4CompoundAttribute will be a 1D array of strings with field
072 * members separated by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of
073 * {int, float} of three data points.
074 *
075 * @see hdf.object.Datatype
076 *
077 * @version 2.0 4/2/2018
078 * @author Peter X. Cao, Jordan T. Henderson
079 */
080public class H4CompoundAttribute extends CompoundDS implements Attribute {
081
082    private static final long serialVersionUID = 2072473407027648309L;
083
084    private static final Logger log = LoggerFactory.getLogger(H4CompoundAttribute.class);
085
086    /** The HObject to which this NC2Attribute is attached, Attribute interface */
087    protected HObject parentObject;
088
089    /** additional information and properties for the attribute, Attribute interface */
090    private transient Map<String, Object> properties;
091
092    /**
093     * Create an attribute with specified name, data type and dimension sizes.
094     *
095     * For scalar attribute, the dimension size can be either an array of size one
096     * or null, and the rank can be either 1 or zero. Attribute is a general class
097     * and is independent of file format, e.g., the implementation of attribute
098     * applies to both HDF4 and HDF5.
099     *
100     * The following example creates a string attribute with the name "CLASS" and
101     * value "IMAGE".
102     *
103     * <pre>
104     * long[] attrDims = { 1 };
105     * String attrName = &quot;CLASS&quot;;
106     * String[] classValue = { &quot;IMAGE&quot; };
107     * Datatype attrType = null;
108     * try {
109     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
110     * Datatype.NATIVE);
111     * }
112     * catch (Exception ex) {}
113     * Attribute attr = new Attribute(attrName, attrType, attrDims);
114     * attr.setValue(classValue);
115     * </pre>
116     *
117     * @param parentObj
118     *            the HObject to which this Attribute is attached.
119     * @param attrName
120     *            the name of the attribute.
121     * @param attrType
122     *            the datatype of the attribute.
123     * @param attrDims
124     *            the dimension sizes of the attribute, null for scalar attribute
125     *
126     * @see hdf.object.Datatype
127     */
128    public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
129    {
130        this(parentObj, attrName, attrType, attrDims, null);
131    }
132
133    /**
134     * Create an attribute with specific name and value.
135     *
136     * For scalar attribute, the dimension size can be either an array of size one
137     * or null, and the rank can be either 1 or zero. Attribute is a general class
138     * and is independent of file format, e.g., the implementation of attribute
139     * applies to both HDF4 and HDF5.
140     *
141     * The following example creates a string attribute with the name "CLASS" and
142     * value "IMAGE".
143     *
144     * <pre>
145     * long[] attrDims = { 1 };
146     * String attrName = &quot;CLASS&quot;;
147     * String[] classValue = { &quot;IMAGE&quot; };
148     * Datatype attrType = null;
149     * try {
150     *     attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
151     * Datatype.NATIVE);
152     * }
153     * catch (Exception ex) {}
154     * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue);
155     * </pre>
156     *
157     * @param parentObj
158     *            the HObject to which this Attribute is attached.
159     * @param attrName
160     *            the name of the attribute.
161     * @param attrType
162     *            the datatype of the attribute.
163     * @param attrDims
164     *            the dimension sizes of the attribute, null for scalar attribute
165     * @param attrValue
166     *            the value of the attribute, null if no value
167     *
168     * @see hdf.object.Datatype
169     */
170    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
171    public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
172                               Object attrValue)
173    {
174        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
175              (parentObj == null) ? null : parentObj.getFullName(), null);
176
177        log.trace("H4CompoundAttribute: start {}", parentObj);
178
179        this.parentObject = parentObj;
180
181        datatype = attrType;
182
183        if (attrValue != null) {
184            data         = attrValue;
185            originalBuf  = attrValue;
186            isDataLoaded = true;
187        }
188        properties = new HashMap();
189
190        if (attrDims == null) {
191            rank = 1;
192            dims = new long[] {1};
193        }
194        else {
195            dims = attrDims;
196            rank = dims.length;
197        }
198
199        selectedDims   = new long[rank];
200        startDims      = new long[rank];
201        selectedStride = new long[rank];
202
203        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
204                  getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
205
206        resetSelection();
207    }
208
209    /*
210     * (non-Javadoc)
211     *
212     * @see hdf.object.HObject#open()
213     */
214    @Override
215    public long open()
216    {
217        if (parentObject == null) {
218            log.debug("open(): attribute's parent object is null");
219            return -1;
220        }
221
222        long aid    = -1;
223        long pObjID = -1;
224
225        try {
226            pObjID = parentObject.open();
227            if (pObjID >= 0) {
228                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
229                    log.trace("open(): FILE_TYPE_HDF4");
230                    /*
231                     * TODO: Get type of HDF4 object this is attached to and retrieve attribute info.
232                     */
233                }
234            }
235
236            log.trace("open(): aid={}", aid);
237        }
238        catch (Exception ex) {
239            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
240            aid = -1;
241        }
242        finally {
243            parentObject.close(pObjID);
244        }
245
246        return aid;
247    }
248
249    /*
250     * (non-Javadoc)
251     *
252     * @see hdf.object.HObject#close(int)
253     */
254    @Override
255    public void close(long aid)
256    {
257        if (aid >= 0) {
258            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
259                log.trace("close(): FILE_TYPE_HDF4");
260                /*
261                 * TODO: Get type of HDF4 object this is attached to and close attribute.
262                 */
263            }
264        }
265    }
266
267    @Override
268    public void init()
269    {
270        if (inited) {
271            resetSelection();
272            log.trace("init(): Attribute already inited");
273            return;
274        }
275
276        if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) {
277            log.trace("init(): FILE_TYPE_HDF4");
278            /*
279             * TODO: If HDF4 attribute object needs to init dependent objects.
280             */
281            inited = true;
282        }
283
284        resetSelection();
285    }
286
287    /**
288     * Reads the data from file.
289     *
290     * read() reads the data from file to a memory buffer and returns the memory
291     * buffer. The dataset object does not hold the memory buffer. To store the
292     * memory buffer in the dataset object, one must call getData().
293     *
294     * By default, the whole dataset is read into memory. Users can also select
295     * a subset to read. Subsetting is done in an implicit way.
296     *
297     * @return the data read from file.
298     *
299     * @see #getData()
300     *
301     * @throws Exception
302     *             if object can not be read
303     * @throws OutOfMemoryError
304     *             if memory is exhausted
305     */
306    @Override
307    public Object read() throws Exception, OutOfMemoryError
308    {
309        if (!inited)
310            init();
311
312        /*
313         * TODO: For now, convert a compound Attribute's data (String[]) into a List for
314         * convenient processing
315         */
316        if (getDatatype().isCompound() && !(data instanceof List)) {
317            List<String> valueList = Arrays.asList((String[])data);
318
319            data = valueList;
320        }
321
322        return data;
323    }
324
325    /* Implement abstract Dataset */
326
327    /**
328     * Writes a memory buffer to the object in the file.
329     *
330     * @param buf
331     *            The buffer that contains the data values.
332     *
333     * @throws Exception
334     *             if data can not be written
335     */
336    @Override
337    public void write(Object buf) throws Exception
338    {
339        log.trace("function of dataset: write(Object) start");
340        if (!buf.equals(data))
341            setData(buf);
342
343        init();
344
345        if (parentObject == null) {
346            log.debug("write(Object): parent object is null; nowhere to write attribute to");
347            return;
348        }
349
350        ((MetaDataContainer)getParentObject()).writeMetadata(this);
351    }
352
353    /*
354     * (non-Javadoc)
355     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
356     */
357    @Override
358    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
359    {
360        // not supported
361        throw new UnsupportedOperationException("copy operation unsupported for H4.");
362    }
363
364    /*
365     * (non-Javadoc)
366     * @see hdf.object.Dataset#readBytes()
367     */
368    @Override
369    public byte[] readBytes() throws Exception
370    {
371        // not supported
372        throw new UnsupportedOperationException("readBytes operation unsupported for H4.");
373    }
374
375    /**
376     * Given an array of bytes representing a compound Datatype and a start index
377     * and length, converts len number of bytes into the correct Object type and
378     * returns it.
379     *
380     * @param data
381     *            The byte array representing the data of the compound Datatype
382     * @param data_type
383     *            The type of data to convert the bytes to
384     * @param start
385     *            The start index of the bytes to get
386     * @param len
387     *            The number of bytes to convert
388     * @return The converted type of the bytes
389     */
390    protected Object convertCompoundByteMember(byte[] data, long data_type, long start, long len)
391    {
392        return null;
393    }
394
395    /**
396     * Converts the data values of this data object to appropriate Java integers if
397     * they are unsigned integers.
398     *
399     * @see hdf.object.Dataset#convertToUnsignedC(Object)
400     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
401     *
402     * @return the converted data buffer.
403     */
404    @Override
405    public Object convertFromUnsignedC()
406    {
407        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
408    }
409
410    /**
411     * Converts Java integer data values of this data object back to unsigned C-type
412     * integer data if they are unsigned integers.
413     *
414     * @see hdf.object.Dataset#convertToUnsignedC(Object)
415     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
416     *
417     * @return the converted data buffer.
418     */
419    @Override
420    public Object convertToUnsignedC()
421    {
422        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
423    }
424
425    /* Implement interface Attribute */
426
427    /**
428     * Returns the HObject to which this Attribute is currently "attached".
429     *
430     * @return the HObject to which this Attribute is currently "attached".
431     */
432    public HObject getParentObject() { return parentObject; }
433
434    /**
435     * Sets the HObject to which this Attribute is "attached".
436     *
437     * @param pObj
438     *            the new HObject to which this Attribute is "attached".
439     */
440    public void setParentObject(HObject pObj) { parentObject = pObj; }
441
442    /**
443     * set a property for the attribute.
444     *
445     * @param key the attribute Map key
446     * @param value the attribute Map value
447     */
448    public void setProperty(String key, Object value) { properties.put(key, value); }
449
450    /**
451     * get a property for a given key.
452     *
453     * @param key the attribute Map key
454     *
455     * @return the property
456     */
457    public Object getProperty(String key) { return properties.get(key); }
458
459    /**
460     * get all property keys.
461     *
462     * @return the Collection of property keys
463     */
464    public Collection<String> getPropertyKeys() { return properties.keySet(); }
465
466    /**
467     * Returns the name of the object. For example, "Raster Image #2".
468     *
469     * @return The name of the object.
470     */
471    public final String getAttributeName() { return getName(); }
472
473    /**
474     * Retrieves the attribute data from the file.
475     *
476     * @return the attribute data.
477     *
478     * @throws Exception
479     *             if the data can not be retrieved
480     */
481    public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); }
482
483    /**
484     * Returns the datatype of the attribute.
485     *
486     * @return the datatype of the attribute.
487     */
488    public final Datatype getAttributeDatatype() { return getDatatype(); }
489
490    /**
491     * Returns the space type for the attribute. It returns a
492     * negative number if it failed to retrieve the type information from
493     * the file.
494     *
495     * @return the space type for the attribute.
496     */
497    public final int getAttributeSpaceType() { return getSpaceType(); }
498
499    /**
500     * Returns the rank (number of dimensions) of the attribute. It returns a
501     * negative number if it failed to retrieve the dimension information from
502     * the file.
503     *
504     * @return the number of dimensions of the attribute.
505     */
506    public final int getAttributeRank() { return getRank(); }
507
508    /**
509     * Returns the selected size of the rows and columns of the attribute. It returns a
510     * negative number if it failed to retrieve the size information from
511     * the file.
512     *
513     * @return the selected size of the rows and colums of the attribute.
514     */
515    public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); }
516
517    /**
518     * Returns the array that contains the dimension sizes of the data value of
519     * the attribute. It returns null if it failed to retrieve the dimension
520     * information from the file.
521     *
522     * @return the dimension sizes of the attribute.
523     */
524    public final long[] getAttributeDims() { return getDims(); }
525
526    /**
527     * @return true if the dataspace is a NULL; otherwise, returns false.
528     */
529    @Override
530    public boolean isAttributeNULL()
531    {
532        return isNULL();
533    }
534
535    /**
536     * @return true if the data is a single scalar point; otherwise, returns false.
537     */
538    public boolean isAttributeScalar() { return isScalar(); }
539
540    /**
541     * Not for public use in the future.
542     *
543     * setData() is not safe to use because it changes memory buffer
544     * of the dataset object. Dataset operations such as write/read
545     * will fail if the buffer type or size is changed.
546     *
547     * @param d  the object data -must be an array of Objects
548     */
549    public void setAttributeData(Object d) { setData(d); }
550
551    /**
552     * Writes the memory buffer of this dataset to file.
553     *
554     * @throws Exception if buffer can not be written
555     */
556    public void writeAttribute() throws Exception { write(); }
557
558    /**
559     * Writes the given data buffer into this attribute in a file.
560     *
561     * The data buffer is a vector that contains the data values of compound fields. The data is written
562     * into file as one data blob.
563     *
564     * @param buf
565     *            The vector that contains the data values of compound fields.
566     *
567     * @throws Exception
568     *             If there is an error at the library level.
569     */
570    public void writeAttribute(Object buf) throws Exception { write(buf); }
571
572    /**
573     * Returns a string representation of the data value. For
574     * example, "0, 255".
575     *
576     * For a compound datatype, it will be a 1D array of strings with field
577     * members separated by the delimiter. For example,
578     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
579     * float} of three data points.
580     *
581     * @param delimiter
582     *            The delimiter used to separate individual data points. It
583     *            can be a comma, semicolon, tab or space. For example,
584     *            toString(",") will separate data by commas.
585     *
586     * @return the string representation of the data values.
587     */
588    public String toAttributeString(String delimiter) { return toString(delimiter, -1); }
589
590    /**
591     * Returns a string representation of the data value. For
592     * example, "0, 255".
593     *
594     * For a compound datatype, it will be a 1D array of strings with field
595     * members separated by the delimiter. For example,
596     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
597     * float} of three data points.
598     *
599     * @param delimiter
600     *            The delimiter used to separate individual data points. It
601     *            can be a comma, semicolon, tab or space. For example,
602     *            toString(",") will separate data by commas.
603     * @param maxItems
604     *            The maximum number of Array values to return
605     *
606     * @return the string representation of the data values.
607     */
608    public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); }
609}