001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.fits;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import org.slf4j.Logger;
026import org.slf4j.LoggerFactory;
027
028import hdf.object.Attribute;
029import hdf.object.DataFormat;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group or named
039 * datatype.
040 *
041 * Like a dataset, an attribute has a name, datatype and dataspace.
042 *
043 * For more details on attributes, <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5
044 * Attributes in HDF5 User Guide</a>
045 *
046 * The following code is an example of an attribute with 1D integer array of two elements.
047 *
048 * <pre>
049 * // Example of creating a new attribute
050 * // The name of the new attribute
051 * String name = "Data range";
052 * // Creating an unsigned 1-byte integer datatype
053 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
054 *                              1,                      // size in bytes
055 *                              Datatype.ORDER_LE,      // byte order
056 *                              Datatype.SIGN_NONE);    // unsigned
057 * // 1-D array of size two
058 * long[] dims = {2};
059 * // The value of the attribute
060 * int[] value = {0, 255};
061 * // Create a new attribute
062 * FitsAttribute dataRange = new FitsAttribute(name, type, dims);
063 * // Set the attribute value
064 * dataRange.setValue(value);
065 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
066 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
067 * </pre>
068 *
069 *
070 * For an atomic datatype, the value of an FitsAttribute will be a 1D array of integers, floats and strings. For a
071 * compound datatype, it will be a 1D array of strings with field members separated by a comma. For example, "{0, 10.5},
072 * {255, 20.0}, {512, 30.0}" is a compound attribute of {int, float} of three data points.
073 *
074 * @see hdf.object.Datatype
075 *
076 * @version 2.0 4/2/2018
077 * @author Peter X. Cao, Jordan T. Henderson
078 */
079public class FitsAttribute extends ScalarDS implements Attribute {
080
081    private static final long serialVersionUID = 2072473407027648309L;
082
083    private static final Logger log = LoggerFactory.getLogger(FitsAttribute.class);
084
085    /** The HObject to which this NC2Attribute is attached, Attribute interface */
086    protected HObject         parentObject;
087
088    /** additional information and properties for the attribute, Attribute interface */
089    private transient Map<String, Object> properties;
090
091    /**
092     * Create an attribute with specified name, data type and dimension sizes.
093     *
094     * For scalar attribute, the dimension size can be either an array of size one
095     * or null, and the rank can be either 1 or zero. Attribute is a general class
096     * and is independent of file format, e.g., the implementation of attribute
097     * applies to both HDF4 and HDF5.
098     *
099     * The following example creates a string attribute with the name "CLASS" and
100     * value "IMAGE".
101     *
102     * <pre>
103     * long[] attrDims = { 1 };
104     * String attrName = &quot;CLASS&quot;;
105     * String[] classValue = { &quot;IMAGE&quot; };
106     * Datatype attrType = null;
107     * try {
108     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
109     * }
110     * catch (Exception ex) {}
111     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims);
112     * attr.setValue(classValue);
113     * </pre>
114     *
115     * @param parentObj
116     *            the HObject to which this Attribute is attached.
117     * @param attrName
118     *            the name of the attribute.
119     * @param attrType
120     *            the datatype of the attribute.
121     * @param attrDims
122     *            the dimension sizes of the attribute, null for scalar attribute
123     *
124     * @see hdf.object.Datatype
125     */
126    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) {
127        this(parentObj, attrName, attrType, attrDims, null);
128    }
129
130    /**
131     * Create an attribute with specific name and value.
132     *
133     * For scalar attribute, the dimension size can be either an array of size one
134     * or null, and the rank can be either 1 or zero. Attribute is a general class
135     * and is independent of file format, e.g., the implementation of attribute
136     * applies to both HDF4 and HDF5.
137     *
138     * The following example creates a string attribute with the name "CLASS" and
139     * value "IMAGE".
140     *
141     * <pre>
142     * long[] attrDims = { 1 };
143     * String attrName = &quot;CLASS&quot;;
144     * String[] classValue = { &quot;IMAGE&quot; };
145     * Datatype attrType = null;
146     * try {
147     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE);
148     * }
149     * catch (Exception ex) {}
150     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims, classValue);
151     * </pre>
152     *
153     * @param parentObj
154     *            the HObject to which this FitsAttribute is attached.
155     * @param attrName
156     *            the name of the attribute.
157     * @param attrType
158     *            the datatype of the attribute.
159     * @param attrDims
160     *            the dimension sizes of the attribute, null for scalar attribute
161     * @param attrValue
162     *            the value of the attribute, null if no value
163     *
164     * @see hdf.object.Datatype
165     */
166    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
167    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) {
168        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
169                (parentObj == null) ? null : parentObj.getFullName(), null);
170
171        log.trace("FitsAttribute: start {}", parentObj);
172        this.parentObject = parentObj;
173
174        unsignedConverted = false;
175
176        datatype = attrType;
177
178        if (attrValue != null) {
179            data = attrValue;
180            originalBuf = attrValue;
181            isDataLoaded = true;
182        }
183        properties = new HashMap();
184
185        if (attrDims == null) {
186            rank = 1;
187            dims = new long[] { 1 };
188        }
189        else {
190            dims = attrDims;
191            rank = dims.length;
192        }
193
194        selectedDims = new long[rank];
195        startDims = new long[rank];
196        selectedStride = new long[rank];
197
198        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}",
199                attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
200
201        resetSelection();
202    }
203
204    /*
205     * (non-Javadoc)
206     *
207     * @see hdf.object.HObject#open()
208     */
209    @Override
210    public long open() {
211        if (parentObject == null) {
212            log.debug("open(): attribute's parent object is null");
213            return -1;
214        }
215
216        return -1;
217    }
218
219    /*
220     * (non-Javadoc)
221     *
222     * @see hdf.object.HObject#close(int)
223     */
224    @Override
225    public void close(long aid) {
226    }
227
228    @Override
229    public void init() {
230        if (inited) {
231            resetSelection();
232            log.trace("init(): FitsAttribute already inited");
233            return;
234        }
235    }
236
237    /**
238     * Reads the data from file.
239     *
240     * read() reads the data from file to a memory buffer and returns the memory
241     * buffer. The dataset object does not hold the memory buffer. To store the
242     * memory buffer in the dataset object, one must call getData().
243     *
244     * By default, the whole dataset is read into memory. Users can also select
245     * a subset to read. Subsetting is done in an implicit way.
246     *
247     * @return the data read from file.
248     *
249     * @see #getData()
250     *
251     * @throws Exception
252     *             if object can not be read
253     * @throws OutOfMemoryError
254     *             if memory is exhausted
255     */
256    @Override
257    public Object read() throws Exception, OutOfMemoryError {
258        if (!inited) init();
259
260        return data;
261    }
262
263    /* Implement abstract Dataset */
264
265    /*
266     * (non-Javadoc)
267     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
268     */
269    @Override
270    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff)
271            throws Exception {
272        // not supported
273        throw new UnsupportedOperationException("copy operation unsupported for FITS.");
274    }
275
276    /*
277     * (non-Javadoc)
278     * @see hdf.object.Dataset#readBytes()
279     */
280    @Override
281    public byte[] readBytes() throws Exception {
282        // not supported
283        throw new UnsupportedOperationException("readBytes operation unsupported for FITS.");
284    }
285
286    /**
287     * Writes a memory buffer to the object in the file.
288     *
289     * @param buf
290     *            the data to write
291     *
292     * @throws Exception
293     *             if data can not be written
294     */
295    @Override
296    public void write(Object buf) throws Exception {
297        // not supported
298        throw new UnsupportedOperationException("write operation unsupported for FITS.");
299    }
300
301    /**
302     * Returns the HObject to which this Attribute is currently "attached".
303     *
304     * @return the HObject to which this Attribute is currently "attached".
305     */
306    public HObject getParentObject() {
307        return parentObject;
308    }
309
310    /**
311     * Sets the HObject to which this Attribute is "attached".
312     *
313     * @param pObj
314     *            the new HObject to which this Attribute is "attached".
315     */
316    public void setParentObject(HObject pObj) {
317        parentObject = pObj;
318    }
319
320    /**
321     * set a property for the attribute.
322     *
323     * @param key the attribute Map key
324     * @param value the attribute Map value
325     */
326    public void setProperty(String key, Object value) {
327        properties.put(key, value);
328    }
329
330    /**
331     * get a property for a given key.
332     *
333     * @param key the attribute Map key
334     *
335     * @return the property
336     */
337    public Object getProperty(String key) {
338        return properties.get(key);
339    }
340
341    /**
342     * get all property keys.
343     *
344     * @return the Collection of property keys
345     */
346    public Collection<String> getPropertyKeys() {
347        return properties.keySet();
348    }
349
350    /**
351     * Returns the name of the object. For example, "Raster Image #2".
352     *
353     * @return The name of the object.
354     */
355    public final String getAttributeName() {
356        return getName();
357    }
358
359    /**
360     * Retrieves the attribute data from the file.
361     *
362     * @return the attribute data.
363     *
364     * @throws Exception
365     *             if the data can not be retrieved
366     */
367    public final Object getAttributeData() throws Exception, OutOfMemoryError {
368        return getData();
369    }
370
371    /**
372     * Returns the datatype of the attribute.
373     *
374     * @return the datatype of the attribute.
375     */
376    public final Datatype getAttributeDatatype() {
377        return getDatatype();
378    }
379
380    /**
381     * Returns the space type for the attribute. It returns a
382     * negative number if it failed to retrieve the type information from
383     * the file.
384     *
385     * @return the space type for the attribute.
386     */
387    public final int getAttributeSpaceType() {
388        return getSpaceType();
389    }
390
391    /**
392     * Returns the rank (number of dimensions) of the attribute. It returns a
393     * negative number if it failed to retrieve the dimension information from
394     * the file.
395     *
396     * @return the number of dimensions of the attribute.
397     */
398    public final int getAttributeRank() {
399        return getRank();
400    }
401
402    /**
403     * Returns the selected size of the rows and columns of the attribute. It returns a
404     * negative number if it failed to retrieve the size information from
405     * the file.
406     *
407     * @return the selected size of the rows and colums of the attribute.
408     */
409    public final int getAttributePlane() {
410        return (int)getWidth() * (int)getHeight();
411    }
412
413    /**
414     * Returns the array that contains the dimension sizes of the data value of
415     * the attribute. It returns null if it failed to retrieve the dimension
416     * information from the file.
417     *
418     * @return the dimension sizes of the attribute.
419     */
420    public final long[] getAttributeDims() {
421        return getDims();
422    }
423
424    /**
425     * @return true if the dataspace is a NULL; otherwise, returns false.
426     */
427    @Override
428    public boolean isAttributeNULL() {
429        return isNULL();
430    }
431
432    /**
433     * @return true if the data is a single scalar point; otherwise, returns false.
434     */
435    public boolean isAttributeScalar() {
436        return isScalar();
437    }
438
439    /**
440     * Not for public use in the future.
441     *
442     * setData() is not safe to use because it changes memory buffer
443     * of the dataset object. Dataset operations such as write/read
444     * will fail if the buffer type or size is changed.
445     *
446     * @param d  the object data -must be an array of Objects
447     */
448    public void setAttributeData(Object d) {
449        setData(d);
450    }
451
452    /**
453     * Writes the memory buffer of this dataset to file.
454     *
455     * @throws Exception if buffer can not be written
456     */
457    public void writeAttribute() throws Exception {
458        write();
459    }
460
461    /**
462     * Writes the given data buffer into this attribute in a file.
463     *
464     * The data buffer is a vector that contains the data values of compound fields. The data is written
465     * into file as one data blob.
466     *
467     * @param buf
468     *            The vector that contains the data values of compound fields.
469     *
470     * @throws Exception
471     *             If there is an error at the library level.
472     */
473    public void writeAttribute(Object buf) throws Exception {
474        write(buf);
475    }
476
477    /**
478     * Returns a string representation of the data value. For
479     * example, "0, 255".
480     *
481     * For a compound datatype, it will be a 1D array of strings with field
482     * members separated by the delimiter. For example,
483     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
484     * float} of three data points.
485     *
486     * @param delimiter
487     *            The delimiter used to separate individual data points. It
488     *            can be a comma, semicolon, tab or space. For example,
489     *            toString(",") will separate data by commas.
490     *
491     * @return the string representation of the data values.
492     */
493    public String toAttributeString(String delimiter) {
494        return toString(delimiter, -1);
495    }
496
497    /**
498     * Returns a string representation of the data value. For
499     * example, "0, 255".
500     *
501     * For a compound datatype, it will be a 1D array of strings with field
502     * members separated by the delimiter. For example,
503     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
504     * float} of three data points.
505     *
506     * @param delimiter
507     *            The delimiter used to separate individual data points. It
508     *            can be a comma, semicolon, tab or space. For example,
509     *            toString(",") will separate data by commas.
510     * @param maxItems
511     *            The maximum number of Array values to return
512     *
513     * @return the string representation of the data values.
514     */
515    public String toAttributeString(String delimiter, int maxItems) {
516        return toString(delimiter, maxItems);
517    }
518}