001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.fits;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034import org.slf4j.Logger;
035import org.slf4j.LoggerFactory;
036
037/**
038 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
039 * or named datatype.
040 *
041 * Like a dataset, an attribute has a name, datatype and dataspace.
042 *
043 * For more details on attributes, <a
044 * href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a>
045 *
046 * The following code is an example of an attribute with 1D integer array of two elements.
047 *
048 * <pre>
049 * // Example of creating a new attribute
050 * // The name of the new attribute
051 * String name = "Data range";
052 * // Creating an unsigned 1-byte integer datatype
053 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
054 *                              1,                      // size in bytes
055 *                              Datatype.ORDER_LE,      // byte order
056 *                              Datatype.SIGN_NONE);    // unsigned
057 * // 1-D array of size two
058 * long[] dims = {2};
059 * // The value of the attribute
060 * int[] value = {0, 255};
061 * // Create a new attribute
062 * FitsAttribute dataRange = new FitsAttribute(name, type, dims);
063 * // Set the attribute value
064 * dataRange.setValue(value);
065 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
066 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
067 * </pre>
068 *
069 *
070 * For an atomic datatype, the value of an FitsAttribute will be a 1D array of integers, floats and strings.
071 * For a compound datatype, it will be a 1D array of strings with field members separated by a comma. For
072 * example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, float} of three data
073 * points.
074 *
075 * @see hdf.object.Datatype
076 *
077 * @version 2.0 4/2/2018
078 * @author Peter X. Cao, Jordan T. Henderson
079 */
080public class FitsAttribute extends ScalarDS implements Attribute {
081
082    private static final long serialVersionUID = 2072473407027648309L;
083
084    private static final Logger log = LoggerFactory.getLogger(FitsAttribute.class);
085
086    /** The HObject to which this NC2Attribute is attached, Attribute interface */
087    protected HObject parentObject;
088
089    /** additional information and properties for the attribute, Attribute interface */
090    private transient Map<String, Object> properties;
091
092    /**
093     * Create an attribute with specified name, data type and dimension sizes.
094     *
095     * For scalar attribute, the dimension size can be either an array of size one
096     * or null, and the rank can be either 1 or zero. Attribute is a general class
097     * and is independent of file format, e.g., the implementation of attribute
098     * applies to both HDF4 and HDF5.
099     *
100     * The following example creates a string attribute with the name "CLASS" and
101     * value "IMAGE".
102     *
103     * <pre>
104     * long[] attrDims = { 1 };
105     * String attrName = &quot;CLASS&quot;;
106     * String[] classValue = { &quot;IMAGE&quot; };
107     * Datatype attrType = null;
108     * try {
109     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
110     * Datatype.NATIVE);
111     * }
112     * catch (Exception ex) {}
113     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims);
114     * attr.setValue(classValue);
115     * </pre>
116     *
117     * @param parentObj
118     *            the HObject to which this Attribute is attached.
119     * @param attrName
120     *            the name of the attribute.
121     * @param attrType
122     *            the datatype of the attribute.
123     * @param attrDims
124     *            the dimension sizes of the attribute, null for scalar attribute
125     *
126     * @see hdf.object.Datatype
127     */
128    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
129    {
130        this(parentObj, attrName, attrType, attrDims, null);
131    }
132
133    /**
134     * Create an attribute with specific name and value.
135     *
136     * For scalar attribute, the dimension size can be either an array of size one
137     * or null, and the rank can be either 1 or zero. Attribute is a general class
138     * and is independent of file format, e.g., the implementation of attribute
139     * applies to both HDF4 and HDF5.
140     *
141     * The following example creates a string attribute with the name "CLASS" and
142     * value "IMAGE".
143     *
144     * <pre>
145     * long[] attrDims = { 1 };
146     * String attrName = &quot;CLASS&quot;;
147     * String[] classValue = { &quot;IMAGE&quot; };
148     * Datatype attrType = null;
149     * try {
150     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
151     * Datatype.NATIVE);
152     * }
153     * catch (Exception ex) {}
154     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims, classValue);
155     * </pre>
156     *
157     * @param parentObj
158     *            the HObject to which this FitsAttribute is attached.
159     * @param attrName
160     *            the name of the attribute.
161     * @param attrType
162     *            the datatype of the attribute.
163     * @param attrDims
164     *            the dimension sizes of the attribute, null for scalar attribute
165     * @param attrValue
166     *            the value of the attribute, null if no value
167     *
168     * @see hdf.object.Datatype
169     */
170    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
171    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
172                         Object attrValue)
173    {
174        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
175              (parentObj == null) ? null : parentObj.getFullName(), null);
176
177        log.trace("FitsAttribute: start {}", parentObj);
178        this.parentObject = parentObj;
179
180        unsignedConverted = false;
181
182        datatype = attrType;
183
184        if (attrValue != null) {
185            data         = attrValue;
186            originalBuf  = attrValue;
187            isDataLoaded = true;
188        }
189        properties = new HashMap();
190
191        if (attrDims == null) {
192            rank = 1;
193            dims = new long[] {1};
194        }
195        else {
196            dims = attrDims;
197            rank = dims.length;
198        }
199
200        selectedDims   = new long[rank];
201        startDims      = new long[rank];
202        selectedStride = new long[rank];
203
204        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
205                  getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
206
207        resetSelection();
208    }
209
210    /*
211     * (non-Javadoc)
212     *
213     * @see hdf.object.HObject#open()
214     */
215    @Override
216    public long open()
217    {
218        if (parentObject == null) {
219            log.debug("open(): attribute's parent object is null");
220            return -1;
221        }
222
223        return -1;
224    }
225
226    /*
227     * (non-Javadoc)
228     *
229     * @see hdf.object.HObject#close(int)
230     */
231    @Override
232    public void close(long aid)
233    {
234    }
235
236    @Override
237    public void init()
238    {
239        if (inited) {
240            resetSelection();
241            log.trace("init(): FitsAttribute already inited");
242            return;
243        }
244    }
245
246    /**
247     * Reads the data from file.
248     *
249     * read() reads the data from file to a memory buffer and returns the memory
250     * buffer. The dataset object does not hold the memory buffer. To store the
251     * memory buffer in the dataset object, one must call getData().
252     *
253     * By default, the whole dataset is read into memory. Users can also select
254     * a subset to read. Subsetting is done in an implicit way.
255     *
256     * @return the data read from file.
257     *
258     * @see #getData()
259     *
260     * @throws Exception
261     *             if object can not be read
262     * @throws OutOfMemoryError
263     *             if memory is exhausted
264     */
265    @Override
266    public Object read() throws Exception, OutOfMemoryError
267    {
268        if (!inited)
269            init();
270
271        return data;
272    }
273
274    /* Implement abstract Dataset */
275
276    /*
277     * (non-Javadoc)
278     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
279     */
280    @Override
281    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
282    {
283        // not supported
284        throw new UnsupportedOperationException("copy operation unsupported for FITS.");
285    }
286
287    /*
288     * (non-Javadoc)
289     * @see hdf.object.Dataset#readBytes()
290     */
291    @Override
292    public byte[] readBytes() throws Exception
293    {
294        // not supported
295        throw new UnsupportedOperationException("readBytes operation unsupported for FITS.");
296    }
297
298    /**
299     * Writes a memory buffer to the object in the file.
300     *
301     * @param buf
302     *            the data to write
303     *
304     * @throws Exception
305     *             if data can not be written
306     */
307    @Override
308    public void write(Object buf) throws Exception
309    {
310        // not supported
311        throw new UnsupportedOperationException("write operation unsupported for FITS.");
312    }
313
314    /**
315     * Returns the HObject to which this Attribute is currently "attached".
316     *
317     * @return the HObject to which this Attribute is currently "attached".
318     */
319    public HObject getParentObject() { return parentObject; }
320
321    /**
322     * Sets the HObject to which this Attribute is "attached".
323     *
324     * @param pObj
325     *            the new HObject to which this Attribute is "attached".
326     */
327    public void setParentObject(HObject pObj) { parentObject = pObj; }
328
329    /**
330     * set a property for the attribute.
331     *
332     * @param key the attribute Map key
333     * @param value the attribute Map value
334     */
335    public void setProperty(String key, Object value) { properties.put(key, value); }
336
337    /**
338     * get a property for a given key.
339     *
340     * @param key the attribute Map key
341     *
342     * @return the property
343     */
344    public Object getProperty(String key) { return properties.get(key); }
345
346    /**
347     * get all property keys.
348     *
349     * @return the Collection of property keys
350     */
351    public Collection<String> getPropertyKeys() { return properties.keySet(); }
352
353    /**
354     * Returns the name of the object. For example, "Raster Image #2".
355     *
356     * @return The name of the object.
357     */
358    public final String getAttributeName() { return getName(); }
359
360    /**
361     * Retrieves the attribute data from the file.
362     *
363     * @return the attribute data.
364     *
365     * @throws Exception
366     *             if the data can not be retrieved
367     */
368    public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); }
369
370    /**
371     * Returns the datatype of the attribute.
372     *
373     * @return the datatype of the attribute.
374     */
375    public final Datatype getAttributeDatatype() { return getDatatype(); }
376
377    /**
378     * Returns the space type for the attribute. It returns a
379     * negative number if it failed to retrieve the type information from
380     * the file.
381     *
382     * @return the space type for the attribute.
383     */
384    public final int getAttributeSpaceType() { return getSpaceType(); }
385
386    /**
387     * Returns the rank (number of dimensions) of the attribute. It returns a
388     * negative number if it failed to retrieve the dimension information from
389     * the file.
390     *
391     * @return the number of dimensions of the attribute.
392     */
393    public final int getAttributeRank() { return getRank(); }
394
395    /**
396     * Returns the selected size of the rows and columns of the attribute. It returns a
397     * negative number if it failed to retrieve the size information from
398     * the file.
399     *
400     * @return the selected size of the rows and colums of the attribute.
401     */
402    public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); }
403
404    /**
405     * Returns the array that contains the dimension sizes of the data value of
406     * the attribute. It returns null if it failed to retrieve the dimension
407     * information from the file.
408     *
409     * @return the dimension sizes of the attribute.
410     */
411    public final long[] getAttributeDims() { return getDims(); }
412
413    /**
414     * @return true if the dataspace is a NULL; otherwise, returns false.
415     */
416    @Override
417    public boolean isAttributeNULL()
418    {
419        return isNULL();
420    }
421
422    /**
423     * @return true if the data is a single scalar point; otherwise, returns false.
424     */
425    public boolean isAttributeScalar() { return isScalar(); }
426
427    /**
428     * Not for public use in the future.
429     *
430     * setData() is not safe to use because it changes memory buffer
431     * of the dataset object. Dataset operations such as write/read
432     * will fail if the buffer type or size is changed.
433     *
434     * @param d  the object data -must be an array of Objects
435     */
436    public void setAttributeData(Object d) { setData(d); }
437
438    /**
439     * Writes the memory buffer of this dataset to file.
440     *
441     * @throws Exception if buffer can not be written
442     */
443    public void writeAttribute() throws Exception { write(); }
444
445    /**
446     * Writes the given data buffer into this attribute in a file.
447     *
448     * The data buffer is a vector that contains the data values of compound fields. The data is written
449     * into file as one data blob.
450     *
451     * @param buf
452     *            The vector that contains the data values of compound fields.
453     *
454     * @throws Exception
455     *             If there is an error at the library level.
456     */
457    public void writeAttribute(Object buf) throws Exception { write(buf); }
458
459    /**
460     * Returns a string representation of the data value. For
461     * example, "0, 255".
462     *
463     * For a compound datatype, it will be a 1D array of strings with field
464     * members separated by the delimiter. For example,
465     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
466     * float} of three data points.
467     *
468     * @param delimiter
469     *            The delimiter used to separate individual data points. It
470     *            can be a comma, semicolon, tab or space. For example,
471     *            toString(",") will separate data by commas.
472     *
473     * @return the string representation of the data values.
474     */
475    public String toAttributeString(String delimiter) { return toString(delimiter, -1); }
476
477    /**
478     * Returns a string representation of the data value. For
479     * example, "0, 255".
480     *
481     * For a compound datatype, it will be a 1D array of strings with field
482     * members separated by the delimiter. For example,
483     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
484     * float} of three data points.
485     *
486     * @param delimiter
487     *            The delimiter used to separate individual data points. It
488     *            can be a comma, semicolon, tab or space. For example,
489     *            toString(",") will separate data by commas.
490     * @param maxItems
491     *            The maximum number of Array values to return
492     *
493     * @return the string representation of the data values.
494     */
495    public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); }
496}