001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.fits;
016
017import java.lang.reflect.Array;
018import java.util.Iterator;
019import java.util.List;
020import java.util.Vector;
021
022import org.slf4j.Logger;
023import org.slf4j.LoggerFactory;
024
025import hdf.object.Dataset;
026import hdf.object.Datatype;
027import hdf.object.FileFormat;
028import hdf.object.Group;
029import hdf.object.HObject;
030import hdf.object.ScalarDS;
031import hdf.object.MetaDataContainer;
032
033import hdf.object.fits.FitsAttribute;
034
035import nom.tam.fits.BasicHDU;
036import nom.tam.fits.Header;
037import nom.tam.fits.HeaderCard;
038
039/**
040 * FitsDataset describes an multi-dimension array of HDF5 scalar or atomic data
041 * types, such as byte, int, short, long, float, double and string,
042 * and operations performed on the scalar dataset
043 *
044 * The library predefines a modest number of datatypes. For details, read <a
045 * href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">
046 * The Datatype Interface (H5T)</a>
047 *
048 * @version 1.1 9/4/2007
049 * @author Peter X. Cao
050 */
051public class FitsDataset extends ScalarDS implements MetaDataContainer
052{
053    private static final long serialVersionUID = 3944770379558335171L;
054
055    private static final Logger log = LoggerFactory.getLogger(FitsDataset.class);
056
057    /**
058     * The list of attributes of this data object. Members of the list are
059     * instance of Attribute.
060     */
061    private List attributeList;
062
063    /** the native dataset */
064    private BasicHDU nativeDataset;
065
066    /**
067     * Constructs an FitsDataset object with specific netcdf variable.
068     *
069     * @param fileFormat the netcdf file.
070     * @param hdu the BasicHDU.
071     * @param dName the name for this dataset.
072     * @param oid the unique identifier for this dataset.
073     */
074    public FitsDataset(FileFormat fileFormat, BasicHDU hdu, String dName, long[] oid) {
075        super(fileFormat, dName, HObject.SEPARATOR, oid);
076        unsignedConverted = false;
077        nativeDataset = hdu;
078    }
079
080    /**
081     * Check if the object has any attributes attached.
082     *
083     * @return true if it has any attributes, false otherwise.
084     */
085    @Override
086    public boolean hasAttribute() {
087        return false;
088    }
089
090    // Implementing Dataset
091    @Override
092    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
093        // not supported
094        throw new UnsupportedOperationException("copy operation unsupported for FITS.");
095    }
096
097    /*
098     * (non-Javadoc)
099     * @see hdf.object.Dataset#readBytes()
100     */
101    @Override
102    public byte[] readBytes() throws Exception {
103        // not supported
104        throw new UnsupportedOperationException("readBytes operation unsupported for FITS.");
105    }
106
107    /**
108     * Reads the data from file.
109     *
110     * read() reads the data from file to a memory buffer and returns the memory
111     * buffer. The dataset object does not hold the memory buffer. To store the
112     * memory buffer in the dataset object, one must call getData().
113     *
114     * By default, the whole dataset is read into memory. Users can also select
115     * a subset to read. Subsetting is done in an implicit way.
116     *
117     * @return the data read from file.
118     *
119     * @see #getData()
120     *
121     * @throws Exception
122     *             if object can not be read
123     * @throws OutOfMemoryError
124     *             if memory is exhausted
125     */
126    @Override
127    public Object read() throws Exception {
128        Object theData = null;
129        Object fitsData = null;
130
131        if (nativeDataset == null)
132            return null;
133
134        try {
135            fitsData = nativeDataset.getData().getData();
136        }
137        catch (Exception ex) {
138            throw new UnsupportedOperationException("This implementation only supports integer and float dataset. " +
139                    "It may not work for other datatypes. \n"+ex);
140        }
141
142        int n = get1DLength(fitsData);
143
144        theData = FitsDatatype.allocateArray(nativeDataset.getBitPix(), n);
145
146        to1Darray(fitsData, theData, 0);
147
148        return theData;
149    }
150
151    /**
152     * Writes a memory buffer to the object in the file.
153     *
154     * @param buf
155     *            the data to write
156     *
157     * @throws Exception
158     *             if data can not be written
159     */
160    @Override
161    public void write(Object buf) throws Exception {
162        // not supported
163        throw new UnsupportedOperationException("write operation unsupported for FITS.");
164    }
165
166    /**
167     * Retrieves the object's metadata, such as attributes, from the file.
168     *
169     * Metadata, such as attributes, is stored in a List.
170     *
171     * @return the list of metadata objects.
172     *
173     * @throws Exception
174     *             if the metadata can not be retrieved
175     */
176    @SuppressWarnings("rawtypes")
177    public List getMetadata() throws Exception {
178        if (attributeList != null)
179            return attributeList;
180
181        if (nativeDataset == null)
182            return null;
183
184        Header header = nativeDataset.getHeader();
185        if (header == null)
186            return null;
187
188        attributeList = new Vector();
189        HeaderCard hc = null;
190        Iterator it = header.iterator();
191        FitsAttribute attr = null;
192        Datatype dtype = new FitsDatatype(Datatype.CLASS_STRING, 80, 0, 0);
193        long[] dims = {1};
194        String value = null;
195        while (it.hasNext()) {
196            value = "";
197            hc = (HeaderCard)it.next();
198            attr = new FitsAttribute(this, hc.getKey(), dtype, dims);
199            String tvalue = hc.getValue();
200            if (tvalue != null)
201                value += tvalue;
202            tvalue = hc.getComment();
203            if (tvalue != null)
204                value += " / " + tvalue;
205            attr.setAttributeData(value);
206            attributeList.add(attr);
207        }
208
209        return attributeList;
210    }
211
212    /**
213     * Writes a specific piece of metadata (such as an attribute) into the file.
214     *
215     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
216     * value. If the attribute does not exist in the file, it creates the
217     * attribute in the file and attaches it to the object. It will fail to
218     * write a new attribute to the object where an attribute with the same name
219     * already exists. To update the value of an existing attribute in the file,
220     * one needs to get the instance of the attribute by getMetadata(), change
221     * its values, then use writeMetadata() to write the value.
222     *
223     * @param info
224     *            the metadata to write.
225     *
226     * @throws Exception
227     *             if the metadata can not be written
228     */
229    public void writeMetadata(Object info) throws Exception {
230        // not supported
231        throw new UnsupportedOperationException("writeMetadata operation unsupported for FITS.");
232    }
233
234    /**
235     * Deletes an existing piece of metadata from this object.
236     *
237     * @param info
238     *            the metadata to delete.
239     *
240     * @throws Exception
241     *             if the metadata can not be removed
242     */
243    public void removeMetadata(Object info) throws Exception {
244        // not supported
245        throw new UnsupportedOperationException("removeMetadata operation unsupported for FITS.");
246    }
247
248    /**
249     * Updates an existing piece of metadata attached to this object.
250     *
251     * @param info
252     *            the metadata to update.
253     *
254     * @throws Exception
255     *             if the metadata can not be updated
256     */
257    public void updateMetadata(Object info) throws Exception {
258        // not supported
259        throw new UnsupportedOperationException("updateMetadata operation unsupported for FITS.");
260    }
261
262    /*
263     * (non-Javadoc)
264     * @see hdf.object.HObject#open()
265     */
266    @Override
267    public long open() {
268        return -1;
269    }
270
271    /*
272     * (non-Javadoc)
273     * @see hdf.object.HObject#close(int)
274     */
275    @Override
276    public void close(long did) {
277        // Nothing to implement
278    }
279
280    /*
281     * (non-Javadoc)
282     * @see hdf.object.Dataset#init()
283     */
284    @Override
285    public void init() {
286        if (nativeDataset == null)
287            return;
288
289        if (inited)
290            return; // already called. Initialize only once
291
292        int[] axes= null;
293        try {
294            axes = nativeDataset.getAxes();
295        }
296        catch (Exception ex) {
297            log.debug("nativeDataset.getAxes():", ex);
298        }
299
300        if (axes == null)
301            return;
302
303
304        rank = axes.length;
305        if (rank == 0) {
306            // a scalar data point
307            isScalar = true;
308            rank = 1;
309            dims = new long[] { 1 };
310        }
311        else {
312            isScalar = false;
313            dims = new long[rank];
314            for (int i=0; i<rank; i++)
315                dims[i] = axes[i];
316        }
317
318        startDims = new long[rank];
319        selectedDims = new long[rank];
320        for (int i=0; i<rank; i++) {
321            startDims[i] = 0;
322            selectedDims[i] = 1;
323        }
324
325        if (rank == 1) {
326            selectedIndex[0] = 0;
327            selectedDims[0] = dims[0];
328        }
329        else if (rank == 2) {
330            selectedIndex[0] = 0;
331            selectedIndex[1] = 1;
332            selectedDims[0] = dims[0];
333            selectedDims[1] = dims[1];
334        }
335        else if (rank > 2) {
336            selectedIndex[0] = 0;
337            selectedIndex[1] = 1;
338            selectedIndex[2] = 2;
339            selectedDims[0] = dims[0];
340            selectedDims[1] = dims[1];
341        }
342
343        if ((rank > 1) && isText)
344            selectedDims[1] = 1;
345
346        inited = true;
347    }
348
349    /* Implement abstart ScalarDS */
350
351    /**
352     * Creates a new dataset.
353     *
354     * @param name the name of the dataset to create.
355     * @param pgroup the parent group of the new dataset.
356     * @param type the datatype of the dataset.
357     * @param dims the dimension size of the dataset.
358     * @param maxdims the max dimension size of the dataset.
359     * @param chunks the chunk size of the dataset.
360     * @param gzip the level of the gzip compression.
361     * @param data the array of data values.
362     *
363     * @return the new dataset if successful. Otherwise returns null.
364     *
365     * @throws Exception
366     *            if there is an error
367     */
368    public static FitsDataset create(String name, Group pgroup, Datatype type,
369            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception {
370        // not supported
371        throw new UnsupportedOperationException("Unsupported operation for FITS.");
372    }
373
374    /**
375     * Returns the datatype of the data object.
376     *
377     * @return the datatype of the data object.
378     */
379    @Override
380    public Datatype getDatatype() {
381        if (datatype == null) {
382            try {
383                datatype = new FitsDatatype(nativeDataset.getBitPix());
384            }
385            catch (Exception ex) {
386                log.debug("getDatatype(): failed to create datatype: ", ex);
387                datatype = null;
388            }
389        }
390
391        return datatype;
392    }
393
394    /*
395     * (non-Javadoc)
396     * @see hdf.object.HObject#setName(java.lang.String)
397     */
398    @Override
399    public void setName (String newName) throws Exception {
400        // not supported
401        throw new UnsupportedOperationException("Unsupported operation for FITS.");
402    }
403
404    private int get1DLength(Object data) throws Exception {
405        if (!data.getClass().isArray())
406            return 1;
407
408        int len = Array.getLength(data);
409
410        int total = 0;
411        for (int i = 0; i < len; i++)
412            total += get1DLength(Array.get(data, i));
413
414        return total;
415    }
416
417    /** copy multi-dimension array of fits data into 1D array */
418    private int to1Darray(Object dataIn, Object dataOut, int offset) throws Exception {
419        Class component = dataIn.getClass().getComponentType();
420        if (component == null)
421            return offset;
422
423        int size = Array.getLength(dataIn);
424        if (!component.isArray()) {
425            System.arraycopy(dataIn, 0, dataOut, offset, size);
426            return offset+size;
427        }
428
429        for (int i = size - 1; i >= 0; i--)
430            offset = to1Darray(Array.get(dataIn, i), dataOut, offset);
431
432        return offset;
433    }
434
435    //Implementing DataFormat
436    /* FITS does not support metadata */
437    /**
438     * Retrieves the object's metadata, such as attributes, from the file.
439     *
440     * Metadata, such as attributes, is stored in a List.
441     *
442     * @param attrPropList
443     *             the list of properties to get
444     *
445     * @return the list of metadata objects.
446     *
447     * @throws Exception
448     *             if the metadata can not be retrieved
449     */
450    public List getMetadata(int... attrPropList) throws Exception {
451        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
452    }
453}