001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.fits;
016
017import java.lang.reflect.Array;
018import java.util.Iterator;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.object.Attribute;
023import hdf.object.Dataset;
024import hdf.object.Datatype;
025import hdf.object.FileFormat;
026import hdf.object.Group;
027import hdf.object.HObject;
028import hdf.object.ScalarDS;
029import nom.tam.fits.BasicHDU;
030import nom.tam.fits.Header;
031import nom.tam.fits.HeaderCard;
032
033/**
034 * FitsDataset describes an multi-dimension array of HDF5 scalar or atomic data
035 * types, such as byte, int, short, long, float, double and string,
036 * and operations performed on the scalar dataset
037 * <p>
038 * The library predefines a modest number of datatypes. For details, read <a
039 * href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">
040 * The Datatype Interface (H5T)</a>
041 * <p>
042 * @version 1.1 9/4/2007
043 * @author Peter X. Cao
044 */
045public class FitsDataset extends ScalarDS
046{
047    private static final long serialVersionUID = 3944770379558335171L;
048
049    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(FitsDataset.class);
050
051    /**
052     * The list of attributes of this data object. Members of the list are
053     * instance of Attribute.
054     */
055    private List attributeList;
056
057    private BasicHDU nativeDataset;
058
059    /**
060     * Constructs an FitsDataset object with specific netcdf variable.
061     *
062     * @param fileFormat the netcdf file.
063     * @param hdu the BasicHDU.
064     * @param dName the name for this dataset.
065     * @param oid the unique identifier for this dataset.
066     */
067    public FitsDataset(FileFormat fileFormat, BasicHDU hdu, String dName, long[] oid) {
068        super(fileFormat, dName, HObject.SEPARATOR, oid);
069        unsignedConverted = false;
070        nativeDataset = hdu;
071    }
072
073    /*
074     * (non-Javadoc)
075     * @see hdf.object.DataFormat#hasAttribute()
076     */
077    @Override
078    public boolean hasAttribute () { return false; }
079
080    /*
081     * (non-Javadoc)
082     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
083     */
084    @Override
085    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff)
086            throws Exception {
087        // not supported
088        throw new UnsupportedOperationException("copy operation unsupported for FITS.");
089    }
090
091    /*
092     * (non-Javadoc)
093     * @see hdf.object.Dataset#readBytes()
094     */
095    @Override
096    public byte[] readBytes() throws Exception {
097        // not supported
098        throw new UnsupportedOperationException("readBytes operation unsupported for FITS.");
099    }
100
101    /*
102     * (non-Javadoc)
103     * @see hdf.object.Dataset#read()
104     */
105    @Override
106    public Object read() throws Exception {
107        Object theData = null;
108        Object fitsData = null;
109
110        if (nativeDataset == null) {
111            return null;
112        }
113
114        try {
115            fitsData = nativeDataset.getData().getData();
116        }
117        catch (Exception ex) {
118            throw new UnsupportedOperationException("This implementation only supports integer and float dataset. " +
119                    "It may not work for other datatypes. \n"+ex);
120        }
121
122        int n = get1DLength(fitsData);
123
124        theData = FitsDatatype.allocateArray(nativeDataset.getBitPix(), n);
125
126        to1Darray(fitsData, theData, 0);
127
128        return theData;
129    }
130
131    /*
132     * (non-Javadoc)
133     * @see hdf.object.Dataset#write(java.lang.Object)
134     */
135    @Override
136    public void write(Object buf) throws Exception {
137        // not supported
138        throw new UnsupportedOperationException("write operation unsupported for FITS.");
139    }
140
141    /*
142     * (non-Javadoc)
143     * @see hdf.object.DataFormat#getMetadata()
144     */
145    @SuppressWarnings("rawtypes")
146    @Override
147    public List getMetadata() throws Exception {
148        if (attributeList != null) {
149            return attributeList;
150        }
151
152        if (nativeDataset == null) {
153            return null;
154        }
155
156        Header header = nativeDataset.getHeader();
157        if (header == null) {
158            return null;
159        }
160
161        attributeList = new Vector();
162        HeaderCard hc = null;
163        Iterator it = header.iterator();
164        Attribute attr = null;
165        Datatype dtype = new FitsDatatype(Datatype.CLASS_STRING, 80, 0, 0);
166        long[] dims = {1};
167        String value = null;
168        while (it.hasNext()) {
169            value = "";
170            hc = (HeaderCard)it.next();
171            attr = new Attribute(this, hc.getKey(), dtype, dims);
172            String tvalue = hc.getValue();
173            if (tvalue != null) {
174                value += tvalue;
175            }
176            tvalue = hc.getComment();
177            if (tvalue != null) {
178                value += " / " + tvalue;
179            }
180            attr.setData(value);
181            attributeList.add(attr);
182        }
183
184        return attributeList;
185    }
186
187    /*
188     * (non-Javadoc)
189     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
190     */
191    @Override
192    public void writeMetadata(Object info) throws Exception {
193        // not supported
194        throw new UnsupportedOperationException("writeMetadata operation unsupported for FITS.");
195    }
196
197    /*
198     * (non-Javadoc)
199     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
200     */
201    @Override
202    public void removeMetadata(Object info) throws Exception {
203        // not supported
204        throw new UnsupportedOperationException("removeMetadata operation unsupported for FITS.");
205    }
206
207    /*
208     * (non-Javadoc)
209     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
210     */
211    @Override
212    public void updateMetadata(Object info) throws Exception {
213        // not supported
214        throw new UnsupportedOperationException("updateMetadata operation unsupported for FITS.");
215    }
216
217    /*
218     * (non-Javadoc)
219     * @see hdf.object.HObject#open()
220     */
221    @Override
222    public long open() { return -1;}
223
224    /*
225     * (non-Javadoc)
226     * @see hdf.object.HObject#close(int)
227     */
228    @Override
229    public void close(long did) {
230        // Nothing to implement
231    }
232
233    /*
234     * (non-Javadoc)
235     * @see hdf.object.Dataset#init()
236     */
237    @Override
238    public void init() {
239        if (nativeDataset == null) {
240            return;
241        }
242
243        if (inited) {
244            return; // already called. Initialize only once
245        }
246
247        int[] axes= null;
248        try {
249            axes = nativeDataset.getAxes();
250        }
251        catch (Exception ex) {
252            log.debug("nativeDataset.getAxes():", ex);
253        }
254
255        if (axes == null) {
256            return;
257        }
258
259
260        rank = axes.length;
261        if (rank == 0) {
262            // a scalar data point
263            rank = 1;
264            dims = new long[1];
265            dims[0] = 1;
266        }
267        else {
268            dims = new long[rank];
269            for (int i=0; i<rank; i++) {
270                dims[i] = axes[i];
271            }
272        }
273
274        startDims = new long[rank];
275        selectedDims = new long[rank];
276        for (int i=0; i<rank; i++) {
277            startDims[i] = 0;
278            selectedDims[i] = 1;
279        }
280
281        if (rank == 1) {
282            selectedIndex[0] = 0;
283            selectedDims[0] = dims[0];
284        }
285        else if (rank == 2) {
286            selectedIndex[0] = 0;
287            selectedIndex[1] = 1;
288            selectedDims[0] = dims[0];
289            selectedDims[1] = dims[1];
290        }
291        else if (rank > 2) {
292            selectedIndex[0] = 0;
293            selectedIndex[1] = 1;
294            selectedIndex[2] = 2;
295            selectedDims[0] = dims[0];
296            selectedDims[1] = dims[1];
297        }
298
299        if ((rank > 1) && isText) {
300            selectedDims[1] = 1;
301        }
302
303        inited = true;
304    }
305
306    /*
307     * (non-Javadoc)
308     * @see hdf.object.ScalarDS#getPalette()
309     */
310    @Override
311    public byte[][] getPalette()
312    {
313        if (palette == null) {
314            palette = readPalette(0);
315        }
316
317        return palette;
318    }
319
320    /*
321     * (non-Javadoc)
322     * @see hdf.object.ScalarDS#readPalette(int)
323     */
324    @Override
325    public byte[][] readPalette(int idx) {
326        return null;
327    }
328
329    /**
330     * Creates a new dataset.
331     *
332     * @param name the name of the dataset to create.
333     * @param pgroup the parent group of the new dataset.
334     * @param type the datatype of the dataset.
335     * @param dims the dimension size of the dataset.
336     * @param maxdims the max dimension size of the dataset.
337     * @param chunks the chunk size of the dataset.
338     * @param gzip the level of the gzip compression.
339     * @param data the array of data values.
340     *
341     * @return the new dataset if successful. Otherwise returns null.
342     *
343     * @throws Exception
344     *            if there is an error
345     */
346    public static FitsDataset create(
347            String name,
348            Group pgroup,
349            Datatype type,
350            long[] dims,
351            long[] maxdims,
352            long[] chunks,
353            int gzip,
354            Object data) throws Exception {
355        // not supported
356        throw new UnsupportedOperationException("Unsupported operation for FITS.");
357    }
358
359    /*
360     * (non-Javadoc)
361     * @see hdf.object.ScalarDS#getPaletteRefs()
362     */
363    @Override
364    public byte[] getPaletteRefs() {
365        return null;
366    }
367
368    /*
369     * (non-Javadoc)
370     * @see hdf.object.Dataset#getDatatype()
371     */
372    @Override
373    public Datatype getDatatype() {
374        if (datatype == null) {
375            try {
376                datatype = new FitsDatatype(nativeDataset.getBitPix());
377            }
378            catch (Exception ex) {
379                log.debug("getDatatype(): failed to create datatype: ", ex);
380                datatype = null;
381            }
382        }
383
384        return datatype;
385    }
386
387    /*
388     * (non-Javadoc)
389     * @see hdf.object.HObject#setName(java.lang.String)
390     */
391    @Override
392    public void setName (String newName) throws Exception {
393        // not supported
394        throw new UnsupportedOperationException("Unsupported operation for FITS.");
395    }
396
397    private int get1DLength(Object data) throws Exception {
398
399        if (!data.getClass().isArray()) {
400            return 1;
401        }
402
403        int len = Array.getLength(data);
404
405        int total = 0;
406        for (int i = 0; i < len; i++) {
407            total += get1DLength(Array.get(data, i));
408        }
409
410        return total;
411    }
412
413    /** copy multi-dimension array of fits data into 1D array */
414    private int to1Darray(Object dataIn, Object dataOut, int offset) throws Exception {
415        Class component = dataIn.getClass().getComponentType();
416        if (component == null) {
417            return offset;
418        }
419
420        int size = Array.getLength(dataIn);
421        if (!component.isArray()) {
422            System.arraycopy(dataIn, 0, dataOut, offset, size);
423            return offset+size;
424        }
425
426        for (int i = size - 1; i >= 0; i--) {
427            offset = to1Darray(Array.get(dataIn, i), dataOut, offset);
428        }
429
430        return offset;
431    }
432
433    //Implementing DataFormat
434    public List getMetadata(int... attrPropList) throws Exception {
435        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
436    }
437}