001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026
027import hdf.object.Attribute;
028import hdf.object.Dataset;
029import hdf.object.Datatype;
030import hdf.object.FileFormat;
031import hdf.object.Group;
032import hdf.object.HObject;
033import hdf.object.ScalarDS;
034import hdf.object.MetaDataContainer;
035
036import hdf.object.h4.H4ScalarAttribute;
037
038/**
039 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
040 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
041 *
042 * Every GR data set must contain the following components: image array, name,
043 * pixel type, and dimensions. The name, dimensions, and pixel type must be
044 * supplied by the user at the time the GR data set is defined.
045 *
046 * An image array is a two-dimensional array of pixels. Each element in an image
047 * array corresponds to one pixel and each pixel can consist of a number of
048 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
049 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
050 * by different methods (8-bit lookup table or 24-bit direct representation) and
051 * may have different data types. The data type of pixel components and the number
052 * of components in each pixel are collectively known as the pixel type.
053 *
054 * <b>How to Select a Subset</b>
055 *
056 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
057 * defined to select a subset of a data array. The selection is done in an implicit way.
058 * Function calls to dimension information such as getSelectedDims() return an array
059 * of dimension values, which is a reference to the array in the dataset object.
060 * Changes of the array outside the dataset object directly change the values of
061 * the array in the dataset object. It is like pointers in C.
062 *
063 * The following is an example of how to make a subset. In the example, the dataset
064 * is a 4-dimension with size of [200][100][50][10], i.e.
065 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
066 * We want to select every other data point in dims[1] and dims[2]
067 * <pre>
068     int rank = dataset.getRank();   // number of dimensions of the dataset
069     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
070     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
071     long[] start = dataset.getStartDims(); // the offset of the selection
072     long[] stride = dataset.getStride(); // the stride of the dataset
073     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
074
075     // select dim1 and dim2 as 2D data for display, and slice through dim0
076     selectedIndex[0] = 1;
077     selectedIndex[1] = 2;
078     selectedIndex[1] = 0;
079
080     // reset the selection arrays
081     for (int i=0; i&lt;rank; i++) {
082         start[i] = 0;
083         selected[i] = 1;
084         stride[i] = 1;
085    }
086
087    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
088    stride[1] = 2;
089    stride[2] = 2;
090
091    // set the selection size of dim1 and dim2
092    selected[1] = dims[1]/stride[1];
093    selected[2] = dims[1]/stride[2];
094
095    // when dataset.read() is called, the slection above will be used since
096    // the dimension arrays are passed by reference. Changes of these arrays
097    // outside the dataset object directly change the values of these array
098    // in the dataset object.
099
100 * </pre>
101 *
102 * @version 1.1 9/4/2007
103 * @author Peter X. Cao
104 */
105public class H4GRImage extends ScalarDS implements MetaDataContainer
106{
107    private static final long serialVersionUID = 1029672744963360976L;
108
109    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
110
111    /**
112     * The list of attributes of this data object. Members of the list are
113     * instance of H4ScalarAttribute.
114     */
115    @SuppressWarnings("rawtypes")
116    private List                            attributeList;
117
118    /**
119     * The GR interface identifier obtained from GRstart(fid)
120     */
121    private long                            grid;
122
123    /**
124     * The number of components in the raster image
125     */
126    private int                             ncomp;
127
128    /** the datatype identifier */
129    private long                            datatypeID = -1;
130
131    /** the number of attributes */
132    private int                             nAttributes = -1;
133
134
135    /**
136     * Creates a H4GRImage object with specific name and path.
137     *
138     * @param theFile the HDF file.
139     * @param name the name of this H4GRImage.
140     * @param path the full path of this H4GRImage.
141     */
142    public H4GRImage(FileFormat theFile, String name, String path) {
143        this(theFile, name, path, null);
144    }
145
146    /**
147     * Creates a H4GRImage object with specific name, path, and object ID.
148     *
149     * @param theFile the HDF file.
150     * @param name the name of this H4GRImage.
151     * @param path the full path of this H4GRImage.
152     * @param oid the unique identifier of this data object.
153     */
154    @SuppressWarnings("deprecation")
155    public H4GRImage(FileFormat theFile, String name, String path, long[] oid) {
156        super (theFile, name, path, oid);
157        palette = null;
158        isImage = isImageDisplay = true;
159        unsignedConverted = false;
160        grid = ((H4File)getFileFormat()).getGRAccessID();
161    }
162
163    /*
164     * (non-Javadoc)
165     * @see hdf.object.DataFormat#hasAttribute()
166     */
167    @Override
168    public boolean hasAttribute() {
169        if (nAttributes < 0) {
170            grid = ((H4File)getFileFormat()).getGRAccessID();
171
172            long id = open();
173
174            if (id >= 0) {
175                String[] objName = {""};
176                int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
177                int[] idims = new int[2];
178                try {
179                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
180                    nAttributes = grInfo[3];
181                }
182                catch (Exception ex) {
183                    log.trace("hasAttribute() failure: ", ex);
184                    nAttributes = 0;
185                }
186
187                log.trace("hasAttribute(): nAttributes={}", nAttributes);
188
189                close(id);
190            }
191        }
192
193        return (nAttributes > 0);
194    }
195
196    // To do: Implementing Dataset
197    @Override
198    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
199        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
200
201        Dataset dataset = null;
202        long srcdid = -1;
203        long dstdid = -1;
204        String path = null;
205        int[] count = null;
206
207        if (pgroup == null) {
208            log.debug("copy(): Parent group is null - exit");
209            return null;
210        }
211
212        if (pgroup.isRoot())
213            path = HObject.SEPARATOR;
214        else
215            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
216
217        srcdid = open();
218        if (srcdid < 0) {
219            log.debug("copy(): Invalid source dataset ID - exit");
220            return null;
221        }
222
223        if (dims != null) {
224            count = new int[2];
225            count[0] = (int)dims[0];
226            count[1] = (int)dims[1];
227        }
228
229        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
230        try {
231            String[] tmpName = {""};
232            int[] tmpDims = new int[2];
233            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
234            if (count == null)
235                count = tmpDims;
236        }
237        catch (HDFException ex) {
238            log.debug("copy(): GRgetiminfo failure: ", ex);
239        }
240
241        ncomp = grInfo[0];
242        long tid = grInfo[1];
243        int interlace = grInfo[2];
244        int numberOfAttributes = grInfo[3];
245        dstdid = HDFLibrary.GRcreate( ((H4File)pgroup.getFileFormat()).getGRAccessID(),
246                dname, ncomp, tid, interlace, count);
247        if (dstdid < 0) {
248            log.debug("copy(): Invalid dest dataset ID - exit");
249            return null;
250        }
251
252        int ref = HDFLibrary.GRidtoref(dstdid);
253        if (!pgroup.isRoot()) {
254            long vgid = pgroup.open();
255            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
256            pgroup.close(vgid);
257        }
258
259        // read data from the source dataset
260        int[] start = {0, 0};
261        if ((buff == null) && (count != null)) {
262            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
263            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
264        }
265
266        // write the data into the destination dataset
267        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
268
269        // copy palette
270        long pid = HDFLibrary.GRgetlutid(srcdid, 0);
271        int[] palInfo = new int[4];
272
273        HDFLibrary.GRgetlutinfo(pid, palInfo);
274        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
275        int palSize = palInfo[0]*palInfo[3];
276        byte[] palBuff = new byte[palSize];
277        HDFLibrary.GRreadlut(pid, palBuff);
278        pid = HDFLibrary.GRgetlutid(dstdid, 0);
279        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
280
281        // copy attributes from one object to the new object
282        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
283        copyAttribute(srcdid, dstdid, numberOfAttributes);
284
285        long[] oid = {HDFConstants.DFTAG_RIG, ref};
286        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
287
288        pgroup.addToMemberList(dataset);
289
290        close(srcdid);
291
292        try {
293            HDFLibrary.GRendaccess(dstdid);
294        }
295        catch (HDFException ex) {
296            log.debug("copy(): GRendaccess failure: ", ex);
297        }
298
299        return dataset;
300    }
301
302    /* Implement abstract ScalarDS */
303
304    /*
305     * (non-Javadoc)
306     * @see hdf.object.ScalarDS#readPalette(int)
307     */
308    @Override
309    public byte[][] readPalette(int idx) {
310        return null;
311    }
312
313    /*
314     * (non-Javadoc)
315     * @see hdf.object.ScalarDS#getPaletteRefs()
316     */
317    @Override
318    public byte[] getPaletteRefs() {
319        return null;
320    }
321
322    // implementing ScalarDS
323    /**
324     * Returns the datatype of the data object.
325     *
326     * @return the datatype of the data object.
327     */
328    @Override
329    public Datatype getDatatype() {
330        if (!inited)
331            init();
332
333        if (datatype == null) {
334            try {
335                datatype = new H4Datatype(datatypeID);
336            }
337            catch (Exception ex) {
338                log.debug("getDatatype(): failed to create datatype: ", ex);
339                datatype = null;
340            }
341        }
342
343        return datatype;
344    }
345
346    // Implementing Dataset
347    @Override
348    public byte[] readBytes() throws HDFException {
349        byte[] theData = null;
350
351        if (!isInited())
352            init();
353
354        long id = open();
355        if (id < 0) {
356            log.debug("readBytes(): Invalid ID - exit");
357            return null;
358        }
359
360        try {
361            // set the interlacing scheme for reading image data
362            HDFLibrary.GRreqimageil(id, interlace);
363            int datasize = (int)(getWidth()*getHeight()*ncomp);
364            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
365            theData = new byte[size];
366            int[] start = {(int)startDims[0], (int)startDims[1]};
367            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
368
369            int[] stride = null;
370            if (selectedStride != null) {
371                stride = new int[rank];
372                for (int i=0; i<rank; i++)
373                    stride[i] = (int)selectedStride[i];
374            }
375
376            HDFLibrary.GRreadimage(id, start, stride, select, theData);
377        }
378        catch (Exception ex) {
379            log.debug("readBytes(): failure: ", ex);
380        }
381        finally {
382            close(id);
383        }
384
385        return theData;
386    }
387
388    // ***** need to implement from DataFormat *****
389    /**
390     * Reads the data from file.
391     *
392     * read() reads the data from file to a memory buffer and returns the memory
393     * buffer. The dataset object does not hold the memory buffer. To store the
394     * memory buffer in the dataset object, one must call getData().
395     *
396     * By default, the whole dataset is read into memory. Users can also select
397     * a subset to read. Subsetting is done in an implicit way.
398     *
399     * @return the data read from file.
400     *
401     * @see #getData()
402     *
403     * @throws HDFException
404     *             if object can not be read
405     */
406    @Override
407    public Object read() throws HDFException {
408        Object theData = null;
409
410        if (!isInited()) init();
411
412        long id = open();
413        if (id < 0) {
414            log.debug("read(): Invalid ID");
415            return null;
416        }
417
418        try {
419            // set the interlacing scheme for reading image data
420            HDFLibrary.GRreqimageil(id, interlace);
421            int datasize = (int)(getWidth()*getHeight()*ncomp);
422
423            theData = H4Datatype.allocateArray(datatypeID, datasize);
424
425            if (theData != null) {
426                // assume external data files are located in the same directory as the main file.
427                HDFLibrary.HXsetdir(getFileFormat().getParent());
428
429                int[] start = {(int)startDims[0], (int)startDims[1]};
430                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
431
432                int[] stride = null;
433                if (selectedStride != null) {
434                    stride = new int[rank];
435                    for (int i=0; i<rank; i++)
436                        stride[i] = (int)selectedStride[i];
437                }
438
439                HDFLibrary.GRreadimage(id, start, stride, select, theData);
440            }
441        }
442        catch (Exception ex) {
443            log.debug("read(): failure: ", ex);
444        }
445        finally {
446            close(id);
447        }
448
449        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
450            isDefaultImageOrder = false;
451        else
452            isDefaultImageOrder = true;
453
454        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
455        return theData;
456    }
457
458    // Implementing DataFormat
459    /**
460     * Writes a memory buffer to the object in the file.
461     *
462     * @param buf
463     *            the data to write
464     *
465     * @throws HDFException
466     *             if data can not be written
467     */
468    @SuppressWarnings("deprecation")
469    @Override
470    public void write(Object buf) throws HDFException {
471        if (buf == null) {
472            log.debug("write(): buf is null - exit");
473            return;
474        }
475
476        long id = open();
477        if (id < 0) {
478            log.debug("write(): Invalid ID - exit");
479            return;
480        }
481
482        int[] select = new int[rank];
483        int[] start = new int[rank];
484        for (int i=0; i<rank; i++) {
485            select[i] = (int)selectedDims[i];
486            start[i] = (int)startDims[i];
487        }
488
489        int[] stride = null;
490        if (selectedStride != null) {
491            stride = new int[rank];
492            for (int i=0; i<rank; i++) {
493                stride[i] = (int)selectedStride[i];
494            }
495        }
496
497        Object tmpData = buf;
498        try {
499            if (getDatatype().isUnsigned() && unsignedConverted)
500                tmpData = convertToUnsignedC(buf);
501            // assume external data files are located in the same directory as the main file.
502            HDFLibrary.HXsetdir(getFileFormat().getParent());
503
504            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
505        }
506        catch (Exception ex) {
507            log.debug("write(): failure: ", ex);
508        }
509        finally {
510            tmpData = null;
511            close(id);
512        }
513    }
514
515    // ***** need to implement from DataFormat *****
516    /**
517     * Retrieves the object's metadata, such as attributes, from the file.
518     *
519     * Metadata, such as attributes, is stored in a List.
520     *
521     * @return the list of metadata objects.
522     *
523     * @throws HDFException
524     *             if the metadata can not be retrieved
525     */
526    @Override
527    @SuppressWarnings({"rawtypes", "unchecked"})
528    public List getMetadata() throws HDFException {
529        if (attributeList != null) {
530            log.trace("getMetadata(): attributeList != null - exit");
531            return attributeList;
532        }
533
534        long id = open();
535        String[] objName = {""};
536        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
537        int[] idims = new int[2];
538        try {
539            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
540            // mask off the litend bit
541            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
542            int n = grInfo[3];
543
544            if ((attributeList == null) && (n>0))
545                attributeList = new Vector(n, 5);
546
547            boolean b = false;
548            String[] attrName = new String[1];
549            int[] attrInfo = {0, 0}; // data_type, length
550            for (int i=0; i<n; i++) {
551                attrName[0] = "";
552                try {
553                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
554                    // mask off the litend bit
555                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
556                }
557                catch (HDFException ex) {
558                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
559                    b = false;
560                }
561
562                if (!b)
563                    continue;
564
565                long[] attrDims = {attrInfo[1]};
566                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
567                attributeList.add(attr);
568
569                Object buf = null;
570                try {
571                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
572                }
573                catch (OutOfMemoryError e) {
574                    log.debug("getMetadata(): out of memory: ", e);
575                    buf = null;
576                }
577
578                try {
579                    HDFLibrary.GRgetattr(id, i, buf);
580                }
581                catch (HDFException ex) {
582                    log.debug("getMetadata(): GRgetattr failure: ", ex);
583                    buf = null;
584                }
585
586                if (buf != null) {
587                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
588                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
589                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
590                    }
591
592                    attr.setAttributeData(buf);
593                }
594            } //  (int i=0; i<n; i++)
595        }
596        catch (Exception ex) {
597            log.debug("getMetadata(): failure: ", ex);
598        }
599        finally {
600            close(id);
601        }
602
603        return attributeList;
604    }
605
606    // ***** need to implement from DataFormat *****
607    /**
608     * Writes a specific piece of metadata (such as an attribute) into the file.
609     *
610     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
611     * value. If the attribute does not exist in the file, it creates the
612     * attribute in the file and attaches it to the object. It will fail to
613     * write a new attribute to the object where an attribute with the same name
614     * already exists. To update the value of an existing attribute in the file,
615     * one needs to get the instance of the attribute by getMetadata(), change
616     * its values, then use writeMetadata() to write the value.
617     *
618     * @param info
619     *            the metadata to write.
620     *
621     * @throws Exception
622     *             if the metadata can not be written
623     */
624    @Override
625    @SuppressWarnings({"rawtypes", "unchecked"})
626    public void writeMetadata(Object info) throws Exception {
627        // only attribute metadata is supported.
628        if (!(info instanceof Attribute)) {
629            log.debug("writeMetadata(): Object not an H4ScalarAttribute - exit");
630            return;
631        }
632
633        try {
634            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
635
636            if (attributeList == null)
637                attributeList = new Vector();
638
639            attributeList.add(info);
640            nAttributes = attributeList.size();
641        }
642        catch (Exception ex) {
643            log.debug("writeMetadata(): failure: ", ex);
644        }
645    }
646
647    // ***** need to implement from DataFormat *****
648    /**
649     * Deletes an existing piece of metadata from this object.
650     *
651     * @param info
652     *            the metadata to delete.
653     *
654     * @throws HDFException
655     *             if the metadata can not be removed
656     */
657    @Override
658    public void removeMetadata(Object info) throws HDFException {
659        log.trace("removeMetadata(): disabled");
660    }
661
662    /**
663     * Updates an existing piece of metadata attached to this object.
664     *
665     * @param info
666     *            the metadata to update.
667     *
668     * @throws Exception
669     *             if the metadata can not be updated
670     */
671    @Override
672    public void updateMetadata(Object info) throws Exception {
673        log.trace("updateMetadata(): disabled");
674    }
675
676    // Implementing HObject.
677    @Override
678    public long open() {
679        log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]);
680
681        long id = -1;
682        try {
683            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
684            id = HDFLibrary.GRselect(grid, index);
685        }
686        catch (HDFException ex) {
687            log.debug("open(): failure: ", ex);
688            id = -1;
689        }
690
691        return id;
692    }
693
694    // Implementing HObject.
695    @Override
696    public void close(long grid) {
697        try { HDFLibrary.GRendaccess(grid); }
698        catch (HDFException ex) {log.debug("close(): failure: ", ex);}
699    }
700
701    // Implementing Dataset.
702    @Override
703    public void init() {
704        if (inited) {
705            log.trace("init(): Already initialized");
706            return; // already called. Initialize only once
707        }
708
709        long id = open();
710        String[] objName = {""};
711        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
712        int[] idims = new int[2];
713        try {
714            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
715            // mask off the litend bit
716            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
717            datatypeID = grInfo[1];
718
719            // get compression information
720            try {
721                HDFCompInfo compInfo = new HDFCompInfo();
722                HDFLibrary.GRgetcompinfo(id, compInfo);
723
724                compression.setLength(0);
725
726                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE)
727                    compression.append("GZIP");
728                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP)
729                    compression.append("SZIP");
730                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG)
731                    compression.append("JPEG");
732                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF)
733                    compression.append("SKPHUFF");
734                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE)
735                    compression.append("RLE");
736                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT)
737                    compression.append("NBIT");
738
739                if (compression.length() == 0)
740                    compression.append("NONE");
741            }
742            catch (Exception ex) {
743                log.debug("init(): get compression information failure: ", ex);
744            }
745
746            // get chunk information
747            try {
748                HDFChunkInfo chunkInfo = new HDFChunkInfo();
749                int[] cflag = {HDFConstants.HDF_NONE};
750                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
751
752                storageLayout.setLength(0);
753
754                if (cflag[0] == HDFConstants.HDF_NONE) {
755                    chunkSize = null;
756                    storageLayout.append("NONE");
757                }
758                else {
759                    chunkSize = new long[rank];
760                    for (int i=0; i<rank; i++)
761                        chunkSize[i] = chunkInfo.chunk_lengths[i];
762
763                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
764                    for (int i = 1; i < rank; i++)
765                        storageLayout.append(" X ").append(chunkSize[i]);
766                }
767            }
768            catch (Exception ex) {
769                log.debug("init(): get chunk information failure: ", ex);
770            }
771
772            inited = true;
773        }
774        catch (HDFException ex) {
775            log.debug("init(): failure: ", ex);
776        }
777        finally {
778            close(id);
779        }
780
781        ncomp = grInfo[0];
782        isTrueColor = (ncomp >= 3);
783        interlace = grInfo[2];
784        rank = 2; // support only two dimensional raster image
785
786        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
787        // other image data is arranged as dim[0]=height, dim[1]=width.
788        selectedIndex[0] = 1;
789        selectedIndex[1] = 0;
790
791        dims = new long[rank];
792        startDims = new long[rank];
793        selectedDims = new long[rank];
794        for (int i=0; i<rank; i++) {
795            startDims[i] = 0;
796            selectedDims[i] = idims[i];
797            dims[i] = idims[i];
798        }
799    }
800
801    // ***** need to implement from ScalarDS *****
802    @Override
803    public byte[][] getPalette() {
804        if (palette != null) {
805            log.trace("getPalette(): palette != null - exit");
806            return palette;
807        }
808
809        long id = open();
810        if (id < 0) {
811            log.debug("getPalette(): Invalid ID - exit");
812            return null;
813        }
814
815        // get palette info.
816        long lutid  = -1;
817        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
818        try {
819            // find the first palette.
820            // Todo: get all the palettes
821            lutid = HDFLibrary.GRgetlutid(id, 0);
822            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
823        }
824        catch (HDFException ex) {
825            log.debug("getPalette(): exit with failure: ", ex);
826            close(id);
827            return null;
828        }
829
830        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
831        // return true even if there is no palette data, and check if it is a
832        // RGB with 256 colors
833        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) || (lutInfo[3] != 256)) {
834            close(id);
835            log.debug("getPalette(): no palette data - exit");
836            return null;
837        }
838
839        // read palette data
840        boolean b = false;
841        byte[] pal = new byte[3*256];
842        try {
843            HDFLibrary.GRreqlutil(id, lutInfo[2]);
844            b = HDFLibrary.GRreadlut(lutid, pal);
845        }
846        catch (HDFException ex) {
847            log.debug("getPalette(): failure: ", ex);
848            b = false;
849        }
850
851        if (!b) {
852            close(id);
853            log.debug("getPalette(): no palette data - exit");
854            return null;
855        }
856
857        palette = new byte[3][256];
858        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
859            // color conponents are arranged in RGB, RGB, RGB, ...
860            for (int i=0; i<256; i++) {
861                palette[0][i] = pal[i*3];
862                palette[1][i] = pal[i*3+1];
863                palette[2][i] = pal[i*3+2];
864            }
865        }
866        else {
867            for (int i=0; i<256; i++) {
868                palette[0][i] = pal[i];
869                palette[1][i] = pal[256+i];
870                palette[2][i] = pal[512+i];
871            }
872        }
873
874        close(id);
875
876        return palette;
877    }
878
879    /**
880     * Returns the number of components of this image data.
881     *
882     * @return the number of components
883     */
884    public int getComponentCount() {
885        return ncomp;
886    }
887
888    /**
889     * Creates a new image.
890     *
891     * @param name the name of the dataset to create.
892     * @param pgroup the parent group of the new dataset.
893     * @param type the datatype of the dataset.
894     * @param dims the dimension size of the dataset.
895     * @param maxdims the max dimension size of the dataset.
896     * @param chunks the chunk size of the dataset.
897     * @param gzip the level of the gzip compression.
898     * @param ncomp number of components of the image data.
899     * @param interlace the interlace mode.
900     * @param data the array of data values.
901     *
902     * @return the new image if successful. Otherwise returns null.
903     *
904     * @throws Exception if the image can not be created
905     */
906    public static H4GRImage create(String name, Group pgroup, Datatype type,
907            long[] dims, long[] maxdims, long[] chunks, int gzip, int ncomp, int interlace, Object data) throws Exception {
908        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace);
909
910        H4GRImage dataset = null;
911        if ((name == null) ||
912            (pgroup == null) ||
913            (dims == null) ||
914            ((gzip>0) && (chunks==null))) {
915            log.debug("create(): one or more parameters are null - exit");
916            return null;
917        }
918
919        H4File file = (H4File)pgroup.getFileFormat();
920        if (file == null) {
921            log.debug("create(): Parent group FileFormat is null - exit");
922            return null;
923        }
924
925        String path = HObject.SEPARATOR;
926        if (!pgroup.isRoot())
927            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
928        if (interlace == ScalarDS.INTERLACE_PLANE)
929            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
930        else
931            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
932
933        int rank = 2;
934        int[] idims = new int[rank];
935        int[] imaxdims = new int[rank];
936        int[] start = new int[rank];
937        for (int i=0; i<rank; i++) {
938            idims[i] = (int)dims[i];
939            if (maxdims != null)
940                imaxdims[i] = (int)maxdims[i];
941            else
942                imaxdims[i] = idims[i];
943            start[i] = 0;
944        }
945
946        int[] ichunks = null;
947        if (chunks != null) {
948            ichunks = new int[rank];
949            for (int i=0; i<rank; i++)
950                ichunks[i] = (int)chunks[i];
951        }
952
953        long grid = -1;
954        long vgid = -1;
955        long gid = (file).getGRAccessID();
956        long tid = type.createNative();
957
958        if(tid >= 0) {
959            try {
960                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
961            }
962            catch (Exception ex) {
963                log.debug("create(): exit with failure: ", ex);
964                throw (ex);
965            }
966        }
967
968        if (grid < 0) {
969            log.debug("create(): Invalid GR ID - exit");
970            throw (new HDFException("Unable to create the new dataset."));
971        }
972
973        if ((grid > 0) && (data != null))
974            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
975
976        if (chunks != null) {
977            // set chunk
978            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
979            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
980        }
981
982        if (gzip > 0) {
983            // set compression
984            int compType = HDFConstants.COMP_CODE_DEFLATE;
985            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
986            compInfo.level = gzip;
987            HDFLibrary.GRsetcompress(grid, compType, compInfo);
988        }
989
990        int ref = HDFLibrary.GRidtoref(grid);
991
992        if (!pgroup.isRoot()) {
993            // add the dataset to the parent group
994            vgid = pgroup.open();
995            if (vgid < 0) {
996                if (grid > 0)
997                    HDFLibrary.GRendaccess(grid);
998                log.debug("create(): Invalid VG ID - exit");
999                throw (new HDFException("Unable to open the parent group."));
1000            }
1001
1002            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
1003
1004            pgroup.close(vgid);
1005        }
1006
1007        try {
1008            if (grid > 0)
1009                HDFLibrary.GRendaccess(grid);
1010        }
1011        catch (Exception ex) {
1012            log.debug("create(): GRendaccess failure: ", ex);
1013        }
1014
1015        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1016        dataset = new H4GRImage(file, name, path, oid);
1017
1018        if (dataset != null)
1019            pgroup.addToMemberList(dataset);
1020
1021        return dataset;
1022    }
1023
1024    /**
1025     * copy attributes from one GR image to another GR image
1026     */
1027    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes) {
1028        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes);
1029
1030        if (numberOfAttributes <= 0) {
1031            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1032            return;
1033        }
1034
1035        try {
1036            boolean b = false;
1037            String[] attrName = new String[1];
1038            int[] attrInfo = {0, 0};
1039            for (int i=0; i<numberOfAttributes; i++) {
1040                attrName[0] = "";
1041                try {
1042                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1043                }
1044                catch (HDFException ex) {
1045                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1046                    b = false;
1047                }
1048
1049                if (!b)
1050                    continue;
1051
1052                // read attribute data from source dataset
1053                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1054                try {
1055                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1056                }
1057                catch (Exception ex) {
1058                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1059                    attrBuff = null;
1060                }
1061
1062                if (attrBuff == null) {
1063                    log.debug("copyAttribute(): attrBuff[{}] is null continue", i);
1064                    continue;
1065                }
1066
1067                // attach attribute to the destination dataset
1068                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1069            } //  (int i=0; i<numberOfAttributes; i++)
1070        }
1071        catch (Exception ex) {
1072            log.debug("copyAttribute(): failure: ", ex);
1073        }
1074    }
1075
1076    //Implementing DataFormat
1077    /**
1078     * Retrieves the object's metadata, such as attributes, from the file.
1079     *
1080     * Metadata, such as attributes, is stored in a List.
1081     *
1082     * @param attrPropList
1083     *             the list of properties to get
1084     *
1085     * @return the list of metadata objects.
1086     *
1087     * @throws Exception
1088     *             if the metadata can not be retrieved
1089     */
1090    @SuppressWarnings("rawtypes")
1091    public List getMetadata(int... attrPropList) throws Exception {
1092        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1093    }
1094}