001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import org.slf4j.Logger;
021import org.slf4j.LoggerFactory;
022
023import hdf.hdflib.HDFChunkInfo;
024import hdf.hdflib.HDFCompInfo;
025import hdf.hdflib.HDFConstants;
026import hdf.hdflib.HDFDeflateCompInfo;
027import hdf.hdflib.HDFException;
028import hdf.hdflib.HDFLibrary;
029
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.ScalarDS;
037import hdf.object.MetaDataContainer;
038
039import hdf.object.h4.H4ScalarAttribute;
040
041/**
042 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
043 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
044 *
045 * Every GR data set must contain the following components: image array, name,
046 * pixel type, and dimensions. The name, dimensions, and pixel type must be
047 * supplied by the user at the time the GR data set is defined.
048 *
049 * An image array is a two-dimensional array of pixels. Each element in an image
050 * array corresponds to one pixel and each pixel can consist of a number of
051 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
052 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
053 * by different methods (8-bit lookup table or 24-bit direct representation) and
054 * may have different data types. The data type of pixel components and the number
055 * of components in each pixel are collectively known as the pixel type.
056 *
057 * <b>How to Select a Subset</b>
058 *
059 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
060 * defined to select a subset of a data array. The selection is done in an implicit way.
061 * Function calls to dimension information such as getSelectedDims() return an array
062 * of dimension values, which is a reference to the array in the dataset object.
063 * Changes of the array outside the dataset object directly change the values of
064 * the array in the dataset object. It is like pointers in C.
065 *
066 * The following is an example of how to make a subset. In the example, the dataset
067 * is a 4-dimension with size of [200][100][50][10], i.e.
068 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
069 * We want to select every other data point in dims[1] and dims[2]
070 * <pre>
071     int rank = dataset.getRank();   // number of dimensions of the dataset
072     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
073     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
074     long[] start = dataset.getStartDims(); // the offset of the selection
075     long[] stride = dataset.getStride(); // the stride of the dataset
076     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
077
078     // select dim1 and dim2 as 2D data for display, and slice through dim0
079     selectedIndex[0] = 1;
080     selectedIndex[1] = 2;
081     selectedIndex[1] = 0;
082
083     // reset the selection arrays
084     for (int i=0; i&lt;rank; i++) {
085         start[i] = 0;
086         selected[i] = 1;
087         stride[i] = 1;
088    }
089
090    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
091    stride[1] = 2;
092    stride[2] = 2;
093
094    // set the selection size of dim1 and dim2
095    selected[1] = dims[1]/stride[1];
096    selected[2] = dims[1]/stride[2];
097
098    // when dataset.read() is called, the slection above will be used since
099    // the dimension arrays are passed by reference. Changes of these arrays
100    // outside the dataset object directly change the values of these array
101    // in the dataset object.
102
103 * </pre>
104 *
105 * @version 1.1 9/4/2007
106 * @author Peter X. Cao
107 */
108public class H4GRImage extends ScalarDS implements MetaDataContainer
109{
110    private static final long serialVersionUID = 1029672744963360976L;
111
112    private static final Logger   log = LoggerFactory.getLogger(H4GRImage.class);
113
114    /**
115     * The list of attributes of this data object. Members of the list are
116     * instance of H4ScalarAttribute.
117     */
118    @SuppressWarnings("rawtypes")
119    private List                            attributeList;
120
121    /**
122     * The GR interface identifier obtained from GRstart(fid)
123     */
124    private long                            grid;
125
126    /**
127     * The number of components in the raster image
128     */
129    private int                             ncomp;
130
131    /** the datatype identifier */
132    private long                            datatypeID = -1;
133
134    /** the number of attributes */
135    private int                             nAttributes = -1;
136
137
138    /**
139     * Creates a H4GRImage object with specific name and path.
140     *
141     * @param theFile the HDF file.
142     * @param name the name of this H4GRImage.
143     * @param path the full path of this H4GRImage.
144     */
145    public H4GRImage(FileFormat theFile, String name, String path) {
146        this(theFile, name, path, null);
147    }
148
149    /**
150     * Creates a H4GRImage object with specific name, path, and object ID.
151     *
152     * @param theFile the HDF file.
153     * @param name the name of this H4GRImage.
154     * @param path the full path of this H4GRImage.
155     * @param oid the unique identifier of this data object.
156     */
157    @SuppressWarnings("deprecation")
158    public H4GRImage(FileFormat theFile, String name, String path, long[] oid) {
159        super (theFile, name, path, oid);
160        palette = null;
161        isImage = isImageDisplay = true;
162        unsignedConverted = false;
163        grid = ((H4File)getFileFormat()).getGRAccessID();
164    }
165
166    /*
167     * (non-Javadoc)
168     * @see hdf.object.DataFormat#hasAttribute()
169     */
170    @Override
171    public boolean hasAttribute() {
172        if (nAttributes < 0) {
173            grid = ((H4File)getFileFormat()).getGRAccessID();
174
175            long id = open();
176
177            if (id >= 0) {
178                String[] objName = {""};
179                int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
180                int[] idims = new int[2];
181                try {
182                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
183                    nAttributes = grInfo[3];
184                }
185                catch (Exception ex) {
186                    log.trace("hasAttribute() failure: ", ex);
187                    nAttributes = 0;
188                }
189
190                log.trace("hasAttribute(): nAttributes={}", nAttributes);
191
192                close(id);
193            }
194        }
195
196        return (nAttributes > 0);
197    }
198
199    // To do: Implementing Dataset
200    @Override
201    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
202        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
203
204        Dataset dataset = null;
205        long srcdid = -1;
206        long dstdid = -1;
207        String path = null;
208        int[] count = null;
209
210        if (pgroup == null) {
211            log.debug("copy(): Parent group is null - exit");
212            return null;
213        }
214
215        if (pgroup.isRoot())
216            path = HObject.SEPARATOR;
217        else
218            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
219
220        srcdid = open();
221        if (srcdid < 0) {
222            log.debug("copy(): Invalid source dataset ID - exit");
223            return null;
224        }
225
226        if (dims != null) {
227            count = new int[2];
228            count[0] = (int)dims[0];
229            count[1] = (int)dims[1];
230        }
231
232        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
233        try {
234            String[] tmpName = {""};
235            int[] tmpDims = new int[2];
236            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
237            if (count == null)
238                count = tmpDims;
239        }
240        catch (HDFException ex) {
241            log.debug("copy(): GRgetiminfo failure: ", ex);
242        }
243
244        ncomp = grInfo[0];
245        long tid = grInfo[1];
246        int interlace = grInfo[2];
247        int numberOfAttributes = grInfo[3];
248        dstdid = HDFLibrary.GRcreate( ((H4File)pgroup.getFileFormat()).getGRAccessID(),
249                dname, ncomp, tid, interlace, count);
250        if (dstdid < 0) {
251            log.debug("copy(): Invalid dest dataset ID - exit");
252            return null;
253        }
254
255        int ref = HDFLibrary.GRidtoref(dstdid);
256        if (!pgroup.isRoot()) {
257            long vgid = pgroup.open();
258            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
259            pgroup.close(vgid);
260        }
261
262        // read data from the source dataset
263        int[] start = {0, 0};
264        if ((buff == null) && (count != null)) {
265            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
266            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
267        }
268
269        // write the data into the destination dataset
270        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
271
272        // copy palette
273        long pid = HDFLibrary.GRgetlutid(srcdid, 0);
274        int[] palInfo = new int[4];
275
276        HDFLibrary.GRgetlutinfo(pid, palInfo);
277        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
278        int palSize = palInfo[0]*palInfo[3];
279        byte[] palBuff = new byte[palSize];
280        HDFLibrary.GRreadlut(pid, palBuff);
281        pid = HDFLibrary.GRgetlutid(dstdid, 0);
282        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
283
284        // copy attributes from one object to the new object
285        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
286        copyAttribute(srcdid, dstdid, numberOfAttributes);
287
288        long[] oid = {HDFConstants.DFTAG_RIG, ref};
289        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
290
291        pgroup.addToMemberList(dataset);
292
293        close(srcdid);
294
295        try {
296            HDFLibrary.GRendaccess(dstdid);
297        }
298        catch (HDFException ex) {
299            log.debug("copy(): GRendaccess failure: ", ex);
300        }
301
302        return dataset;
303    }
304
305    // implementing ScalarDS
306    /**
307     * Returns the datatype of the data object.
308     *
309     * @return the datatype of the data object.
310     */
311    @Override
312    public Datatype getDatatype() {
313        if (!inited)
314            init();
315
316        if (datatype == null) {
317            try {
318                datatype = new H4Datatype(datatypeID);
319            }
320            catch (Exception ex) {
321                log.debug("getDatatype(): failed to create datatype: ", ex);
322                datatype = null;
323            }
324        }
325
326        return datatype;
327    }
328
329    // Implementing Dataset
330    @Override
331    public byte[] readBytes() throws HDFException {
332        byte[] theData = null;
333
334        if (!isInited())
335            init();
336
337        long id = open();
338        if (id < 0) {
339            log.debug("readBytes(): Invalid ID - exit");
340            return null;
341        }
342
343        try {
344            // set the interlacing scheme for reading image data
345            HDFLibrary.GRreqimageil(id, interlace);
346            int datasize = (int)(getWidth()*getHeight()*ncomp);
347            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
348            theData = new byte[size];
349            int[] start = {(int)startDims[0], (int)startDims[1]};
350            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
351
352            int[] stride = null;
353            if (selectedStride != null) {
354                stride = new int[rank];
355                for (int i=0; i<rank; i++)
356                    stride[i] = (int)selectedStride[i];
357            }
358
359            HDFLibrary.GRreadimage(id, start, stride, select, theData);
360        }
361        catch (Exception ex) {
362            log.debug("readBytes(): failure: ", ex);
363        }
364        finally {
365            close(id);
366        }
367
368        return theData;
369    }
370
371    // ***** need to implement from DataFormat *****
372    /**
373     * Reads the data from file.
374     *
375     * read() reads the data from file to a memory buffer and returns the memory
376     * buffer. The dataset object does not hold the memory buffer. To store the
377     * memory buffer in the dataset object, one must call getData().
378     *
379     * By default, the whole dataset is read into memory. Users can also select
380     * a subset to read. Subsetting is done in an implicit way.
381     *
382     * @return the data read from file.
383     *
384     * @see #getData()
385     *
386     * @throws HDFException
387     *             if object can not be read
388     */
389    @Override
390    public Object read() throws HDFException {
391        Object theData = null;
392
393        if (!isInited()) init();
394
395        long id = open();
396        if (id < 0) {
397            log.debug("read(): Invalid ID");
398            return null;
399        }
400
401        try {
402            // set the interlacing scheme for reading image data
403            HDFLibrary.GRreqimageil(id, interlace);
404            int datasize = (int)(getWidth()*getHeight()*ncomp);
405
406            theData = H4Datatype.allocateArray(datatypeID, datasize);
407
408            if (theData != null) {
409                // assume external data files are located in the same directory as the main file.
410                HDFLibrary.HXsetdir(getFileFormat().getParent());
411
412                int[] start = {(int)startDims[0], (int)startDims[1]};
413                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
414
415                int[] stride = null;
416                if (selectedStride != null) {
417                    stride = new int[rank];
418                    for (int i=0; i<rank; i++)
419                        stride[i] = (int)selectedStride[i];
420                }
421
422                HDFLibrary.GRreadimage(id, start, stride, select, theData);
423            }
424        }
425        catch (Exception ex) {
426            log.debug("read(): failure: ", ex);
427        }
428        finally {
429            close(id);
430        }
431
432        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
433            isDefaultImageOrder = false;
434        else
435            isDefaultImageOrder = true;
436
437        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
438        return theData;
439    }
440
441    // Implementing DataFormat
442    /**
443     * Writes a memory buffer to the object in the file.
444     *
445     * @param buf
446     *            the data to write
447     *
448     * @throws HDFException
449     *             if data can not be written
450     */
451    @SuppressWarnings("deprecation")
452    @Override
453    public void write(Object buf) throws HDFException {
454        if (buf == null) {
455            log.debug("write(): buf is null - exit");
456            return;
457        }
458
459        long id = open();
460        if (id < 0) {
461            log.debug("write(): Invalid ID - exit");
462            return;
463        }
464
465        int[] select = new int[rank];
466        int[] start = new int[rank];
467        for (int i=0; i<rank; i++) {
468            select[i] = (int)selectedDims[i];
469            start[i] = (int)startDims[i];
470        }
471
472        int[] stride = null;
473        if (selectedStride != null) {
474            stride = new int[rank];
475            for (int i=0; i<rank; i++) {
476                stride[i] = (int)selectedStride[i];
477            }
478        }
479
480        Object tmpData = buf;
481        try {
482            if (getDatatype().isUnsigned() && unsignedConverted)
483                tmpData = convertToUnsignedC(buf);
484            // assume external data files are located in the same directory as the main file.
485            HDFLibrary.HXsetdir(getFileFormat().getParent());
486
487            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
488        }
489        catch (Exception ex) {
490            log.debug("write(): failure: ", ex);
491        }
492        finally {
493            tmpData = null;
494            close(id);
495        }
496    }
497
498    // ***** need to implement from DataFormat *****
499    /**
500     * Retrieves the object's metadata, such as attributes, from the file.
501     *
502     * Metadata, such as attributes, is stored in a List.
503     *
504     * @return the list of metadata objects.
505     *
506     * @throws HDFException
507     *             if the metadata can not be retrieved
508     */
509    @Override
510    @SuppressWarnings({"rawtypes", "unchecked"})
511    public List getMetadata() throws HDFException {
512        if (attributeList != null) {
513            log.trace("getMetadata(): attributeList != null - exit");
514            return attributeList;
515        }
516
517        long id = open();
518        String[] objName = {""};
519        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
520        int[] idims = new int[2];
521        try {
522            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
523            // mask off the litend bit
524            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
525            int n = grInfo[3];
526
527            if ((attributeList == null) && (n>0))
528                attributeList = new Vector(n, 5);
529
530            boolean b = false;
531            String[] attrName = new String[1];
532            int[] attrInfo = {0, 0}; // data_type, length
533            for (int i=0; i<n; i++) {
534                attrName[0] = "";
535                try {
536                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
537                    // mask off the litend bit
538                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
539                }
540                catch (HDFException ex) {
541                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
542                    b = false;
543                }
544
545                if (!b)
546                    continue;
547
548                long[] attrDims = {attrInfo[1]};
549                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
550                attributeList.add(attr);
551
552                Object buf = null;
553                try {
554                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
555                }
556                catch (OutOfMemoryError e) {
557                    log.debug("getMetadata(): out of memory: ", e);
558                    buf = null;
559                }
560
561                try {
562                    HDFLibrary.GRgetattr(id, i, buf);
563                }
564                catch (HDFException ex) {
565                    log.debug("getMetadata(): GRgetattr failure: ", ex);
566                    buf = null;
567                }
568
569                if (buf != null) {
570                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
571                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
572                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
573                    }
574
575                    attr.setAttributeData(buf);
576                }
577            } //  (int i=0; i<n; i++)
578        }
579        catch (Exception ex) {
580            log.debug("getMetadata(): failure: ", ex);
581        }
582        finally {
583            close(id);
584        }
585
586        return attributeList;
587    }
588
589    // ***** need to implement from DataFormat *****
590    /**
591     * Writes a specific piece of metadata (such as an attribute) into the file.
592     *
593     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
594     * value. If the attribute does not exist in the file, it creates the
595     * attribute in the file and attaches it to the object. It will fail to
596     * write a new attribute to the object where an attribute with the same name
597     * already exists. To update the value of an existing attribute in the file,
598     * one needs to get the instance of the attribute by getMetadata(), change
599     * its values, then use writeMetadata() to write the value.
600     *
601     * @param info
602     *            the metadata to write.
603     *
604     * @throws Exception
605     *             if the metadata can not be written
606     */
607    @Override
608    @SuppressWarnings({"rawtypes", "unchecked"})
609    public void writeMetadata(Object info) throws Exception {
610        // only attribute metadata is supported.
611        if (!(info instanceof Attribute)) {
612            log.debug("writeMetadata(): Object not an H4ScalarAttribute - exit");
613            return;
614        }
615
616        try {
617            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
618
619            if (attributeList == null)
620                attributeList = new Vector();
621
622            attributeList.add(info);
623            nAttributes = attributeList.size();
624        }
625        catch (Exception ex) {
626            log.debug("writeMetadata(): failure: ", ex);
627        }
628    }
629
630    // ***** need to implement from DataFormat *****
631    /**
632     * Deletes an existing piece of metadata from this object.
633     *
634     * @param info
635     *            the metadata to delete.
636     *
637     * @throws HDFException
638     *             if the metadata can not be removed
639     */
640    @Override
641    public void removeMetadata(Object info) throws HDFException {
642        log.trace("removeMetadata(): disabled");
643    }
644
645    /**
646     * Updates an existing piece of metadata attached to this object.
647     *
648     * @param info
649     *            the metadata to update.
650     *
651     * @throws Exception
652     *             if the metadata can not be updated
653     */
654    @Override
655    public void updateMetadata(Object info) throws Exception {
656        log.trace("updateMetadata(): disabled");
657    }
658
659    // Implementing HObject.
660    @Override
661    public long open() {
662        log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]);
663
664        long id = -1;
665        try {
666            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
667            id = HDFLibrary.GRselect(grid, index);
668        }
669        catch (HDFException ex) {
670            log.debug("open(): failure: ", ex);
671            id = -1;
672        }
673
674        return id;
675    }
676
677    // Implementing HObject.
678    @Override
679    public void close(long grid) {
680        try { HDFLibrary.GRendaccess(grid); }
681        catch (HDFException ex) {log.debug("close(): failure: ", ex);}
682    }
683
684    // Implementing Dataset.
685    @Override
686    public void init() {
687        if (inited) {
688            log.trace("init(): Already initialized");
689            return; // already called. Initialize only once
690        }
691
692        long id = open();
693        String[] objName = {""};
694        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
695        int[] idims = new int[2];
696        try {
697            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
698            // mask off the litend bit
699            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
700            datatypeID = grInfo[1];
701
702            // get compression information
703            try {
704                HDFCompInfo compInfo = new HDFCompInfo();
705                HDFLibrary.GRgetcompinfo(id, compInfo);
706
707                compression.setLength(0);
708
709                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE)
710                    compression.append("GZIP");
711                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP)
712                    compression.append("SZIP");
713                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG)
714                    compression.append("JPEG");
715                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF)
716                    compression.append("SKPHUFF");
717                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE)
718                    compression.append("RLE");
719                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT)
720                    compression.append("NBIT");
721
722                if (compression.length() == 0)
723                    compression.append("NONE");
724            }
725            catch (Exception ex) {
726                log.debug("init(): get compression information failure: ", ex);
727            }
728
729            // get chunk information
730            try {
731                HDFChunkInfo chunkInfo = new HDFChunkInfo();
732                int[] cflag = {HDFConstants.HDF_NONE};
733                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
734
735                storageLayout.setLength(0);
736
737                if (cflag[0] == HDFConstants.HDF_NONE) {
738                    chunkSize = null;
739                    storageLayout.append("NONE");
740                }
741                else {
742                    chunkSize = new long[rank];
743                    for (int i=0; i<rank; i++)
744                        chunkSize[i] = chunkInfo.chunk_lengths[i];
745
746                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
747                    for (int i = 1; i < rank; i++)
748                        storageLayout.append(" X ").append(chunkSize[i]);
749                }
750            }
751            catch (Exception ex) {
752                log.debug("init(): get chunk information failure: ", ex);
753            }
754
755            inited = true;
756        }
757        catch (HDFException ex) {
758            log.debug("init(): failure: ", ex);
759        }
760        finally {
761            close(id);
762        }
763
764        ncomp = grInfo[0];
765        isTrueColor = (ncomp >= 3);
766        interlace = grInfo[2];
767        rank = 2; // support only two dimensional raster image
768
769        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
770        // other image data is arranged as dim[0]=height, dim[1]=width.
771        selectedIndex[0] = 1;
772        selectedIndex[1] = 0;
773
774        dims = new long[rank];
775        startDims = new long[rank];
776        selectedDims = new long[rank];
777        for (int i=0; i<rank; i++) {
778            startDims[i] = 0;
779            selectedDims[i] = idims[i];
780            dims[i] = idims[i];
781        }
782    }
783
784    // ***** implement from ScalarDS *****
785
786    /*
787     * (non-Javadoc)
788     * @see hdf.object.ScalarDS#readPalette(int)
789     */
790    @Override
791    public byte[][] readPalette(int idx) {
792        return getPalette();
793    }
794
795    /*
796     * (non-Javadoc)
797     * @see hdf.object.ScalarDS#NumberOfPalettes()
798     */
799    @Override
800    public int getNumberOfPalettes() {
801        if (palette != null)
802            return 1;
803        return 0;
804    }
805
806    @Override
807    public byte[][] getPalette() {
808        if (palette != null) {
809            log.trace("getPalette(): palette != null - exit");
810            return palette;
811        }
812
813        long id = open();
814        if (id < 0) {
815            log.debug("getPalette(): Invalid ID - exit");
816            return null;
817        }
818
819        // get palette info.
820        long lutid  = -1;
821        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
822        try {
823            // find the first palette.
824            // Todo: get all the palettes
825            lutid = HDFLibrary.GRgetlutid(id, 0);
826            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
827        }
828        catch (HDFException ex) {
829            log.debug("getPalette(): exit with failure: ", ex);
830            close(id);
831            return null;
832        }
833
834        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
835        // return true even if there is no palette data, and check if it is a
836        // RGB with 256 colors
837        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) || (lutInfo[3] != 256)) {
838            close(id);
839            log.debug("getPalette(): no palette data - exit");
840            return null;
841        }
842
843        // read palette data
844        boolean b = false;
845        byte[] pal = new byte[3*256];
846        try {
847            HDFLibrary.GRreqlutil(id, lutInfo[2]);
848            b = HDFLibrary.GRreadlut(lutid, pal);
849        }
850        catch (HDFException ex) {
851            log.debug("getPalette(): failure: ", ex);
852            b = false;
853        }
854
855        if (!b) {
856            close(id);
857            log.debug("getPalette(): no palette data - exit");
858            return null;
859        }
860
861        palette = new byte[3][256];
862        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
863            // color conponents are arranged in RGB, RGB, RGB, ...
864            for (int i=0; i<256; i++) {
865                palette[0][i] = pal[i*3];
866                palette[1][i] = pal[i*3+1];
867                palette[2][i] = pal[i*3+2];
868            }
869        }
870        else {
871            for (int i=0; i<256; i++) {
872                palette[0][i] = pal[i];
873                palette[1][i] = pal[256+i];
874                palette[2][i] = pal[512+i];
875            }
876        }
877
878        close(id);
879
880        return palette;
881    }
882
883    /**
884     * Returns the number of components of this image data.
885     *
886     * @return the number of components
887     */
888    public int getComponentCount() {
889        return ncomp;
890    }
891
892    /**
893     * Creates a new image.
894     *
895     * @param name the name of the dataset to create.
896     * @param pgroup the parent group of the new dataset.
897     * @param type the datatype of the dataset.
898     * @param dims the dimension size of the dataset.
899     * @param maxdims the max dimension size of the dataset.
900     * @param chunks the chunk size of the dataset.
901     * @param gzip the level of the gzip compression.
902     * @param ncomp number of components of the image data.
903     * @param interlace the interlace mode.
904     * @param data the array of data values.
905     *
906     * @return the new image if successful. Otherwise returns null.
907     *
908     * @throws Exception if the image can not be created
909     */
910    public static H4GRImage create(String name, Group pgroup, Datatype type,
911            long[] dims, long[] maxdims, long[] chunks, int gzip, int ncomp, int interlace, Object data) throws Exception {
912        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace);
913
914        H4GRImage dataset = null;
915        if ((name == null) ||
916            (pgroup == null) ||
917            (dims == null) ||
918            ((gzip>0) && (chunks==null))) {
919            log.debug("create(): one or more parameters are null - exit");
920            return null;
921        }
922
923        H4File file = (H4File)pgroup.getFileFormat();
924        if (file == null) {
925            log.debug("create(): Parent group FileFormat is null - exit");
926            return null;
927        }
928
929        String path = HObject.SEPARATOR;
930        if (!pgroup.isRoot())
931            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
932        if (interlace == ScalarDS.INTERLACE_PLANE)
933            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
934        else
935            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
936
937        int rank = 2;
938        int[] idims = new int[rank];
939        int[] imaxdims = new int[rank];
940        int[] start = new int[rank];
941        for (int i=0; i<rank; i++) {
942            idims[i] = (int)dims[i];
943            if (maxdims != null)
944                imaxdims[i] = (int)maxdims[i];
945            else
946                imaxdims[i] = idims[i];
947            start[i] = 0;
948        }
949
950        int[] ichunks = null;
951        if (chunks != null) {
952            ichunks = new int[rank];
953            for (int i=0; i<rank; i++)
954                ichunks[i] = (int)chunks[i];
955        }
956
957        long grid = -1;
958        long vgid = -1;
959        long gid = (file).getGRAccessID();
960        long tid = type.createNative();
961
962        if(tid >= 0) {
963            try {
964                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
965            }
966            catch (Exception ex) {
967                log.debug("create(): exit with failure: ", ex);
968                throw (ex);
969            }
970        }
971
972        if (grid < 0) {
973            log.debug("create(): Invalid GR ID - exit");
974            throw (new HDFException("Unable to create the new dataset."));
975        }
976
977        if ((grid > 0) && (data != null))
978            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
979
980        if (chunks != null) {
981            // set chunk
982            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
983            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
984        }
985
986        if (gzip > 0) {
987            // set compression
988            int compType = HDFConstants.COMP_CODE_DEFLATE;
989            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
990            compInfo.level = gzip;
991            HDFLibrary.GRsetcompress(grid, compType, compInfo);
992        }
993
994        int ref = HDFLibrary.GRidtoref(grid);
995
996        if (!pgroup.isRoot()) {
997            // add the dataset to the parent group
998            vgid = pgroup.open();
999            if (vgid < 0) {
1000                if (grid > 0)
1001                    HDFLibrary.GRendaccess(grid);
1002                log.debug("create(): Invalid VG ID - exit");
1003                throw (new HDFException("Unable to open the parent group."));
1004            }
1005
1006            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
1007
1008            pgroup.close(vgid);
1009        }
1010
1011        try {
1012            if (grid > 0)
1013                HDFLibrary.GRendaccess(grid);
1014        }
1015        catch (Exception ex) {
1016            log.debug("create(): GRendaccess failure: ", ex);
1017        }
1018
1019        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1020        dataset = new H4GRImage(file, name, path, oid);
1021
1022        if (dataset != null)
1023            pgroup.addToMemberList(dataset);
1024
1025        return dataset;
1026    }
1027
1028    /**
1029     * copy attributes from one GR image to another GR image
1030     */
1031    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes) {
1032        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes);
1033
1034        if (numberOfAttributes <= 0) {
1035            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1036            return;
1037        }
1038
1039        try {
1040            boolean b = false;
1041            String[] attrName = new String[1];
1042            int[] attrInfo = {0, 0};
1043            for (int i=0; i<numberOfAttributes; i++) {
1044                attrName[0] = "";
1045                try {
1046                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1047                }
1048                catch (HDFException ex) {
1049                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1050                    b = false;
1051                }
1052
1053                if (!b)
1054                    continue;
1055
1056                // read attribute data from source dataset
1057                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1058                try {
1059                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1060                }
1061                catch (Exception ex) {
1062                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1063                    attrBuff = null;
1064                }
1065
1066                if (attrBuff == null) {
1067                    log.debug("copyAttribute(): attrBuff[{}] is null continue", i);
1068                    continue;
1069                }
1070
1071                // attach attribute to the destination dataset
1072                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1073            } //  (int i=0; i<numberOfAttributes; i++)
1074        }
1075        catch (Exception ex) {
1076            log.debug("copyAttribute(): failure: ", ex);
1077        }
1078    }
1079
1080    //Implementing DataFormat
1081    /**
1082     * Retrieves the object's metadata, such as attributes, from the file.
1083     *
1084     * Metadata, such as attributes, is stored in a List.
1085     *
1086     * @param attrPropList
1087     *             the list of properties to get
1088     *
1089     * @return the list of metadata objects.
1090     *
1091     * @throws Exception
1092     *             if the metadata can not be retrieved
1093     */
1094    @SuppressWarnings("rawtypes")
1095    public List getMetadata(int... attrPropList) throws Exception {
1096        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1097    }
1098}