001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026import hdf.object.Attribute;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.MetaDataContainer;
033import hdf.object.ScalarDS;
034import hdf.object.h4.H4ScalarAttribute;
035
036import org.slf4j.Logger;
037import org.slf4j.LoggerFactory;
038
039/**
040 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
041 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
042 *
043 * Every GR data set must contain the following components: image array, name,
044 * pixel type, and dimensions. The name, dimensions, and pixel type must be
045 * supplied by the user at the time the GR data set is defined.
046 *
047 * An image array is a two-dimensional array of pixels. Each element in an image
048 * array corresponds to one pixel and each pixel can consist of a number of
049 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
050 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
051 * by different methods (8-bit lookup table or 24-bit direct representation) and
052 * may have different data types. The data type of pixel components and the number
053 * of components in each pixel are collectively known as the pixel type.
054 *
055 * <b>How to Select a Subset</b>
056 *
057 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
058 * defined to select a subset of a data array. The selection is done in an implicit way.
059 * Function calls to dimension information such as getSelectedDims() return an array
060 * of dimension values, which is a reference to the array in the dataset object.
061 * Changes of the array outside the dataset object directly change the values of
062 * the array in the dataset object. It is like pointers in C.
063 *
064 * The following is an example of how to make a subset. In the example, the dataset
065 * is a 4-dimension with size of [200][100][50][10], i.e.
066 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
067 * We want to select every other data point in dims[1] and dims[2]
068 * <pre>
069     int rank = dataset.getRank();   // number of dimensions of the dataset
070     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
071     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
072     long[] start = dataset.getStartDims(); // the offset of the selection
073     long[] stride = dataset.getStride(); // the stride of the dataset
074     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
075
076     // select dim1 and dim2 as 2D data for display, and slice through dim0
077     selectedIndex[0] = 1;
078     selectedIndex[1] = 2;
079     selectedIndex[1] = 0;
080
081     // reset the selection arrays
082     for (int i=0; i&lt;rank; i++) {
083         start[i] = 0;
084         selected[i] = 1;
085         stride[i] = 1;
086    }
087
088    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
089    stride[1] = 2;
090    stride[2] = 2;
091
092    // set the selection size of dim1 and dim2
093    selected[1] = dims[1]/stride[1];
094    selected[2] = dims[1]/stride[2];
095
096    // when dataset.read() is called, the slection above will be used since
097    // the dimension arrays are passed by reference. Changes of these arrays
098    // outside the dataset object directly change the values of these array
099    // in the dataset object.
100
101 * </pre>
102 *
103 * @version 1.1 9/4/2007
104 * @author Peter X. Cao
105 */
106public class H4GRImage extends ScalarDS implements MetaDataContainer {
107    private static final long serialVersionUID = 1029672744963360976L;
108
109    private static final Logger log = LoggerFactory.getLogger(H4GRImage.class);
110
111    /**
112     * The list of attributes of this data object. Members of the list are
113     * instance of H4ScalarAttribute.
114     */
115    @SuppressWarnings("rawtypes")
116    private List attributeList;
117
118    /**
119     * The GR interface identifier obtained from GRstart(fid)
120     */
121    private long grid;
122
123    /**
124     * The number of components in the raster image
125     */
126    private int ncomp;
127
128    /** the datatype identifier */
129    private long datatypeID = -1;
130
131    /** the number of attributes */
132    private int nAttributes = -1;
133
134    /**
135     * Creates a H4GRImage object with specific name and path.
136     *
137     * @param theFile the HDF file.
138     * @param name the name of this H4GRImage.
139     * @param path the full path of this H4GRImage.
140     */
141    public H4GRImage(FileFormat theFile, String name, String path) { this(theFile, name, path, null); }
142
143    /**
144     * Creates a H4GRImage object with specific name, path, and object ID.
145     *
146     * @param theFile the HDF file.
147     * @param name the name of this H4GRImage.
148     * @param path the full path of this H4GRImage.
149     * @param oid the unique identifier of this data object.
150     */
151    @SuppressWarnings("deprecation")
152    public H4GRImage(FileFormat theFile, String name, String path, long[] oid)
153    {
154        super(theFile, name, path, oid);
155        palette = null;
156        isImage = isImageDisplay = true;
157        unsignedConverted        = false;
158        grid                     = ((H4File)getFileFormat()).getGRAccessID();
159    }
160
161    /*
162     * (non-Javadoc)
163     * @see hdf.object.DataFormat#hasAttribute()
164     */
165    @Override
166    public boolean hasAttribute()
167    {
168        if (nAttributes < 0) {
169            grid = ((H4File)getFileFormat()).getGRAccessID();
170
171            long id = open();
172
173            if (id >= 0) {
174                String[] objName = {""};
175                int[] grInfo     = new int[4]; // ncomp, data_type, interlace, and num_attrs
176                int[] idims      = new int[2];
177                try {
178                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
179                    nAttributes = grInfo[3];
180                }
181                catch (Exception ex) {
182                    log.trace("hasAttribute() failure: ", ex);
183                    nAttributes = 0;
184                }
185
186                log.trace("hasAttribute(): nAttributes={}", nAttributes);
187
188                close(id);
189            }
190        }
191
192        return (nAttributes > 0);
193    }
194
195    // To do: Implementing Dataset
196    @Override
197    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
198    {
199        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
200
201        Dataset dataset = null;
202        long srcdid     = -1;
203        long dstdid     = -1;
204        String path     = null;
205        int[] count     = null;
206
207        if (pgroup == null) {
208            log.debug("copy(): Parent group is null - exit");
209            return null;
210        }
211
212        if (pgroup.isRoot())
213            path = HObject.SEPARATOR;
214        else
215            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
216
217        srcdid = open();
218        if (srcdid < 0) {
219            log.debug("copy(): Invalid source dataset ID - exit");
220            return null;
221        }
222
223        if (dims != null) {
224            count    = new int[2];
225            count[0] = (int)dims[0];
226            count[1] = (int)dims[1];
227        }
228
229        int[] grInfo = new int[4]; // ncomp, data_type, interlace and num_attrs
230        try {
231            String[] tmpName = {""};
232            int[] tmpDims    = new int[2];
233            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
234            if (count == null)
235                count = tmpDims;
236        }
237        catch (HDFException ex) {
238            log.debug("copy(): GRgetiminfo failure: ", ex);
239        }
240
241        ncomp                  = grInfo[0];
242        long tid               = grInfo[1];
243        int interlace          = grInfo[2];
244        int numberOfAttributes = grInfo[3];
245        dstdid = HDFLibrary.GRcreate(((H4File)pgroup.getFileFormat()).getGRAccessID(), dname, ncomp, tid,
246                                     interlace, count);
247        if (dstdid < 0) {
248            log.debug("copy(): Invalid dest dataset ID - exit");
249            return null;
250        }
251
252        int ref = HDFLibrary.GRidtoref(dstdid);
253        if (!pgroup.isRoot()) {
254            long vgid = pgroup.open();
255            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
256            pgroup.close(vgid);
257        }
258
259        // read data from the source dataset
260        int[] start = {0, 0};
261        if ((buff == null) && (count != null)) {
262            buff = new byte[count[0] * count[1] * HDFLibrary.DFKNTsize(tid)];
263            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
264        }
265
266        // write the data into the destination dataset
267        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
268
269        // copy palette
270        long pid      = HDFLibrary.GRgetlutid(srcdid, 0);
271        int[] palInfo = new int[4];
272
273        HDFLibrary.GRgetlutinfo(pid, palInfo);
274        palInfo[1]     = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
275        int palSize    = palInfo[0] * palInfo[3];
276        byte[] palBuff = new byte[palSize];
277        HDFLibrary.GRreadlut(pid, palBuff);
278        pid = HDFLibrary.GRgetlutid(dstdid, 0);
279        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
280
281        // copy attributes from one object to the new object
282        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
283        copyAttribute(srcdid, dstdid, numberOfAttributes);
284
285        long[] oid = {HDFConstants.DFTAG_RIG, ref};
286        dataset    = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
287
288        pgroup.addToMemberList(dataset);
289
290        close(srcdid);
291
292        try {
293            HDFLibrary.GRendaccess(dstdid);
294        }
295        catch (HDFException ex) {
296            log.debug("copy(): GRendaccess failure: ", ex);
297        }
298
299        return dataset;
300    }
301
302    // implementing ScalarDS
303    /**
304     * Returns the datatype of the data object.
305     *
306     * @return the datatype of the data object.
307     */
308    @Override
309    public Datatype getDatatype()
310    {
311        if (!inited)
312            init();
313
314        if (datatype == null) {
315            try {
316                datatype = new H4Datatype(datatypeID);
317            }
318            catch (Exception ex) {
319                log.debug("getDatatype(): failed to create datatype: ", ex);
320                datatype = null;
321            }
322        }
323
324        return datatype;
325    }
326
327    // Implementing Dataset
328    @Override
329    public byte[] readBytes() throws HDFException
330    {
331        byte[] theData = null;
332
333        if (!isInited())
334            init();
335
336        long id = open();
337        if (id < 0) {
338            log.debug("readBytes(): Invalid ID - exit");
339            return null;
340        }
341
342        try {
343            // set the interlacing scheme for reading image data
344            HDFLibrary.GRreqimageil(id, interlace);
345            int datasize = (int)(getWidth() * getHeight() * ncomp);
346            int size     = HDFLibrary.DFKNTsize(datatypeID) * datasize;
347            theData      = new byte[size];
348            int[] start  = {(int)startDims[0], (int)startDims[1]};
349            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
350
351            int[] stride = null;
352            if (selectedStride != null) {
353                stride = new int[rank];
354                for (int i = 0; i < rank; i++)
355                    stride[i] = (int)selectedStride[i];
356            }
357
358            HDFLibrary.GRreadimage(id, start, stride, select, theData);
359        }
360        catch (Exception ex) {
361            log.debug("readBytes(): failure: ", ex);
362        }
363        finally {
364            close(id);
365        }
366
367        return theData;
368    }
369
370    // ***** need to implement from DataFormat *****
371    /**
372     * Reads the data from file.
373     *
374     * read() reads the data from file to a memory buffer and returns the memory
375     * buffer. The dataset object does not hold the memory buffer. To store the
376     * memory buffer in the dataset object, one must call getData().
377     *
378     * By default, the whole dataset is read into memory. Users can also select
379     * a subset to read. Subsetting is done in an implicit way.
380     *
381     * @return the data read from file.
382     *
383     * @see #getData()
384     *
385     * @throws HDFException
386     *             if object can not be read
387     */
388    @Override
389    public Object read() throws HDFException
390    {
391        Object theData = null;
392
393        if (!isInited())
394            init();
395
396        long id = open();
397        if (id < 0) {
398            log.debug("read(): Invalid ID");
399            return null;
400        }
401
402        try {
403            // set the interlacing scheme for reading image data
404            HDFLibrary.GRreqimageil(id, interlace);
405            int datasize = (int)(getWidth() * getHeight() * ncomp);
406
407            theData = H4Datatype.allocateArray(datatypeID, datasize);
408
409            if (theData != null) {
410                // assume external data files are located in the same directory as the main file.
411                HDFLibrary.HXsetdir(getFileFormat().getParent());
412
413                int[] start  = {(int)startDims[0], (int)startDims[1]};
414                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
415
416                int[] stride = null;
417                if (selectedStride != null) {
418                    stride = new int[rank];
419                    for (int i = 0; i < rank; i++)
420                        stride[i] = (int)selectedStride[i];
421                }
422
423                HDFLibrary.GRreadimage(id, start, stride, select, theData);
424            }
425        }
426        catch (Exception ex) {
427            log.debug("read(): failure: ", ex);
428        }
429        finally {
430            close(id);
431        }
432
433        if ((rank > 1) && (selectedIndex[1] > selectedIndex[0]))
434            isDefaultImageOrder = false;
435        else
436            isDefaultImageOrder = true;
437
438        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
439        return theData;
440    }
441
442    // Implementing DataFormat
443    /**
444     * Writes a memory buffer to the object in the file.
445     *
446     * @param buf
447     *            the data to write
448     *
449     * @throws HDFException
450     *             if data can not be written
451     */
452    @SuppressWarnings("deprecation")
453    @Override
454    public void write(Object buf) throws HDFException
455    {
456        if (buf == null) {
457            log.debug("write(): buf is null - exit");
458            return;
459        }
460
461        long id = open();
462        if (id < 0) {
463            log.debug("write(): Invalid ID - exit");
464            return;
465        }
466
467        int[] select = new int[rank];
468        int[] start  = new int[rank];
469        for (int i = 0; i < rank; i++) {
470            select[i] = (int)selectedDims[i];
471            start[i]  = (int)startDims[i];
472        }
473
474        int[] stride = null;
475        if (selectedStride != null) {
476            stride = new int[rank];
477            for (int i = 0; i < rank; i++) {
478                stride[i] = (int)selectedStride[i];
479            }
480        }
481
482        Object tmpData = buf;
483        try {
484            if (getDatatype().isUnsigned() && unsignedConverted)
485                tmpData = convertToUnsignedC(buf);
486            // assume external data files are located in the same directory as the main file.
487            HDFLibrary.HXsetdir(getFileFormat().getParent());
488
489            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
490        }
491        catch (Exception ex) {
492            log.debug("write(): failure: ", ex);
493        }
494        finally {
495            tmpData = null;
496            close(id);
497        }
498    }
499
500    // ***** need to implement from DataFormat *****
501    /**
502     * Retrieves the object's metadata, such as attributes, from the file.
503     *
504     * Metadata, such as attributes, is stored in a List.
505     *
506     * @return the list of metadata objects.
507     *
508     * @throws HDFException
509     *             if the metadata can not be retrieved
510     */
511    @Override
512    @SuppressWarnings({"rawtypes", "unchecked"})
513    public List getMetadata() throws HDFException
514    {
515        if (attributeList != null) {
516            log.trace("getMetadata(): attributeList != null - exit");
517            return attributeList;
518        }
519
520        long id          = open();
521        String[] objName = {""};
522        int[] grInfo     = new int[4]; // ncomp, data_type, interlace, and num_attrs
523        int[] idims      = new int[2];
524        try {
525            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
526            // mask off the litend bit
527            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
528            int n     = grInfo[3];
529
530            if ((attributeList == null) && (n > 0))
531                attributeList = new Vector(n, 5);
532
533            boolean b         = false;
534            String[] attrName = new String[1];
535            int[] attrInfo    = {0, 0}; // data_type, length
536            for (int i = 0; i < n; i++) {
537                attrName[0] = "";
538                try {
539                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
540                    // mask off the litend bit
541                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
542                }
543                catch (HDFException ex) {
544                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
545                    b = false;
546                }
547
548                if (!b)
549                    continue;
550
551                long[] attrDims = {attrInfo[1]};
552                H4ScalarAttribute attr =
553                    new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
554                attributeList.add(attr);
555
556                Object buf = null;
557                try {
558                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
559                }
560                catch (OutOfMemoryError e) {
561                    log.debug("getMetadata(): out of memory: ", e);
562                    buf = null;
563                }
564
565                try {
566                    HDFLibrary.GRgetattr(id, i, buf);
567                }
568                catch (HDFException ex) {
569                    log.debug("getMetadata(): GRgetattr failure: ", ex);
570                    buf = null;
571                }
572
573                if (buf != null) {
574                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
575                        (attrInfo[0] == HDFConstants.DFNT_UCHAR8)) {
576                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
577                    }
578
579                    attr.setAttributeData(buf);
580                }
581            } //  (int i=0; i<n; i++)
582        }
583        catch (Exception ex) {
584            log.debug("getMetadata(): failure: ", ex);
585        }
586        finally {
587            close(id);
588        }
589
590        return attributeList;
591    }
592
593    // ***** need to implement from DataFormat *****
594    /**
595     * Writes a specific piece of metadata (such as an attribute) into the file.
596     *
597     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
598     * value. If the attribute does not exist in the file, it creates the
599     * attribute in the file and attaches it to the object. It will fail to
600     * write a new attribute to the object where an attribute with the same name
601     * already exists. To update the value of an existing attribute in the file,
602     * one needs to get the instance of the attribute by getMetadata(), change
603     * its values, then use writeMetadata() to write the value.
604     *
605     * @param info
606     *            the metadata to write.
607     *
608     * @throws Exception
609     *             if the metadata can not be written
610     */
611    @Override
612    @SuppressWarnings({"rawtypes", "unchecked"})
613    public void writeMetadata(Object info) throws Exception
614    {
615        // only attribute metadata is supported.
616        if (!(info instanceof Attribute)) {
617            log.debug("writeMetadata(): Object not an H4ScalarAttribute - exit");
618            return;
619        }
620
621        try {
622            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
623
624            if (attributeList == null)
625                attributeList = new Vector();
626
627            attributeList.add(info);
628            nAttributes = attributeList.size();
629        }
630        catch (Exception ex) {
631            log.debug("writeMetadata(): failure: ", ex);
632        }
633    }
634
635    // ***** need to implement from DataFormat *****
636    /**
637     * Deletes an existing piece of metadata from this object.
638     *
639     * @param info
640     *            the metadata to delete.
641     *
642     * @throws HDFException
643     *             if the metadata can not be removed
644     */
645    @Override
646    public void removeMetadata(Object info) throws HDFException
647    {
648        log.trace("removeMetadata(): disabled");
649    }
650
651    /**
652     * Updates an existing piece of metadata attached to this object.
653     *
654     * @param info
655     *            the metadata to update.
656     *
657     * @throws Exception
658     *             if the metadata can not be updated
659     */
660    @Override
661    public void updateMetadata(Object info) throws Exception
662    {
663        log.trace("updateMetadata(): disabled");
664    }
665
666    // Implementing HObject.
667    @Override
668    public long open()
669    {
670        log.trace("open(): start: for file={} with ref={}", getFID(), (short)oid[1]);
671
672        long id = -1;
673        try {
674            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
675            id        = HDFLibrary.GRselect(grid, index);
676        }
677        catch (HDFException ex) {
678            log.debug("open(): failure: ", ex);
679            id = -1;
680        }
681
682        return id;
683    }
684
685    // Implementing HObject.
686    @Override
687    public void close(long grid)
688    {
689        try {
690            HDFLibrary.GRendaccess(grid);
691        }
692        catch (HDFException ex) {
693            log.debug("close(): failure: ", ex);
694        }
695    }
696
697    // Implementing Dataset.
698    @Override
699    public void init()
700    {
701        if (inited) {
702            log.trace("init(): Already initialized");
703            return; // already called. Initialize only once
704        }
705
706        long id          = open();
707        String[] objName = {""};
708        int[] grInfo     = new int[4]; // ncomp, data_type, interlace and num_attrs
709        int[] idims      = new int[2];
710        try {
711            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
712            // mask off the litend bit
713            grInfo[1]  = grInfo[1] & (~HDFConstants.DFNT_LITEND);
714            datatypeID = grInfo[1];
715
716            // get compression information
717            try {
718                HDFCompInfo compInfo = new HDFCompInfo();
719                HDFLibrary.GRgetcompinfo(id, compInfo);
720
721                compression.setLength(0);
722
723                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE)
724                    compression.append("GZIP");
725                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP)
726                    compression.append("SZIP");
727                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG)
728                    compression.append("JPEG");
729                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF)
730                    compression.append("SKPHUFF");
731                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE)
732                    compression.append("RLE");
733                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT)
734                    compression.append("NBIT");
735
736                if (compression.length() == 0)
737                    compression.append("NONE");
738            }
739            catch (Exception ex) {
740                log.debug("init(): get compression information failure: ", ex);
741            }
742
743            // get chunk information
744            try {
745                HDFChunkInfo chunkInfo = new HDFChunkInfo();
746                int[] cflag            = {HDFConstants.HDF_NONE};
747                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
748
749                storageLayout.setLength(0);
750
751                if (cflag[0] == HDFConstants.HDF_NONE) {
752                    chunkSize = null;
753                    storageLayout.append("NONE");
754                }
755                else {
756                    chunkSize = new long[rank];
757                    for (int i = 0; i < rank; i++)
758                        chunkSize[i] = chunkInfo.chunk_lengths[i];
759
760                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
761                    for (int i = 1; i < rank; i++)
762                        storageLayout.append(" X ").append(chunkSize[i]);
763                }
764            }
765            catch (Exception ex) {
766                log.debug("init(): get chunk information failure: ", ex);
767            }
768
769            inited = true;
770        }
771        catch (HDFException ex) {
772            log.debug("init(): failure: ", ex);
773        }
774        finally {
775            close(id);
776        }
777
778        ncomp       = grInfo[0];
779        isTrueColor = (ncomp >= 3);
780        interlace   = grInfo[2];
781        rank        = 2; // support only two dimensional raster image
782
783        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
784        // other image data is arranged as dim[0]=height, dim[1]=width.
785        selectedIndex[0] = 1;
786        selectedIndex[1] = 0;
787
788        dims         = new long[rank];
789        startDims    = new long[rank];
790        selectedDims = new long[rank];
791        for (int i = 0; i < rank; i++) {
792            startDims[i]    = 0;
793            selectedDims[i] = idims[i];
794            dims[i]         = idims[i];
795        }
796    }
797
798    // ***** implement from ScalarDS *****
799
800    /*
801     * (non-Javadoc)
802     * @see hdf.object.ScalarDS#readPalette(int)
803     */
804    @Override
805    public byte[][] readPalette(int idx)
806    {
807        return getPalette();
808    }
809
810    /*
811     * (non-Javadoc)
812     * @see hdf.object.ScalarDS#NumberOfPalettes()
813     */
814    @Override
815    public int getNumberOfPalettes()
816    {
817        if (palette != null)
818            return 1;
819        return 0;
820    }
821
822    @Override
823    public byte[][] getPalette()
824    {
825        if (palette != null) {
826            log.trace("getPalette(): palette != null - exit");
827            return palette;
828        }
829
830        long id = open();
831        if (id < 0) {
832            log.debug("getPalette(): Invalid ID - exit");
833            return null;
834        }
835
836        // get palette info.
837        long lutid    = -1;
838        int[] lutInfo = new int[4]; // ncomp, datatype, interlace, num_entries
839        try {
840            // find the first palette.
841            // Todo: get all the palettes
842            lutid = HDFLibrary.GRgetlutid(id, 0);
843            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
844        }
845        catch (HDFException ex) {
846            log.debug("getPalette(): exit with failure: ", ex);
847            close(id);
848            return null;
849        }
850
851        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
852        // return true even if there is no palette data, and check if it is a
853        // RGB with 256 colors
854        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) || (lutInfo[3] != 256)) {
855            close(id);
856            log.debug("getPalette(): no palette data - exit");
857            return null;
858        }
859
860        // read palette data
861        boolean b  = false;
862        byte[] pal = new byte[3 * 256];
863        try {
864            HDFLibrary.GRreqlutil(id, lutInfo[2]);
865            b = HDFLibrary.GRreadlut(lutid, pal);
866        }
867        catch (HDFException ex) {
868            log.debug("getPalette(): failure: ", ex);
869            b = false;
870        }
871
872        if (!b) {
873            close(id);
874            log.debug("getPalette(): no palette data - exit");
875            return null;
876        }
877
878        palette = new byte[3][256];
879        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
880            // color conponents are arranged in RGB, RGB, RGB, ...
881            for (int i = 0; i < 256; i++) {
882                palette[0][i] = pal[i * 3];
883                palette[1][i] = pal[i * 3 + 1];
884                palette[2][i] = pal[i * 3 + 2];
885            }
886        }
887        else {
888            for (int i = 0; i < 256; i++) {
889                palette[0][i] = pal[i];
890                palette[1][i] = pal[256 + i];
891                palette[2][i] = pal[512 + i];
892            }
893        }
894
895        close(id);
896
897        return palette;
898    }
899
900    /**
901     * Returns the number of components of this image data.
902     *
903     * @return the number of components
904     */
905    public int getComponentCount() { return ncomp; }
906
907    /**
908     * Creates a new image.
909     *
910     * @param name the name of the dataset to create.
911     * @param pgroup the parent group of the new dataset.
912     * @param type the datatype of the dataset.
913     * @param dims the dimension size of the dataset.
914     * @param maxdims the max dimension size of the dataset.
915     * @param chunks the chunk size of the dataset.
916     * @param gzip the level of the gzip compression.
917     * @param ncomp number of components of the image data.
918     * @param interlace the interlace mode.
919     * @param data the array of data values.
920     *
921     * @return the new image if successful. Otherwise returns null.
922     *
923     * @throws Exception if the image can not be created
924     */
925    public static H4GRImage create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
926                                   long[] chunks, int gzip, int ncomp, int interlace, Object data)
927        throws Exception
928    {
929        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name,
930                  pgroup, type, gzip, ncomp, interlace);
931
932        H4GRImage dataset = null;
933        if ((name == null) || (pgroup == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
934            log.debug("create(): one or more parameters are null - exit");
935            return null;
936        }
937
938        H4File file = (H4File)pgroup.getFileFormat();
939        if (file == null) {
940            log.debug("create(): Parent group FileFormat is null - exit");
941            return null;
942        }
943
944        String path = HObject.SEPARATOR;
945        if (!pgroup.isRoot())
946            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
947        if (interlace == ScalarDS.INTERLACE_PLANE)
948            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
949        else
950            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
951
952        int rank       = 2;
953        int[] idims    = new int[rank];
954        int[] imaxdims = new int[rank];
955        int[] start    = new int[rank];
956        for (int i = 0; i < rank; i++) {
957            idims[i] = (int)dims[i];
958            if (maxdims != null)
959                imaxdims[i] = (int)maxdims[i];
960            else
961                imaxdims[i] = idims[i];
962            start[i] = 0;
963        }
964
965        int[] ichunks = null;
966        if (chunks != null) {
967            ichunks = new int[rank];
968            for (int i = 0; i < rank; i++)
969                ichunks[i] = (int)chunks[i];
970        }
971
972        long grid = -1;
973        long vgid = -1;
974        long gid  = (file).getGRAccessID();
975        long tid  = type.createNative();
976
977        if (tid >= 0) {
978            try {
979                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
980            }
981            catch (Exception ex) {
982                log.debug("create(): exit with failure: ", ex);
983                throw(ex);
984            }
985        }
986
987        if (grid < 0) {
988            log.debug("create(): Invalid GR ID - exit");
989            throw(new HDFException("Unable to create the new dataset."));
990        }
991
992        if ((grid > 0) && (data != null))
993            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
994
995        if (chunks != null) {
996            // set chunk
997            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
998            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
999        }
1000
1001        if (gzip > 0) {
1002            // set compression
1003            int compType                = HDFConstants.COMP_CODE_DEFLATE;
1004            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
1005            compInfo.level              = gzip;
1006            HDFLibrary.GRsetcompress(grid, compType, compInfo);
1007        }
1008
1009        int ref = HDFLibrary.GRidtoref(grid);
1010
1011        if (!pgroup.isRoot()) {
1012            // add the dataset to the parent group
1013            vgid = pgroup.open();
1014            if (vgid < 0) {
1015                if (grid > 0)
1016                    HDFLibrary.GRendaccess(grid);
1017                log.debug("create(): Invalid VG ID - exit");
1018                throw(new HDFException("Unable to open the parent group."));
1019            }
1020
1021            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
1022
1023            pgroup.close(vgid);
1024        }
1025
1026        try {
1027            if (grid > 0)
1028                HDFLibrary.GRendaccess(grid);
1029        }
1030        catch (Exception ex) {
1031            log.debug("create(): GRendaccess failure: ", ex);
1032        }
1033
1034        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1035        dataset    = new H4GRImage(file, name, path, oid);
1036
1037        if (dataset != null)
1038            pgroup.addToMemberList(dataset);
1039
1040        return dataset;
1041    }
1042
1043    /**
1044     * copy attributes from one GR image to another GR image
1045     */
1046    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes)
1047    {
1048        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid,
1049                  numberOfAttributes);
1050
1051        if (numberOfAttributes <= 0) {
1052            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1053            return;
1054        }
1055
1056        try {
1057            boolean b         = false;
1058            String[] attrName = new String[1];
1059            int[] attrInfo    = {0, 0};
1060            for (int i = 0; i < numberOfAttributes; i++) {
1061                attrName[0] = "";
1062                try {
1063                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1064                }
1065                catch (HDFException ex) {
1066                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1067                    b = false;
1068                }
1069
1070                if (!b)
1071                    continue;
1072
1073                // read attribute data from source dataset
1074                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1075                try {
1076                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1077                }
1078                catch (Exception ex) {
1079                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1080                    attrBuff = null;
1081                }
1082
1083                if (attrBuff == null) {
1084                    log.debug("copyAttribute(): attrBuff[{}] is null continue", i);
1085                    continue;
1086                }
1087
1088                // attach attribute to the destination dataset
1089                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1090            } //  (int i=0; i<numberOfAttributes; i++)
1091        }
1092        catch (Exception ex) {
1093            log.debug("copyAttribute(): failure: ", ex);
1094        }
1095    }
1096
1097    // Implementing DataFormat
1098    /**
1099     * Retrieves the object's metadata, such as attributes, from the file.
1100     *
1101     * Metadata, such as attributes, is stored in a List.
1102     *
1103     * @param attrPropList
1104     *             the list of properties to get
1105     *
1106     * @return the list of metadata objects.
1107     *
1108     * @throws Exception
1109     *             if the metadata can not be retrieved
1110     */
1111    @SuppressWarnings("rawtypes")
1112    public List getMetadata(int... attrPropList) throws Exception
1113    {
1114        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1115    }
1116}