001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026import hdf.object.Attribute;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034/**
035 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
036 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
037 * <p>
038 * Every GR data set must contain the following components: image array, name,
039 * pixel type, and dimensions. The name, dimensions, and pixel type must be
040 * supplied by the user at the time the GR data set is defined.
041 * <p>
042 * An image array is a two-dimensional array of pixels. Each element in an image
043 * array corresponds to one pixel and each pixel can consist of a number of
044 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
045 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
046 * by different methods (8-bit lookup table or 24-bit direct representation) and
047 * may have different data types. The data type of pixel components and the number
048 * of components in each pixel are collectively known as the pixel type.
049 * <p>
050 * <b>How to Select a Subset</b>
051 * <p>
052 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
053 * defined to select a subset of a data array. The selection is done in an implicit way.
054 * Function calls to dimension information such as getSelectedDims() return an array
055 * of dimension values, which is a reference to the array in the dataset object.
056 * Changes of the array outside the dataset object directly change the values of
057 * the array in the dataset object. It is like pointers in C.
058 * <p>
059 *
060 * The following is an example of how to make a subset. In the example, the dataset
061 * is a 4-dimension with size of [200][100][50][10], i.e.
062 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
063 * We want to select every other data point in dims[1] and dims[2]
064 * <pre>
065     int rank = dataset.getRank();   // number of dimensions of the dataset
066     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
067     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
068     long[] start = dataset.getStartDims(); // the offset of the selection
069     long[] stride = dataset.getStride(); // the stride of the dataset
070     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
071
072     // select dim1 and dim2 as 2D data for display, and slice through dim0
073     selectedIndex[0] = 1;
074     selectedIndex[1] = 2;
075     selectedIndex[1] = 0;
076
077     // reset the selection arrays
078     for (int i=0; i&lt;rank; i++) {
079         start[i] = 0;
080         selected[i] = 1;
081         stride[i] = 1;
082    }
083
084    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
085    stride[1] = 2;
086    stride[2] = 2;
087
088    // set the selection size of dim1 and dim2
089    selected[1] = dims[1]/stride[1];
090    selected[2] = dims[1]/stride[2];
091
092    // when dataset.read() is called, the slection above will be used since
093    // the dimension arrays are passed by reference. Changes of these arrays
094    // outside the dataset object directly change the values of these array
095    // in the dataset object.
096
097 * </pre>
098 *
099 * @version 1.1 9/4/2007
100 * @author Peter X. Cao
101 */
102public class H4GRImage extends ScalarDS
103{
104    private static final long serialVersionUID = 1029672744963360976L;
105
106    private final static org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
107
108    /**
109     * The list of attributes of this data object. Members of the list are
110     * instance of Attribute.
111     */
112    @SuppressWarnings("rawtypes")
113    private List                            attributeList;
114
115    /**
116     * The GR interface identifier obtained from GRstart(fid)
117     */
118    private long                            grid;
119
120    /**
121     * The number of components in the raster image
122     */
123    private int                             ncomp;
124
125    /** the datatype identifier */
126    private long                            datatypeID = -1;
127
128    private int                             nAttributes = -1;
129
130
131    public H4GRImage(FileFormat theFile, String name, String path)
132    {
133        this(theFile, name, path, null);
134    }
135
136    /**
137     * Creates a H4GRImage object with specific name, path, and object ID.
138     *
139     * @param theFile the HDF file.
140     * @param name the name of this H4GRImage.
141     * @param path the full path of this H4GRImage.
142     * @param oid the unique identifier of this data object.
143     */
144    @SuppressWarnings("deprecation")
145    public H4GRImage(
146        FileFormat theFile,
147        String name,
148        String path,
149        long[] oid)
150    {
151        super (theFile, name, path, oid);
152        palette = null;
153        isImage = isImageDisplay = true;
154        unsignedConverted = false;
155        grid = ((H4File)getFileFormat()).getGRAccessID();
156    }
157
158    /*
159     * (non-Javadoc)
160     * @see hdf.object.DataFormat#hasAttribute()
161     */
162    @Override
163    public boolean hasAttribute ()
164    {
165        if (nAttributes < 0) {
166            grid = ((H4File)getFileFormat()).getGRAccessID();
167
168            long id = open();
169
170            if (id >= 0) {
171                String[] objName = {""};
172                int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
173                int[] idims = new int[2];
174                try {
175                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
176                    nAttributes = grInfo[3];
177                }
178                catch (Exception ex) {
179                    log.trace("hasAttribute() failure: ", ex);
180                    nAttributes = 0;
181                }
182
183                log.trace("hasAttribute(): nAttributes={}", nAttributes);
184
185                close(id);
186            }
187        }
188
189        return (nAttributes > 0);
190    }
191
192    // To do: Implementing Dataset
193    @Override
194    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
195    {
196        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
197
198        Dataset dataset = null;
199        long srcdid=-1, dstdid=-1;
200        String path=null;
201        int[] count=null;
202
203        if (pgroup == null) {
204            log.debug("copy(): Parent group is null");
205            log.trace("copy(): finish");
206            return null;
207        }
208
209        if (pgroup.isRoot()) {
210            path = HObject.separator;
211        }
212        else {
213            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
214        }
215
216        srcdid = open();
217        if (srcdid < 0) {
218            log.debug("copy(): Invalid source dataset ID");
219            log.trace("copy(): finish");
220            return null;
221        }
222
223        if (dims != null) {
224            count = new int[2];
225            count[0] = (int)dims[0];
226            count[1] = (int)dims[1];
227        }
228
229        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
230        try {
231            String[] tmpName = {""};
232            int[] tmpDims = new int[2];
233            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
234            if (count == null) {
235                count = tmpDims;
236            }
237        }
238        catch (HDFException ex) {
239            log.debug("copy(): GRgetiminfo failure: ", ex);
240        }
241
242        int ncomp = grInfo[0];
243        long tid = grInfo[1];
244        int interlace = grInfo[2];
245        int numberOfAttributes = grInfo[3];
246        dstdid = HDFLibrary.GRcreate(
247                ((H4File)pgroup.getFileFormat()).getGRAccessID(),
248                dname, ncomp, tid, interlace, count);
249        if (dstdid < 0) {
250            log.debug("copy(): Invalid dest dataset ID");
251            log.trace("copy(): finish");
252            return null;
253        }
254
255        int ref = HDFLibrary.GRidtoref(dstdid);
256        if (!pgroup.isRoot()) {
257            long vgid = pgroup.open();
258            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
259            pgroup.close(vgid);
260        }
261
262        // read data from the source dataset
263        int[] start = {0, 0};
264        if (buff == null) {
265            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
266            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
267        }
268
269        // write the data into the destination dataset
270        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
271
272        // copy palette
273        long pid = HDFLibrary.GRgetlutid(srcdid, 0);
274        int[] palInfo = new int[4];
275
276        HDFLibrary.GRgetlutinfo(pid, palInfo);
277        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
278        int palSize = palInfo[0]*palInfo[3];
279        byte[] palBuff = new byte[palSize];
280        HDFLibrary.GRreadlut(pid, palBuff);
281        pid = HDFLibrary.GRgetlutid(dstdid, 0);
282        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
283
284        // copy attributes from one object to the new object
285        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
286        copyAttribute(srcdid, dstdid, numberOfAttributes);
287
288        long[] oid = {HDFConstants.DFTAG_RIG, ref};
289        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
290
291        pgroup.addToMemberList(dataset);
292
293        close(srcdid);
294
295        try {
296            HDFLibrary.GRendaccess(dstdid);
297        }
298        catch (HDFException ex) {
299            log.debug("copy(): GRendaccess failure: ", ex);
300        }
301
302        log.trace("copy(): finish");
303        return dataset;
304    }
305
306    // ***** need to implement from ScalarDS *****
307    @Override
308    public byte[][] readPalette(int idx) { return null;}
309
310    // ***** need to implement from ScalarDS *****
311    @Override
312    public byte[] getPaletteRefs() { return null;}
313
314    // implementing ScalarDS
315    @Override
316    public Datatype getDatatype()
317    {
318        if (datatype == null) {
319            datatype = new H4Datatype(datatypeID);
320        }
321
322        return datatype;
323    }
324
325    // Implementing Dataset
326    @Override
327    public byte[] readBytes() throws HDFException
328    {
329        log.trace("readBytes(): start");
330
331        byte[] theData = null;
332
333        if (!isInited()) init();
334
335        long id = open();
336        if (id < 0) {
337            log.debug("readBytes(): Invalid ID");
338            log.trace("readBytes(): finish");
339            return null;
340        }
341
342        try {
343            // set the interlacing scheme for reading image data
344            HDFLibrary.GRreqimageil(id, interlace);
345            int datasize = (int)(getWidth()*getHeight()*ncomp);
346            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
347            theData = new byte[size];
348            int[] start = {(int)startDims[0], (int)startDims[1]};
349            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
350
351            int[] stride = null;
352            if (selectedStride != null) {
353                stride = new int[rank];
354                for (int i=0; i<rank; i++) {
355                    stride[i] = (int)selectedStride[i];
356                }
357            }
358
359            HDFLibrary.GRreadimage(id, start, stride, select, theData);
360        }
361        catch (Exception ex) {
362            log.debug("readBytes(): failure: ", ex);
363        }
364        finally {
365            close(id);
366        }
367
368        log.trace("readBytes(): finish");
369        return theData;
370    }
371
372    // ***** need to implement from DataFormat *****
373    @Override
374    public Object read() throws HDFException
375    {
376        log.trace("read(): start");
377
378        Object theData = null;
379
380        if (!isInited()) init();
381
382        long id = open();
383        if (id < 0) {
384            log.debug("read(): Invalid ID");
385            log.trace("read(): finish");
386            return null;
387        }
388
389        try {
390            // set the interlacing scheme for reading image data
391            HDFLibrary.GRreqimageil(id, interlace);
392            int datasize = (int)(getWidth()*getHeight()*ncomp);
393
394            theData = H4Datatype.allocateArray(datatypeID, datasize);
395
396            if (theData != null) {
397                // assume external data files are located in the same directory as the main file.
398                HDFLibrary.HXsetdir(getFileFormat().getParent());
399
400                int[] start = {(int)startDims[0], (int)startDims[1]};
401                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
402
403                int[] stride = null;
404                if (selectedStride != null) {
405                    stride = new int[rank];
406                    for (int i=0; i<rank; i++) {
407                        stride[i] = (int)selectedStride[i];
408                    }
409                }
410
411                HDFLibrary.GRreadimage(id, start, stride, select, theData);
412            }
413        }
414        catch (Exception ex) {
415            log.debug("read(): failure: ", ex);
416        }
417        finally {
418            close(id);
419        }
420
421        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
422            isDefaultImageOrder = false;
423        else
424            isDefaultImageOrder = true;
425
426        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
427        log.trace("read(): finish");
428        return theData;
429    }
430
431    // Implementing DataFormat
432    @SuppressWarnings("deprecation")
433    @Override
434    public void write(Object buf) throws HDFException
435    {
436        log.trace("write(): start");
437
438        if (buf == null) {
439            log.debug("write(): buf is null");
440            log.trace("write(): finish");
441            return;
442        }
443
444        long id = open();
445        if (id < 0) {
446            log.debug("write(): Invalid ID");
447            log.trace("write(): finish");
448            return;
449        }
450
451        int[] select = new int[rank];
452        int[] start = new int[rank];
453        for (int i=0; i<rank; i++) {
454            select[i] = (int)selectedDims[i];
455            start[i] = (int)startDims[i];
456        }
457
458        int[] stride = null;
459        if (selectedStride != null) {
460            stride = new int[rank];
461            for (int i=0; i<rank; i++) {
462                stride[i] = (int)selectedStride[i];
463            }
464        }
465
466        Object tmpData = buf;
467        try {
468            if (getDatatype().isUnsigned() && unsignedConverted) {
469                tmpData = convertToUnsignedC(buf);
470            }
471            // assume external data files are located in the same directory as the main file.
472            HDFLibrary.HXsetdir(getFileFormat().getParent());
473
474            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
475        }
476        catch (Exception ex) {
477            log.debug("write(): failure: ", ex);
478        }
479        finally {
480            tmpData = null;
481            close(id);
482        }
483
484        log.trace("write(): finish");
485    }
486
487    // ***** need to implement from DataFormat *****
488    @Override
489    @SuppressWarnings({"rawtypes", "unchecked"})
490    public List getMetadata() throws HDFException
491    {
492        log.trace("getMetadata(): start");
493
494        if (attributeList != null) {
495            log.trace("getMetadata(): attributeList != null");
496            log.trace("getMetadata(): finish");
497            return attributeList;
498        }
499
500        long id = open();
501        String[] objName = {""};
502        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
503        int[] idims = new int[2];
504        try {
505            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
506            // mask off the litend bit
507            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
508            int n = grInfo[3];
509
510            if ((attributeList == null) && (n>0)) {
511                attributeList = new Vector(n, 5);
512            }
513
514            boolean b = false;
515            String[] attrName = new String[1];
516            int[] attrInfo = {0, 0}; // data_type, length
517            for (int i=0; i<n; i++) {
518                attrName[0] = "";
519                try {
520                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
521                    // mask off the litend bit
522                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
523                }
524                catch (HDFException ex) {
525                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
526                    b = false;
527                }
528
529                if (!b) {
530                    continue;
531                }
532
533                long[] attrDims = {attrInfo[1]};
534                Attribute attr = new Attribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
535                attributeList.add(attr);
536
537                Object buf = null;
538                try {
539                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
540                }
541                catch (OutOfMemoryError e) {
542                    log.debug("getMetadata(): out of memory: ", e);
543                    buf = null;
544                }
545
546                try {
547                    HDFLibrary.GRgetattr(id, i, buf);
548                }
549                catch (HDFException ex) {
550                    log.debug("getMetadata(): GRgetattr failure: ", ex);
551                    buf = null;
552                }
553
554                if (buf != null) {
555                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
556                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
557                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
558                    }
559
560                    attr.setData(buf);
561                }
562            } // for (int i=0; i<n; i++)
563        }
564        catch (Exception ex) {
565            log.debug("getMetadata(): failure: ", ex);
566        }
567        finally {
568            close(id);
569        }
570
571        log.trace("getMetadata(): finish");
572        return attributeList;
573    }
574
575    // ***** need to implement from DataFormat *****
576    @Override
577    @SuppressWarnings({"rawtypes", "unchecked"})
578    public void writeMetadata(Object info) throws Exception
579    {
580        log.trace("writeMetadata(): start");
581
582        // only attribute metadata is supported.
583        if (!(info instanceof Attribute)) {
584            log.debug("writeMetadata(): Object not an Attribute");
585            log.trace("writeMetadata(): finish");
586            return;
587        }
588
589        try {
590            getFileFormat().writeAttribute(this, (Attribute)info, true);
591
592            if (attributeList == null) {
593                attributeList = new Vector();
594            }
595
596            attributeList.add(info);
597            nAttributes = attributeList.size();
598        }
599        catch (Exception ex) {
600            log.debug("writeMetadata(): failure: ", ex);
601        }
602
603        log.trace("writeMetadata(): finish");
604    }
605
606    // ***** need to implement from DataFormat *****
607    @Override
608    public void removeMetadata(Object info) throws HDFException {
609        log.trace("removeMetadata(): disabled");
610    }
611
612    // implementing DataFormat
613    @Override
614    public void updateMetadata(Object info) throws Exception {
615        log.trace("updateMetadata(): disabled");
616    }
617
618    // Implementing HObject.
619    @Override
620    public long open()
621    {
622        log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]);
623
624        long id = -1;
625        try {
626            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
627            id = HDFLibrary.GRselect(grid, index);
628        }
629        catch (HDFException ex) {
630            log.debug("open(): failure: ", ex);
631            id = -1;
632        }
633
634        log.trace("open(): finish");
635        return id;
636    }
637
638    // Implementing HObject.
639    @Override
640    public void close(long grid)
641    {
642        try { HDFLibrary.GRendaccess(grid); }
643        catch (HDFException ex) {log.debug("close(): failure: ", ex);}
644    }
645
646    // Implementing Dataset.
647    @Override
648    public void init()
649    {
650        log.trace("init(): start");
651
652        if (inited) {
653            log.trace("init(): Already initialized");
654            log.trace("init(): finish");
655            return; // already called. Initialize only once
656        }
657
658        long id = open();
659        String[] objName = {""};
660        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
661        int[] idims = new int[2];
662        try {
663            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
664            // mask off the litend bit
665            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
666            datatypeID = grInfo[1];
667
668            // get compression information
669            try {
670                HDFCompInfo compInfo = new HDFCompInfo();
671                HDFLibrary.GRgetcompinfo(id, compInfo);
672                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
673                    compression = "GZIP";
674                }
675                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
676                    compression = "SZIP";
677                }
678                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
679                    compression = "JPEG";
680                }
681                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
682                    compression = "SKPHUFF";
683                }
684                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
685                    compression = "RLE";
686                }
687                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
688                    compression = "NBIT";
689                }
690            }
691            catch (Exception ex) {
692                log.debug("init(): get compression information failure: ", ex);
693            }
694
695            // get chunk information
696            try {
697                HDFChunkInfo chunkInfo = new HDFChunkInfo();
698                int[] cflag = {HDFConstants.HDF_NONE};
699                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
700                if (cflag[0] == HDFConstants.HDF_NONE) {
701                    chunkSize = null;
702                    storage_layout = "NONE";
703                }
704                else {
705                    chunkSize = new long[rank];
706                    for (int i=0; i<rank; i++) {
707                        chunkSize[i] = chunkInfo.chunk_lengths[i];
708                    }
709                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
710                    for (int i = 1; i < rank; i++) {
711                        storage_layout += " X " + chunkSize[i];
712                    }
713                }
714            }
715            catch (Exception ex) {
716                log.debug("init(): get chunk information failure: ", ex);
717            }
718
719            inited = true;
720        }
721        catch (HDFException ex) {
722            log.debug("init(): failure: ", ex);
723        }
724        finally {
725            close(id);
726        }
727
728        ncomp = grInfo[0];
729        isTrueColor = (ncomp >= 3);
730        interlace = grInfo[2];
731        rank = 2; // support only two dimensional raster image
732
733        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
734        // other image data is arranged as dim[0]=height, dim[1]=width.
735        selectedIndex[0] = 1;
736        selectedIndex[1] = 0;
737
738        dims = new long[rank];
739        startDims = new long[rank];
740        selectedDims = new long[rank];
741        for (int i=0; i<rank; i++) {
742            startDims[i] = 0;
743            selectedDims[i] = idims[i];
744            dims[i] = idims[i];
745        }
746
747        log.trace("init(): finish");
748    }
749
750    // ***** need to implement from ScalarDS *****
751    @Override
752    public byte[][] getPalette()
753    {
754        log.trace("getPalette(): start");
755
756        if (palette != null) {
757            log.trace("getPalette(): palette != null");
758            log.trace("getPalette(): finish");
759            return palette;
760        }
761
762        long id = open();
763        if (id < 0) {
764            log.debug("getPalette(): Invalid ID");
765            log.trace("getPalette(): finish");
766            return null;
767        }
768
769        // get palette info.
770        long lutid  = -1;
771        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
772        try {
773            // find the first palette.
774            // Todo: get all the palettes
775            lutid = HDFLibrary.GRgetlutid(id, 0);
776            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
777        }
778        catch (HDFException ex) {
779            log.debug("getPalette(): failure: ", ex);
780            close(id);
781            log.trace("getPalette(): finish");
782            return null;
783        }
784
785        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
786        // return true even if there is no palette data, and check if it is a
787        // RGB with 256 colors
788        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) | (lutInfo[3] != 256)) {
789            close(id);
790            log.debug("getPalette(): no palette data");
791            log.trace("getPalette(): finish");
792            return null;
793        }
794
795        // read palette data
796        boolean b = false;
797        byte[] pal = new byte[3*256];
798        try
799        {
800            HDFLibrary.GRreqlutil(id, lutInfo[2]);
801            b = HDFLibrary.GRreadlut(lutid, pal);
802        }
803        catch (HDFException ex) {
804            log.debug("getPalette(): failure: ", ex);
805            b = false;
806        }
807
808        if (!b) {
809            close(id);
810            log.debug("getPalette(): no palette data");
811            log.trace("getPalette(): finish");
812            return null;
813        }
814
815        palette = new byte[3][256];
816        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
817            // color conponents are arranged in RGB, RGB, RGB, ...
818            for (int i=0; i<256; i++) {
819                palette[0][i] = pal[i*3];
820                palette[1][i] = pal[i*3+1];
821                palette[2][i] = pal[i*3+2];
822            }
823        }
824        else {
825            for (int i=0; i<256; i++) {
826                palette[0][i] = pal[i];
827                palette[1][i] = pal[256+i];
828                palette[2][i] = pal[512+i];
829            }
830        }
831
832        close(id);
833
834        log.trace("getPalette(): finish");
835        return palette;
836    }
837
838    /**
839     * Returns the number of components of this image data.
840     *
841     * @return the number of components
842     */
843    public int getComponentCount()
844    {
845        return ncomp;
846    }
847
848    /**
849     * Creates a new image.
850     *
851     * @param name the name of the dataset to create.
852     * @param pgroup the parent group of the new dataset.
853     * @param type the datatype of the dataset.
854     * @param dims the dimension size of the dataset.
855     * @param maxdims the max dimension size of the dataset.
856     * @param chunks the chunk size of the dataset.
857     * @param gzip the level of the gzip compression.
858     * @param ncomp number of components of the image data.
859     * @param interlace the interlace mode.
860     * @param data the array of data values.
861     *
862     * @return the new image if successful. Otherwise returns null.
863     *
864     * @throws Exception if the image can not be created
865     */
866    public static H4GRImage create(
867        String name,
868        Group pgroup,
869        Datatype type,
870        long[] dims,
871        long[] maxdims,
872        long[] chunks,
873        int gzip,
874        int ncomp,
875        int interlace,
876        Object data) throws Exception
877    {
878        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace);
879
880        H4GRImage dataset = null;
881        if ((name == null) ||
882            (pgroup == null) ||
883            (dims == null) ||
884            ((gzip>0) && (chunks==null))) {
885            log.debug("create(): one or more parameters are null");
886            log.trace("create(): finish");
887            return null;
888        }
889
890        H4File file = (H4File)pgroup.getFileFormat();
891        if (file == null) {
892            log.debug("create(): Parent group FileFormat is null");
893            log.trace("create(): finish");
894            return null;
895        }
896
897        String path = HObject.separator;
898        if (!pgroup.isRoot()) {
899            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
900        }
901        if (interlace == ScalarDS.INTERLACE_PLANE) {
902            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
903        }
904        else {
905            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
906        }
907
908        int rank = 2;
909        int idims[] = new int[rank];
910        int imaxdims[] = new int[rank];
911        int start[] = new int [rank];
912        for (int i=0; i<rank; i++) {
913            idims[i] = (int)dims[i];
914            if (maxdims != null) {
915                imaxdims[i] = (int)maxdims[i];
916            }
917            else {
918                imaxdims[i] = idims[i];
919            }
920            start[i] = 0;
921        }
922
923        int ichunks[] = null;
924        if (chunks != null) {
925            ichunks = new int[rank];
926            for (int i=0; i<rank; i++) {
927                ichunks[i] = (int)chunks[i];
928            }
929        }
930
931        long grid = -1;
932        long vgid = -1;
933        long gid = (file).getGRAccessID();
934        long tid = type.createNative();
935
936        if(tid >= 0) {
937            try {
938                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
939            }
940            catch (Exception ex) {
941                log.debug("create(): failure: ", ex);
942                log.trace("create(): finish");
943                throw (ex);
944            }
945        }
946
947        if (grid < 0) {
948            log.debug("create(): Invalid GR ID");
949            log.trace("create(): finish");
950            throw (new HDFException("Unable to create the new dataset."));
951        }
952
953        if ((grid > 0) && (data != null)) {
954            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
955        }
956
957        if (chunks != null) {
958            // set chunk
959            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
960            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
961        }
962
963        if (gzip > 0) {
964            // set compression
965            int compType = HDFConstants.COMP_CODE_DEFLATE;
966            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
967            compInfo.level = gzip;
968            HDFLibrary.GRsetcompress(grid, compType, compInfo);
969        }
970
971        int ref = HDFLibrary.GRidtoref(grid);
972
973        if (!pgroup.isRoot()) {
974            // add the dataset to the parent group
975            vgid = pgroup.open();
976            if (vgid < 0) {
977                if (grid > 0) {
978                    HDFLibrary.GRendaccess(grid);
979                }
980                log.debug("create(): Invalid VG ID");
981                log.trace("create(): finish");
982                throw (new HDFException("Unable to open the parent group."));
983            }
984
985            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
986
987            pgroup.close(vgid);
988        }
989
990        try {
991            if (grid > 0) {
992                HDFLibrary.GRendaccess(grid);
993            }
994        }
995        catch (Exception ex) {
996            log.debug("create(): GRendaccess failure: ", ex);
997        }
998
999        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1000        dataset = new H4GRImage(file, name, path, oid);
1001
1002        if (dataset != null) {
1003            pgroup.addToMemberList(dataset);
1004        }
1005
1006        log.trace("create(): finish");
1007        return dataset;
1008    }
1009
1010    /**
1011     * copy attributes from one GR image to another GR image
1012     */
1013    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes)
1014    {
1015        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes);
1016
1017        if (numberOfAttributes <= 0) {
1018            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1019            log.trace("copyAttribute(): finish");
1020            return;
1021        }
1022
1023        try {
1024            boolean b = false;
1025            String[] attrName = new String[1];
1026            int[] attrInfo = {0, 0};
1027            for (int i=0; i<numberOfAttributes; i++) {
1028                attrName[0] = "";
1029                try {
1030                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1031                }
1032                catch (HDFException ex) {
1033                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1034                    b = false;
1035                }
1036
1037                if (!b) {
1038                    continue;
1039                }
1040
1041                // read attribute data from source dataset
1042                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1043                try {
1044                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1045                }
1046                catch (Exception ex) {
1047                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1048                    attrBuff = null;
1049                }
1050
1051                if (attrBuff == null) {
1052                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1053                    log.trace("copyAttribute(): continue");
1054                    continue;
1055                }
1056
1057                // attach attribute to the destination dataset
1058                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1059            } // for (int i=0; i<numberOfAttributes; i++)
1060        }
1061        catch (Exception ex) {
1062            log.debug("copyAttribute(): failure: ", ex);
1063        }
1064    }
1065
1066    //Implementing DataFormat
1067    @SuppressWarnings("rawtypes")
1068    public List getMetadata(int... attrPropList) throws Exception {
1069        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1070    }
1071}