001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026import hdf.object.Attribute;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034/**
035 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
036 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
037 * <p>
038 * Every GR data set must contain the following components: image array, name,
039 * pixel type, and dimensions. The name, dimensions, and pixel type must be
040 * supplied by the user at the time the GR data set is defined.
041 * <p>
042 * An image array is a two-dimensional array of pixels. Each element in an image
043 * array corresponds to one pixel and each pixel can consist of a number of
044 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
045 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
046 * by different methods (8-bit lookup table or 24-bit direct representation) and
047 * may have different data types. The data type of pixel components and the number
048 * of components in each pixel are collectively known as the pixel type.
049 * <p>
050 * <b>How to Select a Subset</b>
051 * <p>
052 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
053 * defined to select a subset of a data array. The selection is done in an implicit way.
054 * Function calls to dimension information such as getSelectedDims() return an array
055 * of dimension values, which is a reference to the array in the dataset object.
056 * Changes of the array outside the dataset object directly change the values of
057 * the array in the dataset object. It is like pointers in C.
058 * <p>
059 *
060 * The following is an example of how to make a subset. In the example, the dataset
061 * is a 4-dimension with size of [200][100][50][10], i.e.
062 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
063 * We want to select every other data point in dims[1] and dims[2]
064 * <pre>
065     int rank = dataset.getRank();   // number of dimensions of the dataset
066     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
067     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
068     long[] start = dataset.getStartDims(); // the offset of the selection
069     long[] stride = dataset.getStride(); // the stride of the dataset
070     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
071
072     // select dim1 and dim2 as 2D data for display, and slice through dim0
073     selectedIndex[0] = 1;
074     selectedIndex[1] = 2;
075     selectedIndex[1] = 0;
076
077     // reset the selection arrays
078     for (int i=0; i&lt;rank; i++) {
079         start[i] = 0;
080         selected[i] = 1;
081         stride[i] = 1;
082    }
083
084    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
085    stride[1] = 2;
086    stride[2] = 2;
087
088    // set the selection size of dim1 and dim2
089    selected[1] = dims[1]/stride[1];
090    selected[2] = dims[1]/stride[2];
091
092    // when dataset.read() is called, the slection above will be used since
093    // the dimension arrays are passed by reference. Changes of these arrays
094    // outside the dataset object directly change the values of these array
095    // in the dataset object.
096
097 * </pre>
098 *
099 * @version 1.1 9/4/2007
100 * @author Peter X. Cao
101 */
102public class H4GRImage extends ScalarDS
103{
104    private static final long serialVersionUID = 1029672744963360976L;
105
106    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
107
108    /**
109     * The list of attributes of this data object. Members of the list are
110     * instance of Attribute.
111     */
112    @SuppressWarnings("rawtypes")
113    private List                            attributeList;
114
115    /**
116     * The GR interface identifier obtained from GRstart(fid)
117     */
118    private long                            grid;
119
120    /**
121     * The number of components in the raster image
122     */
123    private int                             ncomp;
124
125    /** the datatype identifier */
126    private long                            datatypeID = -1;
127
128    private int                             nAttributes = -1;
129
130
131    public H4GRImage(FileFormat theFile, String name, String path)
132    {
133        this(theFile, name, path, null);
134    }
135
136    /**
137     * Creates a H4GRImage object with specific name, path, and object ID.
138     *
139     * @param theFile the HDF file.
140     * @param name the name of this H4GRImage.
141     * @param path the full path of this H4GRImage.
142     * @param oid the unique identifier of this data object.
143     */
144    @SuppressWarnings("deprecation")
145    public H4GRImage(
146        FileFormat theFile,
147        String name,
148        String path,
149        long[] oid)
150    {
151        super (theFile, name, path, oid);
152        palette = null;
153        isImage = isImageDisplay = true;
154        unsignedConverted = false;
155        grid = ((H4File)getFileFormat()).getGRAccessID();
156    }
157
158    /*
159     * (non-Javadoc)
160     * @see hdf.object.DataFormat#hasAttribute()
161     */
162    @Override
163    public boolean hasAttribute ()
164    {
165        if (nAttributes < 0) {
166            grid = ((H4File)getFileFormat()).getGRAccessID();
167
168            long id = open();
169
170            if (id >= 0) {
171                String[] objName = {""};
172                int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
173                int[] idims = new int[2];
174                try {
175                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
176                    nAttributes = grInfo[3];
177                }
178                catch (Exception ex) {
179                    log.trace("hasAttribute() failure: ", ex);
180                    nAttributes = 0;
181                }
182
183                log.trace("hasAttribute(): nAttributes={}", nAttributes);
184
185                close(id);
186            }
187        }
188
189        return (nAttributes > 0);
190    }
191
192    // To do: Implementing Dataset
193    @Override
194    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
195    {
196        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
197
198        Dataset dataset = null;
199        long srcdid = -1;
200        long dstdid = -1;
201        String path = null;
202        int[] count = null;
203
204        if (pgroup == null) {
205            log.debug("copy(): Parent group is null - exit");
206            return null;
207        }
208
209        if (pgroup.isRoot()) {
210            path = HObject.SEPARATOR;
211        }
212        else {
213            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
214        }
215
216        srcdid = open();
217        if (srcdid < 0) {
218            log.debug("copy(): Invalid source dataset ID - exit");
219            return null;
220        }
221
222        if (dims != null) {
223            count = new int[2];
224            count[0] = (int)dims[0];
225            count[1] = (int)dims[1];
226        }
227
228        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
229        try {
230            String[] tmpName = {""};
231            int[] tmpDims = new int[2];
232            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
233            if (count == null) {
234                count = tmpDims;
235            }
236        }
237        catch (HDFException ex) {
238            log.debug("copy(): GRgetiminfo failure: ", ex);
239        }
240
241        ncomp = grInfo[0];
242        long tid = grInfo[1];
243        int interlace = grInfo[2];
244        int numberOfAttributes = grInfo[3];
245        dstdid = HDFLibrary.GRcreate(
246                ((H4File)pgroup.getFileFormat()).getGRAccessID(),
247                dname, ncomp, tid, interlace, count);
248        if (dstdid < 0) {
249            log.debug("copy(): Invalid dest dataset ID - exit");
250            return null;
251        }
252
253        int ref = HDFLibrary.GRidtoref(dstdid);
254        if (!pgroup.isRoot()) {
255            long vgid = pgroup.open();
256            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
257            pgroup.close(vgid);
258        }
259
260        // read data from the source dataset
261        int[] start = {0, 0};
262        if ((buff == null) && (count != null)) {
263            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
264            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
265        }
266
267        // write the data into the destination dataset
268        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
269
270        // copy palette
271        long pid = HDFLibrary.GRgetlutid(srcdid, 0);
272        int[] palInfo = new int[4];
273
274        HDFLibrary.GRgetlutinfo(pid, palInfo);
275        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
276        int palSize = palInfo[0]*palInfo[3];
277        byte[] palBuff = new byte[palSize];
278        HDFLibrary.GRreadlut(pid, palBuff);
279        pid = HDFLibrary.GRgetlutid(dstdid, 0);
280        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
281
282        // copy attributes from one object to the new object
283        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
284        copyAttribute(srcdid, dstdid, numberOfAttributes);
285
286        long[] oid = {HDFConstants.DFTAG_RIG, ref};
287        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
288
289        pgroup.addToMemberList(dataset);
290
291        close(srcdid);
292
293        try {
294            HDFLibrary.GRendaccess(dstdid);
295        }
296        catch (HDFException ex) {
297            log.debug("copy(): GRendaccess failure: ", ex);
298        }
299
300        return dataset;
301    }
302
303    // ***** need to implement from ScalarDS *****
304    @Override
305    public byte[][] readPalette(int idx) { return null;}
306
307    // ***** need to implement from ScalarDS *****
308    @Override
309    public byte[] getPaletteRefs() { return null;}
310
311    // implementing ScalarDS
312    @Override
313    public Datatype getDatatype()
314    {
315        if (!inited)
316            init();
317
318        if (datatype == null) {
319            try {
320                datatype = new H4Datatype(datatypeID);
321            }
322            catch (Exception ex) {
323                log.debug("getDatatype(): failed to create datatype: ", ex);
324                datatype = null;
325            }
326        }
327
328        return datatype;
329    }
330
331    // Implementing Dataset
332    @Override
333    public byte[] readBytes() throws HDFException
334    {
335        byte[] theData = null;
336
337        if (!isInited()) init();
338
339        long id = open();
340        if (id < 0) {
341            log.debug("readBytes(): Invalid ID - exit");
342            return null;
343        }
344
345        try {
346            // set the interlacing scheme for reading image data
347            HDFLibrary.GRreqimageil(id, interlace);
348            int datasize = (int)(getWidth()*getHeight()*ncomp);
349            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
350            theData = new byte[size];
351            int[] start = {(int)startDims[0], (int)startDims[1]};
352            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
353
354            int[] stride = null;
355            if (selectedStride != null) {
356                stride = new int[rank];
357                for (int i=0; i<rank; i++) {
358                    stride[i] = (int)selectedStride[i];
359                }
360            }
361
362            HDFLibrary.GRreadimage(id, start, stride, select, theData);
363        }
364        catch (Exception ex) {
365            log.debug("readBytes(): failure: ", ex);
366        }
367        finally {
368            close(id);
369        }
370
371        return theData;
372    }
373
374    // ***** need to implement from DataFormat *****
375    @Override
376    public Object read() throws HDFException
377    {
378        Object theData = null;
379
380        if (!isInited()) init();
381
382        long id = open();
383        if (id < 0) {
384            log.debug("read(): Invalid ID");
385            return null;
386        }
387
388        try {
389            // set the interlacing scheme for reading image data
390            HDFLibrary.GRreqimageil(id, interlace);
391            int datasize = (int)(getWidth()*getHeight()*ncomp);
392
393            theData = H4Datatype.allocateArray(datatypeID, datasize);
394
395            if (theData != null) {
396                // assume external data files are located in the same directory as the main file.
397                HDFLibrary.HXsetdir(getFileFormat().getParent());
398
399                int[] start = {(int)startDims[0], (int)startDims[1]};
400                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
401
402                int[] stride = null;
403                if (selectedStride != null) {
404                    stride = new int[rank];
405                    for (int i=0; i<rank; i++) {
406                        stride[i] = (int)selectedStride[i];
407                    }
408                }
409
410                HDFLibrary.GRreadimage(id, start, stride, select, theData);
411            }
412        }
413        catch (Exception ex) {
414            log.debug("read(): failure: ", ex);
415        }
416        finally {
417            close(id);
418        }
419
420        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
421            isDefaultImageOrder = false;
422        else
423            isDefaultImageOrder = true;
424
425        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
426        return theData;
427    }
428
429    // Implementing DataFormat
430    @SuppressWarnings("deprecation")
431    @Override
432    public void write(Object buf) throws HDFException
433    {
434        if (buf == null) {
435            log.debug("write(): buf is null - exit");
436            return;
437        }
438
439        long id = open();
440        if (id < 0) {
441            log.debug("write(): Invalid ID - exit");
442            return;
443        }
444
445        int[] select = new int[rank];
446        int[] start = new int[rank];
447        for (int i=0; i<rank; i++) {
448            select[i] = (int)selectedDims[i];
449            start[i] = (int)startDims[i];
450        }
451
452        int[] stride = null;
453        if (selectedStride != null) {
454            stride = new int[rank];
455            for (int i=0; i<rank; i++) {
456                stride[i] = (int)selectedStride[i];
457            }
458        }
459
460        Object tmpData = buf;
461        try {
462            if (getDatatype().isUnsigned() && unsignedConverted) {
463                tmpData = convertToUnsignedC(buf);
464            }
465            // assume external data files are located in the same directory as the main file.
466            HDFLibrary.HXsetdir(getFileFormat().getParent());
467
468            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
469        }
470        catch (Exception ex) {
471            log.debug("write(): failure: ", ex);
472        }
473        finally {
474            tmpData = null;
475            close(id);
476        }
477    }
478
479    // ***** need to implement from DataFormat *****
480    @Override
481    @SuppressWarnings({"rawtypes", "unchecked"})
482    public List getMetadata() throws HDFException
483    {
484        if (attributeList != null) {
485            log.trace("getMetadata(): attributeList != null - exit");
486            return attributeList;
487        }
488
489        long id = open();
490        String[] objName = {""};
491        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
492        int[] idims = new int[2];
493        try {
494            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
495            // mask off the litend bit
496            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
497            int n = grInfo[3];
498
499            if ((attributeList == null) && (n>0)) {
500                attributeList = new Vector(n, 5);
501            }
502
503            boolean b = false;
504            String[] attrName = new String[1];
505            int[] attrInfo = {0, 0}; // data_type, length
506            for (int i=0; i<n; i++) {
507                attrName[0] = "";
508                try {
509                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
510                    // mask off the litend bit
511                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
512                }
513                catch (HDFException ex) {
514                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
515                    b = false;
516                }
517
518                if (!b) {
519                    continue;
520                }
521
522                long[] attrDims = {attrInfo[1]};
523                Attribute attr = new Attribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
524                attributeList.add(attr);
525
526                Object buf = null;
527                try {
528                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
529                }
530                catch (OutOfMemoryError e) {
531                    log.debug("getMetadata(): out of memory: ", e);
532                    buf = null;
533                }
534
535                try {
536                    HDFLibrary.GRgetattr(id, i, buf);
537                }
538                catch (HDFException ex) {
539                    log.debug("getMetadata(): GRgetattr failure: ", ex);
540                    buf = null;
541                }
542
543                if (buf != null) {
544                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
545                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
546                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
547                    }
548
549                    attr.setData(buf);
550                }
551            } //  (int i=0; i<n; i++)
552        }
553        catch (Exception ex) {
554            log.debug("getMetadata(): failure: ", ex);
555        }
556        finally {
557            close(id);
558        }
559
560        return attributeList;
561    }
562
563    // ***** need to implement from DataFormat *****
564    @Override
565    @SuppressWarnings({"rawtypes", "unchecked"})
566    public void writeMetadata(Object info) throws Exception
567    {
568        // only attribute metadata is supported.
569        if (!(info instanceof Attribute)) {
570            log.debug("writeMetadata(): Object not an Attribute - exit");
571            return;
572        }
573
574        try {
575            getFileFormat().writeAttribute(this, (Attribute)info, true);
576
577            if (attributeList == null) {
578                attributeList = new Vector();
579            }
580
581            attributeList.add(info);
582            nAttributes = attributeList.size();
583        }
584        catch (Exception ex) {
585            log.debug("writeMetadata(): failure: ", ex);
586        }
587    }
588
589    // ***** need to implement from DataFormat *****
590    @Override
591    public void removeMetadata(Object info) throws HDFException {
592        log.trace("removeMetadata(): disabled");
593    }
594
595    // implementing DataFormat
596    @Override
597    public void updateMetadata(Object info) throws Exception {
598        log.trace("updateMetadata(): disabled");
599    }
600
601    // Implementing HObject.
602    @Override
603    public long open()
604    {
605        log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]);
606
607        long id = -1;
608        try {
609            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
610            id = HDFLibrary.GRselect(grid, index);
611        }
612        catch (HDFException ex) {
613            log.debug("open(): failure: ", ex);
614            id = -1;
615        }
616
617        return id;
618    }
619
620    // Implementing HObject.
621    @Override
622    public void close(long grid)
623    {
624        try { HDFLibrary.GRendaccess(grid); }
625        catch (HDFException ex) {log.debug("close(): failure: ", ex);}
626    }
627
628    // Implementing Dataset.
629    @Override
630    public void init()
631    {
632        if (inited) {
633            log.trace("init(): Already initialized");
634            return; // already called. Initialize only once
635        }
636
637        long id = open();
638        String[] objName = {""};
639        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
640        int[] idims = new int[2];
641        try {
642            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
643            // mask off the litend bit
644            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
645            datatypeID = grInfo[1];
646
647            // get compression information
648            try {
649                HDFCompInfo compInfo = new HDFCompInfo();
650                HDFLibrary.GRgetcompinfo(id, compInfo);
651
652                compression.setLength(0);
653
654                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
655                    compression.append("GZIP");
656                }
657                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
658                    compression.append("SZIP");
659                }
660                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
661                    compression.append("JPEG");
662                }
663                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
664                    compression.append("SKPHUFF");
665                }
666                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
667                    compression.append("RLE");
668                }
669                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
670                    compression.append("NBIT");
671                }
672
673                if (compression.length() == 0)
674                    compression.append("NONE");
675            }
676            catch (Exception ex) {
677                log.debug("init(): get compression information failure: ", ex);
678            }
679
680            // get chunk information
681            try {
682                HDFChunkInfo chunkInfo = new HDFChunkInfo();
683                int[] cflag = {HDFConstants.HDF_NONE};
684                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
685
686                storageLayout.setLength(0);
687
688                if (cflag[0] == HDFConstants.HDF_NONE) {
689                    chunkSize = null;
690                    storageLayout.append("NONE");
691                }
692                else {
693                    chunkSize = new long[rank];
694                    for (int i=0; i<rank; i++) {
695                        chunkSize[i] = chunkInfo.chunk_lengths[i];
696                    }
697
698                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
699                    for (int i = 1; i < rank; i++) {
700                        storageLayout.append(" X ").append(chunkSize[i]);
701                    }
702                }
703            }
704            catch (Exception ex) {
705                log.debug("init(): get chunk information failure: ", ex);
706            }
707
708            inited = true;
709        }
710        catch (HDFException ex) {
711            log.debug("init(): failure: ", ex);
712        }
713        finally {
714            close(id);
715        }
716
717        ncomp = grInfo[0];
718        isTrueColor = (ncomp >= 3);
719        interlace = grInfo[2];
720        rank = 2; // support only two dimensional raster image
721
722        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
723        // other image data is arranged as dim[0]=height, dim[1]=width.
724        selectedIndex[0] = 1;
725        selectedIndex[1] = 0;
726
727        dims = new long[rank];
728        startDims = new long[rank];
729        selectedDims = new long[rank];
730        for (int i=0; i<rank; i++) {
731            startDims[i] = 0;
732            selectedDims[i] = idims[i];
733            dims[i] = idims[i];
734        }
735    }
736
737    // ***** need to implement from ScalarDS *****
738    @Override
739    public byte[][] getPalette()
740    {
741        if (palette != null) {
742            log.trace("getPalette(): palette != null - exit");
743            return palette;
744        }
745
746        long id = open();
747        if (id < 0) {
748            log.debug("getPalette(): Invalid ID - exit");
749            return null;
750        }
751
752        // get palette info.
753        long lutid  = -1;
754        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
755        try {
756            // find the first palette.
757            // Todo: get all the palettes
758            lutid = HDFLibrary.GRgetlutid(id, 0);
759            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
760        }
761        catch (HDFException ex) {
762            log.debug("getPalette(): exit with failure: ", ex);
763            close(id);
764            return null;
765        }
766
767        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
768        // return true even if there is no palette data, and check if it is a
769        // RGB with 256 colors
770        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) || (lutInfo[3] != 256)) {
771            close(id);
772            log.debug("getPalette(): no palette data - exit");
773            return null;
774        }
775
776        // read palette data
777        boolean b = false;
778        byte[] pal = new byte[3*256];
779        try
780        {
781            HDFLibrary.GRreqlutil(id, lutInfo[2]);
782            b = HDFLibrary.GRreadlut(lutid, pal);
783        }
784        catch (HDFException ex) {
785            log.debug("getPalette(): failure: ", ex);
786            b = false;
787        }
788
789        if (!b) {
790            close(id);
791            log.debug("getPalette(): no palette data - exit");
792            return null;
793        }
794
795        palette = new byte[3][256];
796        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
797            // color conponents are arranged in RGB, RGB, RGB, ...
798            for (int i=0; i<256; i++) {
799                palette[0][i] = pal[i*3];
800                palette[1][i] = pal[i*3+1];
801                palette[2][i] = pal[i*3+2];
802            }
803        }
804        else {
805            for (int i=0; i<256; i++) {
806                palette[0][i] = pal[i];
807                palette[1][i] = pal[256+i];
808                palette[2][i] = pal[512+i];
809            }
810        }
811
812        close(id);
813
814        return palette;
815    }
816
817    /**
818     * Returns the number of components of this image data.
819     *
820     * @return the number of components
821     */
822    public int getComponentCount()
823    {
824        return ncomp;
825    }
826
827    /**
828     * Creates a new image.
829     *
830     * @param name the name of the dataset to create.
831     * @param pgroup the parent group of the new dataset.
832     * @param type the datatype of the dataset.
833     * @param dims the dimension size of the dataset.
834     * @param maxdims the max dimension size of the dataset.
835     * @param chunks the chunk size of the dataset.
836     * @param gzip the level of the gzip compression.
837     * @param ncomp number of components of the image data.
838     * @param interlace the interlace mode.
839     * @param data the array of data values.
840     *
841     * @return the new image if successful. Otherwise returns null.
842     *
843     * @throws Exception if the image can not be created
844     */
845    public static H4GRImage create(
846        String name,
847        Group pgroup,
848        Datatype type,
849        long[] dims,
850        long[] maxdims,
851        long[] chunks,
852        int gzip,
853        int ncomp,
854        int interlace,
855        Object data) throws Exception
856    {
857        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace);
858
859        H4GRImage dataset = null;
860        if ((name == null) ||
861            (pgroup == null) ||
862            (dims == null) ||
863            ((gzip>0) && (chunks==null))) {
864            log.debug("create(): one or more parameters are null - exit");
865            return null;
866        }
867
868        H4File file = (H4File)pgroup.getFileFormat();
869        if (file == null) {
870            log.debug("create(): Parent group FileFormat is null - exit");
871            return null;
872        }
873
874        String path = HObject.SEPARATOR;
875        if (!pgroup.isRoot()) {
876            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
877        }
878        if (interlace == ScalarDS.INTERLACE_PLANE) {
879            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
880        }
881        else {
882            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
883        }
884
885        int rank = 2;
886        int[] idims = new int[rank];
887        int[] imaxdims = new int[rank];
888        int[] start = new int[rank];
889        for (int i=0; i<rank; i++) {
890            idims[i] = (int)dims[i];
891            if (maxdims != null) {
892                imaxdims[i] = (int)maxdims[i];
893            }
894            else {
895                imaxdims[i] = idims[i];
896            }
897            start[i] = 0;
898        }
899
900        int[] ichunks = null;
901        if (chunks != null) {
902            ichunks = new int[rank];
903            for (int i=0; i<rank; i++) {
904                ichunks[i] = (int)chunks[i];
905            }
906        }
907
908        long grid = -1;
909        long vgid = -1;
910        long gid = (file).getGRAccessID();
911        long tid = type.createNative();
912
913        if(tid >= 0) {
914            try {
915                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
916            }
917            catch (Exception ex) {
918                log.debug("create(): exit with failure: ", ex);
919                throw (ex);
920            }
921        }
922
923        if (grid < 0) {
924            log.debug("create(): Invalid GR ID - exit");
925            throw (new HDFException("Unable to create the new dataset."));
926        }
927
928        if ((grid > 0) && (data != null)) {
929            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
930        }
931
932        if (chunks != null) {
933            // set chunk
934            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
935            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
936        }
937
938        if (gzip > 0) {
939            // set compression
940            int compType = HDFConstants.COMP_CODE_DEFLATE;
941            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
942            compInfo.level = gzip;
943            HDFLibrary.GRsetcompress(grid, compType, compInfo);
944        }
945
946        int ref = HDFLibrary.GRidtoref(grid);
947
948        if (!pgroup.isRoot()) {
949            // add the dataset to the parent group
950            vgid = pgroup.open();
951            if (vgid < 0) {
952                if (grid > 0) {
953                    HDFLibrary.GRendaccess(grid);
954                }
955                log.debug("create(): Invalid VG ID - exit");
956                throw (new HDFException("Unable to open the parent group."));
957            }
958
959            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
960
961            pgroup.close(vgid);
962        }
963
964        try {
965            if (grid > 0) {
966                HDFLibrary.GRendaccess(grid);
967            }
968        }
969        catch (Exception ex) {
970            log.debug("create(): GRendaccess failure: ", ex);
971        }
972
973        long[] oid = {HDFConstants.DFTAG_NDG, ref};
974        dataset = new H4GRImage(file, name, path, oid);
975
976        if (dataset != null) {
977            pgroup.addToMemberList(dataset);
978        }
979
980        return dataset;
981    }
982
983    /**
984     * copy attributes from one GR image to another GR image
985     */
986    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes)
987    {
988        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes);
989
990        if (numberOfAttributes <= 0) {
991            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
992            return;
993        }
994
995        try {
996            boolean b = false;
997            String[] attrName = new String[1];
998            int[] attrInfo = {0, 0};
999            for (int i=0; i<numberOfAttributes; i++) {
1000                attrName[0] = "";
1001                try {
1002                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1003                }
1004                catch (HDFException ex) {
1005                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1006                    b = false;
1007                }
1008
1009                if (!b) {
1010                    continue;
1011                }
1012
1013                // read attribute data from source dataset
1014                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1015                try {
1016                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1017                }
1018                catch (Exception ex) {
1019                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1020                    attrBuff = null;
1021                }
1022
1023                if (attrBuff == null) {
1024                    log.debug("copyAttribute(): attrBuff[{}] is null continue", i);
1025                    continue;
1026                }
1027
1028                // attach attribute to the destination dataset
1029                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1030            } //  (int i=0; i<numberOfAttributes; i++)
1031        }
1032        catch (Exception ex) {
1033            log.debug("copyAttribute(): failure: ", ex);
1034        }
1035    }
1036
1037    //Implementing DataFormat
1038    @SuppressWarnings("rawtypes")
1039    public List getMetadata(int... attrPropList) throws Exception {
1040        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1041    }
1042}