001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026
027import hdf.object.Attribute;
028import hdf.object.Dataset;
029import hdf.object.Datatype;
030import hdf.object.FileFormat;
031import hdf.object.Group;
032import hdf.object.HObject;
033import hdf.object.ScalarDS;
034import hdf.object.MetaDataContainer;
035
036import hdf.object.h4.H4ScalarAttribute;
037
038/**
039 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on
040 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
041 *
042 * Every GR data set must contain the following components: image array, name,
043 * pixel type, and dimensions. The name, dimensions, and pixel type must be
044 * supplied by the user at the time the GR data set is defined.
045 *
046 * An image array is a two-dimensional array of pixels. Each element in an image
047 * array corresponds to one pixel and each pixel can consist of a number of
048 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
049 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
050 * by different methods (8-bit lookup table or 24-bit direct representation) and
051 * may have different data types. The data type of pixel components and the number
052 * of components in each pixel are collectively known as the pixel type.
053 *
054 * <b>How to Select a Subset</b>
055 *
056 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
057 * defined to select a subset of a data array. The selection is done in an implicit way.
058 * Function calls to dimension information such as getSelectedDims() return an array
059 * of dimension values, which is a reference to the array in the dataset object.
060 * Changes of the array outside the dataset object directly change the values of
061 * the array in the dataset object. It is like pointers in C.
062 *
063 * The following is an example of how to make a subset. In the example, the dataset
064 * is a 4-dimension with size of [200][100][50][10], i.e.
065 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
066 * We want to select every other data point in dims[1] and dims[2]
067 * <pre>
068     int rank = dataset.getRank();   // number of dimensions of the dataset
069     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
070     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
071     long[] start = dataset.getStartDims(); // the offset of the selection
072     long[] stride = dataset.getStride(); // the stride of the dataset
073     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
074
075     // select dim1 and dim2 as 2D data for display, and slice through dim0
076     selectedIndex[0] = 1;
077     selectedIndex[1] = 2;
078     selectedIndex[1] = 0;
079
080     // reset the selection arrays
081     for (int i=0; i&lt;rank; i++) {
082         start[i] = 0;
083         selected[i] = 1;
084         stride[i] = 1;
085    }
086
087    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
088    stride[1] = 2;
089    stride[2] = 2;
090
091    // set the selection size of dim1 and dim2
092    selected[1] = dims[1]/stride[1];
093    selected[2] = dims[1]/stride[2];
094
095    // when dataset.read() is called, the slection above will be used since
096    // the dimension arrays are passed by reference. Changes of these arrays
097    // outside the dataset object directly change the values of these array
098    // in the dataset object.
099
100 * </pre>
101 *
102 * @version 1.1 9/4/2007
103 * @author Peter X. Cao
104 */
105public class H4GRImage extends ScalarDS implements MetaDataContainer
106{
107    private static final long serialVersionUID = 1029672744963360976L;
108
109    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
110
111    /**
112     * The list of attributes of this data object. Members of the list are
113     * instance of H4ScalarAttribute.
114     */
115    @SuppressWarnings("rawtypes")
116    private List                            attributeList;
117
118    /**
119     * The GR interface identifier obtained from GRstart(fid)
120     */
121    private long                            grid;
122
123    /**
124     * The number of components in the raster image
125     */
126    private int                             ncomp;
127
128    /** the datatype identifier */
129    private long                            datatypeID = -1;
130
131    /** the number of attributes */
132    private int                             nAttributes = -1;
133
134
135    /**
136     * Creates a H4GRImage object with specific name and path.
137     *
138     * @param theFile the HDF file.
139     * @param name the name of this H4GRImage.
140     * @param path the full path of this H4GRImage.
141     */
142    public H4GRImage(FileFormat theFile, String name, String path) {
143        this(theFile, name, path, null);
144    }
145
146    /**
147     * Creates a H4GRImage object with specific name, path, and object ID.
148     *
149     * @param theFile the HDF file.
150     * @param name the name of this H4GRImage.
151     * @param path the full path of this H4GRImage.
152     * @param oid the unique identifier of this data object.
153     */
154    @SuppressWarnings("deprecation")
155    public H4GRImage(FileFormat theFile, String name, String path, long[] oid) {
156        super (theFile, name, path, oid);
157        palette = null;
158        isImage = isImageDisplay = true;
159        unsignedConverted = false;
160        grid = ((H4File)getFileFormat()).getGRAccessID();
161    }
162
163    /*
164     * (non-Javadoc)
165     * @see hdf.object.DataFormat#hasAttribute()
166     */
167    @Override
168    public boolean hasAttribute() {
169        if (nAttributes < 0) {
170            grid = ((H4File)getFileFormat()).getGRAccessID();
171
172            long id = open();
173
174            if (id >= 0) {
175                String[] objName = {""};
176                int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
177                int[] idims = new int[2];
178                try {
179                    HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
180                    nAttributes = grInfo[3];
181                }
182                catch (Exception ex) {
183                    log.trace("hasAttribute() failure: ", ex);
184                    nAttributes = 0;
185                }
186
187                log.trace("hasAttribute(): nAttributes={}", nAttributes);
188
189                close(id);
190            }
191        }
192
193        return (nAttributes > 0);
194    }
195
196    // To do: Implementing Dataset
197    @Override
198    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
199        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
200
201        Dataset dataset = null;
202        long srcdid = -1;
203        long dstdid = -1;
204        String path = null;
205        int[] count = null;
206
207        if (pgroup == null) {
208            log.debug("copy(): Parent group is null - exit");
209            return null;
210        }
211
212        if (pgroup.isRoot())
213            path = HObject.SEPARATOR;
214        else
215            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
216
217        srcdid = open();
218        if (srcdid < 0) {
219            log.debug("copy(): Invalid source dataset ID - exit");
220            return null;
221        }
222
223        if (dims != null) {
224            count = new int[2];
225            count[0] = (int)dims[0];
226            count[1] = (int)dims[1];
227        }
228
229        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
230        try {
231            String[] tmpName = {""};
232            int[] tmpDims = new int[2];
233            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
234            if (count == null)
235                count = tmpDims;
236        }
237        catch (HDFException ex) {
238            log.debug("copy(): GRgetiminfo failure: ", ex);
239        }
240
241        ncomp = grInfo[0];
242        long tid = grInfo[1];
243        int interlace = grInfo[2];
244        int numberOfAttributes = grInfo[3];
245        dstdid = HDFLibrary.GRcreate( ((H4File)pgroup.getFileFormat()).getGRAccessID(),
246                dname, ncomp, tid, interlace, count);
247        if (dstdid < 0) {
248            log.debug("copy(): Invalid dest dataset ID - exit");
249            return null;
250        }
251
252        int ref = HDFLibrary.GRidtoref(dstdid);
253        if (!pgroup.isRoot()) {
254            long vgid = pgroup.open();
255            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
256            pgroup.close(vgid);
257        }
258
259        // read data from the source dataset
260        int[] start = {0, 0};
261        if ((buff == null) && (count != null)) {
262            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
263            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
264        }
265
266        // write the data into the destination dataset
267        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
268
269        // copy palette
270        long pid = HDFLibrary.GRgetlutid(srcdid, 0);
271        int[] palInfo = new int[4];
272
273        HDFLibrary.GRgetlutinfo(pid, palInfo);
274        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
275        int palSize = palInfo[0]*palInfo[3];
276        byte[] palBuff = new byte[palSize];
277        HDFLibrary.GRreadlut(pid, palBuff);
278        pid = HDFLibrary.GRgetlutid(dstdid, 0);
279        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
280
281        // copy attributes from one object to the new object
282        log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes);
283        copyAttribute(srcdid, dstdid, numberOfAttributes);
284
285        long[] oid = {HDFConstants.DFTAG_RIG, ref};
286        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
287
288        pgroup.addToMemberList(dataset);
289
290        close(srcdid);
291
292        try {
293            HDFLibrary.GRendaccess(dstdid);
294        }
295        catch (HDFException ex) {
296            log.debug("copy(): GRendaccess failure: ", ex);
297        }
298
299        return dataset;
300    }
301
302    // implementing ScalarDS
303    /**
304     * Returns the datatype of the data object.
305     *
306     * @return the datatype of the data object.
307     */
308    @Override
309    public Datatype getDatatype() {
310        if (!inited)
311            init();
312
313        if (datatype == null) {
314            try {
315                datatype = new H4Datatype(datatypeID);
316            }
317            catch (Exception ex) {
318                log.debug("getDatatype(): failed to create datatype: ", ex);
319                datatype = null;
320            }
321        }
322
323        return datatype;
324    }
325
326    // Implementing Dataset
327    @Override
328    public byte[] readBytes() throws HDFException {
329        byte[] theData = null;
330
331        if (!isInited())
332            init();
333
334        long id = open();
335        if (id < 0) {
336            log.debug("readBytes(): Invalid ID - exit");
337            return null;
338        }
339
340        try {
341            // set the interlacing scheme for reading image data
342            HDFLibrary.GRreqimageil(id, interlace);
343            int datasize = (int)(getWidth()*getHeight()*ncomp);
344            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
345            theData = new byte[size];
346            int[] start = {(int)startDims[0], (int)startDims[1]};
347            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
348
349            int[] stride = null;
350            if (selectedStride != null) {
351                stride = new int[rank];
352                for (int i=0; i<rank; i++)
353                    stride[i] = (int)selectedStride[i];
354            }
355
356            HDFLibrary.GRreadimage(id, start, stride, select, theData);
357        }
358        catch (Exception ex) {
359            log.debug("readBytes(): failure: ", ex);
360        }
361        finally {
362            close(id);
363        }
364
365        return theData;
366    }
367
368    // ***** need to implement from DataFormat *****
369    /**
370     * Reads the data from file.
371     *
372     * read() reads the data from file to a memory buffer and returns the memory
373     * buffer. The dataset object does not hold the memory buffer. To store the
374     * memory buffer in the dataset object, one must call getData().
375     *
376     * By default, the whole dataset is read into memory. Users can also select
377     * a subset to read. Subsetting is done in an implicit way.
378     *
379     * @return the data read from file.
380     *
381     * @see #getData()
382     *
383     * @throws HDFException
384     *             if object can not be read
385     */
386    @Override
387    public Object read() throws HDFException {
388        Object theData = null;
389
390        if (!isInited()) init();
391
392        long id = open();
393        if (id < 0) {
394            log.debug("read(): Invalid ID");
395            return null;
396        }
397
398        try {
399            // set the interlacing scheme for reading image data
400            HDFLibrary.GRreqimageil(id, interlace);
401            int datasize = (int)(getWidth()*getHeight()*ncomp);
402
403            theData = H4Datatype.allocateArray(datatypeID, datasize);
404
405            if (theData != null) {
406                // assume external data files are located in the same directory as the main file.
407                HDFLibrary.HXsetdir(getFileFormat().getParent());
408
409                int[] start = {(int)startDims[0], (int)startDims[1]};
410                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
411
412                int[] stride = null;
413                if (selectedStride != null) {
414                    stride = new int[rank];
415                    for (int i=0; i<rank; i++)
416                        stride[i] = (int)selectedStride[i];
417                }
418
419                HDFLibrary.GRreadimage(id, start, stride, select, theData);
420            }
421        }
422        catch (Exception ex) {
423            log.debug("read(): failure: ", ex);
424        }
425        finally {
426            close(id);
427        }
428
429        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
430            isDefaultImageOrder = false;
431        else
432            isDefaultImageOrder = true;
433
434        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
435        return theData;
436    }
437
438    // Implementing DataFormat
439    /**
440     * Writes a memory buffer to the object in the file.
441     *
442     * @param buf
443     *            the data to write
444     *
445     * @throws HDFException
446     *             if data can not be written
447     */
448    @SuppressWarnings("deprecation")
449    @Override
450    public void write(Object buf) throws HDFException {
451        if (buf == null) {
452            log.debug("write(): buf is null - exit");
453            return;
454        }
455
456        long id = open();
457        if (id < 0) {
458            log.debug("write(): Invalid ID - exit");
459            return;
460        }
461
462        int[] select = new int[rank];
463        int[] start = new int[rank];
464        for (int i=0; i<rank; i++) {
465            select[i] = (int)selectedDims[i];
466            start[i] = (int)startDims[i];
467        }
468
469        int[] stride = null;
470        if (selectedStride != null) {
471            stride = new int[rank];
472            for (int i=0; i<rank; i++) {
473                stride[i] = (int)selectedStride[i];
474            }
475        }
476
477        Object tmpData = buf;
478        try {
479            if (getDatatype().isUnsigned() && unsignedConverted)
480                tmpData = convertToUnsignedC(buf);
481            // assume external data files are located in the same directory as the main file.
482            HDFLibrary.HXsetdir(getFileFormat().getParent());
483
484            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
485        }
486        catch (Exception ex) {
487            log.debug("write(): failure: ", ex);
488        }
489        finally {
490            tmpData = null;
491            close(id);
492        }
493    }
494
495    // ***** need to implement from DataFormat *****
496    /**
497     * Retrieves the object's metadata, such as attributes, from the file.
498     *
499     * Metadata, such as attributes, is stored in a List.
500     *
501     * @return the list of metadata objects.
502     *
503     * @throws HDFException
504     *             if the metadata can not be retrieved
505     */
506    @Override
507    @SuppressWarnings({"rawtypes", "unchecked"})
508    public List getMetadata() throws HDFException {
509        if (attributeList != null) {
510            log.trace("getMetadata(): attributeList != null - exit");
511            return attributeList;
512        }
513
514        long id = open();
515        String[] objName = {""};
516        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
517        int[] idims = new int[2];
518        try {
519            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
520            // mask off the litend bit
521            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
522            int n = grInfo[3];
523
524            if ((attributeList == null) && (n>0))
525                attributeList = new Vector(n, 5);
526
527            boolean b = false;
528            String[] attrName = new String[1];
529            int[] attrInfo = {0, 0}; // data_type, length
530            for (int i=0; i<n; i++) {
531                attrName[0] = "";
532                try {
533                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
534                    // mask off the litend bit
535                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
536                }
537                catch (HDFException ex) {
538                    log.debug("getMetadata(): GRattrinfo failure: ", ex);
539                    b = false;
540                }
541
542                if (!b)
543                    continue;
544
545                long[] attrDims = {attrInfo[1]};
546                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
547                attributeList.add(attr);
548
549                Object buf = null;
550                try {
551                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
552                }
553                catch (OutOfMemoryError e) {
554                    log.debug("getMetadata(): out of memory: ", e);
555                    buf = null;
556                }
557
558                try {
559                    HDFLibrary.GRgetattr(id, i, buf);
560                }
561                catch (HDFException ex) {
562                    log.debug("getMetadata(): GRgetattr failure: ", ex);
563                    buf = null;
564                }
565
566                if (buf != null) {
567                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
568                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
569                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
570                    }
571
572                    attr.setAttributeData(buf);
573                }
574            } //  (int i=0; i<n; i++)
575        }
576        catch (Exception ex) {
577            log.debug("getMetadata(): failure: ", ex);
578        }
579        finally {
580            close(id);
581        }
582
583        return attributeList;
584    }
585
586    // ***** need to implement from DataFormat *****
587    /**
588     * Writes a specific piece of metadata (such as an attribute) into the file.
589     *
590     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
591     * value. If the attribute does not exist in the file, it creates the
592     * attribute in the file and attaches it to the object. It will fail to
593     * write a new attribute to the object where an attribute with the same name
594     * already exists. To update the value of an existing attribute in the file,
595     * one needs to get the instance of the attribute by getMetadata(), change
596     * its values, then use writeMetadata() to write the value.
597     *
598     * @param info
599     *            the metadata to write.
600     *
601     * @throws Exception
602     *             if the metadata can not be written
603     */
604    @Override
605    @SuppressWarnings({"rawtypes", "unchecked"})
606    public void writeMetadata(Object info) throws Exception {
607        // only attribute metadata is supported.
608        if (!(info instanceof Attribute)) {
609            log.debug("writeMetadata(): Object not an H4ScalarAttribute - exit");
610            return;
611        }
612
613        try {
614            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
615
616            if (attributeList == null)
617                attributeList = new Vector();
618
619            attributeList.add(info);
620            nAttributes = attributeList.size();
621        }
622        catch (Exception ex) {
623            log.debug("writeMetadata(): failure: ", ex);
624        }
625    }
626
627    // ***** need to implement from DataFormat *****
628    /**
629     * Deletes an existing piece of metadata from this object.
630     *
631     * @param info
632     *            the metadata to delete.
633     *
634     * @throws HDFException
635     *             if the metadata can not be removed
636     */
637    @Override
638    public void removeMetadata(Object info) throws HDFException {
639        log.trace("removeMetadata(): disabled");
640    }
641
642    /**
643     * Updates an existing piece of metadata attached to this object.
644     *
645     * @param info
646     *            the metadata to update.
647     *
648     * @throws Exception
649     *             if the metadata can not be updated
650     */
651    @Override
652    public void updateMetadata(Object info) throws Exception {
653        log.trace("updateMetadata(): disabled");
654    }
655
656    // Implementing HObject.
657    @Override
658    public long open() {
659        log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]);
660
661        long id = -1;
662        try {
663            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
664            id = HDFLibrary.GRselect(grid, index);
665        }
666        catch (HDFException ex) {
667            log.debug("open(): failure: ", ex);
668            id = -1;
669        }
670
671        return id;
672    }
673
674    // Implementing HObject.
675    @Override
676    public void close(long grid) {
677        try { HDFLibrary.GRendaccess(grid); }
678        catch (HDFException ex) {log.debug("close(): failure: ", ex);}
679    }
680
681    // Implementing Dataset.
682    @Override
683    public void init() {
684        if (inited) {
685            log.trace("init(): Already initialized");
686            return; // already called. Initialize only once
687        }
688
689        long id = open();
690        String[] objName = {""};
691        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
692        int[] idims = new int[2];
693        try {
694            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
695            // mask off the litend bit
696            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
697            datatypeID = grInfo[1];
698
699            // get compression information
700            try {
701                HDFCompInfo compInfo = new HDFCompInfo();
702                HDFLibrary.GRgetcompinfo(id, compInfo);
703
704                compression.setLength(0);
705
706                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE)
707                    compression.append("GZIP");
708                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP)
709                    compression.append("SZIP");
710                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG)
711                    compression.append("JPEG");
712                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF)
713                    compression.append("SKPHUFF");
714                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE)
715                    compression.append("RLE");
716                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT)
717                    compression.append("NBIT");
718
719                if (compression.length() == 0)
720                    compression.append("NONE");
721            }
722            catch (Exception ex) {
723                log.debug("init(): get compression information failure: ", ex);
724            }
725
726            // get chunk information
727            try {
728                HDFChunkInfo chunkInfo = new HDFChunkInfo();
729                int[] cflag = {HDFConstants.HDF_NONE};
730                HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
731
732                storageLayout.setLength(0);
733
734                if (cflag[0] == HDFConstants.HDF_NONE) {
735                    chunkSize = null;
736                    storageLayout.append("NONE");
737                }
738                else {
739                    chunkSize = new long[rank];
740                    for (int i=0; i<rank; i++)
741                        chunkSize[i] = chunkInfo.chunk_lengths[i];
742
743                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
744                    for (int i = 1; i < rank; i++)
745                        storageLayout.append(" X ").append(chunkSize[i]);
746                }
747            }
748            catch (Exception ex) {
749                log.debug("init(): get chunk information failure: ", ex);
750            }
751
752            inited = true;
753        }
754        catch (HDFException ex) {
755            log.debug("init(): failure: ", ex);
756        }
757        finally {
758            close(id);
759        }
760
761        ncomp = grInfo[0];
762        isTrueColor = (ncomp >= 3);
763        interlace = grInfo[2];
764        rank = 2; // support only two dimensional raster image
765
766        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
767        // other image data is arranged as dim[0]=height, dim[1]=width.
768        selectedIndex[0] = 1;
769        selectedIndex[1] = 0;
770
771        dims = new long[rank];
772        startDims = new long[rank];
773        selectedDims = new long[rank];
774        for (int i=0; i<rank; i++) {
775            startDims[i] = 0;
776            selectedDims[i] = idims[i];
777            dims[i] = idims[i];
778        }
779    }
780
781    // ***** implement from ScalarDS *****
782
783    /*
784     * (non-Javadoc)
785     * @see hdf.object.ScalarDS#readPalette(int)
786     */
787    @Override
788    public byte[][] readPalette(int idx) {
789        return getPalette();
790    }
791
792    /*
793     * (non-Javadoc)
794     * @see hdf.object.ScalarDS#NumberOfPalettes()
795     */
796    @Override
797    public int getNumberOfPalettes() {
798        if (palette != null)
799            return 1;
800        return 0;
801    }
802
803    @Override
804    public byte[][] getPalette() {
805        if (palette != null) {
806            log.trace("getPalette(): palette != null - exit");
807            return palette;
808        }
809
810        long id = open();
811        if (id < 0) {
812            log.debug("getPalette(): Invalid ID - exit");
813            return null;
814        }
815
816        // get palette info.
817        long lutid  = -1;
818        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
819        try {
820            // find the first palette.
821            // Todo: get all the palettes
822            lutid = HDFLibrary.GRgetlutid(id, 0);
823            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
824        }
825        catch (HDFException ex) {
826            log.debug("getPalette(): exit with failure: ", ex);
827            close(id);
828            return null;
829        }
830
831        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
832        // return true even if there is no palette data, and check if it is a
833        // RGB with 256 colors
834        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) || (lutInfo[3] != 256)) {
835            close(id);
836            log.debug("getPalette(): no palette data - exit");
837            return null;
838        }
839
840        // read palette data
841        boolean b = false;
842        byte[] pal = new byte[3*256];
843        try {
844            HDFLibrary.GRreqlutil(id, lutInfo[2]);
845            b = HDFLibrary.GRreadlut(lutid, pal);
846        }
847        catch (HDFException ex) {
848            log.debug("getPalette(): failure: ", ex);
849            b = false;
850        }
851
852        if (!b) {
853            close(id);
854            log.debug("getPalette(): no palette data - exit");
855            return null;
856        }
857
858        palette = new byte[3][256];
859        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
860            // color conponents are arranged in RGB, RGB, RGB, ...
861            for (int i=0; i<256; i++) {
862                palette[0][i] = pal[i*3];
863                palette[1][i] = pal[i*3+1];
864                palette[2][i] = pal[i*3+2];
865            }
866        }
867        else {
868            for (int i=0; i<256; i++) {
869                palette[0][i] = pal[i];
870                palette[1][i] = pal[256+i];
871                palette[2][i] = pal[512+i];
872            }
873        }
874
875        close(id);
876
877        return palette;
878    }
879
880    /**
881     * Returns the number of components of this image data.
882     *
883     * @return the number of components
884     */
885    public int getComponentCount() {
886        return ncomp;
887    }
888
889    /**
890     * Creates a new image.
891     *
892     * @param name the name of the dataset to create.
893     * @param pgroup the parent group of the new dataset.
894     * @param type the datatype of the dataset.
895     * @param dims the dimension size of the dataset.
896     * @param maxdims the max dimension size of the dataset.
897     * @param chunks the chunk size of the dataset.
898     * @param gzip the level of the gzip compression.
899     * @param ncomp number of components of the image data.
900     * @param interlace the interlace mode.
901     * @param data the array of data values.
902     *
903     * @return the new image if successful. Otherwise returns null.
904     *
905     * @throws Exception if the image can not be created
906     */
907    public static H4GRImage create(String name, Group pgroup, Datatype type,
908            long[] dims, long[] maxdims, long[] chunks, int gzip, int ncomp, int interlace, Object data) throws Exception {
909        log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace);
910
911        H4GRImage dataset = null;
912        if ((name == null) ||
913            (pgroup == null) ||
914            (dims == null) ||
915            ((gzip>0) && (chunks==null))) {
916            log.debug("create(): one or more parameters are null - exit");
917            return null;
918        }
919
920        H4File file = (H4File)pgroup.getFileFormat();
921        if (file == null) {
922            log.debug("create(): Parent group FileFormat is null - exit");
923            return null;
924        }
925
926        String path = HObject.SEPARATOR;
927        if (!pgroup.isRoot())
928            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
929        if (interlace == ScalarDS.INTERLACE_PLANE)
930            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
931        else
932            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
933
934        int rank = 2;
935        int[] idims = new int[rank];
936        int[] imaxdims = new int[rank];
937        int[] start = new int[rank];
938        for (int i=0; i<rank; i++) {
939            idims[i] = (int)dims[i];
940            if (maxdims != null)
941                imaxdims[i] = (int)maxdims[i];
942            else
943                imaxdims[i] = idims[i];
944            start[i] = 0;
945        }
946
947        int[] ichunks = null;
948        if (chunks != null) {
949            ichunks = new int[rank];
950            for (int i=0; i<rank; i++)
951                ichunks[i] = (int)chunks[i];
952        }
953
954        long grid = -1;
955        long vgid = -1;
956        long gid = (file).getGRAccessID();
957        long tid = type.createNative();
958
959        if(tid >= 0) {
960            try {
961                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
962            }
963            catch (Exception ex) {
964                log.debug("create(): exit with failure: ", ex);
965                throw (ex);
966            }
967        }
968
969        if (grid < 0) {
970            log.debug("create(): Invalid GR ID - exit");
971            throw (new HDFException("Unable to create the new dataset."));
972        }
973
974        if ((grid > 0) && (data != null))
975            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
976
977        if (chunks != null) {
978            // set chunk
979            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
980            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
981        }
982
983        if (gzip > 0) {
984            // set compression
985            int compType = HDFConstants.COMP_CODE_DEFLATE;
986            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
987            compInfo.level = gzip;
988            HDFLibrary.GRsetcompress(grid, compType, compInfo);
989        }
990
991        int ref = HDFLibrary.GRidtoref(grid);
992
993        if (!pgroup.isRoot()) {
994            // add the dataset to the parent group
995            vgid = pgroup.open();
996            if (vgid < 0) {
997                if (grid > 0)
998                    HDFLibrary.GRendaccess(grid);
999                log.debug("create(): Invalid VG ID - exit");
1000                throw (new HDFException("Unable to open the parent group."));
1001            }
1002
1003            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
1004
1005            pgroup.close(vgid);
1006        }
1007
1008        try {
1009            if (grid > 0)
1010                HDFLibrary.GRendaccess(grid);
1011        }
1012        catch (Exception ex) {
1013            log.debug("create(): GRendaccess failure: ", ex);
1014        }
1015
1016        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1017        dataset = new H4GRImage(file, name, path, oid);
1018
1019        if (dataset != null)
1020            pgroup.addToMemberList(dataset);
1021
1022        return dataset;
1023    }
1024
1025    /**
1026     * copy attributes from one GR image to another GR image
1027     */
1028    private void copyAttribute(long srcdid, long dstdid, int numberOfAttributes) {
1029        log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes);
1030
1031        if (numberOfAttributes <= 0) {
1032            log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1033            return;
1034        }
1035
1036        try {
1037            boolean b = false;
1038            String[] attrName = new String[1];
1039            int[] attrInfo = {0, 0};
1040            for (int i=0; i<numberOfAttributes; i++) {
1041                attrName[0] = "";
1042                try {
1043                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
1044                }
1045                catch (HDFException ex) {
1046                    log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex);
1047                    b = false;
1048                }
1049
1050                if (!b)
1051                    continue;
1052
1053                // read attribute data from source dataset
1054                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1055                try {
1056                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
1057                }
1058                catch (Exception ex) {
1059                    log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex);
1060                    attrBuff = null;
1061                }
1062
1063                if (attrBuff == null) {
1064                    log.debug("copyAttribute(): attrBuff[{}] is null continue", i);
1065                    continue;
1066                }
1067
1068                // attach attribute to the destination dataset
1069                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1070            } //  (int i=0; i<numberOfAttributes; i++)
1071        }
1072        catch (Exception ex) {
1073            log.debug("copyAttribute(): failure: ", ex);
1074        }
1075    }
1076
1077    //Implementing DataFormat
1078    /**
1079     * Retrieves the object's metadata, such as attributes, from the file.
1080     *
1081     * Metadata, such as attributes, is stored in a List.
1082     *
1083     * @param attrPropList
1084     *             the list of properties to get
1085     *
1086     * @return the list of metadata objects.
1087     *
1088     * @throws Exception
1089     *             if the metadata can not be retrieved
1090     */
1091    @SuppressWarnings("rawtypes")
1092    public List getMetadata(int... attrPropList) throws Exception {
1093        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1094    }
1095}