001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.ScalarDS;
037
038/**
039 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
040 * the SDS. A SDS is a group of data structures used to store and describe
041 * multidimensional arrays of scientific data.
042 * <p>
043 * The data contained in an SDS array has a data type associated with it. The
044 * standard data types supported by the SD interface include 32- and 64-bit
045 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
046 * 32-bit unsigned integers, and 8-bit characters.
047 * <p>
048 * <b>How to Select a Subset</b>
049 * <p>
050 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
051 * is defined to select a subset of a data array. The selection is done in an implicit
052 * way. Function calls to dimension information such as getSelectedDims() return an array
053 * of dimension values, which is a reference to the array in the dataset object.
054 * Changes of the array outside the dataset object directly change the values of
055 * the array in the dataset object. It is like pointers in C.
056 * <p>
057 *
058 * The following is an example of how to make a subset. In the example, the dataset
059 * is a 4-dimension with size of [200][100][50][10], i.e.
060 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
061 * We want to select every other data point in dims[1] and dims[2]
062 * <pre>
063     int rank = dataset.getRank();   // number of dimensions of the dataset
064     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
065     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
066     long[] start = dataset.getStartDims(); // the offset of the selection
067     long[] stride = dataset.getStride(); // the stride of the dataset
068     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
069
070     // select dim1 and dim2 as 2D data for display,and slice through dim0
071     selectedIndex[0] = 1;
072     selectedIndex[1] = 2;
073     selectedIndex[1] = 0;
074
075     // reset the selection arrays
076     for (int i=0; i&lt;rank; i++) {
077         start[i] = 0;
078         selected[i] = 1;
079         stride[i] = 1;
080    }
081
082    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
083    stride[1] = 2;
084    stride[2] = 2;
085
086    // set the selection size of dim1 and dim2
087    selected[1] = dims[1]/stride[1];
088    selected[2] = dims[1]/stride[2];
089
090    // when dataset.read() is called, the slection above will be used since
091    // the dimension arrays is passed by reference. Changes of these arrays
092    // outside the dataset object directly change the values of these array
093    // in the dataset object.
094
095 * </pre>
096 *
097 * <p>
098 * @version 1.1 9/4/2007
099 * @author Peter X. Cao
100 */
101public class H4SDS extends ScalarDS
102{
103    private static final long serialVersionUID = 2557157923292438696L;
104
105    private final static org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
106
107    /** tag for netCDF datasets.
108     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
109     */
110    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
111    public static final int                 DFTAG_NDG_NETCDF = 67687001;
112
113    /**
114     * The list of attributes of this data object. Members of the list are
115     * instance of Attribute.
116     */
117    @SuppressWarnings("rawtypes")
118    private List                            attributeList;
119
120    /**
121     * The SDS interface identifier obtained from SDstart(filename, access)
122     */
123    private long                            sdid;
124
125    /** the datatype identifier */
126    private long                            datatypeID = -1;
127
128    private int                             nAttributes = -1;
129
130
131    public H4SDS(FileFormat theFile, String name, String path)
132    {
133        this(theFile, name, path, null);
134    }
135
136    /**
137     * Creates an H4SDS object with specific name and path.
138     *
139     * @param theFile the HDF file.
140     * @param name the name of this H4SDS.
141     * @param path the full path of this H4SDS.
142     * @param oid the unique identifier of this data object.
143     */
144    @SuppressWarnings("deprecation")
145    public H4SDS(
146        FileFormat theFile,
147        String name,
148        String path,
149        long[] oid)
150    {
151        super (theFile, name, path, oid);
152        unsignedConverted = false;
153        sdid = ((H4File)getFileFormat()).getSDAccessID();
154    }
155
156    /*
157     * (non-Javadoc)
158     * @see hdf.object.DataFormat#hasAttribute()
159     */
160    @Override
161    public boolean hasAttribute ()
162    {
163        if (nAttributes < 0) {
164            sdid = ((H4File)getFileFormat()).getSDAccessID();
165
166            long id = open();
167
168            if (id >= 0) {
169                try { // retireve attributes of the dataset
170                    String[] objName = {""};
171                    int[] sdInfo = {0, 0, 0};
172                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
173                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
174                    nAttributes = sdInfo[2];
175                }
176                catch (Exception ex) {
177                    log.debug("hasAttribute(): failure: ", ex);
178                    nAttributes=0;
179                }
180
181                log.trace("hasAttribute(): nAttributes={}", nAttributes);
182
183                close(id);
184            }
185        }
186
187        return (nAttributes>0);
188    }
189
190    // ***** need to implement from ScalarDS *****
191    @Override
192    public byte[][] readPalette(int idx) { return null;}
193
194    // ***** need to implement from ScalarDS *****
195    @Override
196    public byte[] getPaletteRefs() { return null;}
197
198    // implementing Dataset
199    @Override
200    public Datatype getDatatype()
201    {
202        if (datatype == null)
203        {
204            datatype = new H4Datatype(datatypeID);
205        }
206
207        return datatype;
208    }
209
210    // To do: Implementing Dataset
211    @Override
212    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff)
213            throws Exception
214    {
215        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
216
217        Dataset dataset = null;
218        long srcdid=-1, dstdid=-1, tid=-1;
219        int size=1, theRank=2;
220        String path=null;
221        int[] count=null, start=null;
222
223        if (pgroup == null) {
224            log.debug("copy(): Parent group is null");
225            log.trace("copy(): finish");
226            return null;
227        }
228
229        if (dname == null) {
230            dname = getName();
231        }
232
233        if (pgroup.isRoot()) {
234            path = HObject.separator;
235        }
236        else {
237            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
238        }
239        log.trace("copy(): path={}", path);
240
241        srcdid = open();
242        if (srcdid < 0) {
243            log.debug("copy(): Invalid source SDID");
244            log.trace("copy(): finish");
245            return null;
246        }
247
248        if (dims == null) {
249            if (!isInited())
250                init();
251
252            theRank = getRank();
253
254            dims = getDims();
255        }
256        else {
257            theRank = dims.length;
258        }
259
260        start = new int[theRank];
261        count = new int[theRank];
262        for (int i=0; i<theRank; i++) {
263            start[i] = 0;
264            count[i] = (int)dims[i];
265            size *= count[i];
266        }
267        log.trace("copy(): theRank={} with size={}", theRank, size);
268
269        // create the new dataset and attach it to the parent group
270        tid = datatypeID;
271        dstdid = HDFLibrary.SDcreate(
272                ((H4File)pgroup.getFileFormat()).getSDAccessID(),
273                dname, tid, theRank, count);
274        if (dstdid < 0) {
275            log.debug("copy(): Invalid dest SDID");
276            log.trace("copy(): finish");
277            return null;
278        }
279
280        int ref = HDFLibrary.SDidtoref(dstdid);
281        if (!pgroup.isRoot()) {
282            long vgid = pgroup.open();
283            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
284            pgroup.close(vgid);
285        }
286
287        // copy attributes from one object to the new object
288        log.trace("copy(): copy attributes");
289        copyAttribute(srcdid, dstdid);
290
291        // read data from the source dataset
292        log.trace("copy(): read data from the source dataset");
293        if (buff == null) {
294            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
295            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
296        }
297
298        // write the data into the destination dataset
299        log.trace("copy(): write the data into the destination dataset");
300        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
301
302        long[] oid = {HDFConstants.DFTAG_NDG, ref};
303        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
304
305        pgroup.addToMemberList(dataset);
306
307        close(srcdid);
308
309        try {
310            HDFLibrary.SDendaccess(dstdid);
311        }
312        catch (HDFException ex) {
313            log.debug("copy(): SDendaccess failure: ", ex);
314        }
315
316        log.trace("copy(): finish");
317        return dataset;
318    }
319
320    // Implementing Dataset
321    @Override
322    public byte[] readBytes() throws HDFException
323    {
324        log.trace("readBytes(): start");
325
326        byte[] theData = null;
327
328        if (!isInited())
329            init();
330
331        long id = open();
332        if (id < 0) {
333            log.debug("readBytes(): Invalid SDID");
334            log.trace("readBytes(): finish");
335            return null;
336        }
337
338        int datasize = 1;
339        int[] select = new int[rank];
340        int[] start = new int[rank];
341        for (int i=0; i<rank; i++) {
342            datasize *= (int)selectedDims[i];
343            select[i] = (int)selectedDims[i];
344            start[i] = (int)startDims[i];
345        }
346
347        int[] stride = null;
348        if (selectedStride != null) {
349            stride = new int[rank];
350            for (int i=0; i<rank; i++) {
351                stride[i] = (int)selectedStride[i];
352            }
353        }
354
355        try {
356            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
357            theData = new byte[size];
358            HDFLibrary.SDreaddata(id, start, stride, select, theData);
359        }
360        catch (Exception ex) {
361            log.debug("readBytes(): failure: ", ex);
362        }
363        finally {
364            close(id);
365        }
366
367        log.trace("readBytes(): finish");
368        return theData;
369    }
370
371    // Implementing DataFormat
372    @Override
373    public Object read() throws HDFException, OutOfMemoryError
374    {
375        log.trace("read(): start");
376
377        Object theData = null;
378
379        if (!isInited())
380            init();
381
382        long id = open();
383        if (id < 0) {
384            log.debug("read(): Invalid SDID");
385            log.trace("read(): finish");
386            return null;
387        }
388
389        int datasize = 1;
390        int[] select = new int[rank];
391        int[] start = new int[rank];
392        for (int i=0; i<rank; i++) {
393            datasize *= (int)selectedDims[i];
394            select[i] = (int)selectedDims[i];
395            start[i] = (int)startDims[i];
396        }
397
398        int[] stride = null;
399        if (selectedStride != null) {
400            stride = new int[rank];
401            for (int i=0; i<rank; i++) {
402                stride[i] = (int)selectedStride[i];
403            }
404        }
405
406        try {
407            theData = H4Datatype.allocateArray(datatypeID, datasize);
408
409            if (theData != null) {
410                // assume external data files are located in the same directory as the main file.
411                HDFLibrary.HXsetdir(getFileFormat().getParent());
412
413                HDFLibrary.SDreaddata(id, start, stride, select, theData);
414
415                if (isText) {
416                    theData = byteToString((byte[])theData, select[0]);
417                }
418            }
419        }
420        catch (Exception ex) {
421            log.debug("read(): failure: ", ex);
422        }
423        finally {
424            close(id);
425        }
426
427        if (fillValue==null && isImageDisplay) {
428            try {
429                getMetadata();
430            } // need to set fillValue for images
431            catch (Exception ex) {
432                log.debug("read(): getMetadata failure: ", ex);
433            }
434        }
435
436        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
437            isDefaultImageOrder = false;
438        else
439            isDefaultImageOrder = true;
440
441        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
442        log.trace("read(): finish");
443        return theData;
444    }
445
446    // Implementing DataFormat
447    @SuppressWarnings("deprecation")
448    @Override
449    public void write(Object buf) throws HDFException
450    {
451        log.trace("write(): start");
452
453        if (buf == null) {
454            log.debug("write(): Object is null");
455            log.trace("write(): finish");
456            return;
457        }
458
459        long id = open();
460        if (id < 0) {
461            log.debug("write(): Invalid SDID");
462            log.trace("write(): finish");
463            return;
464        }
465
466        int[] select = new int[rank];
467        int[] start = new int[rank];
468        for (int i=0; i<rank; i++) {
469            select[i] = (int)selectedDims[i];
470            start[i] = (int)startDims[i];
471        }
472
473        int[] stride = null;
474        if (selectedStride != null) {
475            stride = new int[rank];
476            for (int i=0; i<rank; i++) {
477                stride[i] = (int)selectedStride[i];
478            }
479        }
480
481        Object tmpData = buf;
482        try {
483            if (getDatatype().isUnsigned() && unsignedConverted) {
484                tmpData = convertToUnsignedC(buf);
485            }
486            // assume external data files are located in the same directory as the main file.
487            HDFLibrary.HXsetdir(getFileFormat().getParent());
488
489            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
490            //} catch (Exception ex) {ex.printStackTrace();
491        }
492        catch (Exception ex) {
493            log.debug("write(): failure: ", ex);
494        }
495        finally {
496            tmpData = null;
497            close(id);
498        }
499
500        log.trace("write(): finish");
501    }
502
503    // Implementing DataFormat
504    @Override
505    @SuppressWarnings({"rawtypes", "unchecked"})
506    public List getMetadata() throws HDFException
507    {
508        log.trace("getMetadata(): start");
509
510        if (attributeList != null) {
511            log.trace("getMetdata(): attributeList != null");
512            log.trace("getMetadata(): finish");
513            return attributeList;
514        }
515
516        long id = open();
517        String[] objName = {""};
518        int[] sdInfo = {0, 0, 0};
519        try {
520            // retrieve attributes of the dataset
521            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
522            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
523            int n = sdInfo[2];
524
525            if ((attributeList == null) && (n>0)) {
526                attributeList = new Vector(n, 5);
527            }
528
529            boolean b = false;
530            String[] attrName = new String[1];
531            int[] attrInfo = {0, 0};
532            for (int i=0; i<n; i++) {
533                attrName[0] = "";
534                try {
535                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
536                    // mask off the litend bit
537                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
538                }
539                catch (HDFException ex) {
540                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
541                    b = false;
542                }
543
544                if (!b) {
545                    continue;
546                }
547
548                long[] attrDims = {attrInfo[1]};
549                Attribute attr = new Attribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
550                attributeList.add(attr);
551
552                Object buf = null;
553                try {
554                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
555                }
556                catch (OutOfMemoryError e) {
557                    log.debug("getMetadata(): out of memory: ", e);
558                    buf = null;
559                }
560
561                try {
562                    HDFLibrary.SDreadattr(id, i, buf);
563                }
564                catch (HDFException ex) {
565                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
566                    buf = null;
567                }
568
569                if (buf != null) {
570                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
571                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
572                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
573                    }
574                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
575                            attrName[0].equalsIgnoreCase("_fillValue")) {
576                        fillValue = buf;
577                    }
578
579                    attr.setData(buf);
580                }
581
582            } // for (int i=0; i<n; i++)
583
584            // retrieve attribute of dimension
585            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
586            /*
587            for (int i=0; i< rank; i++) {
588                int dimID = HDFLibrary.SDgetdimid(id, i);
589                String[] argv = {" ", " ", " "};
590                HDFLibrary.SDgetdimstrs(dimID, argv, 80);
591            }
592             */
593        }
594        catch (Exception ex) {
595            log.debug("getMetadata(): failure: ", ex);
596        }
597        finally {
598            close(id);
599        }
600
601        log.trace("getMetadata(): finish");
602        return attributeList;
603    }
604
605    // To do: implementing DataFormat
606    @Override
607    @SuppressWarnings({"rawtypes", "unchecked"})
608    public void writeMetadata(Object info) throws Exception
609    {
610        log.trace("writeMetadata(): start");
611
612        // only attribute metadata is supported.
613        if (!(info instanceof Attribute)) {
614            log.debug("writeMetadata(): Object not an Attribute");
615            log.trace("writeMetadata(): finish");
616            return;
617        }
618
619        try {
620            getFileFormat().writeAttribute(this, (Attribute)info, true);
621
622            if (attributeList == null) {
623                attributeList = new Vector();
624            }
625
626            attributeList.add(info);
627            nAttributes = attributeList.size();
628        }
629        catch (Exception ex) {
630            log.trace("writeMetadata(): failure: ", ex);
631        }
632
633        log.trace("writeMetadata(): finish");
634    }
635
636    // To do: implementing DataFormat
637    @Override
638    public void removeMetadata(Object info) throws HDFException {
639        log.trace("removeMetadata(): disabled");
640    }
641
642    // implementing DataFormat
643    @Override
644    public void updateMetadata(Object info) throws Exception {
645        log.trace("updateMetadata(): disabled");
646    }
647
648    // Implementing HObject
649    @Override
650    public long open()
651    {
652        log.trace("open(): start");
653
654        long id=-1;
655
656        try {
657            int index = 0;
658            int tag = (int)oid[0];
659
660            log.trace("open(): tag={}", tag);
661            if (tag == H4SDS.DFTAG_NDG_NETCDF) {
662                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
663            }
664            else {
665                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
666            }
667
668            id = HDFLibrary.SDselect(sdid,index);
669        }
670        catch (HDFException ex) {
671            log.debug("open(): failure: ", ex);
672            id = -1;
673        }
674
675        log.trace("open(): finish");
676        return id;
677    }
678
679    // Implementing HObject
680    @Override
681    public void close(long id)
682    {
683        try { HDFLibrary.SDendaccess(id); }
684        catch (HDFException ex) { log.debug("close(): failure: ", ex); }
685    }
686
687    /**
688     * Initializes the H4SDS such as dimension size of this dataset.
689     */
690    @SuppressWarnings("deprecation")
691    @Override
692    public void init()
693    {
694        log.trace("init(): start");
695
696        if (inited) {
697            log.trace("init(): Already initialized");
698            log.trace("init(): finish");
699            return; // already called. Initialize only once
700        }
701
702        long id = open();
703        String[] objName = {""};
704        String[] dimName = {""};
705        int[] dimInfo = {0, 0, 0};
706        int[] sdInfo = {0, 0, 0};
707        boolean isUnlimited = false;
708
709        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
710        try {
711            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
712            // mask off the litend bit
713            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
714            nAttributes = sdInfo[2];
715            rank = sdInfo[0];
716
717            if (rank <= 0) {
718                rank = 1;
719                idims[0] = 1;
720            }
721
722            isUnlimited = HDFLibrary.SDisrecord(id);
723            log.trace("init(): isUnlimited={}", isUnlimited);
724
725            datatypeID = sdInfo[1];
726            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
727
728            // get the dimension names
729            try {
730                dimNames = new String[rank];
731                for (int i=0; i<rank; i++) {
732                    long dimid = HDFLibrary.SDgetdimid(id, i);
733                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
734                    dimNames[i] = dimName[0];
735                }
736            }
737            catch (Exception ex) {
738                log.debug("init(): get the dimension names: ", ex);
739            }
740
741            // get compression information
742            try {
743                HDFCompInfo compInfo = new HDFCompInfo();
744
745                HDFLibrary.SDgetcompinfo(id, compInfo);
746                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
747                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
748                    HDFLibrary.SDgetcompinfo(id, comp);
749                    compression = "GZIP(level="+comp.level+")";
750                }
751                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
752                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
753                    HDFLibrary.SDgetcompinfo(id, comp);
754                    compression = "SZIP(bits_per_pixel="+comp.bits_per_pixel+",options_mask="+comp.options_mask+
755                                  ",pixels="+comp.pixels+",pixels_per_block="+comp.pixels_per_block+
756                                  ",pixels_per_scanline="+comp.pixels_per_scanline+")";
757                }
758                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
759                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
760                    HDFLibrary.SDgetcompinfo(id, comp);
761                    compression = "JPEG(quality="+comp.quality+",options_mask="+
762                                  ",force_baseline="+comp.force_baseline+")";
763                }
764                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
765                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
766                    HDFLibrary.SDgetcompinfo(id, comp);
767                    compression = "SKPHUFF(skp_size="+comp.skp_size+")";
768                }
769                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
770                    compression = "RLE";
771                }
772                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
773                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
774                    HDFLibrary.SDgetcompinfo(id, comp);
775                    compression = "NBIT(nt="+comp.nt+",bit_len="+comp.bit_len+",ctype="+comp.ctype+
776                                  ",fill_one="+comp.fill_one+",sign_ext="+comp.sign_ext+
777                                  ",start_bit="+comp.start_bit+")";
778                }
779            }
780            catch (Exception ex) {
781                log.debug("init(): get compression information failure: ", ex);
782            }
783
784            // get chunk information
785            try {
786                HDFChunkInfo chunkInfo = new HDFChunkInfo();
787                int[] cflag = {HDFConstants.HDF_NONE};
788
789                try {
790                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
791                }
792                catch (Throwable ex) {
793                    ex.printStackTrace();
794                }
795
796                if (cflag[0] == HDFConstants.HDF_NONE) {
797                    chunkSize = null;
798                    storage_layout = "NONE";
799                }
800                else {
801                    chunkSize = new long[rank];
802                    for (int i=0; i<rank; i++) {
803                        chunkSize[i] = chunkInfo.chunk_lengths[i];
804                    }
805                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
806                    for (int i = 1; i < rank; i++) {
807                        storage_layout += " X " + chunkSize[i];
808                    }
809                }
810            }
811            catch (Exception ex) {
812                log.debug("init(): get chunk information failure: ", ex);
813            }
814
815            inited = true;
816        }
817        catch (HDFException ex) {
818            log.debug("init(): failure: ", ex);
819        }
820        finally {
821            close(id);
822        }
823
824        dims = new long[rank];
825        maxDims = new long[rank];
826        startDims = new long[rank];
827        selectedDims = new long[rank];
828
829        for (int i=0; i<rank; i++) {
830            startDims[i] = 0;
831            selectedDims[i] = 1;
832            dims[i] = maxDims[i] = idims[i];
833        }
834
835        if (isUnlimited)
836            maxDims[0] = -1;
837
838        selectedIndex[0] = 0;
839        selectedIndex[1] = 1;
840        selectedIndex[2] = 2;
841
842        // select only two dimension a time,
843        if (rank == 1) {
844            selectedDims[0] = dims[0];
845        }
846
847        if (rank > 1) {
848            selectedDims[0] = dims[0];
849            if (isText) {
850                selectedDims[1] = 1;
851            }
852            else {
853                selectedDims[1] = dims[1];
854            }
855        }
856
857        log.trace("init(): finish");
858    }
859
860    // Implementing ScalarDS
861    @Override
862    public byte[][] getPalette()
863    {
864        return palette;
865    }
866
867    /**
868     * Creates a new dataset.
869     *
870     * @param name the name of the dataset to create.
871     * @param pgroup the parent group of the new dataset.
872     * @param type the datatype of the dataset.
873     * @param dims the dimension size of the dataset.
874     * @param maxdims the max dimension size of the dataset.
875     * @param chunks the chunk size of the dataset.
876     * @param gzip the level of the gzip compression.
877     * @param fillValue the default value.
878     * @param data the array of data values.
879     *
880     * @return the new dataset if successful. Otherwise returns null.
881     *
882     * @throws Exception if the dataset can not be created
883     */
884    public static H4SDS create(
885        String name,
886        Group pgroup,
887        Datatype type,
888        long[] dims,
889        long[] maxdims,
890        long[] chunks,
891        int gzip,
892        Object fillValue,
893        Object data) throws Exception
894    {
895        log.trace("create(): start");
896
897        H4SDS dataset = null;
898        if ((pgroup == null) ||
899            (name == null)||
900            (dims == null)) {
901            log.trace("create(): Parent group, name or dims is null");
902            log.trace("create(): finish");
903            return null;
904        }
905
906        H4File file = (H4File)pgroup.getFileFormat();
907
908        if (file == null) {
909            log.trace("create(): Parent group FileFormat is null");
910            log.trace("create(): finish");
911            return null;
912        }
913
914        String path = HObject.separator;
915        if (!pgroup.isRoot()) {
916            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
917        }
918        // prepare the dataspace
919        // int tsize = 1;
920        int rank = dims.length;
921        int idims[] = new int[rank];
922        int start[] = new int [rank];
923        for (int i=0; i<rank; i++) {
924            idims[i] = (int)dims[i];
925            start[i] = 0;
926            // tsize *= idims[i];
927        }
928
929        // only the first element of the SDcreate parameter dim_sizes (i.e.,
930        // the dimension of the lowest rank or the slowest-changing dimension)
931        // can be assigned the value SD_UNLIMITED (or 0) to make the first
932        // dimension unlimited.
933        if ((maxdims != null) && (maxdims[0]<=0)) {
934            idims[0] = 0; // set to unlimited dimension.
935        }
936
937        int ichunks[] = null;
938        if (chunks != null) {
939            ichunks = new int[rank];
940            for (int i=0; i<rank; i++) {
941                ichunks[i] = (int)chunks[i];
942            }
943        }
944
945        // unlimted cannot be used with chunking or compression for HDF 4.2.6 or earlier.
946        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
947            log.debug("create(): Unlimited cannot be used with chunking or compression");
948            log.trace("create(): finish");
949            throw new HDFException("Unlimited cannot be used with chunking or compression");
950        }
951
952        long sdid = (file).getSDAccessID();
953        long sdsid = -1;
954        long vgid = -1;
955        long tid = type.createNative();
956
957        if(tid >= 0) {
958            try {
959                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
960                // set fill value to zero.
961                int vsize = HDFLibrary.DFKNTsize(tid);
962                byte[] fill = new byte[vsize];
963                for (int i=0; i<vsize; i++) {
964                    fill[i] = 0;
965                }
966                HDFLibrary.SDsetfillvalue(sdsid, fill);
967
968                // when we create a new dataset with unlimited dimension,
969                // we have to write some data into the dataset or otherwise
970                // the current dataset has zero dimensin size.
971
972                // comment out the following lines because SDwritedata fails when
973                // try to write data into a zero dimension array. 05/25/05
974                // don't know why the code was first put here ????
975                /**
976                if (idims[0] == 0 && data == null)
977                {
978                    idims[0] = (int)dims[0];
979                    data = new byte[tsize*vsize];
980                }
981                 */
982
983            }
984            catch (Exception ex) {
985                log.debug("create(): failure: ", ex);
986                log.trace("create(): finish");
987                throw (ex);
988            }
989        }
990
991        if (sdsid < 0) {
992            log.debug("create(): Dataset creation failed");
993            log.trace("create(): finish");
994            throw (new HDFException("Unable to create the new dataset."));
995        }
996
997        HDFDeflateCompInfo compInfo = null;
998        if (gzip > 0) {
999            // set compression
1000            compInfo = new HDFDeflateCompInfo();
1001            compInfo.level = gzip;
1002            if (chunks == null)
1003                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1004        }
1005
1006        if (chunks != null) {
1007            // set chunk
1008            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
1009            int flag = HDFConstants.HDF_CHUNK;
1010
1011            if (gzip > 0) {
1012                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1013                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1014            }
1015
1016            try  {
1017                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
1018            }
1019            catch (Throwable err) {
1020                log.debug("create(): SDsetchunk failure: ", err);
1021                err.printStackTrace();
1022                log.trace("create(): finish");
1023                throw new HDFException("SDsetchunk failed.");
1024            }
1025        }
1026
1027        if ((sdsid > 0) && (data != null)) {
1028            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1029        }
1030
1031        int ref = HDFLibrary.SDidtoref(sdsid);
1032
1033        if (!pgroup.isRoot()) {
1034            // add the dataset to the parent group
1035            vgid = pgroup.open();
1036            if (vgid < 0)
1037            {
1038                if (sdsid > 0) {
1039                    HDFLibrary.SDendaccess(sdsid);
1040                }
1041                log.debug("create(): Invalid Parent Group ID");
1042                log.trace("create(): finish");
1043                throw (new HDFException("Unable to open the parent group."));
1044            }
1045
1046            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1047
1048            pgroup.close(vgid);
1049        }
1050
1051        try {
1052            if (sdsid > 0) {
1053                HDFLibrary.SDendaccess(sdsid);
1054            }
1055        }
1056        catch (Exception ex) {
1057            log.debug("create(): SDendaccess failure: ", ex);
1058        }
1059
1060        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1061        dataset = new H4SDS(file, name, path, oid);
1062
1063        if (dataset != null) {
1064            pgroup.addToMemberList(dataset);
1065        }
1066
1067        log.trace("create(): finish");
1068        return dataset;
1069    }
1070
1071    public static H4SDS create(
1072            String name,
1073            Group pgroup,
1074            Datatype type,
1075            long[] dims,
1076            long[] maxdims,
1077            long[] chunks,
1078            int gzip,
1079            Object data) throws Exception
1080    {
1081        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1082    }
1083
1084    /**
1085     * copy attributes from one SDS to another SDS
1086     */
1087    private void copyAttribute(long srcdid, long dstdid)
1088    {
1089        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1090        try {
1091            String[] objName = {""};
1092            int[] sdInfo = {0, 0, 0};
1093            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1094            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1095            int numberOfAttributes = sdInfo[2];
1096            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1097
1098            boolean b = false;
1099            String[] attrName = new String[1];
1100            int[] attrInfo = {0, 0};
1101            for (int i=0; i<numberOfAttributes; i++) {
1102                attrName[0] = "";
1103                try {
1104                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1105                }
1106                catch (HDFException ex) {
1107                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1108                    b = false;
1109                }
1110
1111                if (!b) {
1112                    continue;
1113                }
1114
1115                // read attribute data from source dataset
1116                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1117                try {
1118                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1119                }
1120                catch (HDFException ex) {
1121                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1122                    attrBuff = null;
1123                }
1124
1125                if (attrBuff == null) {
1126                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1127                    log.trace("copyAttribute(): continue");
1128                    continue;
1129                }
1130
1131                // attach attribute to the destination dataset
1132                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1133            } // for (int i=0; i<numberOfAttributes; i++)
1134        }
1135        catch (Exception ex) {
1136            log.debug("copyAttribute(): failure: ", ex);
1137        }
1138
1139        log.trace("copyAttribute(): finish");
1140    }
1141
1142    //Implementing DataFormat
1143    @SuppressWarnings("rawtypes")
1144    public List getMetadata(int... attrPropList) throws Exception {
1145        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1146    }
1147}