001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the file COPYING.                     *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * If you do not have access to this file, you may request a copy from       *
011 * help@hdfgroup.org.                                                        *
012 ****************************************************************************/
013
014package hdf.object.h4;
015
016import java.util.List;
017import java.util.Vector;
018
019import hdf.hdflib.HDFChunkInfo;
020import hdf.hdflib.HDFCompInfo;
021import hdf.hdflib.HDFConstants;
022import hdf.hdflib.HDFDeflateCompInfo;
023import hdf.hdflib.HDFException;
024import hdf.hdflib.HDFJPEGCompInfo;
025import hdf.hdflib.HDFLibrary;
026import hdf.hdflib.HDFNBITCompInfo;
027import hdf.hdflib.HDFSKPHUFFCompInfo;
028import hdf.hdflib.HDFSZIPCompInfo;
029import hdf.object.Attribute;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
039 * the SDS. A SDS is a group of data structures used to store and describe
040 * multidimensional arrays of scientific data.
041 * <p>
042 * The data contained in an SDS array has a data type associated with it. The
043 * standard data types supported by the SD interface include 32- and 64-bit
044 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
045 * 32-bit unsigned integers, and 8-bit characters.
046 * <p>
047 * <b>How to Select a Subset</b>
048 * <p>
049 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
050 * is defined to select a subset of a data array. The selection is done in an implicit
051 * way. Function calls to dimension information such as getSelectedDims() return an array
052 * of dimension values, which is a reference to the array in the dataset object.
053 * Changes of the array outside the dataset object directly change the values of
054 * the array in the dataset object. It is like pointers in C.
055 * <p>
056 *
057 * The following is an example of how to make a subset. In the example, the dataset
058 * is a 4-dimension with size of [200][100][50][10], i.e.
059 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
060 * We want to select every other data point in dims[1] and dims[2]
061 * <pre>
062     int rank = dataset.getRank();   // number of dimensions of the dataset
063     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
064     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
065     long[] start = dataset.getStartDims(); // the offset of the selection
066     long[] stride = dataset.getStride(); // the stride of the dataset
067     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
068
069     // select dim1 and dim2 as 2D data for display,and slice through dim0
070     selectedIndex[0] = 1;
071     selectedIndex[1] = 2;
072     selectedIndex[1] = 0;
073
074     // reset the selection arrays
075     for (int i=0; i&lt;rank; i++) {
076         start[i] = 0;
077         selected[i] = 1;
078         stride[i] = 1;
079    }
080
081    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
082    stride[1] = 2;
083    stride[2] = 2;
084
085    // set the selection size of dim1 and dim2
086    selected[1] = dims[1]/stride[1];
087    selected[2] = dims[1]/stride[2];
088
089    // when dataset.read() is called, the slection above will be used since
090    // the dimension arrays is passed by reference. Changes of these arrays
091    // outside the dataset object directly change the values of these array
092    // in the dataset object.
093
094 * </pre>
095 *
096 * <p>
097 * @version 1.1 9/4/2007
098 * @author Peter X. Cao
099 */
100public class H4SDS extends ScalarDS
101{
102    private static final long serialVersionUID = 2557157923292438696L;
103
104    private final static org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
105
106    /** tag for netCDF datasets.
107     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
108     */
109    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
110    public static final int                 DFTAG_NDG_NETCDF = 67687001;
111
112    /**
113     * The list of attributes of this data object. Members of the list are
114     * instance of Attribute.
115     */
116    @SuppressWarnings("rawtypes")
117    private List                            attributeList;
118
119    /**
120     * The SDS interface identifier obtained from SDstart(filename, access)
121     */
122    private int sdid;
123
124    /** the datatype identifier */
125    private int datatypeID = -1;
126
127    private int                             nAttributes = -1;
128
129
130    public H4SDS(FileFormat theFile, String name, String path)
131    {
132        this(theFile, name, path, null);
133    }
134
135    /**
136     * Creates an H4SDS object with specific name and path.
137     *
138     * @param theFile the HDF file.
139     * @param name the name of this H4SDS.
140     * @param path the full path of this H4SDS.
141     * @param oid the unique identifier of this data object.
142     */
143    public H4SDS(
144        FileFormat theFile,
145        String name,
146        String path,
147        long[] oid)
148    {
149        super (theFile, name, path, oid);
150        unsignedConverted = false;
151        sdid = ((H4File)getFileFormat()).getSDAccessID();
152    }
153
154    /*
155     * (non-Javadoc)
156     * @see hdf.object.DataFormat#hasAttribute()
157     */
158    public boolean hasAttribute ()
159    {
160        if (nAttributes < 0) {
161            sdid = ((H4File)getFileFormat()).getSDAccessID();
162
163            int id = open();
164
165            if (id >= 0) {
166                try { // retireve attributes of the dataset
167                    String[] objName = {""};
168                    int[] sdInfo = {0, 0, 0};
169                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
170                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
171                    nAttributes = sdInfo[2];
172                }
173                catch (Exception ex) {
174                    log.debug("hasAttribute(): failure: ", ex);
175                    nAttributes=0;
176                }
177
178                log.trace("hasAttribute(): nAttributes={}", nAttributes);
179
180                close(id);
181            }
182        }
183
184        return (nAttributes>0);
185    }
186
187    // ***** need to implement from ScalarDS *****
188    @Override
189    public byte[][] readPalette(int idx) { return null;}
190
191    // ***** need to implement from ScalarDS *****
192    @Override
193    public byte[] getPaletteRefs() { return null;}
194
195    // implementing Dataset
196    @Override
197    public Datatype getDatatype()
198    {
199        if (datatype == null)
200        {
201            datatype = new H4Datatype(datatypeID);
202        }
203
204        return datatype;
205    }
206
207    // To do: Implementing Dataset
208    @Override
209    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff)
210    throws Exception
211    {
212        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
213
214        Dataset dataset = null;
215        int srcdid=-1, dstdid=-1, tid=-1, size=1, theRank=2;
216        String path=null;
217        int[] count=null, start=null;
218
219        if (pgroup == null) {
220            log.debug("copy(): Parent group is null");
221            log.trace("copy(): finish");
222            return null;
223        }
224
225        if (dname == null) {
226            dname = getName();
227        }
228
229        if (pgroup.isRoot()) {
230            path = HObject.separator;
231        }
232        else {
233            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
234        }
235        log.trace("copy(): path={}", path);
236
237        srcdid = open();
238        if (srcdid < 0) {
239            log.debug("copy(): Invalid source SDID");
240            log.trace("copy(): finish");
241            return null;
242        }
243
244        if (dims == null) {
245            theRank = getRank();
246            if (theRank <=0) {
247                init();
248            }
249            theRank = getRank();
250
251            dims = getDims();
252        }
253        else {
254            theRank = dims.length;
255        }
256
257        start = new int[theRank];
258        count = new int[theRank];
259        for (int i=0; i<theRank; i++) {
260            start[i] = 0;
261            count[i] = (int)dims[i];
262            size *= count[i];
263        }
264        log.trace("copy(): theRank={} with size={}", theRank, size);
265
266        // create the new dataset and attach it to the parent group
267        tid = datatypeID;
268        dstdid = HDFLibrary.SDcreate(
269            ((H4File)pgroup.getFileFormat()).getSDAccessID(),
270            dname, tid, theRank, count);
271        if (dstdid < 0) {
272            log.debug("copy(): Invalid dest SDID");
273            log.trace("copy(): finish");
274            return null;
275        }
276
277        int ref = HDFLibrary.SDidtoref(dstdid);
278        if (!pgroup.isRoot()) {
279            int vgid = pgroup.open();
280            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
281            pgroup.close(vgid);
282        }
283
284        // copy attributes from one object to the new object
285        log.trace("copy(): copy attributes");
286        copyAttribute(srcdid, dstdid);
287
288        // read data from the source dataset
289        log.trace("copy(): read data from the source dataset");
290        if (buff == null) {
291            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
292            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
293        }
294
295        // write the data into the destination dataset
296        log.trace("copy(): write the data into the destination dataset");
297        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
298
299        long[] oid = {HDFConstants.DFTAG_NDG, ref};
300        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
301
302        pgroup.addToMemberList(dataset);
303
304        close(srcdid);
305
306        try {
307            HDFLibrary.SDendaccess(dstdid);
308        }
309        catch (HDFException ex) {
310            log.debug("copy(): SDendaccess failure: ", ex);
311        }
312
313        log.trace("copy(): finish");
314        return dataset;
315    }
316
317    // Implementing Dataset
318    @Override
319    public byte[] readBytes() throws HDFException
320    {
321        log.trace("readBytes(): start");
322
323        byte[] theData = null;
324
325        if (rank <=0 ) {
326            init();
327        }
328
329        int id = open();
330        if (id < 0) {
331            log.debug("readBytes(): Invalid SDID");
332            log.trace("readBytes(): finish");
333            return null;
334        }
335
336        int datasize = 1;
337        int[] select = new int[rank];
338        int[] start = new int[rank];
339        for (int i=0; i<rank; i++) {
340            datasize *= (int)selectedDims[i];
341            select[i] = (int)selectedDims[i];
342            start[i] = (int)startDims[i];
343        }
344
345        int[] stride = null;
346        if (selectedStride != null) {
347            stride = new int[rank];
348            for (int i=0; i<rank; i++) {
349                stride[i] = (int)selectedStride[i];
350            }
351        }
352
353        try {
354            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
355            theData = new byte[size];
356            HDFLibrary.SDreaddata(id, start, stride, select, theData);
357        }
358        catch (Exception ex) {
359            log.debug("readBytes(): failure: ", ex);
360        }
361        finally {
362            close(id);
363        }
364
365        log.trace("readBytes(): finish");
366        return theData;
367    }
368
369    // Implementing DataFormat
370    @Override
371    public Object read() throws HDFException
372    {
373        log.trace("read(): start");
374
375        Object theData = null;
376
377        if (rank <= 0) {
378            init();
379        }
380
381        int id = open();
382        if (id < 0) {
383            log.debug("read(): Invalid SDID");
384            log.trace("read(): finish");
385            return null;
386        }
387
388        int datasize = 1;
389        int[] select = new int[rank];
390        int[] start = new int[rank];
391        for (int i=0; i<rank; i++) {
392            datasize *= (int)selectedDims[i];
393            select[i] = (int)selectedDims[i];
394            start[i] = (int)startDims[i];
395        }
396
397        int[] stride = null;
398        if (selectedStride != null) {
399            stride = new int[rank];
400            for (int i=0; i<rank; i++) {
401                stride[i] = (int)selectedStride[i];
402            }
403        }
404
405        try {
406            theData = H4Datatype.allocateArray(datatypeID, datasize);
407
408            if (theData != null) {
409                // assume external data files are located in the same directory as the main file.
410                HDFLibrary.HXsetdir(getFileFormat().getParent());
411
412                HDFLibrary.SDreaddata(id, start, stride, select, theData);
413
414                if (isText) {
415                    theData = byteToString((byte[])theData, select[0]);
416                }
417            }
418        }
419        catch (Exception ex) {
420            log.debug("read(): failure: ", ex);
421        }
422        finally {
423            close(id);
424        }
425
426        if (fillValue==null && isImageDisplay) {
427            try {
428                getMetadata();
429            } // need to set fillValue for images
430            catch (Exception ex) {
431                log.debug("read(): getMetadata failure: ", ex);
432            }
433        }
434
435        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
436            isDefaultImageOrder = false;
437        else
438            isDefaultImageOrder = true;
439
440        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
441        log.trace("read(): finish");
442        return theData;
443    }
444
445    // Implementing DataFormat
446    @Override
447    public void write(Object buf) throws HDFException
448    {
449        log.trace("write(): start");
450
451        if (buf == null) {
452            log.debug("write(): Object is null");
453            log.trace("write(): finish");
454            return;
455        }
456
457        int id = open();
458        if (id < 0) {
459            log.debug("write(): Invalid SDID");
460            log.trace("write(): finish");
461            return;
462        }
463
464        int[] select = new int[rank];
465        int[] start = new int[rank];
466        for (int i=0; i<rank; i++) {
467            select[i] = (int)selectedDims[i];
468            start[i] = (int)startDims[i];
469        }
470
471        int[] stride = null;
472        if (selectedStride != null) {
473            stride = new int[rank];
474            for (int i=0; i<rank; i++) {
475                stride[i] = (int)selectedStride[i];
476            }
477        }
478
479        Object tmpData = buf;
480        try {
481            if ( isUnsigned && unsignedConverted) {
482                tmpData = convertToUnsignedC(buf);
483            }
484            // assume external data files are located in the same directory as the main file.
485            HDFLibrary.HXsetdir(getFileFormat().getParent());
486
487            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
488        //} catch (Exception ex) {ex.printStackTrace();
489        }
490        catch (Exception ex) {
491            log.debug("write(): failure: ", ex);
492        }
493        finally {
494            tmpData = null;
495            close(id);
496        }
497
498        log.trace("write(): finish");
499    }
500
501    // Implementing DataFormat
502    @SuppressWarnings({"rawtypes", "unchecked"})
503    public List getMetadata() throws HDFException
504    {
505        log.trace("getMetadata(): start");
506
507        if (attributeList != null) {
508            log.trace("getMetdata(): attributeList != null");
509            log.trace("getMetadata(): finish");
510            return attributeList;
511        }
512
513        int id = open();
514        String[] objName = {""};
515        int[] sdInfo = {0, 0, 0};
516        try {
517            // retrieve attributes of the dataset
518            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
519            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
520            int n = sdInfo[2];
521
522            if ((attributeList == null) && (n>0)) {
523                attributeList = new Vector(n, 5);
524            }
525
526            boolean b = false;
527            String[] attrName = new String[1];
528            int[] attrInfo = {0, 0};
529            for (int i=0; i<n; i++) {
530                attrName[0] = "";
531                try {
532                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
533                    // mask off the litend bit
534                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
535                }
536                catch (HDFException ex) {
537                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
538                    b = false;
539                }
540
541                if (!b) {
542                    continue;
543                }
544
545                long[] attrDims = {attrInfo[1]};
546                Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims);
547                attributeList.add(attr);
548
549                Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
550                try {
551                    HDFLibrary.SDreadattr(id, i, buf);
552                }
553                catch (HDFException ex) {
554                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
555                    buf = null;
556                }
557
558                if (buf != null) {
559                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
560                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
561                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
562                    }
563                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
564                            attrName[0].equalsIgnoreCase("_fillValue")) {
565                        fillValue = buf;
566                    }
567
568                    attr.setValue(buf);
569                }
570
571            } // for (int i=0; i<n; i++)
572
573            // retrieve attribute of dimension
574            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
575/*
576            for (int i=0; i< rank; i++) {
577                int dimID = HDFLibrary.SDgetdimid(id, i);
578                String[] argv = {" ", " ", " "};
579                HDFLibrary.SDgetdimstrs(dimID, argv, 80);
580            }
581*/
582        }
583        catch (Exception ex) {
584            log.debug("getMetadata(): failure: ", ex);
585        }
586        finally {
587            close(id);
588        }
589
590        log.trace("getMetadata(): finish");
591        return attributeList;
592    }
593
594    // To do: implementing DataFormat
595    @SuppressWarnings({"rawtypes", "unchecked"})
596    public void writeMetadata(Object info) throws Exception
597    {
598        log.trace("writeMetadata(): start");
599
600        // only attribute metadata is supported.
601        if (!(info instanceof Attribute)) {
602            log.debug("writeMetadata(): Object not an Attribute");
603            log.trace("writeMetadata(): finish");
604            return;
605        }
606
607        try {
608            getFileFormat().writeAttribute(this, (Attribute)info, true);
609
610            if (attributeList == null) {
611                attributeList = new Vector();
612            }
613
614            attributeList.add(info);
615            nAttributes = attributeList.size();
616        }
617        catch (Exception ex) {
618            log.trace("writeMetadata(): failure: ", ex);
619        }
620
621        log.trace("writeMetadata(): finish");
622    }
623
624    // To do: implementing DataFormat
625    public void removeMetadata(Object info) throws HDFException {
626        log.trace("removeMetadata(): disabled");
627    }
628
629    // implementing DataFormat
630    public void updateMetadata(Object info) throws Exception {
631        log.trace("updateMetadata(): disabled");
632    }
633
634    // Implementing HObject
635    @Override
636    public int open()
637    {
638        log.trace("open(): start");
639
640        int id=-1;
641
642        try {
643            int index = 0;
644            int tag = (int)oid[0];
645
646            log.trace("open(): tag={}", tag);
647            if (tag == H4SDS.DFTAG_NDG_NETCDF) {
648                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
649            }
650            else {
651                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
652            }
653
654            id = HDFLibrary.SDselect(sdid,index);
655        }
656        catch (HDFException ex) {
657            log.debug("open(): failure: ", ex);
658            id = -1;
659        }
660
661        log.trace("open(): finish");
662        return id;
663    }
664
665    // Implementing HObject
666    @Override
667    public void close(int id)
668    {
669        try { HDFLibrary.SDendaccess(id); }
670        catch (HDFException ex) { log.debug("close(): failure: ", ex); }
671    }
672
673    /**
674     * Initializes the H4SDS such as dimension size of this dataset.
675     */
676    @Override
677    public void init()
678    {
679        log.trace("init(): start");
680
681        if (rank>0) {
682            log.trace("init(): Already initialized");
683            log.trace("init(): finish");
684            return; // already called. Initialize only once
685        }
686
687        int id = open();
688        String[] objName = {""};
689        String[] dimName = {""};
690        int[] dimInfo = {0, 0, 0};
691        int[] sdInfo = {0, 0, 0};
692        boolean isUnlimited = false;
693
694        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
695        try {
696            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
697            // mask off the litend bit
698            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
699            nAttributes = sdInfo[2];
700            rank = sdInfo[0];
701
702            if (rank <= 0) {
703                rank = 1;
704                idims[0] = 1;
705            }
706
707            isUnlimited = HDFLibrary.SDisrecord(id);
708            log.trace("init(): isUnlimited={}", isUnlimited);
709
710            datatypeID = sdInfo[1];
711            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
712
713            //idims = new int[rank];
714            //HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
715
716            // get the dimension names
717            try {
718                dimNames = new String[rank];
719                for (int i=0; i<rank; i++) {
720                    int dimid = HDFLibrary.SDgetdimid(id, i);
721                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
722                    dimNames[i] = dimName[0];
723                }
724            }
725            catch (Exception ex) {
726                log.debug("init(): get the dimension names: ", ex);
727            }
728
729            // get compression information
730            try {
731                HDFCompInfo compInfo = new HDFCompInfo();
732
733                HDFLibrary.SDgetcompinfo(id, compInfo);
734                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
735                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
736                    HDFLibrary.SDgetcompinfo(id, comp);
737                    compression = "GZIP(level="+comp.level+")";
738                }
739                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
740                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
741                    HDFLibrary.SDgetcompinfo(id, comp);
742                    compression = "SZIP(bits_per_pixel="+comp.bits_per_pixel+",options_mask="+comp.options_mask+
743                                  ",pixels="+comp.pixels+",pixels_per_block="+comp.pixels_per_block+
744                                  ",pixels_per_scanline="+comp.pixels_per_scanline+")";
745                }
746                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
747                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
748                    HDFLibrary.SDgetcompinfo(id, comp);
749                    compression = "JPEG(quality="+comp.quality+",options_mask="+
750                                  ",force_baseline="+comp.force_baseline+")";
751                }
752                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
753                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
754                    HDFLibrary.SDgetcompinfo(id, comp);
755                    compression = "SKPHUFF(skp_size="+comp.skp_size+")";
756                }
757                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
758                    compression = "RLE";
759                }
760                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
761                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
762                    HDFLibrary.SDgetcompinfo(id, comp);
763                    compression = "NBIT(nt="+comp.nt+",bit_len="+comp.bit_len+",ctype="+comp.ctype+
764                                  ",fill_one="+comp.fill_one+",sign_ext="+comp.sign_ext+
765                                  ",start_bit="+comp.start_bit+")";
766                }
767            }
768            catch (Exception ex) {
769                log.debug("init(): get compression information failure: ", ex);
770            }
771
772            // get chunk information
773            try {
774                HDFChunkInfo chunkInfo = new HDFChunkInfo();
775                int[] cflag = {HDFConstants.HDF_NONE};
776
777                try {
778                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
779                }
780                catch (Throwable ex) {
781                    ex.printStackTrace();
782                }
783
784                if (cflag[0] == HDFConstants.HDF_NONE) {
785                    chunkSize = null;
786                    storage_layout = "NONE";
787                }
788                else {
789                    chunkSize = new long[rank];
790                    for (int i=0; i<rank; i++) {
791                        chunkSize[i] = chunkInfo.chunk_lengths[i];
792                    }
793                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
794                    for (int i = 1; i < rank; i++) {
795                        storage_layout += " X " + chunkSize[i];
796                    }
797                }
798            }
799            catch (Exception ex) {
800                log.debug("init(): get chunk information failure: ", ex);
801            }
802
803        }
804        catch (HDFException ex) {
805            log.debug("init(): failure: ", ex);
806        }
807        finally {
808            close(id);
809        }
810        isUnsigned = H4Datatype.isUnsigned(datatypeID);
811
812        if (idims == null) {
813            log.debug("init(): idims is null");
814            log.trace("init(): finish");
815            return;
816        }
817
818        dims = new long[rank];
819        maxDims = new long[rank];
820        startDims = new long[rank];
821        selectedDims = new long[rank];
822
823        for (int i=0; i<rank; i++) {
824            startDims[i] = 0;
825            selectedDims[i] = 1;
826            dims[i] = maxDims[i] = idims[i];
827        }
828
829        if (isUnlimited)
830            maxDims[0] = -1;
831
832        selectedIndex[0] = 0;
833        selectedIndex[1] = 1;
834        selectedIndex[2] = 2;
835
836        // select only two dimension a time,
837        if (rank == 1) {
838            selectedDims[0] = dims[0];
839        }
840
841        if (rank > 1) {
842            selectedDims[0] = dims[0];
843            if (isText) {
844                selectedDims[1] = 1;
845            }
846            else {
847                selectedDims[1] = dims[1];
848            }
849        }
850
851        log.trace("init(): finish");
852    }
853
854    // Implementing ScalarDS
855    @Override
856    public byte[][] getPalette()
857    {
858        return palette;
859    }
860
861    /**
862     * Creates a new dataset.
863     *
864     * @param name the name of the dataset to create.
865     * @param pgroup the parent group of the new dataset.
866     * @param type the datatype of the dataset.
867     * @param dims the dimension size of the dataset.
868     * @param maxdims the max dimension size of the dataset.
869     * @param chunks the chunk size of the dataset.
870     * @param gzip the level of the gzip compression.
871     * @param fillValue the default value.
872     * @param data the array of data values.
873     *
874     * @return the new dataset if successful. Otherwise returns null.
875     *
876     * @throws Exception if the dataset can not be created
877     */
878    public static H4SDS create(
879        String name,
880        Group pgroup,
881        Datatype type,
882        long[] dims,
883        long[] maxdims,
884        long[] chunks,
885        int gzip,
886        Object fillValue,
887        Object data) throws Exception
888    {
889        log.trace("create(): start");
890
891        H4SDS dataset = null;
892        if ((pgroup == null) ||
893            (name == null)||
894            (dims == null)) {
895            log.trace("create(): Parent group, name or dims is null");
896            log.trace("create(): finish");
897            return null;
898        }
899
900        H4File file = (H4File)pgroup.getFileFormat();
901
902        if (file == null) {
903            log.trace("create(): Parent group FileFormat is null");
904            log.trace("create(): finish");
905            return null;
906        }
907
908        String path = HObject.separator;
909        if (!pgroup.isRoot()) {
910            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
911        }
912        // prepare the dataspace
913        // int tsize = 1;
914        int rank = dims.length;
915        int idims[] = new int[rank];
916        int start[] = new int [rank];
917        for (int i=0; i<rank; i++) {
918            idims[i] = (int)dims[i];
919            start[i] = 0;
920            // tsize *= idims[i];
921        }
922
923        // only the first element of the SDcreate parameter dim_sizes (i.e.,
924        // the dimension of the lowest rank or the slowest-changing dimension)
925        // can be assigned the value SD_UNLIMITED (or 0) to make the first
926        // dimension unlimited.
927        if ((maxdims != null) && (maxdims[0]<=0)) {
928            idims[0] = 0; // set to unlimited dimension.
929        }
930
931        int ichunks[] = null;
932        if (chunks != null) {
933            ichunks = new int[rank];
934            for (int i=0; i<rank; i++) {
935                ichunks[i] = (int)chunks[i];
936            }
937        }
938
939        // unlimted cannot be used with chunking or compression for HDF 4.2.6 or earlier.
940        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
941            log.debug("create(): Unlimited cannot be used with chunking or compression");
942            log.trace("create(): finish");
943            throw new HDFException("Unlimited cannot be used with chunking or compression");
944        }
945
946        int sdid = (file).getSDAccessID();
947        int sdsid = -1;
948        int vgid = -1;
949        // datatype
950        int tid = type.toNative();
951
952        if(tid >= 0) {
953            try {
954                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
955                // set fill value to zero.
956                int vsize = HDFLibrary.DFKNTsize(tid);
957                byte[] fill = new byte[vsize];
958                for (int i=0; i<vsize; i++) {
959                    fill[i] = 0;
960                }
961                HDFLibrary.SDsetfillvalue(sdsid, fill);
962
963                // when we create a new dataset with unlimited dimension,
964                // we have to write some data into the dataset or otherwise
965                // the current dataset has zero dimensin size.
966
967                // comment out the following lines because SDwritedata fails when
968                // try to write data into a zero dimension array. 05/25/05
969                // don't know why the code was first put here ????
970                /**
971                if (idims[0] == 0 && data == null)
972                {
973                    idims[0] = (int)dims[0];
974                    data = new byte[tsize*vsize];
975                }
976                */
977
978            }
979            catch (Exception ex) {
980                log.debug("create(): failure: ", ex);
981                log.trace("create(): finish");
982                throw (ex);
983            }
984        }
985
986        if (sdsid < 0) {
987            log.debug("create(): Dataset creation failed");
988            log.trace("create(): finish");
989            throw (new HDFException("Unable to create the new dataset."));
990        }
991
992        HDFDeflateCompInfo compInfo = null;
993        if (gzip > 0) {
994            // set compression
995            compInfo = new HDFDeflateCompInfo();
996            compInfo.level = gzip;
997            if (chunks == null)
998                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
999        }
1000
1001        if (chunks != null) {
1002            // set chunk
1003            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
1004            int flag = HDFConstants.HDF_CHUNK;
1005
1006            if (gzip > 0) {
1007                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1008                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1009            }
1010
1011            try  {
1012                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
1013            }
1014            catch (Throwable err) {
1015                log.debug("create(): SDsetchunk failure: ", err);
1016                err.printStackTrace();
1017                log.trace("create(): finish");
1018                throw new HDFException("SDsetchunk failed.");
1019            }
1020        }
1021
1022        if ((sdsid > 0) && (data != null)) {
1023            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1024        }
1025
1026        int ref = HDFLibrary.SDidtoref(sdsid);
1027
1028        if (!pgroup.isRoot()) {
1029            // add the dataset to the parent group
1030            vgid = pgroup.open();
1031            if (vgid < 0)
1032            {
1033                if (sdsid > 0) {
1034                    HDFLibrary.SDendaccess(sdsid);
1035                }
1036                log.debug("create(): Invalid Parent Group ID");
1037                log.trace("create(): finish");
1038                throw (new HDFException("Unable to open the parent group."));
1039            }
1040
1041            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1042
1043            pgroup.close(vgid);
1044        }
1045
1046        try {
1047            if (sdsid > 0) {
1048                HDFLibrary.SDendaccess(sdsid);
1049            }
1050        }
1051        catch (Exception ex) {
1052            log.debug("create(): SDendaccess failure: ", ex);
1053        }
1054
1055        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1056        dataset = new H4SDS(file, name, path, oid);
1057
1058        if (dataset != null) {
1059            pgroup.addToMemberList(dataset);
1060        }
1061
1062        log.trace("create(): finish");
1063        return dataset;
1064    }
1065
1066    public static H4SDS create(
1067            String name,
1068            Group pgroup,
1069            Datatype type,
1070            long[] dims,
1071            long[] maxdims,
1072            long[] chunks,
1073            int gzip,
1074            Object data) throws Exception
1075   {
1076        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1077   }
1078
1079    /**
1080     * copy attributes from one SDS to another SDS
1081     */
1082    private void copyAttribute(int srcdid, int dstdid)
1083    {
1084        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1085        try {
1086            String[] objName = {""};
1087            int[] sdInfo = {0, 0, 0};
1088            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1089            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1090            int numberOfAttributes = sdInfo[2];
1091            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1092
1093            boolean b = false;
1094            String[] attrName = new String[1];
1095            int[] attrInfo = {0, 0};
1096            for (int i=0; i<numberOfAttributes; i++) {
1097                attrName[0] = "";
1098                try {
1099                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1100                }
1101                catch (HDFException ex) {
1102                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1103                    b = false;
1104                }
1105
1106                if (!b) {
1107                    continue;
1108                }
1109
1110                // read attribute data from source dataset
1111                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1112                try {
1113                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1114                }
1115                catch (HDFException ex) {
1116                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1117                    attrBuff = null;
1118                }
1119
1120                if (attrBuff == null) {
1121                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1122                    log.trace("copyAttribute(): continue");
1123                    continue;
1124                }
1125
1126                // attach attribute to the destination dataset
1127                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1128            } // for (int i=0; i<numberOfAttributes; i++)
1129        }
1130        catch (Exception ex) {
1131            log.debug("copyAttribute(): failure: ", ex);
1132        }
1133
1134        log.trace("copyAttribute(): finish");
1135    }
1136
1137    //Implementing DataFormat
1138    @SuppressWarnings("rawtypes")
1139    public List getMetadata(int... attrPropList) throws Exception {
1140        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1141    }
1142}