001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.ScalarDS;
037
038/**
039 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
040 * the SDS. A SDS is a group of data structures used to store and describe
041 * multidimensional arrays of scientific data.
042 * <p>
043 * The data contained in an SDS array has a data type associated with it. The
044 * standard data types supported by the SD interface include 32- and 64-bit
045 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
046 * 32-bit unsigned integers, and 8-bit characters.
047 * <p>
048 * <b>How to Select a Subset</b>
049 * <p>
050 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
051 * is defined to select a subset of a data array. The selection is done in an implicit
052 * way. Function calls to dimension information such as getSelectedDims() return an array
053 * of dimension values, which is a reference to the array in the dataset object.
054 * Changes of the array outside the dataset object directly change the values of
055 * the array in the dataset object. It is like pointers in C.
056 * <p>
057 *
058 * The following is an example of how to make a subset. In the example, the dataset
059 * is a 4-dimension with size of [200][100][50][10], i.e.
060 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
061 * We want to select every other data point in dims[1] and dims[2]
062 * <pre>
063     int rank = dataset.getRank();   // number of dimensions of the dataset
064     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
065     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
066     long[] start = dataset.getStartDims(); // the offset of the selection
067     long[] stride = dataset.getStride(); // the stride of the dataset
068     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
069
070     // select dim1 and dim2 as 2D data for display,and slice through dim0
071     selectedIndex[0] = 1;
072     selectedIndex[1] = 2;
073     selectedIndex[1] = 0;
074
075     // reset the selection arrays
076     for (int i=0; i&lt;rank; i++) {
077         start[i] = 0;
078         selected[i] = 1;
079         stride[i] = 1;
080    }
081
082    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
083    stride[1] = 2;
084    stride[2] = 2;
085
086    // set the selection size of dim1 and dim2
087    selected[1] = dims[1]/stride[1];
088    selected[2] = dims[1]/stride[2];
089
090    // when dataset.read() is called, the slection above will be used since
091    // the dimension arrays is passed by reference. Changes of these arrays
092    // outside the dataset object directly change the values of these array
093    // in the dataset object.
094
095 * </pre>
096 *
097 * <p>
098 * @version 1.1 9/4/2007
099 * @author Peter X. Cao
100 */
101public class H4SDS extends ScalarDS
102{
103    private static final long serialVersionUID = 2557157923292438696L;
104
105    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
106
107    /** tag for netCDF datasets.
108     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
109     */
110    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
111    public static final int                 DFTAG_NDG_NETCDF = 67687001;
112
113    /**
114     * The list of attributes of this data object. Members of the list are
115     * instance of Attribute.
116     */
117    @SuppressWarnings("rawtypes")
118    private List                            attributeList;
119
120    /**
121     * The SDS interface identifier obtained from SDstart(filename, access)
122     */
123    private long                            sdid;
124
125    /** the datatype identifier */
126    private long                            datatypeID = -1;
127
128    private int                             nAttributes = -1;
129
130
131    public H4SDS(FileFormat theFile, String name, String path)
132    {
133        this(theFile, name, path, null);
134    }
135
136    /**
137     * Creates an H4SDS object with specific name and path.
138     *
139     * @param theFile
140     *            the HDF file.
141     * @param name
142     *            the name of this H4SDS.
143     * @param path
144     *            the full path of this H4SDS.
145     * @param oid
146     *            the unique identifier of this data object.
147     */
148    @SuppressWarnings("deprecation")
149    public H4SDS(FileFormat theFile, String name, String path, long[] oid)
150    {
151        super(theFile, name, path, oid);
152        unsignedConverted = false;
153        sdid = ((H4File)getFileFormat()).getSDAccessID();
154    }
155
156    /*
157     * (non-Javadoc)
158     * @see hdf.object.DataFormat#hasAttribute()
159     */
160    @Override
161    public boolean hasAttribute ()
162    {
163        if (nAttributes < 0) {
164            sdid = ((H4File)getFileFormat()).getSDAccessID();
165
166            long id = open();
167
168            if (id >= 0) {
169                try { // retireve attributes of the dataset
170                    String[] objName = {""};
171                    int[] sdInfo = {0, 0, 0};
172                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
173                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
174                    nAttributes = sdInfo[2];
175                }
176                catch (Exception ex) {
177                    log.debug("hasAttribute(): failure: ", ex);
178                    nAttributes=0;
179                }
180
181                log.trace("hasAttribute(): nAttributes={}", nAttributes);
182
183                close(id);
184            }
185        }
186
187        return (nAttributes>0);
188    }
189
190    // ***** need to implement from ScalarDS *****
191    @Override
192    public byte[][] readPalette(int idx) { return null;}
193
194    // ***** need to implement from ScalarDS *****
195    @Override
196    public byte[] getPaletteRefs() { return null;}
197
198    // implementing Dataset
199    @Override
200    public Datatype getDatatype()
201    {
202        if (!inited)
203            init();
204
205        if (datatype == null) {
206            try {
207                datatype = new H4Datatype(datatypeID);
208            }
209            catch (Exception ex) {
210                log.debug("getDatatype(): failed to create datatype: ", ex);
211                datatype = null;
212            }
213        }
214
215        return datatype;
216    }
217
218    // To do: Implementing Dataset
219    @Override
220    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff)
221            throws Exception
222    {
223        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
224
225        Dataset dataset = null;
226        long srcdid = -1;
227        long dstdid = -1;
228        long tid = -1;
229        int size = 1;
230        int theRank = 2;
231        String path = null;
232        int[] count = null;
233        int[] start = null;
234
235        if (pgroup == null) {
236            log.debug("copy(): Parent group is null");
237            return null;
238        }
239
240        if (dname == null) {
241            dname = getName();
242        }
243
244        if (pgroup.isRoot()) {
245            path = HObject.SEPARATOR;
246        }
247        else {
248            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
249        }
250        log.trace("copy(): path={}", path);
251
252        srcdid = open();
253        if (srcdid < 0) {
254            log.debug("copy(): Invalid source SDID");
255            return null;
256        }
257
258        if (dims == null) {
259            if (!isInited())
260                init();
261
262            theRank = getRank();
263
264            dims = getDims();
265        }
266        else {
267            theRank = dims.length;
268        }
269
270        start = new int[theRank];
271        count = new int[theRank];
272        for (int i=0; i<theRank; i++) {
273            start[i] = 0;
274            count[i] = (int)dims[i];
275            size *= count[i];
276        }
277        log.trace("copy(): theRank={} with size={}", theRank, size);
278
279        // create the new dataset and attach it to the parent group
280        tid = datatypeID;
281        dstdid = HDFLibrary.SDcreate(
282                ((H4File)pgroup.getFileFormat()).getSDAccessID(),
283                dname, tid, theRank, count);
284        if (dstdid < 0) {
285            log.debug("copy(): Invalid dest SDID");
286            return null;
287        }
288
289        int ref = HDFLibrary.SDidtoref(dstdid);
290        if (!pgroup.isRoot()) {
291            long vgid = pgroup.open();
292            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
293            pgroup.close(vgid);
294        }
295
296        // copy attributes from one object to the new object
297        log.trace("copy(): copy attributes");
298        copyAttribute(srcdid, dstdid);
299
300        // read data from the source dataset
301        log.trace("copy(): read data from the source dataset");
302        if (buff == null) {
303            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
304            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
305        }
306
307        // write the data into the destination dataset
308        log.trace("copy(): write the data into the destination dataset");
309        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
310
311        long[] oid = {HDFConstants.DFTAG_NDG, ref};
312        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
313
314        pgroup.addToMemberList(dataset);
315
316        close(srcdid);
317
318        try {
319            HDFLibrary.SDendaccess(dstdid);
320        }
321        catch (HDFException ex) {
322            log.debug("copy(): SDendaccess failure: ", ex);
323        }
324
325        return dataset;
326    }
327
328    // Implementing Dataset
329    @Override
330    public byte[] readBytes() throws HDFException
331    {
332        byte[] theData = null;
333
334        if (!isInited())
335            init();
336
337        long id = open();
338        if (id < 0) {
339            log.debug("readBytes(): Invalid SDID");
340            return null;
341        }
342
343        int datasize = 1;
344        int[] select = new int[rank];
345        int[] start = new int[rank];
346        for (int i=0; i<rank; i++) {
347            datasize *= (int)selectedDims[i];
348            select[i] = (int)selectedDims[i];
349            start[i] = (int)startDims[i];
350        }
351
352        int[] stride = null;
353        if (selectedStride != null) {
354            stride = new int[rank];
355            for (int i=0; i<rank; i++) {
356                stride[i] = (int)selectedStride[i];
357            }
358        }
359
360        try {
361            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
362            theData = new byte[size];
363            HDFLibrary.SDreaddata(id, start, stride, select, theData);
364        }
365        catch (Exception ex) {
366            log.debug("readBytes(): failure: ", ex);
367        }
368        finally {
369            close(id);
370        }
371
372        return theData;
373    }
374
375    // Implementing DataFormat
376    @Override
377    public Object read() throws HDFException, OutOfMemoryError
378    {
379        Object theData = null;
380
381        if (!isInited())
382            init();
383
384        long id = open();
385        if (id < 0) {
386            log.debug("read(): Invalid SDID");
387            return null;
388        }
389
390        int datasize = 1;
391        int[] select = new int[rank];
392        int[] start = new int[rank];
393        for (int i=0; i<rank; i++) {
394            datasize *= (int)selectedDims[i];
395            select[i] = (int)selectedDims[i];
396            start[i] = (int)startDims[i];
397        }
398
399        int[] stride = null;
400        if (selectedStride != null) {
401            stride = new int[rank];
402            for (int i=0; i<rank; i++) {
403                stride[i] = (int)selectedStride[i];
404            }
405        }
406
407        try {
408            theData = H4Datatype.allocateArray(datatypeID, datasize);
409
410            if (theData != null) {
411                // assume external data files are located in the same directory as the main file.
412                HDFLibrary.HXsetdir(getFileFormat().getParent());
413
414                HDFLibrary.SDreaddata(id, start, stride, select, theData);
415
416                if (isText) {
417                    theData = byteToString((byte[])theData, select[0]);
418                }
419            }
420        }
421        catch (Exception ex) {
422            log.debug("read(): failure: ", ex);
423        }
424        finally {
425            close(id);
426        }
427
428        if (fillValue==null && isImageDisplay) {
429            try {
430                getMetadata();
431            } // need to set fillValue for images
432            catch (Exception ex) {
433                log.debug("read(): getMetadata failure: ", ex);
434            }
435        }
436
437        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
438            isDefaultImageOrder = false;
439        else
440            isDefaultImageOrder = true;
441
442        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
443        return theData;
444    }
445
446    // Implementing DataFormat
447    @SuppressWarnings("deprecation")
448    @Override
449    public void write(Object buf) throws HDFException
450    {
451        if (buf == null) {
452            log.debug("write(): Object is null");
453            return;
454        }
455
456        long id = open();
457        if (id < 0) {
458            log.debug("write(): Invalid SDID");
459            return;
460        }
461
462        int[] select = new int[rank];
463        int[] start = new int[rank];
464        for (int i=0; i<rank; i++) {
465            select[i] = (int)selectedDims[i];
466            start[i] = (int)startDims[i];
467        }
468
469        int[] stride = null;
470        if (selectedStride != null) {
471            stride = new int[rank];
472            for (int i=0; i<rank; i++) {
473                stride[i] = (int)selectedStride[i];
474            }
475        }
476
477        Object tmpData = buf;
478        try {
479            if (getDatatype().isUnsigned() && unsignedConverted) {
480                tmpData = convertToUnsignedC(buf);
481            }
482            // assume external data files are located in the same directory as the main file.
483            HDFLibrary.HXsetdir(getFileFormat().getParent());
484
485            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
486        }
487        catch (Exception ex) {
488            log.debug("write(): failure: ", ex);
489        }
490        finally {
491            tmpData = null;
492            close(id);
493        }
494    }
495
496    // Implementing DataFormat
497    @Override
498    @SuppressWarnings({"rawtypes", "unchecked"})
499    public List getMetadata() throws HDFException
500    {
501        if (attributeList != null) {
502            log.trace("getMetdata(): attributeList != null");
503            return attributeList;
504        }
505
506        long id = open();
507        String[] objName = {""};
508        int[] sdInfo = {0, 0, 0};
509        try {
510            // retrieve attributes of the dataset
511            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
512            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
513            int n = sdInfo[2];
514
515            if ((attributeList == null) && (n>0)) {
516                attributeList = new Vector(n, 5);
517            }
518
519            boolean b = false;
520            String[] attrName = new String[1];
521            int[] attrInfo = {0, 0};
522            for (int i=0; i<n; i++) {
523                attrName[0] = "";
524                try {
525                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
526                    // mask off the litend bit
527                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
528                }
529                catch (HDFException ex) {
530                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
531                    b = false;
532                }
533
534                if (!b) {
535                    continue;
536                }
537
538                long[] attrDims = {attrInfo[1]};
539                Attribute attr = new Attribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
540                attributeList.add(attr);
541
542                Object buf = null;
543                try {
544                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
545                }
546                catch (OutOfMemoryError e) {
547                    log.debug("getMetadata(): out of memory: ", e);
548                    buf = null;
549                }
550
551                try {
552                    HDFLibrary.SDreadattr(id, i, buf);
553                }
554                catch (HDFException ex) {
555                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
556                    buf = null;
557                }
558
559                if (buf != null) {
560                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
561                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
562                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
563                    }
564                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
565                            attrName[0].equalsIgnoreCase("_fillValue")) {
566                        fillValue = buf;
567                    }
568
569                    attr.setData(buf);
570                }
571
572            } // (int i=0; i<n; i++)
573
574            // retrieve attribute of dimension
575            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
576            /**
577             * for (int i=0; i< rank; i++) { int dimID = HDFLibrary.SDgetdimid(id, i); String[] argv = {" ", "
578             * ", " "}; HDFLibrary.SDgetdimstrs(dimID, argv, 80); }
579             */
580        }
581        catch (Exception ex) {
582            log.debug("getMetadata(): failure: ", ex);
583        }
584        finally {
585            close(id);
586        }
587
588        return attributeList;
589    }
590
591    // To do: implementing DataFormat
592    @Override
593    @SuppressWarnings({"rawtypes", "unchecked"})
594    public void writeMetadata(Object info) throws Exception
595    {
596        // only attribute metadata is supported.
597        if (!(info instanceof Attribute)) {
598            log.debug("writeMetadata(): Object not an Attribute");
599            return;
600        }
601
602        try {
603            getFileFormat().writeAttribute(this, (Attribute)info, true);
604
605            if (attributeList == null) {
606                attributeList = new Vector();
607            }
608
609            attributeList.add(info);
610            nAttributes = attributeList.size();
611        }
612        catch (Exception ex) {
613            log.trace("writeMetadata(): failure: ", ex);
614        }
615    }
616
617    // To do: implementing DataFormat
618    @Override
619    public void removeMetadata(Object info) throws HDFException {
620        log.trace("removeMetadata(): disabled");
621    }
622
623    // implementing DataFormat
624    @Override
625    public void updateMetadata(Object info) throws Exception {
626        log.trace("updateMetadata(): disabled");
627    }
628
629    // Implementing HObject
630    @Override
631    public long open()
632    {
633        long id=-1;
634
635        try {
636            int index = 0;
637            int tag = (int)oid[0];
638
639            log.trace("open(): tag={}", tag);
640            if (tag == H4SDS.DFTAG_NDG_NETCDF) {
641                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
642            }
643            else {
644                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
645            }
646
647            id = HDFLibrary.SDselect(sdid,index);
648        }
649        catch (HDFException ex) {
650            log.debug("open(): failure: ", ex);
651            id = -1;
652        }
653
654        return id;
655    }
656
657    // Implementing HObject
658    @Override
659    public void close(long id)
660    {
661        try { HDFLibrary.SDendaccess(id); }
662        catch (HDFException ex) { log.debug("close(): failure: ", ex); }
663    }
664
665    /**
666     * Initializes the H4SDS such as dimension size of this dataset.
667     */
668    @SuppressWarnings("deprecation")
669    @Override
670    public void init()
671    {
672        if (inited) {
673            log.trace("init(): Already initialized");
674            return; // already called. Initialize only once
675        }
676
677        long id = open();
678        String[] objName = {""};
679        String[] dimName = {""};
680        int[] dimInfo = {0, 0, 0};
681        int[] sdInfo = {0, 0, 0};
682        boolean isUnlimited = false;
683
684        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
685        try {
686            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
687            // mask off the litend bit
688            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
689            nAttributes = sdInfo[2];
690            rank = sdInfo[0];
691
692            if (rank <= 0) {
693                rank = 1;
694                idims[0] = 1;
695            }
696
697            isUnlimited = HDFLibrary.SDisrecord(id);
698            log.trace("init(): isUnlimited={}", isUnlimited);
699
700            datatypeID = sdInfo[1];
701            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
702
703            // get the dimension names
704            try {
705                dimNames = new String[rank];
706                for (int i=0; i<rank; i++) {
707                    long dimid = HDFLibrary.SDgetdimid(id, i);
708                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
709                    dimNames[i] = dimName[0];
710                }
711            }
712            catch (Exception ex) {
713                log.debug("init(): get the dimension names: ", ex);
714            }
715
716            // get compression information
717            try {
718                HDFCompInfo compInfo = new HDFCompInfo();
719                HDFLibrary.SDgetcompinfo(id, compInfo);
720
721                compression.setLength(0);
722
723                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
724                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
725                    HDFLibrary.SDgetcompinfo(id, comp);
726                    compression.append("GZIP(level=").append(comp.level).append(")");
727                }
728                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
729                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
730                    HDFLibrary.SDgetcompinfo(id, comp);
731                    compression.append("SZIP(bits_per_pixel=").append(comp.bits_per_pixel).append(",options_mask=")
732                            .append(comp.options_mask).append(",pixels=").append(comp.pixels).append(",pixels_per_block=")
733                            .append(comp.pixels_per_block).append(",pixels_per_scanline=").append(comp.pixels_per_scanline).append(")");
734                }
735                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
736                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
737                    HDFLibrary.SDgetcompinfo(id, comp);
738                    compression.append("JPEG(quality=").append(comp.quality).append(",options_mask=")
739                            .append(",force_baseline=").append(comp.force_baseline).append(")");
740                }
741                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
742                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
743                    HDFLibrary.SDgetcompinfo(id, comp);
744                    compression.append("SKPHUFF(skp_size=").append(comp.skp_size).append(")");
745                }
746                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
747                    compression.append("RLE");
748                }
749                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
750                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
751                    HDFLibrary.SDgetcompinfo(id, comp);
752                    compression.append("NBIT(nt=").append(comp.nt).append(",bit_len=").append(comp.bit_len)
753                            .append(",ctype=").append(comp.ctype).append(",fill_one=").append(comp.fill_one)
754                            .append(",sign_ext=").append(comp.sign_ext).append(",start_bit=").append(comp.start_bit).append(")");
755                }
756
757                if (compression.length() == 0)
758                    compression.append("NONE");
759            }
760            catch (Exception ex) {
761                log.debug("init(): get compression information failure: ", ex);
762            }
763
764            // get chunk information
765            try {
766                HDFChunkInfo chunkInfo = new HDFChunkInfo();
767                int[] cflag = {HDFConstants.HDF_NONE};
768
769                try {
770                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
771                }
772                catch (Exception ex) {
773                    ex.printStackTrace();
774                }
775
776                storageLayout.setLength(0);
777
778                if (cflag[0] == HDFConstants.HDF_NONE) {
779                    chunkSize = null;
780                    storageLayout.append("NONE");
781                }
782                else {
783                    chunkSize = new long[rank];
784                    for (int i=0; i<rank; i++) {
785                        chunkSize[i] = chunkInfo.chunk_lengths[i];
786                    }
787                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
788                    for (int i = 1; i < rank; i++) {
789                        storageLayout.append(" X ").append(chunkSize[i]);
790                    }
791                }
792            }
793            catch (Exception ex) {
794                log.debug("init(): get chunk information failure: ", ex);
795            }
796
797            inited = true;
798        }
799        catch (HDFException ex) {
800            log.debug("init(): failure: ", ex);
801        }
802        finally {
803            close(id);
804        }
805
806        dims = new long[rank];
807        maxDims = new long[rank];
808        startDims = new long[rank];
809        selectedDims = new long[rank];
810
811        for (int i=0; i<rank; i++) {
812            startDims[i] = 0;
813            selectedDims[i] = 1;
814            dims[i] = maxDims[i] = idims[i];
815        }
816
817        if (isUnlimited)
818            maxDims[0] = -1;
819
820        selectedIndex[0] = 0;
821        selectedIndex[1] = 1;
822        selectedIndex[2] = 2;
823
824        // select only two dimension a time,
825        if (rank == 1) {
826            selectedDims[0] = dims[0];
827        }
828
829        if (rank > 1) {
830            selectedDims[0] = dims[0];
831            if (isText) {
832                selectedDims[1] = 1;
833            }
834            else {
835                selectedDims[1] = dims[1];
836            }
837        }
838    }
839
840    // Implementing ScalarDS
841    @Override
842    public byte[][] getPalette()
843    {
844        return palette;
845    }
846
847    /**
848     * Creates a new dataset.
849     *
850     * @param name the name of the dataset to create.
851     * @param pgroup the parent group of the new dataset.
852     * @param type the datatype of the dataset.
853     * @param dims the dimension size of the dataset.
854     * @param maxdims the max dimension size of the dataset.
855     * @param chunks the chunk size of the dataset.
856     * @param gzip the level of the gzip compression.
857     * @param fillValue the default value.
858     * @param data the array of data values.
859     *
860     * @return the new dataset if successful. Otherwise returns null.
861     *
862     * @throws Exception if the dataset can not be created
863     */
864    public static H4SDS create(
865        String name,
866        Group pgroup,
867        Datatype type,
868        long[] dims,
869        long[] maxdims,
870        long[] chunks,
871        int gzip,
872        Object fillValue,
873        Object data) throws Exception
874    {
875        H4SDS dataset = null;
876        if ((pgroup == null) ||
877            (name == null)||
878            (dims == null)) {
879            log.trace("create(): Parent group, name or dims is null");
880            return null;
881        }
882
883        H4File file = (H4File)pgroup.getFileFormat();
884
885        if (file == null) {
886            log.trace("create(): Parent group FileFormat is null");
887            return null;
888        }
889
890        String path = HObject.SEPARATOR;
891        if (!pgroup.isRoot()) {
892            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
893        }
894        // prepare the dataspace
895        int rank = dims.length;
896        int[] idims = new int[rank];
897        int[] start = new int[rank];
898        for (int i=0; i<rank; i++) {
899            idims[i] = (int)dims[i];
900            start[i] = 0;
901        }
902
903        // only the first element of the SDcreate parameter dim_sizes (i.e.,
904        // the dimension of the lowest rank or the slowest-changing dimension)
905        // can be assigned the value SD_UNLIMITED (or 0) to make the first
906        // dimension unlimited.
907        if ((maxdims != null) && (maxdims[0]<=0)) {
908            idims[0] = 0; // set to unlimited dimension.
909        }
910
911        int[] ichunks = null;
912        if (chunks != null) {
913            ichunks = new int[rank];
914            for (int i=0; i<rank; i++) {
915                ichunks[i] = (int)chunks[i];
916            }
917        }
918
919        // unlimited cannot be used with chunking or compression for HDF 4.2.6 or earlier.
920        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
921            log.debug("create(): Unlimited cannot be used with chunking or compression");
922            throw new HDFException("Unlimited cannot be used with chunking or compression");
923        }
924
925        long sdid = (file).getSDAccessID();
926        long sdsid = -1;
927        long vgid = -1;
928        long tid = type.createNative();
929
930        if(tid >= 0) {
931            try {
932                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
933                // set fill value to zero.
934                int vsize = HDFLibrary.DFKNTsize(tid);
935                byte[] fill = new byte[vsize];
936                for (int i=0; i<vsize; i++) {
937                    fill[i] = 0;
938                }
939                HDFLibrary.SDsetfillvalue(sdsid, fill);
940
941                // when we create a new dataset with unlimited dimension,
942                // we have to write some data into the dataset or otherwise
943                // the current dataset has zero dimensin size.
944
945                // comment out the following lines because SDwritedata fails when
946                // try to write data into a zero dimension array. 05/25/05
947                // don't know why the code was first put here ????
948                /**
949                if (idims[0] == 0 && data == null)
950                {
951                    idims[0] = (int)dims[0];
952                    data = new byte[tsize*vsize];
953                }
954                 */
955
956            }
957            catch (Exception ex) {
958                log.debug("create(): failure: ", ex);
959                throw (ex);
960            }
961        }
962
963        if (sdsid < 0) {
964            log.debug("create(): Dataset creation failed");
965            throw (new HDFException("Unable to create the new dataset."));
966        }
967
968        HDFDeflateCompInfo compInfo = null;
969        if (gzip > 0) {
970            // set compression
971            compInfo = new HDFDeflateCompInfo();
972            compInfo.level = gzip;
973            if (chunks == null)
974                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
975        }
976
977        if (chunks != null) {
978            // set chunk
979            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
980            int flag = HDFConstants.HDF_CHUNK;
981
982            if (gzip > 0) {
983                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
984                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
985            }
986
987            try  {
988                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
989            }
990            catch (Exception err) {
991                log.debug("create(): SDsetchunk failure: ", err);
992                err.printStackTrace();
993                throw new HDFException("SDsetchunk failed.");
994            }
995        }
996
997        if ((sdsid > 0) && (data != null)) {
998            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
999        }
1000
1001        int ref = HDFLibrary.SDidtoref(sdsid);
1002
1003        if (!pgroup.isRoot()) {
1004            // add the dataset to the parent group
1005            vgid = pgroup.open();
1006            if (vgid < 0) {
1007                if (sdsid > 0) {
1008                    HDFLibrary.SDendaccess(sdsid);
1009                }
1010                log.debug("create(): Invalid Parent Group ID");
1011                throw (new HDFException("Unable to open the parent group."));
1012            }
1013
1014            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1015
1016            pgroup.close(vgid);
1017        }
1018
1019        try {
1020            if (sdsid > 0) {
1021                HDFLibrary.SDendaccess(sdsid);
1022            }
1023        }
1024        catch (Exception ex) {
1025            log.debug("create(): SDendaccess failure: ", ex);
1026        }
1027
1028        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1029        dataset = new H4SDS(file, name, path, oid);
1030
1031        if (dataset != null) {
1032            pgroup.addToMemberList(dataset);
1033        }
1034
1035        return dataset;
1036    }
1037
1038    public static H4SDS create(
1039            String name,
1040            Group pgroup,
1041            Datatype type,
1042            long[] dims,
1043            long[] maxdims,
1044            long[] chunks,
1045            int gzip,
1046            Object data) throws Exception
1047    {
1048        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1049    }
1050
1051    /**
1052     * copy attributes from one SDS to another SDS
1053     */
1054    private void copyAttribute(long srcdid, long dstdid)
1055    {
1056        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1057        try {
1058            String[] objName = {""};
1059            int[] sdInfo = {0, 0, 0};
1060            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1061            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1062            int numberOfAttributes = sdInfo[2];
1063            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1064
1065            boolean b = false;
1066            String[] attrName = new String[1];
1067            int[] attrInfo = {0, 0};
1068            for (int i=0; i<numberOfAttributes; i++) {
1069                attrName[0] = "";
1070                try {
1071                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1072                }
1073                catch (HDFException ex) {
1074                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1075                    b = false;
1076                }
1077
1078                if (!b) {
1079                    continue;
1080                }
1081
1082                // read attribute data from source dataset
1083                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1084                try {
1085                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1086                }
1087                catch (HDFException ex) {
1088                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1089                    attrBuff = null;
1090                }
1091
1092                if (attrBuff == null) {
1093                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1094                    continue;
1095                }
1096
1097                // attach attribute to the destination dataset
1098                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1099            } // (int i=0; i<numberOfAttributes; i++)
1100        }
1101        catch (Exception ex) {
1102            log.debug("copyAttribute(): failure: ", ex);
1103        }
1104    }
1105
1106    //Implementing DataFormat
1107    @SuppressWarnings("rawtypes")
1108    public List getMetadata(int... attrPropList) throws Exception {
1109        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1110    }
1111}