001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import org.slf4j.Logger;
021import org.slf4j.LoggerFactory;
022
023import hdf.hdflib.HDFChunkInfo;
024import hdf.hdflib.HDFCompInfo;
025import hdf.hdflib.HDFConstants;
026import hdf.hdflib.HDFDeflateCompInfo;
027import hdf.hdflib.HDFException;
028import hdf.hdflib.HDFJPEGCompInfo;
029import hdf.hdflib.HDFLibrary;
030import hdf.hdflib.HDFNBITCompInfo;
031import hdf.hdflib.HDFSKPHUFFCompInfo;
032import hdf.hdflib.HDFSZIPCompInfo;
033
034import hdf.object.Attribute;
035import hdf.object.Dataset;
036import hdf.object.Datatype;
037import hdf.object.FileFormat;
038import hdf.object.Group;
039import hdf.object.HObject;
040import hdf.object.ScalarDS;
041import hdf.object.MetaDataContainer;
042
043import hdf.object.h4.H4ScalarAttribute;
044
045/**
046 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
047 * the SDS. A SDS is a group of data structures used to store and describe
048 * multidimensional arrays of scientific data.
049 *
050 * The data contained in an SDS array has a data type associated with it. The
051 * standard data types supported by the SD interface include 32- and 64-bit
052 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
053 * 32-bit unsigned integers, and 8-bit characters.
054 *
055 * <b>How to Select a Subset</b>
056 *
057 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
058 * is defined to select a subset of a data array. The selection is done in an implicit
059 * way. Function calls to dimension information such as getSelectedDims() return an array
060 * of dimension values, which is a reference to the array in the dataset object.
061 * Changes of the array outside the dataset object directly change the values of
062 * the array in the dataset object. It is like pointers in C.
063 *
064 * The following is an example of how to make a subset. In the example, the dataset
065 * is a 4-dimension with size of [200][100][50][10], i.e.
066 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
067 * We want to select every other data point in dims[1] and dims[2]
068 * <pre>
069     int rank = dataset.getRank();   // number of dimensions of the dataset
070     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
071     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
072     long[] start = dataset.getStartDims(); // the offset of the selection
073     long[] stride = dataset.getStride(); // the stride of the dataset
074     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
075
076     // select dim1 and dim2 as 2D data for display,and slice through dim0
077     selectedIndex[0] = 1;
078     selectedIndex[1] = 2;
079     selectedIndex[1] = 0;
080
081     // reset the selection arrays
082     for (int i=0; i&lt;rank; i++) {
083         start[i] = 0;
084         selected[i] = 1;
085         stride[i] = 1;
086    }
087
088    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
089    stride[1] = 2;
090    stride[2] = 2;
091
092    // set the selection size of dim1 and dim2
093    selected[1] = dims[1]/stride[1];
094    selected[2] = dims[1]/stride[2];
095
096    // when dataset.read() is called, the slection above will be used since
097    // the dimension arrays is passed by reference. Changes of these arrays
098    // outside the dataset object directly change the values of these array
099    // in the dataset object.
100
101 * </pre>
102 *
103 * @version 1.1 9/4/2007
104 * @author Peter X. Cao
105 */
106public class H4SDS extends ScalarDS implements MetaDataContainer
107{
108    private static final long serialVersionUID = 2557157923292438696L;
109
110    private static final Logger   log = LoggerFactory.getLogger(H4SDS.class);
111
112    /** tag for netCDF datasets.
113     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
114     */
115    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
116    public static final int                 DFTAG_NDG_NETCDF = 67687001;
117
118    /**
119     * The list of attributes of this data object. Members of the list are
120     * instance of Attribute.
121     */
122    @SuppressWarnings("rawtypes")
123    private List                            attributeList;
124
125    /**
126     * The SDS interface identifier obtained from SDstart(filename, access)
127     */
128    private long                            sdid;
129
130    /** the datatype identifier */
131    private long                            datatypeID = -1;
132
133    /** the number of attributes */
134    private int                             nAttributes = -1;
135
136    /**
137     * Creates an H4SDS object with specific name and path.
138     *
139     * @param theFile
140     *            the HDF file.
141     * @param name
142     *            the name of this H4SDS.
143     * @param path
144     *            the full path of this H4SDS.
145     */
146    public H4SDS(FileFormat theFile, String name, String path) {
147        this(theFile, name, path, null);
148    }
149
150    /**
151     * Creates an H4SDS object with specific name, path and oid.
152     *
153     * @param theFile
154     *            the HDF file.
155     * @param name
156     *            the name of this H4SDS.
157     * @param path
158     *            the full path of this H4SDS.
159     * @param oid
160     *            the unique identifier of this data object.
161     */
162    @SuppressWarnings("deprecation")
163    public H4SDS(FileFormat theFile, String name, String path, long[] oid) {
164        super(theFile, name, path, oid);
165        unsignedConverted = false;
166        sdid = ((H4File)getFileFormat()).getSDAccessID();
167    }
168
169    /*
170     * (non-Javadoc)
171     * @see hdf.object.DataFormat#hasAttribute()
172     */
173    @Override
174    public boolean hasAttribute() {
175        if (nAttributes < 0) {
176            sdid = ((H4File)getFileFormat()).getSDAccessID();
177
178            long id = open();
179
180            if (id >= 0) {
181                try { // retrieve attributes of the dataset
182                    String[] objName = {""};
183                    int[] sdInfo = {0, 0, 0};
184                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
185                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
186                    nAttributes = sdInfo[2];
187                }
188                catch (Exception ex) {
189                    log.debug("hasAttribute(): failure: ", ex);
190                    nAttributes=0;
191                }
192
193                log.trace("hasAttribute(): nAttributes={}", nAttributes);
194
195                close(id);
196            }
197        }
198
199        return (nAttributes>0);
200    }
201
202    // implementing Dataset
203    /**
204     * Returns the datatype of the data object.
205     *
206     * @return the datatype of the data object.
207     */
208    @Override
209    public Datatype getDatatype() {
210        if (!inited)
211            init();
212
213        if (datatype == null) {
214            try {
215                datatype = new H4Datatype(datatypeID);
216            }
217            catch (Exception ex) {
218                log.debug("getDatatype(): failed to create datatype: ", ex);
219                datatype = null;
220            }
221        }
222
223        return datatype;
224    }
225
226    // To do: Implementing Dataset
227    @Override
228    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
229        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
230
231        Dataset dataset = null;
232        long srcdid = -1;
233        long dstdid = -1;
234        long tid = -1;
235        int size = 1;
236        int theRank = 2;
237        String path = null;
238        int[] count = null;
239        int[] start = null;
240
241        if (pgroup == null) {
242            log.debug("copy(): Parent group is null");
243            return null;
244        }
245
246        if (dname == null)
247            dname = getName();
248
249        if (pgroup.isRoot())
250            path = HObject.SEPARATOR;
251        else
252            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
253        log.trace("copy(): path={}", path);
254
255        srcdid = open();
256        if (srcdid < 0) {
257            log.debug("copy(): Invalid source SDID");
258            return null;
259        }
260
261        if (dims == null) {
262            if (!isInited())
263                init();
264
265            theRank = getRank();
266
267            dims = getDims();
268        }
269        else {
270            theRank = dims.length;
271        }
272
273        start = new int[theRank];
274        count = new int[theRank];
275        for (int i=0; i<theRank; i++) {
276            start[i] = 0;
277            count[i] = (int)dims[i];
278            size *= count[i];
279        }
280        log.trace("copy(): theRank={} with size={}", theRank, size);
281
282        // create the new dataset and attach it to the parent group
283        tid = datatypeID;
284        dstdid = HDFLibrary.SDcreate(((H4File)pgroup.getFileFormat()).getSDAccessID(), dname, tid, theRank, count);
285        if (dstdid < 0) {
286            log.debug("copy(): Invalid dest SDID");
287            return null;
288        }
289
290        int ref = HDFLibrary.SDidtoref(dstdid);
291        if (!pgroup.isRoot()) {
292            long vgid = pgroup.open();
293            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
294            pgroup.close(vgid);
295        }
296
297        // copy attributes from one object to the new object
298        log.trace("copy(): copy attributes");
299        copyAttribute(srcdid, dstdid);
300
301        // read data from the source dataset
302        log.trace("copy(): read data from the source dataset");
303        if (buff == null) {
304            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
305            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
306        }
307
308        // write the data into the destination dataset
309        log.trace("copy(): write the data into the destination dataset");
310        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
311
312        long[] oid = {HDFConstants.DFTAG_NDG, ref};
313        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
314
315        pgroup.addToMemberList(dataset);
316
317        close(srcdid);
318
319        try {
320            HDFLibrary.SDendaccess(dstdid);
321        }
322        catch (HDFException ex) {
323            log.debug("copy(): SDendaccess failure: ", ex);
324        }
325
326        return dataset;
327    }
328
329    // Implementing Dataset
330    @Override
331    public byte[] readBytes() throws HDFException {
332        byte[] theData = null;
333
334        if (!isInited())
335            init();
336
337        long id = open();
338        if (id < 0) {
339            log.debug("readBytes(): Invalid SDID");
340            return null;
341        }
342
343        int datasize = 1;
344        int[] select = new int[rank];
345        int[] start = new int[rank];
346        for (int i=0; i<rank; i++) {
347            datasize *= (int)selectedDims[i];
348            select[i] = (int)selectedDims[i];
349            start[i] = (int)startDims[i];
350        }
351
352        int[] stride = null;
353        if (selectedStride != null) {
354            stride = new int[rank];
355            for (int i=0; i<rank; i++) {
356                stride[i] = (int)selectedStride[i];
357            }
358        }
359
360        try {
361            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
362            theData = new byte[size];
363            HDFLibrary.SDreaddata(id, start, stride, select, theData);
364        }
365        catch (Exception ex) {
366            log.debug("readBytes(): failure: ", ex);
367        }
368        finally {
369            close(id);
370        }
371
372        return theData;
373    }
374
375    // Implementing DataFormat
376    /**
377     * Reads the data from file.
378     *
379     * read() reads the data from file to a memory buffer and returns the memory
380     * buffer. The dataset object does not hold the memory buffer. To store the
381     * memory buffer in the dataset object, one must call getData().
382     *
383     * By default, the whole dataset is read into memory. Users can also select
384     * a subset to read. Subsetting is done in an implicit way.
385     *
386     * @return the data read from file.
387     *
388     * @see #getData()
389     *
390     * @throws HDFException
391     *             if object can not be read
392     * @throws OutOfMemoryError
393     *             if memory is exhausted
394     */
395    @Override
396    public Object read() throws HDFException, OutOfMemoryError {
397        Object theData = null;
398
399        if (!isInited())
400            init();
401
402        long id = open();
403        if (id < 0) {
404            log.debug("read(): Invalid SDID");
405            return null;
406        }
407
408        int datasize = 1;
409        int[] select = new int[rank];
410        int[] start = new int[rank];
411        for (int i=0; i<rank; i++) {
412            datasize *= (int)selectedDims[i];
413            select[i] = (int)selectedDims[i];
414            start[i] = (int)startDims[i];
415        }
416
417        int[] stride = null;
418        if (selectedStride != null) {
419            stride = new int[rank];
420            for (int i=0; i<rank; i++) {
421                stride[i] = (int)selectedStride[i];
422            }
423        }
424
425        try {
426            theData = H4Datatype.allocateArray(datatypeID, datasize);
427
428            if (theData != null) {
429                // assume external data files are located in the same directory as the main file.
430                HDFLibrary.HXsetdir(getFileFormat().getParent());
431
432                HDFLibrary.SDreaddata(id, start, stride, select, theData);
433
434                if (isText)
435                    theData = byteToString((byte[])theData, select[0]);
436            }
437        }
438        catch (Exception ex) {
439            log.debug("read(): failure: ", ex);
440        }
441        finally {
442            close(id);
443        }
444
445        if (fillValue==null && isImageDisplay) {
446            try {
447                getMetadata();
448            } // need to set fillValue for images
449            catch (Exception ex) {
450                log.debug("read(): getMetadata failure: ", ex);
451            }
452        }
453
454        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
455            isDefaultImageOrder = false;
456        else
457            isDefaultImageOrder = true;
458
459        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
460        return theData;
461    }
462
463    // Implementing DataFormat
464    /**
465     * Writes a memory buffer to the object in the file.
466     *
467     * @param buf
468     *            the data to write
469     *
470     * @throws HDFException
471     *             if data can not be written
472     */
473    @SuppressWarnings("deprecation")
474    @Override
475    public void write(Object buf) throws HDFException {
476        if (buf == null) {
477            log.debug("write(): Object is null");
478            return;
479        }
480
481        long id = open();
482        if (id < 0) {
483            log.debug("write(): Invalid SDID");
484            return;
485        }
486
487        int[] select = new int[rank];
488        int[] start = new int[rank];
489        for (int i=0; i<rank; i++) {
490            select[i] = (int)selectedDims[i];
491            start[i] = (int)startDims[i];
492        }
493
494        int[] stride = null;
495        if (selectedStride != null) {
496            stride = new int[rank];
497            for (int i=0; i<rank; i++) {
498                stride[i] = (int)selectedStride[i];
499            }
500        }
501
502        Object tmpData = buf;
503        try {
504            if (getDatatype().isUnsigned() && unsignedConverted)
505                tmpData = convertToUnsignedC(buf);
506            // assume external data files are located in the same directory as the main file.
507            HDFLibrary.HXsetdir(getFileFormat().getParent());
508
509            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
510        }
511        catch (Exception ex) {
512            log.debug("write(): failure: ", ex);
513        }
514        finally {
515            tmpData = null;
516            close(id);
517        }
518    }
519
520    // Implementing DataFormat
521    /**
522     * Retrieves the object's metadata, such as attributes, from the file.
523     *
524     * Metadata, such as attributes, is stored in a List.
525     *
526     * @return the list of metadata objects.
527     *
528     * @throws HDFException
529     *             if the metadata can not be retrieved
530     */
531    @Override
532    @SuppressWarnings({"rawtypes", "unchecked"})
533    public List getMetadata() throws HDFException {
534        if (attributeList != null) {
535            log.trace("getMetdata(): attributeList != null");
536            return attributeList;
537        }
538
539        long id = open();
540        String[] objName = {""};
541        int[] sdInfo = {0, 0, 0};
542        try {
543            // retrieve attributes of the dataset
544            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
545            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
546            int n = sdInfo[2];
547
548            if ((attributeList == null) && (n>0))
549                attributeList = new Vector(n, 5);
550
551            boolean b = false;
552            String[] attrName = new String[1];
553            int[] attrInfo = {0, 0};
554            for (int i=0; i<n; i++) {
555                attrName[0] = "";
556                try {
557                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
558                    // mask off the litend bit
559                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
560                }
561                catch (HDFException ex) {
562                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
563                    b = false;
564                }
565
566                if (!b)
567                    continue;
568
569                long[] attrDims = {attrInfo[1]};
570                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
571                attributeList.add(attr);
572
573                Object buf = null;
574                try {
575                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
576                }
577                catch (OutOfMemoryError e) {
578                    log.debug("getMetadata(): out of memory: ", e);
579                    buf = null;
580                }
581
582                try {
583                    HDFLibrary.SDreadattr(id, i, buf);
584                }
585                catch (HDFException ex) {
586                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
587                    buf = null;
588                }
589
590                if (buf != null) {
591                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
592                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
593                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
594                    }
595                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
596                            attrName[0].equalsIgnoreCase("_fillValue")) {
597                        fillValue = buf;
598                    }
599
600                    attr.setAttributeData(buf);
601                }
602
603            } // (int i=0; i<n; i++)
604
605            // retrieve attribute of dimension
606            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
607            /**
608             * for (int i=0; i< rank; i++) { int dimID = HDFLibrary.SDgetdimid(id, i); String[] argv = {" ", "
609             * ", " "}; HDFLibrary.SDgetdimstrs(dimID, argv, 80); }
610             */
611        }
612        catch (Exception ex) {
613            log.debug("getMetadata(): failure: ", ex);
614        }
615        finally {
616            close(id);
617        }
618
619        return attributeList;
620    }
621
622    // To do: implementing DataFormat
623    /**
624     * Writes a specific piece of metadata (such as an attribute) into the file.
625     *
626     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
627     * value. If the attribute does not exist in the file, it creates the
628     * attribute in the file and attaches it to the object. It will fail to
629     * write a new attribute to the object where an attribute with the same name
630     * already exists. To update the value of an existing attribute in the file,
631     * one needs to get the instance of the attribute by getMetadata(), change
632     * its values, then use writeMetadata() to write the value.
633     *
634     * @param info
635     *            the metadata to write.
636     *
637     * @throws Exception
638     *             if the metadata can not be written
639     */
640    @Override
641    @SuppressWarnings({"rawtypes", "unchecked"})
642    public void writeMetadata(Object info) throws Exception {
643        // only attribute metadata is supported.
644        if (!(info instanceof Attribute)) {
645            log.debug("writeMetadata(): Object not an H4ScalarAttribute");
646            return;
647        }
648
649        try {
650            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
651
652            if (attributeList == null)
653                attributeList = new Vector();
654
655            attributeList.add(info);
656            nAttributes = attributeList.size();
657        }
658        catch (Exception ex) {
659            log.trace("writeMetadata(): failure: ", ex);
660        }
661    }
662
663    /**
664     * Deletes an existing piece of metadata from this object.
665     *
666     * @param info
667     *            the metadata to delete.
668     *
669     * @throws HDFException
670     *             if the metadata can not be removed
671     */
672    @Override
673    public void removeMetadata(Object info) throws HDFException {
674        log.trace("removeMetadata(): disabled");
675    }
676
677    /**
678     * Updates an existing piece of metadata attached to this object.
679     *
680     * @param info
681     *            the metadata to update.
682     *
683     * @throws Exception
684     *             if the metadata can not be updated
685     */
686    @Override
687    public void updateMetadata(Object info) throws Exception {
688        log.trace("updateMetadata(): disabled");
689    }
690
691    // Implementing HObject
692    @Override
693    public long open() {
694        long id=-1;
695
696        try {
697            int index = 0;
698            int tag = (int)oid[0];
699
700            log.trace("open(): tag={}", tag);
701            if (tag == H4SDS.DFTAG_NDG_NETCDF)
702                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
703            else
704                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
705
706            id = HDFLibrary.SDselect(sdid,index);
707        }
708        catch (HDFException ex) {
709            log.debug("open(): failure: ", ex);
710            id = -1;
711        }
712
713        return id;
714    }
715
716    // Implementing HObject
717    @Override
718    public void close(long id) {
719        try {
720            HDFLibrary.SDendaccess(id);
721        }
722        catch (HDFException ex) {
723            log.debug("close(): failure: ", ex);
724        }
725    }
726
727    /**
728     * Initializes the H4SDS such as dimension size of this dataset.
729     */
730    @SuppressWarnings("deprecation")
731    @Override
732    public void init() {
733        if (inited) {
734            log.trace("init(): Already initialized");
735            return; // already called. Initialize only once
736        }
737
738        long id = open();
739        String[] objName = {""};
740        String[] dimName = {""};
741        int[] dimInfo = {0, 0, 0};
742        int[] sdInfo = {0, 0, 0};
743        boolean isUnlimited = false;
744
745        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
746        try {
747            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
748            // mask off the litend bit
749            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
750            nAttributes = sdInfo[2];
751            rank = sdInfo[0];
752
753            if (rank <= 0) {
754                rank = 1;
755                idims[0] = 1;
756            }
757
758            isUnlimited = HDFLibrary.SDisrecord(id);
759            log.trace("init(): isUnlimited={}", isUnlimited);
760
761            datatypeID = sdInfo[1];
762            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
763
764            // get the dimension names
765            try {
766                dimNames = new String[rank];
767                for (int i=0; i<rank; i++) {
768                    long dimid = HDFLibrary.SDgetdimid(id, i);
769                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
770                    dimNames[i] = dimName[0];
771                }
772            }
773            catch (Exception ex) {
774                log.debug("init(): get the dimension names: ", ex);
775            }
776
777            // get compression information
778            try {
779                HDFCompInfo compInfo = new HDFCompInfo();
780                HDFLibrary.SDgetcompinfo(id, compInfo);
781
782                compression.setLength(0);
783
784                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
785                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
786                    HDFLibrary.SDgetcompinfo(id, comp);
787                    compression.append("GZIP(level=").append(comp.level).append(")");
788                }
789                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
790                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
791                    HDFLibrary.SDgetcompinfo(id, comp);
792                    compression.append("SZIP(bits_per_pixel=").append(comp.bits_per_pixel).append(",options_mask=")
793                            .append(comp.options_mask).append(",pixels=").append(comp.pixels).append(",pixels_per_block=")
794                            .append(comp.pixels_per_block).append(",pixels_per_scanline=").append(comp.pixels_per_scanline).append(")");
795                }
796                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
797                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
798                    HDFLibrary.SDgetcompinfo(id, comp);
799                    compression.append("JPEG(quality=").append(comp.quality).append(",options_mask=")
800                            .append(",force_baseline=").append(comp.force_baseline).append(")");
801                }
802                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
803                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
804                    HDFLibrary.SDgetcompinfo(id, comp);
805                    compression.append("SKPHUFF(skp_size=").append(comp.skp_size).append(")");
806                }
807                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
808                    compression.append("RLE");
809                }
810                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
811                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
812                    HDFLibrary.SDgetcompinfo(id, comp);
813                    compression.append("NBIT(nt=").append(comp.nt).append(",bit_len=").append(comp.bit_len)
814                            .append(",ctype=").append(comp.ctype).append(",fill_one=").append(comp.fill_one)
815                            .append(",sign_ext=").append(comp.sign_ext).append(",start_bit=").append(comp.start_bit).append(")");
816                }
817
818                if (compression.length() == 0)
819                    compression.append("NONE");
820            }
821            catch (Exception ex) {
822                log.debug("init(): get compression information failure: ", ex);
823            }
824
825            // get chunk information
826            try {
827                HDFChunkInfo chunkInfo = new HDFChunkInfo();
828                int[] cflag = {HDFConstants.HDF_NONE};
829
830                try {
831                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
832                }
833                catch (Exception ex) {
834                    ex.printStackTrace();
835                }
836
837                storageLayout.setLength(0);
838
839                if (cflag[0] == HDFConstants.HDF_NONE) {
840                    chunkSize = null;
841                    storageLayout.append("NONE");
842                }
843                else {
844                    chunkSize = new long[rank];
845                    for (int i=0; i<rank; i++)
846                        chunkSize[i] = chunkInfo.chunk_lengths[i];
847                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
848                    for (int i = 1; i < rank; i++)
849                        storageLayout.append(" X ").append(chunkSize[i]);
850                }
851            }
852            catch (Exception ex) {
853                log.debug("init(): get chunk information failure: ", ex);
854            }
855
856            inited = true;
857        }
858        catch (HDFException ex) {
859            log.debug("init(): failure: ", ex);
860        }
861        finally {
862            close(id);
863        }
864
865        dims = new long[rank];
866        maxDims = new long[rank];
867        startDims = new long[rank];
868        selectedDims = new long[rank];
869
870        for (int i=0; i<rank; i++) {
871            startDims[i] = 0;
872            selectedDims[i] = 1;
873            dims[i] = maxDims[i] = idims[i];
874        }
875
876        if (isUnlimited)
877            maxDims[0] = -1;
878
879        selectedIndex[0] = 0;
880        selectedIndex[1] = 1;
881        selectedIndex[2] = 2;
882
883        // select only two dimension a time,
884        if (rank == 1)
885            selectedDims[0] = dims[0];
886
887        if (rank > 1) {
888            selectedDims[0] = dims[0];
889            if (isText)
890                selectedDims[1] = 1;
891            else
892                selectedDims[1] = dims[1];
893        }
894    }
895
896    /**
897     * Creates a new dataset.
898     *
899     * @param name the name of the dataset to create.
900     * @param pgroup the parent group of the new dataset.
901     * @param type the datatype of the dataset.
902     * @param dims the dimension size of the dataset.
903     * @param maxdims the max dimension size of the dataset.
904     * @param chunks the chunk size of the dataset.
905     * @param gzip the level of the gzip compression.
906     * @param fillValue the default value.
907     * @param data the array of data values.
908     *
909     * @return the new dataset if successful. Otherwise returns null.
910     *
911     * @throws Exception if the dataset can not be created
912     */
913    public static H4SDS create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
914            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
915        H4SDS dataset = null;
916        if ((pgroup == null) || (name == null)|| (dims == null)) {
917            log.trace("create(): Parent group, name or dims is null");
918            return null;
919        }
920
921        H4File file = (H4File)pgroup.getFileFormat();
922
923        if (file == null) {
924            log.trace("create(): Parent group FileFormat is null");
925            return null;
926        }
927
928        String path = HObject.SEPARATOR;
929        if (!pgroup.isRoot())
930            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
931        // prepare the dataspace
932        int rank = dims.length;
933        int[] idims = new int[rank];
934        int[] start = new int[rank];
935        for (int i=0; i<rank; i++) {
936            idims[i] = (int)dims[i];
937            start[i] = 0;
938        }
939
940        // only the first element of the SDcreate parameter dim_sizes (i.e.,
941        // the dimension of the lowest rank or the slowest-changing dimension)
942        // can be assigned the value SD_UNLIMITED (or 0) to make the first
943        // dimension unlimited.
944        if ((maxdims != null) && (maxdims[0]<=0))
945            idims[0] = 0; // set to unlimited dimension.
946
947        int[] ichunks = null;
948        if (chunks != null) {
949            ichunks = new int[rank];
950            for (int i=0; i<rank; i++)
951                ichunks[i] = (int)chunks[i];
952        }
953
954        // unlimited cannot be used with chunking or compression for HDF 4.2.6 or earlier.
955        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
956            log.debug("create(): Unlimited cannot be used with chunking or compression");
957            throw new HDFException("Unlimited cannot be used with chunking or compression");
958        }
959
960        long sdid = (file).getSDAccessID();
961        long sdsid = -1;
962        long vgid = -1;
963        long tid = type.createNative();
964
965        if(tid >= 0) {
966            try {
967                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
968                // set fill value to zero.
969                int vsize = HDFLibrary.DFKNTsize(tid);
970                byte[] fill = new byte[vsize];
971                for (int i=0; i<vsize; i++)
972                    fill[i] = 0;
973                HDFLibrary.SDsetfillvalue(sdsid, fill);
974
975                // when we create a new dataset with unlimited dimension,
976                // we have to write some data into the dataset or otherwise
977                // the current dataset has zero dimensin size.
978            }
979            catch (Exception ex) {
980                log.debug("create(): failure: ", ex);
981                throw (ex);
982            }
983        }
984
985        if (sdsid < 0) {
986            log.debug("create(): Dataset creation failed");
987            throw (new HDFException("Unable to create the new dataset."));
988        }
989
990        HDFDeflateCompInfo compInfo = null;
991        if (gzip > 0) {
992            // set compression
993            compInfo = new HDFDeflateCompInfo();
994            compInfo.level = gzip;
995            if (chunks == null)
996                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
997        }
998
999        if (chunks != null) {
1000            // set chunk
1001            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
1002            int flag = HDFConstants.HDF_CHUNK;
1003
1004            if (gzip > 0) {
1005                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1006                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1007            }
1008
1009            try  {
1010                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
1011            }
1012            catch (Exception err) {
1013                log.debug("create(): SDsetchunk failure: ", err);
1014                err.printStackTrace();
1015                throw new HDFException("SDsetchunk failed.");
1016            }
1017        }
1018
1019        if ((sdsid > 0) && (data != null))
1020            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1021
1022        int ref = HDFLibrary.SDidtoref(sdsid);
1023
1024        if (!pgroup.isRoot()) {
1025            // add the dataset to the parent group
1026            vgid = pgroup.open();
1027            if (vgid < 0) {
1028                if (sdsid > 0)
1029                    HDFLibrary.SDendaccess(sdsid);
1030                log.debug("create(): Invalid Parent Group ID");
1031                throw (new HDFException("Unable to open the parent group."));
1032            }
1033
1034            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1035
1036            pgroup.close(vgid);
1037        }
1038
1039        try {
1040            if (sdsid > 0)
1041                HDFLibrary.SDendaccess(sdsid);
1042        }
1043        catch (Exception ex) {
1044            log.debug("create(): SDendaccess failure: ", ex);
1045        }
1046
1047        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1048        dataset = new H4SDS(file, name, path, oid);
1049
1050        if (dataset != null)
1051            pgroup.addToMemberList(dataset);
1052
1053        return dataset;
1054    }
1055
1056    /**
1057     * Creates a new dataset.
1058     *
1059     * @param name the name of the dataset to create.
1060     * @param pgroup the parent group of the new dataset.
1061     * @param type the datatype of the dataset.
1062     * @param dims the dimension size of the dataset.
1063     * @param maxdims the max dimension size of the dataset.
1064     * @param chunks the chunk size of the dataset.
1065     * @param gzip the level of the gzip compression.
1066     * @param data the array of data values.
1067     *
1068     * @return the new dataset if successful. Otherwise returns null.
1069     *
1070     * @throws Exception if the dataset can not be created
1071     */
1072    public static H4SDS create(String name, Group pgroup, Datatype type,
1073            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception {
1074        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1075    }
1076
1077    /**
1078     * copy attributes from one SDS to another SDS
1079     */
1080    private void copyAttribute(long srcdid, long dstdid) {
1081        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1082        try {
1083            String[] objName = {""};
1084            int[] sdInfo = {0, 0, 0};
1085            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1086            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1087            int numberOfAttributes = sdInfo[2];
1088            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1089
1090            boolean b = false;
1091            String[] attrName = new String[1];
1092            int[] attrInfo = {0, 0};
1093            for (int i=0; i<numberOfAttributes; i++) {
1094                attrName[0] = "";
1095                try {
1096                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1097                }
1098                catch (HDFException ex) {
1099                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1100                    b = false;
1101                }
1102
1103                if (!b)
1104                    continue;
1105
1106                // read attribute data from source dataset
1107                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1108                try {
1109                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1110                }
1111                catch (HDFException ex) {
1112                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1113                    attrBuff = null;
1114                }
1115
1116                if (attrBuff == null) {
1117                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1118                    continue;
1119                }
1120
1121                // attach attribute to the destination dataset
1122                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1123            } // (int i=0; i<numberOfAttributes; i++)
1124        }
1125        catch (Exception ex) {
1126            log.debug("copyAttribute(): failure: ", ex);
1127        }
1128    }
1129
1130    //Implementing DataFormat
1131    /**
1132     * Retrieves the object's metadata, such as attributes, from the file.
1133     *
1134     * Metadata, such as attributes, is stored in a List.
1135     *
1136     * @param attrPropList
1137     *             the list of properties to get
1138     *
1139     * @return the list of metadata objects.
1140     *
1141     * @throws Exception
1142     *             if the metadata can not be retrieved
1143     */
1144    @SuppressWarnings("rawtypes")
1145    public List getMetadata(int... attrPropList) throws Exception {
1146        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1147    }
1148}