001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030
031import hdf.object.Attribute;
032import hdf.object.Dataset;
033import hdf.object.Datatype;
034import hdf.object.FileFormat;
035import hdf.object.Group;
036import hdf.object.HObject;
037import hdf.object.ScalarDS;
038import hdf.object.MetaDataContainer;
039
040import hdf.object.h4.H4ScalarAttribute;
041
042/**
043 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
044 * the SDS. A SDS is a group of data structures used to store and describe
045 * multidimensional arrays of scientific data.
046 *
047 * The data contained in an SDS array has a data type associated with it. The
048 * standard data types supported by the SD interface include 32- and 64-bit
049 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
050 * 32-bit unsigned integers, and 8-bit characters.
051 *
052 * <b>How to Select a Subset</b>
053 *
054 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
055 * is defined to select a subset of a data array. The selection is done in an implicit
056 * way. Function calls to dimension information such as getSelectedDims() return an array
057 * of dimension values, which is a reference to the array in the dataset object.
058 * Changes of the array outside the dataset object directly change the values of
059 * the array in the dataset object. It is like pointers in C.
060 *
061 * The following is an example of how to make a subset. In the example, the dataset
062 * is a 4-dimension with size of [200][100][50][10], i.e.
063 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
064 * We want to select every other data point in dims[1] and dims[2]
065 * <pre>
066     int rank = dataset.getRank();   // number of dimensions of the dataset
067     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
068     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
069     long[] start = dataset.getStartDims(); // the offset of the selection
070     long[] stride = dataset.getStride(); // the stride of the dataset
071     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
072
073     // select dim1 and dim2 as 2D data for display,and slice through dim0
074     selectedIndex[0] = 1;
075     selectedIndex[1] = 2;
076     selectedIndex[1] = 0;
077
078     // reset the selection arrays
079     for (int i=0; i&lt;rank; i++) {
080         start[i] = 0;
081         selected[i] = 1;
082         stride[i] = 1;
083    }
084
085    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
086    stride[1] = 2;
087    stride[2] = 2;
088
089    // set the selection size of dim1 and dim2
090    selected[1] = dims[1]/stride[1];
091    selected[2] = dims[1]/stride[2];
092
093    // when dataset.read() is called, the slection above will be used since
094    // the dimension arrays is passed by reference. Changes of these arrays
095    // outside the dataset object directly change the values of these array
096    // in the dataset object.
097
098 * </pre>
099 *
100 * @version 1.1 9/4/2007
101 * @author Peter X. Cao
102 */
103public class H4SDS extends ScalarDS implements MetaDataContainer
104{
105    private static final long serialVersionUID = 2557157923292438696L;
106
107    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
108
109    /** tag for netCDF datasets.
110     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
111     */
112    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
113    public static final int                 DFTAG_NDG_NETCDF = 67687001;
114
115    /**
116     * The list of attributes of this data object. Members of the list are
117     * instance of Attribute.
118     */
119    @SuppressWarnings("rawtypes")
120    private List                            attributeList;
121
122    /**
123     * The SDS interface identifier obtained from SDstart(filename, access)
124     */
125    private long                            sdid;
126
127    /** the datatype identifier */
128    private long                            datatypeID = -1;
129
130    /** the number of attributes */
131    private int                             nAttributes = -1;
132
133    /**
134     * Creates an H4SDS object with specific name and path.
135     *
136     * @param theFile
137     *            the HDF file.
138     * @param name
139     *            the name of this H4SDS.
140     * @param path
141     *            the full path of this H4SDS.
142     */
143    public H4SDS(FileFormat theFile, String name, String path) {
144        this(theFile, name, path, null);
145    }
146
147    /**
148     * Creates an H4SDS object with specific name, path and oid.
149     *
150     * @param theFile
151     *            the HDF file.
152     * @param name
153     *            the name of this H4SDS.
154     * @param path
155     *            the full path of this H4SDS.
156     * @param oid
157     *            the unique identifier of this data object.
158     */
159    @SuppressWarnings("deprecation")
160    public H4SDS(FileFormat theFile, String name, String path, long[] oid) {
161        super(theFile, name, path, oid);
162        unsignedConverted = false;
163        sdid = ((H4File)getFileFormat()).getSDAccessID();
164    }
165
166    /*
167     * (non-Javadoc)
168     * @see hdf.object.DataFormat#hasAttribute()
169     */
170    @Override
171    public boolean hasAttribute() {
172        if (nAttributes < 0) {
173            sdid = ((H4File)getFileFormat()).getSDAccessID();
174
175            long id = open();
176
177            if (id >= 0) {
178                try { // retrieve attributes of the dataset
179                    String[] objName = {""};
180                    int[] sdInfo = {0, 0, 0};
181                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
182                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
183                    nAttributes = sdInfo[2];
184                }
185                catch (Exception ex) {
186                    log.debug("hasAttribute(): failure: ", ex);
187                    nAttributes=0;
188                }
189
190                log.trace("hasAttribute(): nAttributes={}", nAttributes);
191
192                close(id);
193            }
194        }
195
196        return (nAttributes>0);
197    }
198
199    // implementing Dataset
200    /**
201     * Returns the datatype of the data object.
202     *
203     * @return the datatype of the data object.
204     */
205    @Override
206    public Datatype getDatatype() {
207        if (!inited)
208            init();
209
210        if (datatype == null) {
211            try {
212                datatype = new H4Datatype(datatypeID);
213            }
214            catch (Exception ex) {
215                log.debug("getDatatype(): failed to create datatype: ", ex);
216                datatype = null;
217            }
218        }
219
220        return datatype;
221    }
222
223    // To do: Implementing Dataset
224    @Override
225    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
226        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
227
228        Dataset dataset = null;
229        long srcdid = -1;
230        long dstdid = -1;
231        long tid = -1;
232        int size = 1;
233        int theRank = 2;
234        String path = null;
235        int[] count = null;
236        int[] start = null;
237
238        if (pgroup == null) {
239            log.debug("copy(): Parent group is null");
240            return null;
241        }
242
243        if (dname == null)
244            dname = getName();
245
246        if (pgroup.isRoot())
247            path = HObject.SEPARATOR;
248        else
249            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
250        log.trace("copy(): path={}", path);
251
252        srcdid = open();
253        if (srcdid < 0) {
254            log.debug("copy(): Invalid source SDID");
255            return null;
256        }
257
258        if (dims == null) {
259            if (!isInited())
260                init();
261
262            theRank = getRank();
263
264            dims = getDims();
265        }
266        else {
267            theRank = dims.length;
268        }
269
270        start = new int[theRank];
271        count = new int[theRank];
272        for (int i=0; i<theRank; i++) {
273            start[i] = 0;
274            count[i] = (int)dims[i];
275            size *= count[i];
276        }
277        log.trace("copy(): theRank={} with size={}", theRank, size);
278
279        // create the new dataset and attach it to the parent group
280        tid = datatypeID;
281        dstdid = HDFLibrary.SDcreate(((H4File)pgroup.getFileFormat()).getSDAccessID(), dname, tid, theRank, count);
282        if (dstdid < 0) {
283            log.debug("copy(): Invalid dest SDID");
284            return null;
285        }
286
287        int ref = HDFLibrary.SDidtoref(dstdid);
288        if (!pgroup.isRoot()) {
289            long vgid = pgroup.open();
290            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
291            pgroup.close(vgid);
292        }
293
294        // copy attributes from one object to the new object
295        log.trace("copy(): copy attributes");
296        copyAttribute(srcdid, dstdid);
297
298        // read data from the source dataset
299        log.trace("copy(): read data from the source dataset");
300        if (buff == null) {
301            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
302            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
303        }
304
305        // write the data into the destination dataset
306        log.trace("copy(): write the data into the destination dataset");
307        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
308
309        long[] oid = {HDFConstants.DFTAG_NDG, ref};
310        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
311
312        pgroup.addToMemberList(dataset);
313
314        close(srcdid);
315
316        try {
317            HDFLibrary.SDendaccess(dstdid);
318        }
319        catch (HDFException ex) {
320            log.debug("copy(): SDendaccess failure: ", ex);
321        }
322
323        return dataset;
324    }
325
326    // Implementing Dataset
327    @Override
328    public byte[] readBytes() throws HDFException {
329        byte[] theData = null;
330
331        if (!isInited())
332            init();
333
334        long id = open();
335        if (id < 0) {
336            log.debug("readBytes(): Invalid SDID");
337            return null;
338        }
339
340        int datasize = 1;
341        int[] select = new int[rank];
342        int[] start = new int[rank];
343        for (int i=0; i<rank; i++) {
344            datasize *= (int)selectedDims[i];
345            select[i] = (int)selectedDims[i];
346            start[i] = (int)startDims[i];
347        }
348
349        int[] stride = null;
350        if (selectedStride != null) {
351            stride = new int[rank];
352            for (int i=0; i<rank; i++) {
353                stride[i] = (int)selectedStride[i];
354            }
355        }
356
357        try {
358            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
359            theData = new byte[size];
360            HDFLibrary.SDreaddata(id, start, stride, select, theData);
361        }
362        catch (Exception ex) {
363            log.debug("readBytes(): failure: ", ex);
364        }
365        finally {
366            close(id);
367        }
368
369        return theData;
370    }
371
372    // Implementing DataFormat
373    /**
374     * Reads the data from file.
375     *
376     * read() reads the data from file to a memory buffer and returns the memory
377     * buffer. The dataset object does not hold the memory buffer. To store the
378     * memory buffer in the dataset object, one must call getData().
379     *
380     * By default, the whole dataset is read into memory. Users can also select
381     * a subset to read. Subsetting is done in an implicit way.
382     *
383     * @return the data read from file.
384     *
385     * @see #getData()
386     *
387     * @throws HDFException
388     *             if object can not be read
389     * @throws OutOfMemoryError
390     *             if memory is exhausted
391     */
392    @Override
393    public Object read() throws HDFException, OutOfMemoryError {
394        Object theData = null;
395
396        if (!isInited())
397            init();
398
399        long id = open();
400        if (id < 0) {
401            log.debug("read(): Invalid SDID");
402            return null;
403        }
404
405        int datasize = 1;
406        int[] select = new int[rank];
407        int[] start = new int[rank];
408        for (int i=0; i<rank; i++) {
409            datasize *= (int)selectedDims[i];
410            select[i] = (int)selectedDims[i];
411            start[i] = (int)startDims[i];
412        }
413
414        int[] stride = null;
415        if (selectedStride != null) {
416            stride = new int[rank];
417            for (int i=0; i<rank; i++) {
418                stride[i] = (int)selectedStride[i];
419            }
420        }
421
422        try {
423            theData = H4Datatype.allocateArray(datatypeID, datasize);
424
425            if (theData != null) {
426                // assume external data files are located in the same directory as the main file.
427                HDFLibrary.HXsetdir(getFileFormat().getParent());
428
429                HDFLibrary.SDreaddata(id, start, stride, select, theData);
430
431                if (isText)
432                    theData = byteToString((byte[])theData, select[0]);
433            }
434        }
435        catch (Exception ex) {
436            log.debug("read(): failure: ", ex);
437        }
438        finally {
439            close(id);
440        }
441
442        if (fillValue==null && isImageDisplay) {
443            try {
444                getMetadata();
445            } // need to set fillValue for images
446            catch (Exception ex) {
447                log.debug("read(): getMetadata failure: ", ex);
448            }
449        }
450
451        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
452            isDefaultImageOrder = false;
453        else
454            isDefaultImageOrder = true;
455
456        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
457        return theData;
458    }
459
460    // Implementing DataFormat
461    /**
462     * Writes a memory buffer to the object in the file.
463     *
464     * @param buf
465     *            the data to write
466     *
467     * @throws HDFException
468     *             if data can not be written
469     */
470    @SuppressWarnings("deprecation")
471    @Override
472    public void write(Object buf) throws HDFException {
473        if (buf == null) {
474            log.debug("write(): Object is null");
475            return;
476        }
477
478        long id = open();
479        if (id < 0) {
480            log.debug("write(): Invalid SDID");
481            return;
482        }
483
484        int[] select = new int[rank];
485        int[] start = new int[rank];
486        for (int i=0; i<rank; i++) {
487            select[i] = (int)selectedDims[i];
488            start[i] = (int)startDims[i];
489        }
490
491        int[] stride = null;
492        if (selectedStride != null) {
493            stride = new int[rank];
494            for (int i=0; i<rank; i++) {
495                stride[i] = (int)selectedStride[i];
496            }
497        }
498
499        Object tmpData = buf;
500        try {
501            if (getDatatype().isUnsigned() && unsignedConverted)
502                tmpData = convertToUnsignedC(buf);
503            // assume external data files are located in the same directory as the main file.
504            HDFLibrary.HXsetdir(getFileFormat().getParent());
505
506            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
507        }
508        catch (Exception ex) {
509            log.debug("write(): failure: ", ex);
510        }
511        finally {
512            tmpData = null;
513            close(id);
514        }
515    }
516
517    // Implementing DataFormat
518    /**
519     * Retrieves the object's metadata, such as attributes, from the file.
520     *
521     * Metadata, such as attributes, is stored in a List.
522     *
523     * @return the list of metadata objects.
524     *
525     * @throws HDFException
526     *             if the metadata can not be retrieved
527     */
528    @Override
529    @SuppressWarnings({"rawtypes", "unchecked"})
530    public List getMetadata() throws HDFException {
531        if (attributeList != null) {
532            log.trace("getMetdata(): attributeList != null");
533            return attributeList;
534        }
535
536        long id = open();
537        String[] objName = {""};
538        int[] sdInfo = {0, 0, 0};
539        try {
540            // retrieve attributes of the dataset
541            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
542            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
543            int n = sdInfo[2];
544
545            if ((attributeList == null) && (n>0))
546                attributeList = new Vector(n, 5);
547
548            boolean b = false;
549            String[] attrName = new String[1];
550            int[] attrInfo = {0, 0};
551            for (int i=0; i<n; i++) {
552                attrName[0] = "";
553                try {
554                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
555                    // mask off the litend bit
556                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
557                }
558                catch (HDFException ex) {
559                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
560                    b = false;
561                }
562
563                if (!b)
564                    continue;
565
566                long[] attrDims = {attrInfo[1]};
567                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
568                attributeList.add(attr);
569
570                Object buf = null;
571                try {
572                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
573                }
574                catch (OutOfMemoryError e) {
575                    log.debug("getMetadata(): out of memory: ", e);
576                    buf = null;
577                }
578
579                try {
580                    HDFLibrary.SDreadattr(id, i, buf);
581                }
582                catch (HDFException ex) {
583                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
584                    buf = null;
585                }
586
587                if (buf != null) {
588                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
589                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
590                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
591                    }
592                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
593                            attrName[0].equalsIgnoreCase("_fillValue")) {
594                        fillValue = buf;
595                    }
596
597                    attr.setAttributeData(buf);
598                }
599
600            } // (int i=0; i<n; i++)
601
602            // retrieve attribute of dimension
603            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
604            /**
605             * for (int i=0; i< rank; i++) { int dimID = HDFLibrary.SDgetdimid(id, i); String[] argv = {" ", "
606             * ", " "}; HDFLibrary.SDgetdimstrs(dimID, argv, 80); }
607             */
608        }
609        catch (Exception ex) {
610            log.debug("getMetadata(): failure: ", ex);
611        }
612        finally {
613            close(id);
614        }
615
616        return attributeList;
617    }
618
619    // To do: implementing DataFormat
620    /**
621     * Writes a specific piece of metadata (such as an attribute) into the file.
622     *
623     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
624     * value. If the attribute does not exist in the file, it creates the
625     * attribute in the file and attaches it to the object. It will fail to
626     * write a new attribute to the object where an attribute with the same name
627     * already exists. To update the value of an existing attribute in the file,
628     * one needs to get the instance of the attribute by getMetadata(), change
629     * its values, then use writeMetadata() to write the value.
630     *
631     * @param info
632     *            the metadata to write.
633     *
634     * @throws Exception
635     *             if the metadata can not be written
636     */
637    @Override
638    @SuppressWarnings({"rawtypes", "unchecked"})
639    public void writeMetadata(Object info) throws Exception {
640        // only attribute metadata is supported.
641        if (!(info instanceof Attribute)) {
642            log.debug("writeMetadata(): Object not an H4ScalarAttribute");
643            return;
644        }
645
646        try {
647            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
648
649            if (attributeList == null)
650                attributeList = new Vector();
651
652            attributeList.add(info);
653            nAttributes = attributeList.size();
654        }
655        catch (Exception ex) {
656            log.trace("writeMetadata(): failure: ", ex);
657        }
658    }
659
660    /**
661     * Deletes an existing piece of metadata from this object.
662     *
663     * @param info
664     *            the metadata to delete.
665     *
666     * @throws HDFException
667     *             if the metadata can not be removed
668     */
669    @Override
670    public void removeMetadata(Object info) throws HDFException {
671        log.trace("removeMetadata(): disabled");
672    }
673
674    /**
675     * Updates an existing piece of metadata attached to this object.
676     *
677     * @param info
678     *            the metadata to update.
679     *
680     * @throws Exception
681     *             if the metadata can not be updated
682     */
683    @Override
684    public void updateMetadata(Object info) throws Exception {
685        log.trace("updateMetadata(): disabled");
686    }
687
688    // Implementing HObject
689    @Override
690    public long open() {
691        long id=-1;
692
693        try {
694            int index = 0;
695            int tag = (int)oid[0];
696
697            log.trace("open(): tag={}", tag);
698            if (tag == H4SDS.DFTAG_NDG_NETCDF)
699                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
700            else
701                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
702
703            id = HDFLibrary.SDselect(sdid,index);
704        }
705        catch (HDFException ex) {
706            log.debug("open(): failure: ", ex);
707            id = -1;
708        }
709
710        return id;
711    }
712
713    // Implementing HObject
714    @Override
715    public void close(long id) {
716        try {
717            HDFLibrary.SDendaccess(id);
718        }
719        catch (HDFException ex) {
720            log.debug("close(): failure: ", ex);
721        }
722    }
723
724    /**
725     * Initializes the H4SDS such as dimension size of this dataset.
726     */
727    @SuppressWarnings("deprecation")
728    @Override
729    public void init() {
730        if (inited) {
731            log.trace("init(): Already initialized");
732            return; // already called. Initialize only once
733        }
734
735        long id = open();
736        String[] objName = {""};
737        String[] dimName = {""};
738        int[] dimInfo = {0, 0, 0};
739        int[] sdInfo = {0, 0, 0};
740        boolean isUnlimited = false;
741
742        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
743        try {
744            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
745            // mask off the litend bit
746            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
747            nAttributes = sdInfo[2];
748            rank = sdInfo[0];
749
750            if (rank <= 0) {
751                rank = 1;
752                idims[0] = 1;
753            }
754
755            isUnlimited = HDFLibrary.SDisrecord(id);
756            log.trace("init(): isUnlimited={}", isUnlimited);
757
758            datatypeID = sdInfo[1];
759            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
760
761            // get the dimension names
762            try {
763                dimNames = new String[rank];
764                for (int i=0; i<rank; i++) {
765                    long dimid = HDFLibrary.SDgetdimid(id, i);
766                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
767                    dimNames[i] = dimName[0];
768                }
769            }
770            catch (Exception ex) {
771                log.debug("init(): get the dimension names: ", ex);
772            }
773
774            // get compression information
775            try {
776                HDFCompInfo compInfo = new HDFCompInfo();
777                HDFLibrary.SDgetcompinfo(id, compInfo);
778
779                compression.setLength(0);
780
781                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
782                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
783                    HDFLibrary.SDgetcompinfo(id, comp);
784                    compression.append("GZIP(level=").append(comp.level).append(")");
785                }
786                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
787                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
788                    HDFLibrary.SDgetcompinfo(id, comp);
789                    compression.append("SZIP(bits_per_pixel=").append(comp.bits_per_pixel).append(",options_mask=")
790                            .append(comp.options_mask).append(",pixels=").append(comp.pixels).append(",pixels_per_block=")
791                            .append(comp.pixels_per_block).append(",pixels_per_scanline=").append(comp.pixels_per_scanline).append(")");
792                }
793                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
794                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
795                    HDFLibrary.SDgetcompinfo(id, comp);
796                    compression.append("JPEG(quality=").append(comp.quality).append(",options_mask=")
797                            .append(",force_baseline=").append(comp.force_baseline).append(")");
798                }
799                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
800                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
801                    HDFLibrary.SDgetcompinfo(id, comp);
802                    compression.append("SKPHUFF(skp_size=").append(comp.skp_size).append(")");
803                }
804                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
805                    compression.append("RLE");
806                }
807                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
808                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
809                    HDFLibrary.SDgetcompinfo(id, comp);
810                    compression.append("NBIT(nt=").append(comp.nt).append(",bit_len=").append(comp.bit_len)
811                            .append(",ctype=").append(comp.ctype).append(",fill_one=").append(comp.fill_one)
812                            .append(",sign_ext=").append(comp.sign_ext).append(",start_bit=").append(comp.start_bit).append(")");
813                }
814
815                if (compression.length() == 0)
816                    compression.append("NONE");
817            }
818            catch (Exception ex) {
819                log.debug("init(): get compression information failure: ", ex);
820            }
821
822            // get chunk information
823            try {
824                HDFChunkInfo chunkInfo = new HDFChunkInfo();
825                int[] cflag = {HDFConstants.HDF_NONE};
826
827                try {
828                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
829                }
830                catch (Exception ex) {
831                    ex.printStackTrace();
832                }
833
834                storageLayout.setLength(0);
835
836                if (cflag[0] == HDFConstants.HDF_NONE) {
837                    chunkSize = null;
838                    storageLayout.append("NONE");
839                }
840                else {
841                    chunkSize = new long[rank];
842                    for (int i=0; i<rank; i++)
843                        chunkSize[i] = chunkInfo.chunk_lengths[i];
844                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
845                    for (int i = 1; i < rank; i++)
846                        storageLayout.append(" X ").append(chunkSize[i]);
847                }
848            }
849            catch (Exception ex) {
850                log.debug("init(): get chunk information failure: ", ex);
851            }
852
853            inited = true;
854        }
855        catch (HDFException ex) {
856            log.debug("init(): failure: ", ex);
857        }
858        finally {
859            close(id);
860        }
861
862        dims = new long[rank];
863        maxDims = new long[rank];
864        startDims = new long[rank];
865        selectedDims = new long[rank];
866
867        for (int i=0; i<rank; i++) {
868            startDims[i] = 0;
869            selectedDims[i] = 1;
870            dims[i] = maxDims[i] = idims[i];
871        }
872
873        if (isUnlimited)
874            maxDims[0] = -1;
875
876        selectedIndex[0] = 0;
877        selectedIndex[1] = 1;
878        selectedIndex[2] = 2;
879
880        // select only two dimension a time,
881        if (rank == 1)
882            selectedDims[0] = dims[0];
883
884        if (rank > 1) {
885            selectedDims[0] = dims[0];
886            if (isText)
887                selectedDims[1] = 1;
888            else
889                selectedDims[1] = dims[1];
890        }
891    }
892
893    /**
894     * Creates a new dataset.
895     *
896     * @param name the name of the dataset to create.
897     * @param pgroup the parent group of the new dataset.
898     * @param type the datatype of the dataset.
899     * @param dims the dimension size of the dataset.
900     * @param maxdims the max dimension size of the dataset.
901     * @param chunks the chunk size of the dataset.
902     * @param gzip the level of the gzip compression.
903     * @param fillValue the default value.
904     * @param data the array of data values.
905     *
906     * @return the new dataset if successful. Otherwise returns null.
907     *
908     * @throws Exception if the dataset can not be created
909     */
910    public static H4SDS create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
911            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
912        H4SDS dataset = null;
913        if ((pgroup == null) || (name == null)|| (dims == null)) {
914            log.trace("create(): Parent group, name or dims is null");
915            return null;
916        }
917
918        H4File file = (H4File)pgroup.getFileFormat();
919
920        if (file == null) {
921            log.trace("create(): Parent group FileFormat is null");
922            return null;
923        }
924
925        String path = HObject.SEPARATOR;
926        if (!pgroup.isRoot())
927            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
928        // prepare the dataspace
929        int rank = dims.length;
930        int[] idims = new int[rank];
931        int[] start = new int[rank];
932        for (int i=0; i<rank; i++) {
933            idims[i] = (int)dims[i];
934            start[i] = 0;
935        }
936
937        // only the first element of the SDcreate parameter dim_sizes (i.e.,
938        // the dimension of the lowest rank or the slowest-changing dimension)
939        // can be assigned the value SD_UNLIMITED (or 0) to make the first
940        // dimension unlimited.
941        if ((maxdims != null) && (maxdims[0]<=0))
942            idims[0] = 0; // set to unlimited dimension.
943
944        int[] ichunks = null;
945        if (chunks != null) {
946            ichunks = new int[rank];
947            for (int i=0; i<rank; i++)
948                ichunks[i] = (int)chunks[i];
949        }
950
951        // unlimited cannot be used with chunking or compression for HDF 4.2.6 or earlier.
952        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
953            log.debug("create(): Unlimited cannot be used with chunking or compression");
954            throw new HDFException("Unlimited cannot be used with chunking or compression");
955        }
956
957        long sdid = (file).getSDAccessID();
958        long sdsid = -1;
959        long vgid = -1;
960        long tid = type.createNative();
961
962        if(tid >= 0) {
963            try {
964                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
965                // set fill value to zero.
966                int vsize = HDFLibrary.DFKNTsize(tid);
967                byte[] fill = new byte[vsize];
968                for (int i=0; i<vsize; i++)
969                    fill[i] = 0;
970                HDFLibrary.SDsetfillvalue(sdsid, fill);
971
972                // when we create a new dataset with unlimited dimension,
973                // we have to write some data into the dataset or otherwise
974                // the current dataset has zero dimensin size.
975            }
976            catch (Exception ex) {
977                log.debug("create(): failure: ", ex);
978                throw (ex);
979            }
980        }
981
982        if (sdsid < 0) {
983            log.debug("create(): Dataset creation failed");
984            throw (new HDFException("Unable to create the new dataset."));
985        }
986
987        HDFDeflateCompInfo compInfo = null;
988        if (gzip > 0) {
989            // set compression
990            compInfo = new HDFDeflateCompInfo();
991            compInfo.level = gzip;
992            if (chunks == null)
993                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
994        }
995
996        if (chunks != null) {
997            // set chunk
998            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
999            int flag = HDFConstants.HDF_CHUNK;
1000
1001            if (gzip > 0) {
1002                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1003                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1004            }
1005
1006            try  {
1007                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
1008            }
1009            catch (Exception err) {
1010                log.debug("create(): SDsetchunk failure: ", err);
1011                err.printStackTrace();
1012                throw new HDFException("SDsetchunk failed.");
1013            }
1014        }
1015
1016        if ((sdsid > 0) && (data != null))
1017            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1018
1019        int ref = HDFLibrary.SDidtoref(sdsid);
1020
1021        if (!pgroup.isRoot()) {
1022            // add the dataset to the parent group
1023            vgid = pgroup.open();
1024            if (vgid < 0) {
1025                if (sdsid > 0)
1026                    HDFLibrary.SDendaccess(sdsid);
1027                log.debug("create(): Invalid Parent Group ID");
1028                throw (new HDFException("Unable to open the parent group."));
1029            }
1030
1031            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1032
1033            pgroup.close(vgid);
1034        }
1035
1036        try {
1037            if (sdsid > 0)
1038                HDFLibrary.SDendaccess(sdsid);
1039        }
1040        catch (Exception ex) {
1041            log.debug("create(): SDendaccess failure: ", ex);
1042        }
1043
1044        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1045        dataset = new H4SDS(file, name, path, oid);
1046
1047        if (dataset != null)
1048            pgroup.addToMemberList(dataset);
1049
1050        return dataset;
1051    }
1052
1053    /**
1054     * Creates a new dataset.
1055     *
1056     * @param name the name of the dataset to create.
1057     * @param pgroup the parent group of the new dataset.
1058     * @param type the datatype of the dataset.
1059     * @param dims the dimension size of the dataset.
1060     * @param maxdims the max dimension size of the dataset.
1061     * @param chunks the chunk size of the dataset.
1062     * @param gzip the level of the gzip compression.
1063     * @param data the array of data values.
1064     *
1065     * @return the new dataset if successful. Otherwise returns null.
1066     *
1067     * @throws Exception if the dataset can not be created
1068     */
1069    public static H4SDS create(String name, Group pgroup, Datatype type,
1070            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception {
1071        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1072    }
1073
1074    /**
1075     * copy attributes from one SDS to another SDS
1076     */
1077    private void copyAttribute(long srcdid, long dstdid) {
1078        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1079        try {
1080            String[] objName = {""};
1081            int[] sdInfo = {0, 0, 0};
1082            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1083            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1084            int numberOfAttributes = sdInfo[2];
1085            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1086
1087            boolean b = false;
1088            String[] attrName = new String[1];
1089            int[] attrInfo = {0, 0};
1090            for (int i=0; i<numberOfAttributes; i++) {
1091                attrName[0] = "";
1092                try {
1093                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1094                }
1095                catch (HDFException ex) {
1096                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1097                    b = false;
1098                }
1099
1100                if (!b)
1101                    continue;
1102
1103                // read attribute data from source dataset
1104                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1105                try {
1106                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1107                }
1108                catch (HDFException ex) {
1109                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1110                    attrBuff = null;
1111                }
1112
1113                if (attrBuff == null) {
1114                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1115                    continue;
1116                }
1117
1118                // attach attribute to the destination dataset
1119                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1120            } // (int i=0; i<numberOfAttributes; i++)
1121        }
1122        catch (Exception ex) {
1123            log.debug("copyAttribute(): failure: ", ex);
1124        }
1125    }
1126
1127    //Implementing DataFormat
1128    /**
1129     * Retrieves the object's metadata, such as attributes, from the file.
1130     *
1131     * Metadata, such as attributes, is stored in a List.
1132     *
1133     * @param attrPropList
1134     *             the list of properties to get
1135     *
1136     * @return the list of metadata objects.
1137     *
1138     * @throws Exception
1139     *             if the metadata can not be retrieved
1140     */
1141    @SuppressWarnings("rawtypes")
1142    public List getMetadata(int... attrPropList) throws Exception {
1143        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1144    }
1145}