001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.MetaDataContainer;
037import hdf.object.ScalarDS;
038import hdf.object.h4.H4ScalarAttribute;
039
040import org.slf4j.Logger;
041import org.slf4j.LoggerFactory;
042
043/**
044 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
045 * the SDS. A SDS is a group of data structures used to store and describe
046 * multidimensional arrays of scientific data.
047 *
048 * The data contained in an SDS array has a data type associated with it. The
049 * standard data types supported by the SD interface include 32- and 64-bit
050 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
051 * 32-bit unsigned integers, and 8-bit characters.
052 *
053 * <b>How to Select a Subset</b>
054 *
055 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
056 * is defined to select a subset of a data array. The selection is done in an implicit
057 * way. Function calls to dimension information such as getSelectedDims() return an array
058 * of dimension values, which is a reference to the array in the dataset object.
059 * Changes of the array outside the dataset object directly change the values of
060 * the array in the dataset object. It is like pointers in C.
061 *
062 * The following is an example of how to make a subset. In the example, the dataset
063 * is a 4-dimension with size of [200][100][50][10], i.e.
064 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
065 * We want to select every other data point in dims[1] and dims[2]
066 * <pre>
067     int rank = dataset.getRank();   // number of dimensions of the dataset
068     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
069     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
070     long[] start = dataset.getStartDims(); // the offset of the selection
071     long[] stride = dataset.getStride(); // the stride of the dataset
072     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
073
074     // select dim1 and dim2 as 2D data for display,and slice through dim0
075     selectedIndex[0] = 1;
076     selectedIndex[1] = 2;
077     selectedIndex[1] = 0;
078
079     // reset the selection arrays
080     for (int i=0; i&lt;rank; i++) {
081         start[i] = 0;
082         selected[i] = 1;
083         stride[i] = 1;
084    }
085
086    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
087    stride[1] = 2;
088    stride[2] = 2;
089
090    // set the selection size of dim1 and dim2
091    selected[1] = dims[1]/stride[1];
092    selected[2] = dims[1]/stride[2];
093
094    // when dataset.read() is called, the slection above will be used since
095    // the dimension arrays is passed by reference. Changes of these arrays
096    // outside the dataset object directly change the values of these array
097    // in the dataset object.
098
099 * </pre>
100 *
101 * @version 1.1 9/4/2007
102 * @author Peter X. Cao
103 */
104public class H4SDS extends ScalarDS implements MetaDataContainer {
105    private static final long serialVersionUID = 2557157923292438696L;
106
107    private static final Logger log = LoggerFactory.getLogger(H4SDS.class);
108
109    /**
110     * tag for netCDF datasets.
111     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
112     */
113    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
114    public static final int DFTAG_NDG_NETCDF = 67687001;
115
116    /**
117     * The list of attributes of this data object. Members of the list are
118     * instance of Attribute.
119     */
120    @SuppressWarnings("rawtypes")
121    private List attributeList;
122
123    /**
124     * The SDS interface identifier obtained from SDstart(filename, access)
125     */
126    private long sdid;
127
128    /** the datatype identifier */
129    private long datatypeID = -1;
130
131    /** the number of attributes */
132    private int nAttributes = -1;
133
134    /**
135     * Creates an H4SDS object with specific name and path.
136     *
137     * @param theFile
138     *            the HDF file.
139     * @param name
140     *            the name of this H4SDS.
141     * @param path
142     *            the full path of this H4SDS.
143     */
144    public H4SDS(FileFormat theFile, String name, String path) { this(theFile, name, path, null); }
145
146    /**
147     * Creates an H4SDS object with specific name, path and oid.
148     *
149     * @param theFile
150     *            the HDF file.
151     * @param name
152     *            the name of this H4SDS.
153     * @param path
154     *            the full path of this H4SDS.
155     * @param oid
156     *            the unique identifier of this data object.
157     */
158    @SuppressWarnings("deprecation")
159    public H4SDS(FileFormat theFile, String name, String path, long[] oid)
160    {
161        super(theFile, name, path, oid);
162        unsignedConverted = false;
163        sdid              = ((H4File)getFileFormat()).getSDAccessID();
164    }
165
166    /*
167     * (non-Javadoc)
168     * @see hdf.object.DataFormat#hasAttribute()
169     */
170    @Override
171    public boolean hasAttribute()
172    {
173        if (nAttributes < 0) {
174            sdid = ((H4File)getFileFormat()).getSDAccessID();
175
176            long id = open();
177
178            if (id >= 0) {
179                try { // retrieve attributes of the dataset
180                    String[] objName = {""};
181                    int[] sdInfo     = {0, 0, 0};
182                    int[] tmpDim     = new int[HDFConstants.MAX_VAR_DIMS];
183                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
184                    nAttributes = sdInfo[2];
185                }
186                catch (Exception ex) {
187                    log.debug("hasAttribute(): failure: ", ex);
188                    nAttributes = 0;
189                }
190
191                log.trace("hasAttribute(): nAttributes={}", nAttributes);
192
193                close(id);
194            }
195        }
196
197        return (nAttributes > 0);
198    }
199
200    // implementing Dataset
201    /**
202     * Returns the datatype of the data object.
203     *
204     * @return the datatype of the data object.
205     */
206    @Override
207    public Datatype getDatatype()
208    {
209        if (!inited)
210            init();
211
212        if (datatype == null) {
213            try {
214                datatype = new H4Datatype(datatypeID);
215            }
216            catch (Exception ex) {
217                log.debug("getDatatype(): failed to create datatype: ", ex);
218                datatype = null;
219            }
220        }
221
222        return datatype;
223    }
224
225    // To do: Implementing Dataset
226    @Override
227    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
228    {
229        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
230
231        Dataset dataset = null;
232        long srcdid     = -1;
233        long dstdid     = -1;
234        long tid        = -1;
235        int size        = 1;
236        int theRank     = 2;
237        String path     = null;
238        int[] count     = null;
239        int[] start     = null;
240
241        if (pgroup == null) {
242            log.debug("copy(): Parent group is null");
243            return null;
244        }
245
246        if (dname == null)
247            dname = getName();
248
249        if (pgroup.isRoot())
250            path = HObject.SEPARATOR;
251        else
252            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
253        log.trace("copy(): path={}", path);
254
255        srcdid = open();
256        if (srcdid < 0) {
257            log.debug("copy(): Invalid source SDID");
258            return null;
259        }
260
261        if (dims == null) {
262            if (!isInited())
263                init();
264
265            theRank = getRank();
266
267            dims = getDims();
268        }
269        else {
270            theRank = dims.length;
271        }
272
273        start = new int[theRank];
274        count = new int[theRank];
275        for (int i = 0; i < theRank; i++) {
276            start[i] = 0;
277            count[i] = (int)dims[i];
278            size *= count[i];
279        }
280        log.trace("copy(): theRank={} with size={}", theRank, size);
281
282        // create the new dataset and attach it to the parent group
283        tid = datatypeID;
284        dstdid =
285            HDFLibrary.SDcreate(((H4File)pgroup.getFileFormat()).getSDAccessID(), dname, tid, theRank, count);
286        if (dstdid < 0) {
287            log.debug("copy(): Invalid dest SDID");
288            return null;
289        }
290
291        int ref = HDFLibrary.SDidtoref(dstdid);
292        if (!pgroup.isRoot()) {
293            long vgid = pgroup.open();
294            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
295            pgroup.close(vgid);
296        }
297
298        // copy attributes from one object to the new object
299        log.trace("copy(): copy attributes");
300        copyAttribute(srcdid, dstdid);
301
302        // read data from the source dataset
303        log.trace("copy(): read data from the source dataset");
304        if (buff == null) {
305            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
306            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
307        }
308
309        // write the data into the destination dataset
310        log.trace("copy(): write the data into the destination dataset");
311        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
312
313        long[] oid = {HDFConstants.DFTAG_NDG, ref};
314        dataset    = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
315
316        pgroup.addToMemberList(dataset);
317
318        close(srcdid);
319
320        try {
321            HDFLibrary.SDendaccess(dstdid);
322        }
323        catch (HDFException ex) {
324            log.debug("copy(): SDendaccess failure: ", ex);
325        }
326
327        return dataset;
328    }
329
330    // Implementing Dataset
331    @Override
332    public byte[] readBytes() throws HDFException
333    {
334        byte[] theData = null;
335
336        if (!isInited())
337            init();
338
339        long id = open();
340        if (id < 0) {
341            log.debug("readBytes(): Invalid SDID");
342            return null;
343        }
344
345        int datasize = 1;
346        int[] select = new int[rank];
347        int[] start  = new int[rank];
348        for (int i = 0; i < rank; i++) {
349            datasize *= (int)selectedDims[i];
350            select[i] = (int)selectedDims[i];
351            start[i]  = (int)startDims[i];
352        }
353
354        int[] stride = null;
355        if (selectedStride != null) {
356            stride = new int[rank];
357            for (int i = 0; i < rank; i++) {
358                stride[i] = (int)selectedStride[i];
359            }
360        }
361
362        try {
363            int size = HDFLibrary.DFKNTsize(datatypeID) * datasize;
364            theData  = new byte[size];
365            HDFLibrary.SDreaddata(id, start, stride, select, theData);
366        }
367        catch (Exception ex) {
368            log.debug("readBytes(): failure: ", ex);
369        }
370        finally {
371            close(id);
372        }
373
374        return theData;
375    }
376
377    // Implementing DataFormat
378    /**
379     * Reads the data from file.
380     *
381     * read() reads the data from file to a memory buffer and returns the memory
382     * buffer. The dataset object does not hold the memory buffer. To store the
383     * memory buffer in the dataset object, one must call getData().
384     *
385     * By default, the whole dataset is read into memory. Users can also select
386     * a subset to read. Subsetting is done in an implicit way.
387     *
388     * @return the data read from file.
389     *
390     * @see #getData()
391     *
392     * @throws HDFException
393     *             if object can not be read
394     * @throws OutOfMemoryError
395     *             if memory is exhausted
396     */
397    @Override
398    public Object read() throws HDFException, OutOfMemoryError
399    {
400        Object theData = null;
401
402        if (!isInited())
403            init();
404
405        long id = open();
406        if (id < 0) {
407            log.debug("read(): Invalid SDID");
408            return null;
409        }
410
411        int datasize = 1;
412        int[] select = new int[rank];
413        int[] start  = new int[rank];
414        for (int i = 0; i < rank; i++) {
415            datasize *= (int)selectedDims[i];
416            select[i] = (int)selectedDims[i];
417            start[i]  = (int)startDims[i];
418        }
419
420        int[] stride = null;
421        if (selectedStride != null) {
422            stride = new int[rank];
423            for (int i = 0; i < rank; i++) {
424                stride[i] = (int)selectedStride[i];
425            }
426        }
427
428        try {
429            theData = H4Datatype.allocateArray(datatypeID, datasize);
430
431            if (theData != null) {
432                // assume external data files are located in the same directory as the main file.
433                HDFLibrary.HXsetdir(getFileFormat().getParent());
434
435                HDFLibrary.SDreaddata(id, start, stride, select, theData);
436
437                if (isText)
438                    theData = byteToString((byte[])theData, select[0]);
439            }
440        }
441        catch (Exception ex) {
442            log.debug("read(): failure: ", ex);
443        }
444        finally {
445            close(id);
446        }
447
448        if (fillValue == null && isImageDisplay) {
449            try {
450                getMetadata();
451            } // need to set fillValue for images
452            catch (Exception ex) {
453                log.debug("read(): getMetadata failure: ", ex);
454            }
455        }
456
457        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
458            isDefaultImageOrder = false;
459        else
460            isDefaultImageOrder = true;
461
462        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
463        return theData;
464    }
465
466    // Implementing DataFormat
467    /**
468     * Writes a memory buffer to the object in the file.
469     *
470     * @param buf
471     *            the data to write
472     *
473     * @throws HDFException
474     *             if data can not be written
475     */
476    @SuppressWarnings("deprecation")
477    @Override
478    public void write(Object buf) throws HDFException
479    {
480        if (buf == null) {
481            log.debug("write(): Object is null");
482            return;
483        }
484
485        long id = open();
486        if (id < 0) {
487            log.debug("write(): Invalid SDID");
488            return;
489        }
490
491        int[] select = new int[rank];
492        int[] start  = new int[rank];
493        for (int i = 0; i < rank; i++) {
494            select[i] = (int)selectedDims[i];
495            start[i]  = (int)startDims[i];
496        }
497
498        int[] stride = null;
499        if (selectedStride != null) {
500            stride = new int[rank];
501            for (int i = 0; i < rank; i++) {
502                stride[i] = (int)selectedStride[i];
503            }
504        }
505
506        Object tmpData = buf;
507        try {
508            if (getDatatype().isUnsigned() && unsignedConverted)
509                tmpData = convertToUnsignedC(buf);
510            // assume external data files are located in the same directory as the main file.
511            HDFLibrary.HXsetdir(getFileFormat().getParent());
512
513            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
514        }
515        catch (Exception ex) {
516            log.debug("write(): failure: ", ex);
517        }
518        finally {
519            tmpData = null;
520            close(id);
521        }
522    }
523
524    // Implementing DataFormat
525    /**
526     * Retrieves the object's metadata, such as attributes, from the file.
527     *
528     * Metadata, such as attributes, is stored in a List.
529     *
530     * @return the list of metadata objects.
531     *
532     * @throws HDFException
533     *             if the metadata can not be retrieved
534     */
535    @Override
536    @SuppressWarnings({"rawtypes", "unchecked"})
537    public List getMetadata() throws HDFException
538    {
539        if (attributeList != null) {
540            log.trace("getMetdata(): attributeList != null");
541            return attributeList;
542        }
543
544        long id          = open();
545        String[] objName = {""};
546        int[] sdInfo     = {0, 0, 0};
547        try {
548            // retrieve attributes of the dataset
549            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
550            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
551            int n = sdInfo[2];
552
553            if ((attributeList == null) && (n > 0))
554                attributeList = new Vector(n, 5);
555
556            boolean b         = false;
557            String[] attrName = new String[1];
558            int[] attrInfo    = {0, 0};
559            for (int i = 0; i < n; i++) {
560                attrName[0] = "";
561                try {
562                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
563                    // mask off the litend bit
564                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
565                }
566                catch (HDFException ex) {
567                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
568                    b = false;
569                }
570
571                if (!b)
572                    continue;
573
574                long[] attrDims = {attrInfo[1]};
575                H4ScalarAttribute attr =
576                    new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
577                attributeList.add(attr);
578
579                Object buf = null;
580                try {
581                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
582                }
583                catch (OutOfMemoryError e) {
584                    log.debug("getMetadata(): out of memory: ", e);
585                    buf = null;
586                }
587
588                try {
589                    HDFLibrary.SDreadattr(id, i, buf);
590                }
591                catch (HDFException ex) {
592                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
593                    buf = null;
594                }
595
596                if (buf != null) {
597                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
598                        (attrInfo[0] == HDFConstants.DFNT_UCHAR8)) {
599                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
600                    }
601                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
602                             attrName[0].equalsIgnoreCase("_fillValue")) {
603                        fillValue = buf;
604                    }
605
606                    attr.setAttributeData(buf);
607                }
608
609            } // (int i=0; i<n; i++)
610
611            // retrieve attribute of dimension
612            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
613            /**
614             * for (int i=0; i< rank; i++) { int dimID = HDFLibrary.SDgetdimid(id, i); String[] argv = {" ", "
615             * ", " "}; HDFLibrary.SDgetdimstrs(dimID, argv, 80); }
616             */
617        }
618        catch (Exception ex) {
619            log.debug("getMetadata(): failure: ", ex);
620        }
621        finally {
622            close(id);
623        }
624
625        return attributeList;
626    }
627
628    // To do: implementing DataFormat
629    /**
630     * Writes a specific piece of metadata (such as an attribute) into the file.
631     *
632     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
633     * value. If the attribute does not exist in the file, it creates the
634     * attribute in the file and attaches it to the object. It will fail to
635     * write a new attribute to the object where an attribute with the same name
636     * already exists. To update the value of an existing attribute in the file,
637     * one needs to get the instance of the attribute by getMetadata(), change
638     * its values, then use writeMetadata() to write the value.
639     *
640     * @param info
641     *            the metadata to write.
642     *
643     * @throws Exception
644     *             if the metadata can not be written
645     */
646    @Override
647    @SuppressWarnings({"rawtypes", "unchecked"})
648    public void writeMetadata(Object info) throws Exception
649    {
650        // only attribute metadata is supported.
651        if (!(info instanceof Attribute)) {
652            log.debug("writeMetadata(): Object not an H4ScalarAttribute");
653            return;
654        }
655
656        try {
657            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
658
659            if (attributeList == null)
660                attributeList = new Vector();
661
662            attributeList.add(info);
663            nAttributes = attributeList.size();
664        }
665        catch (Exception ex) {
666            log.trace("writeMetadata(): failure: ", ex);
667        }
668    }
669
670    /**
671     * Deletes an existing piece of metadata from this object.
672     *
673     * @param info
674     *            the metadata to delete.
675     *
676     * @throws HDFException
677     *             if the metadata can not be removed
678     */
679    @Override
680    public void removeMetadata(Object info) throws HDFException
681    {
682        log.trace("removeMetadata(): disabled");
683    }
684
685    /**
686     * Updates an existing piece of metadata attached to this object.
687     *
688     * @param info
689     *            the metadata to update.
690     *
691     * @throws Exception
692     *             if the metadata can not be updated
693     */
694    @Override
695    public void updateMetadata(Object info) throws Exception
696    {
697        log.trace("updateMetadata(): disabled");
698    }
699
700    // Implementing HObject
701    @Override
702    public long open()
703    {
704        long id = -1;
705
706        try {
707            int index = 0;
708            int tag   = (int)oid[0];
709
710            log.trace("open(): tag={}", tag);
711            if (tag == H4SDS.DFTAG_NDG_NETCDF)
712                index = (int)oid[1]; // HDFLibrary.SDidtoref(id) fails for netCDF
713            else
714                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
715
716            id = HDFLibrary.SDselect(sdid, index);
717        }
718        catch (HDFException ex) {
719            log.debug("open(): failure: ", ex);
720            id = -1;
721        }
722
723        return id;
724    }
725
726    // Implementing HObject
727    @Override
728    public void close(long id)
729    {
730        try {
731            HDFLibrary.SDendaccess(id);
732        }
733        catch (HDFException ex) {
734            log.debug("close(): failure: ", ex);
735        }
736    }
737
738    /**
739     * Initializes the H4SDS such as dimension size of this dataset.
740     */
741    @SuppressWarnings("deprecation")
742    @Override
743    public void init()
744    {
745        if (inited) {
746            log.trace("init(): Already initialized");
747            return; // already called. Initialize only once
748        }
749
750        long id             = open();
751        String[] objName    = {""};
752        String[] dimName    = {""};
753        int[] dimInfo       = {0, 0, 0};
754        int[] sdInfo        = {0, 0, 0};
755        boolean isUnlimited = false;
756
757        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
758        try {
759            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
760            // mask off the litend bit
761            sdInfo[1]   = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
762            nAttributes = sdInfo[2];
763            rank        = sdInfo[0];
764
765            if (rank <= 0) {
766                rank     = 1;
767                idims[0] = 1;
768            }
769
770            isUnlimited = HDFLibrary.SDisrecord(id);
771            log.trace("init(): isUnlimited={}", isUnlimited);
772
773            datatypeID = sdInfo[1];
774            isText     = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
775
776            // get the dimension names
777            try {
778                dimNames = new String[rank];
779                for (int i = 0; i < rank; i++) {
780                    long dimid = HDFLibrary.SDgetdimid(id, i);
781                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
782                    dimNames[i] = dimName[0];
783                }
784            }
785            catch (Exception ex) {
786                log.debug("init(): get the dimension names: ", ex);
787            }
788
789            // get compression information
790            try {
791                HDFCompInfo compInfo = new HDFCompInfo();
792                HDFLibrary.SDgetcompinfo(id, compInfo);
793
794                compression.setLength(0);
795
796                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
797                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
798                    HDFLibrary.SDgetcompinfo(id, comp);
799                    compression.append("GZIP(level=").append(comp.level).append(")");
800                }
801                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
802                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
803                    HDFLibrary.SDgetcompinfo(id, comp);
804                    compression.append("SZIP(bits_per_pixel=")
805                        .append(comp.bits_per_pixel)
806                        .append(",options_mask=")
807                        .append(comp.options_mask)
808                        .append(",pixels=")
809                        .append(comp.pixels)
810                        .append(",pixels_per_block=")
811                        .append(comp.pixels_per_block)
812                        .append(",pixels_per_scanline=")
813                        .append(comp.pixels_per_scanline)
814                        .append(")");
815                }
816                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
817                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
818                    HDFLibrary.SDgetcompinfo(id, comp);
819                    compression.append("JPEG(quality=")
820                        .append(comp.quality)
821                        .append(",options_mask=")
822                        .append(",force_baseline=")
823                        .append(comp.force_baseline)
824                        .append(")");
825                }
826                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
827                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
828                    HDFLibrary.SDgetcompinfo(id, comp);
829                    compression.append("SKPHUFF(skp_size=").append(comp.skp_size).append(")");
830                }
831                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
832                    compression.append("RLE");
833                }
834                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
835                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
836                    HDFLibrary.SDgetcompinfo(id, comp);
837                    compression.append("NBIT(nt=")
838                        .append(comp.nt)
839                        .append(",bit_len=")
840                        .append(comp.bit_len)
841                        .append(",ctype=")
842                        .append(comp.ctype)
843                        .append(",fill_one=")
844                        .append(comp.fill_one)
845                        .append(",sign_ext=")
846                        .append(comp.sign_ext)
847                        .append(",start_bit=")
848                        .append(comp.start_bit)
849                        .append(")");
850                }
851
852                if (compression.length() == 0)
853                    compression.append("NONE");
854            }
855            catch (Exception ex) {
856                log.debug("init(): get compression information failure: ", ex);
857            }
858
859            // get chunk information
860            try {
861                HDFChunkInfo chunkInfo = new HDFChunkInfo();
862                int[] cflag            = {HDFConstants.HDF_NONE};
863
864                try {
865                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
866                }
867                catch (Exception ex) {
868                    ex.printStackTrace();
869                }
870
871                storageLayout.setLength(0);
872
873                if (cflag[0] == HDFConstants.HDF_NONE) {
874                    chunkSize = null;
875                    storageLayout.append("NONE");
876                }
877                else {
878                    chunkSize = new long[rank];
879                    for (int i = 0; i < rank; i++)
880                        chunkSize[i] = chunkInfo.chunk_lengths[i];
881                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
882                    for (int i = 1; i < rank; i++)
883                        storageLayout.append(" X ").append(chunkSize[i]);
884                }
885            }
886            catch (Exception ex) {
887                log.debug("init(): get chunk information failure: ", ex);
888            }
889
890            inited = true;
891        }
892        catch (HDFException ex) {
893            log.debug("init(): failure: ", ex);
894        }
895        finally {
896            close(id);
897        }
898
899        dims         = new long[rank];
900        maxDims      = new long[rank];
901        startDims    = new long[rank];
902        selectedDims = new long[rank];
903
904        for (int i = 0; i < rank; i++) {
905            startDims[i]    = 0;
906            selectedDims[i] = 1;
907            dims[i] = maxDims[i] = idims[i];
908        }
909
910        if (isUnlimited)
911            maxDims[0] = -1;
912
913        selectedIndex[0] = 0;
914        selectedIndex[1] = 1;
915        selectedIndex[2] = 2;
916
917        // select only two dimension a time,
918        if (rank == 1)
919            selectedDims[0] = dims[0];
920
921        if (rank > 1) {
922            selectedDims[0] = dims[0];
923            if (isText)
924                selectedDims[1] = 1;
925            else
926                selectedDims[1] = dims[1];
927        }
928    }
929
930    /**
931     * Creates a new dataset.
932     *
933     * @param name the name of the dataset to create.
934     * @param pgroup the parent group of the new dataset.
935     * @param type the datatype of the dataset.
936     * @param dims the dimension size of the dataset.
937     * @param maxdims the max dimension size of the dataset.
938     * @param chunks the chunk size of the dataset.
939     * @param gzip the level of the gzip compression.
940     * @param fillValue the default value.
941     * @param data the array of data values.
942     *
943     * @return the new dataset if successful. Otherwise returns null.
944     *
945     * @throws Exception if the dataset can not be created
946     */
947    public static H4SDS create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
948                               long[] chunks, int gzip, Object fillValue, Object data) throws Exception
949    {
950        H4SDS dataset = null;
951        if ((pgroup == null) || (name == null) || (dims == null)) {
952            log.trace("create(): Parent group, name or dims is null");
953            return null;
954        }
955
956        H4File file = (H4File)pgroup.getFileFormat();
957
958        if (file == null) {
959            log.trace("create(): Parent group FileFormat is null");
960            return null;
961        }
962
963        String path = HObject.SEPARATOR;
964        if (!pgroup.isRoot())
965            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
966        // prepare the dataspace
967        int rank    = dims.length;
968        int[] idims = new int[rank];
969        int[] start = new int[rank];
970        for (int i = 0; i < rank; i++) {
971            idims[i] = (int)dims[i];
972            start[i] = 0;
973        }
974
975        // only the first element of the SDcreate parameter dim_sizes (i.e.,
976        // the dimension of the lowest rank or the slowest-changing dimension)
977        // can be assigned the value SD_UNLIMITED (or 0) to make the first
978        // dimension unlimited.
979        if ((maxdims != null) && (maxdims[0] <= 0))
980            idims[0] = 0; // set to unlimited dimension.
981
982        int[] ichunks = null;
983        if (chunks != null) {
984            ichunks = new int[rank];
985            for (int i = 0; i < rank; i++)
986                ichunks[i] = (int)chunks[i];
987        }
988
989        // unlimited cannot be used with chunking or compression for HDF 4.2.6 or earlier.
990        if (idims[0] == 0 && (ichunks != null || gzip > 0)) {
991            log.debug("create(): Unlimited cannot be used with chunking or compression");
992            throw new HDFException("Unlimited cannot be used with chunking or compression");
993        }
994
995        long sdid  = (file).getSDAccessID();
996        long sdsid = -1;
997        long vgid  = -1;
998        long tid   = type.createNative();
999
1000        if (tid >= 0) {
1001            try {
1002                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
1003                // set fill value to zero.
1004                int vsize   = HDFLibrary.DFKNTsize(tid);
1005                byte[] fill = new byte[vsize];
1006                for (int i = 0; i < vsize; i++)
1007                    fill[i] = 0;
1008                HDFLibrary.SDsetfillvalue(sdsid, fill);
1009
1010                // when we create a new dataset with unlimited dimension,
1011                // we have to write some data into the dataset or otherwise
1012                // the current dataset has zero dimensin size.
1013            }
1014            catch (Exception ex) {
1015                log.debug("create(): failure: ", ex);
1016                throw(ex);
1017            }
1018        }
1019
1020        if (sdsid < 0) {
1021            log.debug("create(): Dataset creation failed");
1022            throw(new HDFException("Unable to create the new dataset."));
1023        }
1024
1025        HDFDeflateCompInfo compInfo = null;
1026        if (gzip > 0) {
1027            // set compression
1028            compInfo       = new HDFDeflateCompInfo();
1029            compInfo.level = gzip;
1030            if (chunks == null)
1031                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1032        }
1033
1034        if (chunks != null) {
1035            // set chunk
1036            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
1037            int flag               = HDFConstants.HDF_CHUNK;
1038
1039            if (gzip > 0) {
1040                flag      = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1041                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1042            }
1043
1044            try {
1045                HDFLibrary.SDsetchunk(sdsid, chunkInfo, flag);
1046            }
1047            catch (Exception err) {
1048                log.debug("create(): SDsetchunk failure: ", err);
1049                err.printStackTrace();
1050                throw new HDFException("SDsetchunk failed.");
1051            }
1052        }
1053
1054        if ((sdsid > 0) && (data != null))
1055            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1056
1057        int ref = HDFLibrary.SDidtoref(sdsid);
1058
1059        if (!pgroup.isRoot()) {
1060            // add the dataset to the parent group
1061            vgid = pgroup.open();
1062            if (vgid < 0) {
1063                if (sdsid > 0)
1064                    HDFLibrary.SDendaccess(sdsid);
1065                log.debug("create(): Invalid Parent Group ID");
1066                throw(new HDFException("Unable to open the parent group."));
1067            }
1068
1069            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1070
1071            pgroup.close(vgid);
1072        }
1073
1074        try {
1075            if (sdsid > 0)
1076                HDFLibrary.SDendaccess(sdsid);
1077        }
1078        catch (Exception ex) {
1079            log.debug("create(): SDendaccess failure: ", ex);
1080        }
1081
1082        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1083        dataset    = new H4SDS(file, name, path, oid);
1084
1085        if (dataset != null)
1086            pgroup.addToMemberList(dataset);
1087
1088        return dataset;
1089    }
1090
1091    /**
1092     * Creates a new dataset.
1093     *
1094     * @param name the name of the dataset to create.
1095     * @param pgroup the parent group of the new dataset.
1096     * @param type the datatype of the dataset.
1097     * @param dims the dimension size of the dataset.
1098     * @param maxdims the max dimension size of the dataset.
1099     * @param chunks the chunk size of the dataset.
1100     * @param gzip the level of the gzip compression.
1101     * @param data the array of data values.
1102     *
1103     * @return the new dataset if successful. Otherwise returns null.
1104     *
1105     * @throws Exception if the dataset can not be created
1106     */
1107    public static H4SDS create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1108                               long[] chunks, int gzip, Object data) throws Exception
1109    {
1110        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1111    }
1112
1113    /**
1114     * copy attributes from one SDS to another SDS
1115     */
1116    private void copyAttribute(long srcdid, long dstdid)
1117    {
1118        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1119        try {
1120            String[] objName = {""};
1121            int[] sdInfo     = {0, 0, 0};
1122            int[] tmpDim     = new int[HDFConstants.MAX_VAR_DIMS];
1123            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1124            int numberOfAttributes = sdInfo[2];
1125            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1126
1127            boolean b         = false;
1128            String[] attrName = new String[1];
1129            int[] attrInfo    = {0, 0};
1130            for (int i = 0; i < numberOfAttributes; i++) {
1131                attrName[0] = "";
1132                try {
1133                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1134                }
1135                catch (HDFException ex) {
1136                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1137                    b = false;
1138                }
1139
1140                if (!b)
1141                    continue;
1142
1143                // read attribute data from source dataset
1144                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1145                try {
1146                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1147                }
1148                catch (HDFException ex) {
1149                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1150                    attrBuff = null;
1151                }
1152
1153                if (attrBuff == null) {
1154                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1155                    continue;
1156                }
1157
1158                // attach attribute to the destination dataset
1159                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1160            } // (int i=0; i<numberOfAttributes; i++)
1161        }
1162        catch (Exception ex) {
1163            log.debug("copyAttribute(): failure: ", ex);
1164        }
1165    }
1166
1167    // Implementing DataFormat
1168    /**
1169     * Retrieves the object's metadata, such as attributes, from the file.
1170     *
1171     * Metadata, such as attributes, is stored in a List.
1172     *
1173     * @param attrPropList
1174     *             the list of properties to get
1175     *
1176     * @return the list of metadata objects.
1177     *
1178     * @throws Exception
1179     *             if the metadata can not be retrieved
1180     */
1181    @SuppressWarnings("rawtypes")
1182    public List getMetadata(int... attrPropList) throws Exception
1183    {
1184        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1185    }
1186}