001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030
031import hdf.object.Attribute;
032import hdf.object.Dataset;
033import hdf.object.Datatype;
034import hdf.object.FileFormat;
035import hdf.object.Group;
036import hdf.object.HObject;
037import hdf.object.ScalarDS;
038import hdf.object.MetaDataContainer;
039
040import hdf.object.h4.H4ScalarAttribute;
041
042/**
043 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
044 * the SDS. A SDS is a group of data structures used to store and describe
045 * multidimensional arrays of scientific data.
046 *
047 * The data contained in an SDS array has a data type associated with it. The
048 * standard data types supported by the SD interface include 32- and 64-bit
049 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
050 * 32-bit unsigned integers, and 8-bit characters.
051 *
052 * <b>How to Select a Subset</b>
053 *
054 * Dataset defines APIs for reading, writing and subsetting a dataset. No function
055 * is defined to select a subset of a data array. The selection is done in an implicit
056 * way. Function calls to dimension information such as getSelectedDims() return an array
057 * of dimension values, which is a reference to the array in the dataset object.
058 * Changes of the array outside the dataset object directly change the values of
059 * the array in the dataset object. It is like pointers in C.
060 *
061 * The following is an example of how to make a subset. In the example, the dataset
062 * is a 4-dimension with size of [200][100][50][10], i.e.
063 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
064 * We want to select every other data point in dims[1] and dims[2]
065 * <pre>
066     int rank = dataset.getRank();   // number of dimensions of the dataset
067     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
068     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
069     long[] start = dataset.getStartDims(); // the offset of the selection
070     long[] stride = dataset.getStride(); // the stride of the dataset
071     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
072
073     // select dim1 and dim2 as 2D data for display,and slice through dim0
074     selectedIndex[0] = 1;
075     selectedIndex[1] = 2;
076     selectedIndex[1] = 0;
077
078     // reset the selection arrays
079     for (int i=0; i&lt;rank; i++) {
080         start[i] = 0;
081         selected[i] = 1;
082         stride[i] = 1;
083    }
084
085    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
086    stride[1] = 2;
087    stride[2] = 2;
088
089    // set the selection size of dim1 and dim2
090    selected[1] = dims[1]/stride[1];
091    selected[2] = dims[1]/stride[2];
092
093    // when dataset.read() is called, the slection above will be used since
094    // the dimension arrays is passed by reference. Changes of these arrays
095    // outside the dataset object directly change the values of these array
096    // in the dataset object.
097
098 * </pre>
099 *
100 * @version 1.1 9/4/2007
101 * @author Peter X. Cao
102 */
103public class H4SDS extends ScalarDS implements MetaDataContainer
104{
105    private static final long serialVersionUID = 2557157923292438696L;
106
107    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
108
109    /** tag for netCDF datasets.
110     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
111     */
112    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
113    public static final int                 DFTAG_NDG_NETCDF = 67687001;
114
115    /**
116     * The list of attributes of this data object. Members of the list are
117     * instance of Attribute.
118     */
119    @SuppressWarnings("rawtypes")
120    private List                            attributeList;
121
122    /**
123     * The SDS interface identifier obtained from SDstart(filename, access)
124     */
125    private long                            sdid;
126
127    /** the datatype identifier */
128    private long                            datatypeID = -1;
129
130    /** the number of attributes */
131    private int                             nAttributes = -1;
132
133    /**
134     * Creates an H4SDS object with specific name and path.
135     *
136     * @param theFile
137     *            the HDF file.
138     * @param name
139     *            the name of this H4SDS.
140     * @param path
141     *            the full path of this H4SDS.
142     */
143    public H4SDS(FileFormat theFile, String name, String path) {
144        this(theFile, name, path, null);
145    }
146
147    /**
148     * Creates an H4SDS object with specific name, path and oid.
149     *
150     * @param theFile
151     *            the HDF file.
152     * @param name
153     *            the name of this H4SDS.
154     * @param path
155     *            the full path of this H4SDS.
156     * @param oid
157     *            the unique identifier of this data object.
158     */
159    @SuppressWarnings("deprecation")
160    public H4SDS(FileFormat theFile, String name, String path, long[] oid) {
161        super(theFile, name, path, oid);
162        unsignedConverted = false;
163        sdid = ((H4File)getFileFormat()).getSDAccessID();
164    }
165
166    /*
167     * (non-Javadoc)
168     * @see hdf.object.DataFormat#hasAttribute()
169     */
170    @Override
171    public boolean hasAttribute() {
172        if (nAttributes < 0) {
173            sdid = ((H4File)getFileFormat()).getSDAccessID();
174
175            long id = open();
176
177            if (id >= 0) {
178                try { // retrieve attributes of the dataset
179                    String[] objName = {""};
180                    int[] sdInfo = {0, 0, 0};
181                    int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
182                    HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
183                    nAttributes = sdInfo[2];
184                }
185                catch (Exception ex) {
186                    log.debug("hasAttribute(): failure: ", ex);
187                    nAttributes=0;
188                }
189
190                log.trace("hasAttribute(): nAttributes={}", nAttributes);
191
192                close(id);
193            }
194        }
195
196        return (nAttributes>0);
197    }
198
199    /* Implement abstract ScalarDS */
200
201    /*
202     * (non-Javadoc)
203     * @see hdf.object.ScalarDS#readPalette(int)
204     */
205    @Override
206    public byte[][] readPalette(int idx) {
207        return null;
208    }
209
210    /*
211     * (non-Javadoc)
212     * @see hdf.object.ScalarDS#getPaletteRefs()
213     */
214    @Override
215    public byte[] getPaletteRefs() {
216        return null;
217    }
218
219    // implementing Dataset
220    /**
221     * Returns the datatype of the data object.
222     *
223     * @return the datatype of the data object.
224     */
225    @Override
226    public Datatype getDatatype() {
227        if (!inited)
228            init();
229
230        if (datatype == null) {
231            try {
232                datatype = new H4Datatype(datatypeID);
233            }
234            catch (Exception ex) {
235                log.debug("getDatatype(): failed to create datatype: ", ex);
236                datatype = null;
237            }
238        }
239
240        return datatype;
241    }
242
243    // To do: Implementing Dataset
244    @Override
245    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception {
246        log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname);
247
248        Dataset dataset = null;
249        long srcdid = -1;
250        long dstdid = -1;
251        long tid = -1;
252        int size = 1;
253        int theRank = 2;
254        String path = null;
255        int[] count = null;
256        int[] start = null;
257
258        if (pgroup == null) {
259            log.debug("copy(): Parent group is null");
260            return null;
261        }
262
263        if (dname == null)
264            dname = getName();
265
266        if (pgroup.isRoot())
267            path = HObject.SEPARATOR;
268        else
269            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
270        log.trace("copy(): path={}", path);
271
272        srcdid = open();
273        if (srcdid < 0) {
274            log.debug("copy(): Invalid source SDID");
275            return null;
276        }
277
278        if (dims == null) {
279            if (!isInited())
280                init();
281
282            theRank = getRank();
283
284            dims = getDims();
285        }
286        else {
287            theRank = dims.length;
288        }
289
290        start = new int[theRank];
291        count = new int[theRank];
292        for (int i=0; i<theRank; i++) {
293            start[i] = 0;
294            count[i] = (int)dims[i];
295            size *= count[i];
296        }
297        log.trace("copy(): theRank={} with size={}", theRank, size);
298
299        // create the new dataset and attach it to the parent group
300        tid = datatypeID;
301        dstdid = HDFLibrary.SDcreate(((H4File)pgroup.getFileFormat()).getSDAccessID(), dname, tid, theRank, count);
302        if (dstdid < 0) {
303            log.debug("copy(): Invalid dest SDID");
304            return null;
305        }
306
307        int ref = HDFLibrary.SDidtoref(dstdid);
308        if (!pgroup.isRoot()) {
309            long vgid = pgroup.open();
310            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
311            pgroup.close(vgid);
312        }
313
314        // copy attributes from one object to the new object
315        log.trace("copy(): copy attributes");
316        copyAttribute(srcdid, dstdid);
317
318        // read data from the source dataset
319        log.trace("copy(): read data from the source dataset");
320        if (buff == null) {
321            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
322            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
323        }
324
325        // write the data into the destination dataset
326        log.trace("copy(): write the data into the destination dataset");
327        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
328
329        long[] oid = {HDFConstants.DFTAG_NDG, ref};
330        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
331
332        pgroup.addToMemberList(dataset);
333
334        close(srcdid);
335
336        try {
337            HDFLibrary.SDendaccess(dstdid);
338        }
339        catch (HDFException ex) {
340            log.debug("copy(): SDendaccess failure: ", ex);
341        }
342
343        return dataset;
344    }
345
346    // Implementing Dataset
347    @Override
348    public byte[] readBytes() throws HDFException {
349        byte[] theData = null;
350
351        if (!isInited())
352            init();
353
354        long id = open();
355        if (id < 0) {
356            log.debug("readBytes(): Invalid SDID");
357            return null;
358        }
359
360        int datasize = 1;
361        int[] select = new int[rank];
362        int[] start = new int[rank];
363        for (int i=0; i<rank; i++) {
364            datasize *= (int)selectedDims[i];
365            select[i] = (int)selectedDims[i];
366            start[i] = (int)startDims[i];
367        }
368
369        int[] stride = null;
370        if (selectedStride != null) {
371            stride = new int[rank];
372            for (int i=0; i<rank; i++) {
373                stride[i] = (int)selectedStride[i];
374            }
375        }
376
377        try {
378            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
379            theData = new byte[size];
380            HDFLibrary.SDreaddata(id, start, stride, select, theData);
381        }
382        catch (Exception ex) {
383            log.debug("readBytes(): failure: ", ex);
384        }
385        finally {
386            close(id);
387        }
388
389        return theData;
390    }
391
392    // Implementing DataFormat
393    /**
394     * Reads the data from file.
395     *
396     * read() reads the data from file to a memory buffer and returns the memory
397     * buffer. The dataset object does not hold the memory buffer. To store the
398     * memory buffer in the dataset object, one must call getData().
399     *
400     * By default, the whole dataset is read into memory. Users can also select
401     * a subset to read. Subsetting is done in an implicit way.
402     *
403     * @return the data read from file.
404     *
405     * @see #getData()
406     *
407     * @throws HDFException
408     *             if object can not be read
409     * @throws OutOfMemoryError
410     *             if memory is exhausted
411     */
412    @Override
413    public Object read() throws HDFException, OutOfMemoryError {
414        Object theData = null;
415
416        if (!isInited())
417            init();
418
419        long id = open();
420        if (id < 0) {
421            log.debug("read(): Invalid SDID");
422            return null;
423        }
424
425        int datasize = 1;
426        int[] select = new int[rank];
427        int[] start = new int[rank];
428        for (int i=0; i<rank; i++) {
429            datasize *= (int)selectedDims[i];
430            select[i] = (int)selectedDims[i];
431            start[i] = (int)startDims[i];
432        }
433
434        int[] stride = null;
435        if (selectedStride != null) {
436            stride = new int[rank];
437            for (int i=0; i<rank; i++) {
438                stride[i] = (int)selectedStride[i];
439            }
440        }
441
442        try {
443            theData = H4Datatype.allocateArray(datatypeID, datasize);
444
445            if (theData != null) {
446                // assume external data files are located in the same directory as the main file.
447                HDFLibrary.HXsetdir(getFileFormat().getParent());
448
449                HDFLibrary.SDreaddata(id, start, stride, select, theData);
450
451                if (isText)
452                    theData = byteToString((byte[])theData, select[0]);
453            }
454        }
455        catch (Exception ex) {
456            log.debug("read(): failure: ", ex);
457        }
458        finally {
459            close(id);
460        }
461
462        if (fillValue==null && isImageDisplay) {
463            try {
464                getMetadata();
465            } // need to set fillValue for images
466            catch (Exception ex) {
467                log.debug("read(): getMetadata failure: ", ex);
468            }
469        }
470
471        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
472            isDefaultImageOrder = false;
473        else
474            isDefaultImageOrder = true;
475
476        log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder);
477        return theData;
478    }
479
480    // Implementing DataFormat
481    /**
482     * Writes a memory buffer to the object in the file.
483     *
484     * @param buf
485     *            the data to write
486     *
487     * @throws HDFException
488     *             if data can not be written
489     */
490    @SuppressWarnings("deprecation")
491    @Override
492    public void write(Object buf) throws HDFException {
493        if (buf == null) {
494            log.debug("write(): Object is null");
495            return;
496        }
497
498        long id = open();
499        if (id < 0) {
500            log.debug("write(): Invalid SDID");
501            return;
502        }
503
504        int[] select = new int[rank];
505        int[] start = new int[rank];
506        for (int i=0; i<rank; i++) {
507            select[i] = (int)selectedDims[i];
508            start[i] = (int)startDims[i];
509        }
510
511        int[] stride = null;
512        if (selectedStride != null) {
513            stride = new int[rank];
514            for (int i=0; i<rank; i++) {
515                stride[i] = (int)selectedStride[i];
516            }
517        }
518
519        Object tmpData = buf;
520        try {
521            if (getDatatype().isUnsigned() && unsignedConverted)
522                tmpData = convertToUnsignedC(buf);
523            // assume external data files are located in the same directory as the main file.
524            HDFLibrary.HXsetdir(getFileFormat().getParent());
525
526            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
527        }
528        catch (Exception ex) {
529            log.debug("write(): failure: ", ex);
530        }
531        finally {
532            tmpData = null;
533            close(id);
534        }
535    }
536
537    // Implementing DataFormat
538    /**
539     * Retrieves the object's metadata, such as attributes, from the file.
540     *
541     * Metadata, such as attributes, is stored in a List.
542     *
543     * @return the list of metadata objects.
544     *
545     * @throws HDFException
546     *             if the metadata can not be retrieved
547     */
548    @Override
549    @SuppressWarnings({"rawtypes", "unchecked"})
550    public List getMetadata() throws HDFException {
551        if (attributeList != null) {
552            log.trace("getMetdata(): attributeList != null");
553            return attributeList;
554        }
555
556        long id = open();
557        String[] objName = {""};
558        int[] sdInfo = {0, 0, 0};
559        try {
560            // retrieve attributes of the dataset
561            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
562            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
563            int n = sdInfo[2];
564
565            if ((attributeList == null) && (n>0))
566                attributeList = new Vector(n, 5);
567
568            boolean b = false;
569            String[] attrName = new String[1];
570            int[] attrInfo = {0, 0};
571            for (int i=0; i<n; i++) {
572                attrName[0] = "";
573                try {
574                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
575                    // mask off the litend bit
576                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
577                }
578                catch (HDFException ex) {
579                    log.debug("getMetadata(): attribute[{}] SDattrinfo failure: ", i, ex);
580                    b = false;
581                }
582
583                if (!b)
584                    continue;
585
586                long[] attrDims = {attrInfo[1]};
587                H4ScalarAttribute attr = new H4ScalarAttribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
588                attributeList.add(attr);
589
590                Object buf = null;
591                try {
592                    buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
593                }
594                catch (OutOfMemoryError e) {
595                    log.debug("getMetadata(): out of memory: ", e);
596                    buf = null;
597                }
598
599                try {
600                    HDFLibrary.SDreadattr(id, i, buf);
601                }
602                catch (HDFException ex) {
603                    log.debug("getMetadata(): attribute[{}] SDreadattr failure: ", i, ex);
604                    buf = null;
605                }
606
607                if (buf != null) {
608                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
609                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
610                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
611                    }
612                    else if (attrName[0].equalsIgnoreCase("fillValue") ||
613                            attrName[0].equalsIgnoreCase("_fillValue")) {
614                        fillValue = buf;
615                    }
616
617                    attr.setAttributeData(buf);
618                }
619
620            } // (int i=0; i<n; i++)
621
622            // retrieve attribute of dimension
623            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
624            /**
625             * for (int i=0; i< rank; i++) { int dimID = HDFLibrary.SDgetdimid(id, i); String[] argv = {" ", "
626             * ", " "}; HDFLibrary.SDgetdimstrs(dimID, argv, 80); }
627             */
628        }
629        catch (Exception ex) {
630            log.debug("getMetadata(): failure: ", ex);
631        }
632        finally {
633            close(id);
634        }
635
636        return attributeList;
637    }
638
639    // To do: implementing DataFormat
640    /**
641     * Writes a specific piece of metadata (such as an attribute) into the file.
642     *
643     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
644     * value. If the attribute does not exist in the file, it creates the
645     * attribute in the file and attaches it to the object. It will fail to
646     * write a new attribute to the object where an attribute with the same name
647     * already exists. To update the value of an existing attribute in the file,
648     * one needs to get the instance of the attribute by getMetadata(), change
649     * its values, then use writeMetadata() to write the value.
650     *
651     * @param info
652     *            the metadata to write.
653     *
654     * @throws Exception
655     *             if the metadata can not be written
656     */
657    @Override
658    @SuppressWarnings({"rawtypes", "unchecked"})
659    public void writeMetadata(Object info) throws Exception {
660        // only attribute metadata is supported.
661        if (!(info instanceof Attribute)) {
662            log.debug("writeMetadata(): Object not an H4ScalarAttribute");
663            return;
664        }
665
666        try {
667            getFileFormat().writeAttribute(this, (H4ScalarAttribute)info, true);
668
669            if (attributeList == null)
670                attributeList = new Vector();
671
672            attributeList.add(info);
673            nAttributes = attributeList.size();
674        }
675        catch (Exception ex) {
676            log.trace("writeMetadata(): failure: ", ex);
677        }
678    }
679
680    /**
681     * Deletes an existing piece of metadata from this object.
682     *
683     * @param info
684     *            the metadata to delete.
685     *
686     * @throws HDFException
687     *             if the metadata can not be removed
688     */
689    @Override
690    public void removeMetadata(Object info) throws HDFException {
691        log.trace("removeMetadata(): disabled");
692    }
693
694    /**
695     * Updates an existing piece of metadata attached to this object.
696     *
697     * @param info
698     *            the metadata to update.
699     *
700     * @throws Exception
701     *             if the metadata can not be updated
702     */
703    @Override
704    public void updateMetadata(Object info) throws Exception {
705        log.trace("updateMetadata(): disabled");
706    }
707
708    // Implementing HObject
709    @Override
710    public long open() {
711        long id=-1;
712
713        try {
714            int index = 0;
715            int tag = (int)oid[0];
716
717            log.trace("open(): tag={}", tag);
718            if (tag == H4SDS.DFTAG_NDG_NETCDF)
719                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
720            else
721                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
722
723            id = HDFLibrary.SDselect(sdid,index);
724        }
725        catch (HDFException ex) {
726            log.debug("open(): failure: ", ex);
727            id = -1;
728        }
729
730        return id;
731    }
732
733    // Implementing HObject
734    @Override
735    public void close(long id) {
736        try {
737            HDFLibrary.SDendaccess(id);
738        }
739        catch (HDFException ex) {
740            log.debug("close(): failure: ", ex);
741        }
742    }
743
744    /**
745     * Initializes the H4SDS such as dimension size of this dataset.
746     */
747    @SuppressWarnings("deprecation")
748    @Override
749    public void init() {
750        if (inited) {
751            log.trace("init(): Already initialized");
752            return; // already called. Initialize only once
753        }
754
755        long id = open();
756        String[] objName = {""};
757        String[] dimName = {""};
758        int[] dimInfo = {0, 0, 0};
759        int[] sdInfo = {0, 0, 0};
760        boolean isUnlimited = false;
761
762        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
763        try {
764            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
765            // mask off the litend bit
766            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
767            nAttributes = sdInfo[2];
768            rank = sdInfo[0];
769
770            if (rank <= 0) {
771                rank = 1;
772                idims[0] = 1;
773            }
774
775            isUnlimited = HDFLibrary.SDisrecord(id);
776            log.trace("init(): isUnlimited={}", isUnlimited);
777
778            datatypeID = sdInfo[1];
779            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
780
781            // get the dimension names
782            try {
783                dimNames = new String[rank];
784                for (int i=0; i<rank; i++) {
785                    long dimid = HDFLibrary.SDgetdimid(id, i);
786                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
787                    dimNames[i] = dimName[0];
788                }
789            }
790            catch (Exception ex) {
791                log.debug("init(): get the dimension names: ", ex);
792            }
793
794            // get compression information
795            try {
796                HDFCompInfo compInfo = new HDFCompInfo();
797                HDFLibrary.SDgetcompinfo(id, compInfo);
798
799                compression.setLength(0);
800
801                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
802                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
803                    HDFLibrary.SDgetcompinfo(id, comp);
804                    compression.append("GZIP(level=").append(comp.level).append(")");
805                }
806                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
807                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
808                    HDFLibrary.SDgetcompinfo(id, comp);
809                    compression.append("SZIP(bits_per_pixel=").append(comp.bits_per_pixel).append(",options_mask=")
810                            .append(comp.options_mask).append(",pixels=").append(comp.pixels).append(",pixels_per_block=")
811                            .append(comp.pixels_per_block).append(",pixels_per_scanline=").append(comp.pixels_per_scanline).append(")");
812                }
813                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
814                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
815                    HDFLibrary.SDgetcompinfo(id, comp);
816                    compression.append("JPEG(quality=").append(comp.quality).append(",options_mask=")
817                            .append(",force_baseline=").append(comp.force_baseline).append(")");
818                }
819                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
820                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
821                    HDFLibrary.SDgetcompinfo(id, comp);
822                    compression.append("SKPHUFF(skp_size=").append(comp.skp_size).append(")");
823                }
824                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
825                    compression.append("RLE");
826                }
827                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
828                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
829                    HDFLibrary.SDgetcompinfo(id, comp);
830                    compression.append("NBIT(nt=").append(comp.nt).append(",bit_len=").append(comp.bit_len)
831                            .append(",ctype=").append(comp.ctype).append(",fill_one=").append(comp.fill_one)
832                            .append(",sign_ext=").append(comp.sign_ext).append(",start_bit=").append(comp.start_bit).append(")");
833                }
834
835                if (compression.length() == 0)
836                    compression.append("NONE");
837            }
838            catch (Exception ex) {
839                log.debug("init(): get compression information failure: ", ex);
840            }
841
842            // get chunk information
843            try {
844                HDFChunkInfo chunkInfo = new HDFChunkInfo();
845                int[] cflag = {HDFConstants.HDF_NONE};
846
847                try {
848                    HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
849                }
850                catch (Exception ex) {
851                    ex.printStackTrace();
852                }
853
854                storageLayout.setLength(0);
855
856                if (cflag[0] == HDFConstants.HDF_NONE) {
857                    chunkSize = null;
858                    storageLayout.append("NONE");
859                }
860                else {
861                    chunkSize = new long[rank];
862                    for (int i=0; i<rank; i++)
863                        chunkSize[i] = chunkInfo.chunk_lengths[i];
864                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
865                    for (int i = 1; i < rank; i++)
866                        storageLayout.append(" X ").append(chunkSize[i]);
867                }
868            }
869            catch (Exception ex) {
870                log.debug("init(): get chunk information failure: ", ex);
871            }
872
873            inited = true;
874        }
875        catch (HDFException ex) {
876            log.debug("init(): failure: ", ex);
877        }
878        finally {
879            close(id);
880        }
881
882        dims = new long[rank];
883        maxDims = new long[rank];
884        startDims = new long[rank];
885        selectedDims = new long[rank];
886
887        for (int i=0; i<rank; i++) {
888            startDims[i] = 0;
889            selectedDims[i] = 1;
890            dims[i] = maxDims[i] = idims[i];
891        }
892
893        if (isUnlimited)
894            maxDims[0] = -1;
895
896        selectedIndex[0] = 0;
897        selectedIndex[1] = 1;
898        selectedIndex[2] = 2;
899
900        // select only two dimension a time,
901        if (rank == 1)
902            selectedDims[0] = dims[0];
903
904        if (rank > 1) {
905            selectedDims[0] = dims[0];
906            if (isText)
907                selectedDims[1] = 1;
908            else
909                selectedDims[1] = dims[1];
910        }
911    }
912
913    // Implementing ScalarDS
914    @Override
915    public byte[][] getPalette() {
916        return palette;
917    }
918
919    /**
920     * Creates a new dataset.
921     *
922     * @param name the name of the dataset to create.
923     * @param pgroup the parent group of the new dataset.
924     * @param type the datatype of the dataset.
925     * @param dims the dimension size of the dataset.
926     * @param maxdims the max dimension size of the dataset.
927     * @param chunks the chunk size of the dataset.
928     * @param gzip the level of the gzip compression.
929     * @param fillValue the default value.
930     * @param data the array of data values.
931     *
932     * @return the new dataset if successful. Otherwise returns null.
933     *
934     * @throws Exception if the dataset can not be created
935     */
936    public static H4SDS create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
937            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
938        H4SDS dataset = null;
939        if ((pgroup == null) || (name == null)|| (dims == null)) {
940            log.trace("create(): Parent group, name or dims is null");
941            return null;
942        }
943
944        H4File file = (H4File)pgroup.getFileFormat();
945
946        if (file == null) {
947            log.trace("create(): Parent group FileFormat is null");
948            return null;
949        }
950
951        String path = HObject.SEPARATOR;
952        if (!pgroup.isRoot())
953            path = pgroup.getPath()+pgroup.getName()+HObject.SEPARATOR;
954        // prepare the dataspace
955        int rank = dims.length;
956        int[] idims = new int[rank];
957        int[] start = new int[rank];
958        for (int i=0; i<rank; i++) {
959            idims[i] = (int)dims[i];
960            start[i] = 0;
961        }
962
963        // only the first element of the SDcreate parameter dim_sizes (i.e.,
964        // the dimension of the lowest rank or the slowest-changing dimension)
965        // can be assigned the value SD_UNLIMITED (or 0) to make the first
966        // dimension unlimited.
967        if ((maxdims != null) && (maxdims[0]<=0))
968            idims[0] = 0; // set to unlimited dimension.
969
970        int[] ichunks = null;
971        if (chunks != null) {
972            ichunks = new int[rank];
973            for (int i=0; i<rank; i++)
974                ichunks[i] = (int)chunks[i];
975        }
976
977        // unlimited cannot be used with chunking or compression for HDF 4.2.6 or earlier.
978        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
979            log.debug("create(): Unlimited cannot be used with chunking or compression");
980            throw new HDFException("Unlimited cannot be used with chunking or compression");
981        }
982
983        long sdid = (file).getSDAccessID();
984        long sdsid = -1;
985        long vgid = -1;
986        long tid = type.createNative();
987
988        if(tid >= 0) {
989            try {
990                sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
991                // set fill value to zero.
992                int vsize = HDFLibrary.DFKNTsize(tid);
993                byte[] fill = new byte[vsize];
994                for (int i=0; i<vsize; i++)
995                    fill[i] = 0;
996                HDFLibrary.SDsetfillvalue(sdsid, fill);
997
998                // when we create a new dataset with unlimited dimension,
999                // we have to write some data into the dataset or otherwise
1000                // the current dataset has zero dimensin size.
1001            }
1002            catch (Exception ex) {
1003                log.debug("create(): failure: ", ex);
1004                throw (ex);
1005            }
1006        }
1007
1008        if (sdsid < 0) {
1009            log.debug("create(): Dataset creation failed");
1010            throw (new HDFException("Unable to create the new dataset."));
1011        }
1012
1013        HDFDeflateCompInfo compInfo = null;
1014        if (gzip > 0) {
1015            // set compression
1016            compInfo = new HDFDeflateCompInfo();
1017            compInfo.level = gzip;
1018            if (chunks == null)
1019                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1020        }
1021
1022        if (chunks != null) {
1023            // set chunk
1024            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
1025            int flag = HDFConstants.HDF_CHUNK;
1026
1027            if (gzip > 0) {
1028                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
1029                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
1030            }
1031
1032            try  {
1033                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
1034            }
1035            catch (Exception err) {
1036                log.debug("create(): SDsetchunk failure: ", err);
1037                err.printStackTrace();
1038                throw new HDFException("SDsetchunk failed.");
1039            }
1040        }
1041
1042        if ((sdsid > 0) && (data != null))
1043            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
1044
1045        int ref = HDFLibrary.SDidtoref(sdsid);
1046
1047        if (!pgroup.isRoot()) {
1048            // add the dataset to the parent group
1049            vgid = pgroup.open();
1050            if (vgid < 0) {
1051                if (sdsid > 0)
1052                    HDFLibrary.SDendaccess(sdsid);
1053                log.debug("create(): Invalid Parent Group ID");
1054                throw (new HDFException("Unable to open the parent group."));
1055            }
1056
1057            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
1058
1059            pgroup.close(vgid);
1060        }
1061
1062        try {
1063            if (sdsid > 0)
1064                HDFLibrary.SDendaccess(sdsid);
1065        }
1066        catch (Exception ex) {
1067            log.debug("create(): SDendaccess failure: ", ex);
1068        }
1069
1070        long[] oid = {HDFConstants.DFTAG_NDG, ref};
1071        dataset = new H4SDS(file, name, path, oid);
1072
1073        if (dataset != null)
1074            pgroup.addToMemberList(dataset);
1075
1076        return dataset;
1077    }
1078
1079    /**
1080     * Creates a new dataset.
1081     *
1082     * @param name the name of the dataset to create.
1083     * @param pgroup the parent group of the new dataset.
1084     * @param type the datatype of the dataset.
1085     * @param dims the dimension size of the dataset.
1086     * @param maxdims the max dimension size of the dataset.
1087     * @param chunks the chunk size of the dataset.
1088     * @param gzip the level of the gzip compression.
1089     * @param data the array of data values.
1090     *
1091     * @return the new dataset if successful. Otherwise returns null.
1092     *
1093     * @throws Exception if the dataset can not be created
1094     */
1095    public static H4SDS create(String name, Group pgroup, Datatype type,
1096            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception {
1097        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1098    }
1099
1100    /**
1101     * copy attributes from one SDS to another SDS
1102     */
1103    private void copyAttribute(long srcdid, long dstdid) {
1104        log.trace("copyAttribute(): start: srcdid={} dstdid={}", srcdid, dstdid);
1105        try {
1106            String[] objName = {""};
1107            int[] sdInfo = {0, 0, 0};
1108            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
1109            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
1110            int numberOfAttributes = sdInfo[2];
1111            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
1112
1113            boolean b = false;
1114            String[] attrName = new String[1];
1115            int[] attrInfo = {0, 0};
1116            for (int i=0; i<numberOfAttributes; i++) {
1117                attrName[0] = "";
1118                try {
1119                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1120                }
1121                catch (HDFException ex) {
1122                    log.debug("copyAttribute(): attribute[{}] SDattrinfo failure: ", i, ex);
1123                    b = false;
1124                }
1125
1126                if (!b)
1127                    continue;
1128
1129                // read attribute data from source dataset
1130                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1131                try {
1132                    HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1133                }
1134                catch (HDFException ex) {
1135                    log.debug("copyAttribute(): attribute[{}] SDreadattr failure: ", i, ex);
1136                    attrBuff = null;
1137                }
1138
1139                if (attrBuff == null) {
1140                    log.debug("copyAttribute(): attrBuff[{}] is null", i);
1141                    continue;
1142                }
1143
1144                // attach attribute to the destination dataset
1145                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1146            } // (int i=0; i<numberOfAttributes; i++)
1147        }
1148        catch (Exception ex) {
1149            log.debug("copyAttribute(): failure: ", ex);
1150        }
1151    }
1152
1153    //Implementing DataFormat
1154    /**
1155     * Retrieves the object's metadata, such as attributes, from the file.
1156     *
1157     * Metadata, such as attributes, is stored in a List.
1158     *
1159     * @param attrPropList
1160     *             the list of properties to get
1161     *
1162     * @return the list of metadata objects.
1163     *
1164     * @throws Exception
1165     *             if the metadata can not be retrieved
1166     */
1167    @SuppressWarnings("rawtypes")
1168    public List getMetadata(int... attrPropList) throws Exception {
1169        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1170    }
1171}