001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.exceptions.HDF5LibraryException;
028import hdf.hdf5lib.structs.H5O_info_t;
029import hdf.object.Attribute;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
039 * float, double and string, and operations performed on the scalar dataset.
040 * <p>
041 * The library predefines a modest number of datatypes. For details,
042 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
043 *
044 * @version 1.1 9/4/2007
045 * @author Peter X. Cao
046 */
047public class H5ScalarDS extends ScalarDS {
048    private static final long serialVersionUID = 2887517608230611642L;
049
050    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
051
052    /**
053     * The list of attributes of this data object. Members of the list are instance of Attribute.
054     */
055    private List<Attribute> attributeList;
056
057    private int nAttributes = -1;
058
059    private H5O_info_t obj_info;
060
061    /**
062     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
063     * array length is 8*numberOfPalettes.
064     */
065    private byte[] paletteRefs;
066
067    /** flag to indicate if the dataset is an external dataset */
068    private boolean isExternal = false;
069
070    /** flag to indicate is the dataset is a virtual dataset */
071    private boolean isVirtual = false;
072    private List<String> virtualNameList;
073
074    /**
075     * flag to indicate if the datatype in file is the same as dataype in memory
076     */
077    private boolean isNativeDatatype = false;
078
079    /**
080     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
081     * <p>
082     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
083     * path of the dataset.
084     *
085     * @param theFile
086     *            the file that contains the data object.
087     * @param theName
088     *            the name of the data object, e.g. "dset".
089     * @param thePath
090     *            the full path of the data object, e.g. "/arrays/".
091     */
092    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
093        this(theFile, theName, thePath, null);
094    }
095
096    /**
097     * @deprecated Not for public use in the future.<br>
098     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
099     *
100     * @param theFile
101     *            the file that contains the data object.
102     * @param theName
103     *            the name of the data object, e.g. "dset".
104     * @param thePath
105     *            the full path of the data object, e.g. "/arrays/".
106     * @param oid
107     *            the oid of the data object.
108     */
109    @Deprecated
110    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
111        super(theFile, theName, thePath, oid);
112        unsignedConverted = false;
113        paletteRefs = null;
114        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
115
116        if ((oid == null) && (theFile != null)) {
117            // retrieve the object ID
118            try {
119                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
120                this.oid = new long[1];
121                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
122            }
123            catch (Exception ex) {
124                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
125            }
126        }
127    }
128
129    /*
130     * (non-Javadoc)
131     *
132     * @see hdf.object.HObject#open()
133     */
134    @Override
135    public long open() {
136        log.trace("open(): start");
137        long did = -1;
138
139        try {
140            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
141            log.trace("open(): did={}", did);
142        }
143        catch (HDF5Exception ex) {
144            log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
145            did = -1;
146        }
147
148        log.trace("open(): finish");
149        return did;
150    }
151
152    /*
153     * (non-Javadoc)
154     *
155     * @see hdf.object.HObject#close(int)
156     */
157    @Override
158    public void close(long did) {
159        log.trace("close(): start");
160
161        if (did >= 0) {
162            try {
163                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
164            }
165            catch (Exception ex) {
166                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
167            }
168            try {
169                H5.H5Dclose(did);
170            }
171            catch (HDF5Exception ex) {
172                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
173            }
174        }
175
176        log.trace("close(): finish");
177    }
178
179    /**
180     * Retrieves datatype and dataspace information from file and sets the dataset
181     * in memory.
182     * <p>
183     * The init() is designed to support lazy operation in a dataset object. When a
184     * data object is retrieved from file, the datatype, dataspace and raw data are
185     * not loaded into memory. When it is asked to read the raw data from file,
186     * init() is first called to get the datatype and dataspace information, then
187     * load the raw data from file.
188     * <p>
189     * init() is also used to reset the selection of a dataset (start, stride and
190     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
191     * the following example, init() at step 1) retrieves datatype and dataspace
192     * information from file. getData() at step 3) reads only one data point. init()
193     * at step 4) resets the selection to the whole dataset. getData() at step 4)
194     * reads the values of whole dataset into memory.
195     *
196     * <pre>
197     * dset = (Dataset) file.get(NAME_DATASET);
198     *
199     * // 1) get datatype and dataspace information from file
200     * dset.init();
201     * rank = dset.getRank(); // rank = 2, a 2D dataset
202     * count = dset.getSelectedDims();
203     * start = dset.getStartDims();
204     * dims = dset.getDims();
205     *
206     * // 2) select only one data point
207     * for (int i = 0; i &lt; rank; i++) {
208     *     start[0] = 0;
209     *     count[i] = 1;
210     * }
211     *
212     * // 3) read one data point
213     * data = dset.getData();
214     *
215     * // 4) reset selection to the whole dataset
216     * dset.init();
217     *
218     * // 5) clean the memory data buffer
219     * dset.clearData();
220     *
221     * // 6) Read the whole dataset
222     * data = dset.getData();
223     * </pre>
224     */
225    @Override
226    public void init() {
227        log.trace("init(): start");
228
229        if (inited) {
230            resetSelection();
231            log.trace("init(): Dataset already intialized");
232            log.trace("init(): finish");
233            return; // already called. Initialize only once
234        }
235
236        long did = -1;
237        long tid = -1;
238        long sid = -1;
239
240        did = open();
241        if (did >= 0) {
242            // check if it is an external or virtual dataset
243            long pid = -1;
244            try {
245                pid = H5.H5Dget_create_plist(did);
246                try {
247                    int nfiles = H5.H5Pget_external_count(pid);
248                    isExternal = (nfiles > 0);
249                    int layout_type = H5.H5Pget_layout(pid);
250                    if (isVirtual = (layout_type == HDF5Constants.H5D_VIRTUAL)) {
251                        try {
252                            long vmaps = H5.H5Pget_virtual_count(pid);
253                            if (vmaps > 0) {
254                                virtualNameList = new Vector<>();
255                                for (long next = 0; next < vmaps; next++) {
256                                    try {
257                                        String fname = H5.H5Pget_virtual_filename(pid, next);
258                                        virtualNameList.add(fname);
259                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
260                                    }
261                                    catch (Throwable err) {
262                                        log.trace("init(): vds[{}] continue", next);
263                                        continue;
264                                    }
265                                }
266                            }
267                        }
268                        catch (Throwable err) {
269                            log.debug("init(): vds count error: ", err);
270                        }
271                    }
272                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
273                }
274                catch (Exception ex) {
275                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
276                }
277            }
278            catch (Exception ex) {
279                log.debug("init(): H5Dget_create_plist: ", ex);
280            }
281            finally {
282                try {
283                    H5.H5Pclose(pid);
284                }
285                catch (Exception ex) {
286                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
287                }
288            }
289
290            paletteRefs = getPaletteRefs(did);
291
292            try {
293                sid = H5.H5Dget_space(did);
294                rank = H5.H5Sget_simple_extent_ndims(sid);
295                tid = H5.H5Dget_type(did);
296
297                // Check if the datatype in the file is the native datatype
298                long tmptid = -1;
299                try {
300                    tmptid = H5.H5Tget_native_type(tid);
301                    isNativeDatatype = H5.H5Tequal(tid, tmptid);
302                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
303                }
304                catch (Exception ex) {
305                    log.debug("init(): check if native type failure: ", ex);
306                }
307                finally {
308                    try {
309                        H5.H5Tclose(tmptid);
310                    }
311                    catch (Exception ex) {
312                        log.debug("init(): H5.H5Tclose(tmptid {}) failure: ", tmptid, ex);
313                    }
314                }
315
316                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
317                datatype = new H5Datatype(tid);
318
319                log.trace("init(): tid={} is tclass={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
320                        tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isVLEN(), datatype.isEnum(), datatype.isUnsigned(),
321                        ((H5Datatype) datatype).isRegRef());
322
323                /* see if fill value is defined */
324                try {
325                    pid = H5.H5Dget_create_plist(did);
326                    int[] fillStatus = { 0 };
327                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
328                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
329                            try {
330                                fillValue = ((H5Datatype) getDatatype()).allocateArray(1);
331                            }
332                            catch (OutOfMemoryError e) {
333                                log.debug("init(): out of memory: ", e);
334                                fillValue = null;
335                            }
336
337                            log.trace("init(): fillValue={}", fillValue);
338                            try {
339                                H5.H5Pget_fill_value(pid, tmptid, fillValue);
340                                log.trace("init(): H5Pget_fill_value={}", fillValue);
341                                if (fillValue != null) {
342                                    if (isFillValueConverted)
343                                        fillValue = ScalarDS.convertToUnsignedC(fillValue, null);
344
345                                    int n = Array.getLength(fillValue);
346                                    for (int i = 0; i < n; i++)
347                                        addFilteredImageValue((Number) Array.get(fillValue, i));
348                                }
349                            }
350                            catch (Exception ex2) {
351                                log.debug("init(): fill value was defined: ", ex2);
352                                fillValue = null;
353                            }
354                        }
355                    }
356                }
357                catch (HDF5Exception ex) {
358                    log.debug("init(): check if fill value is defined failure: ", ex);
359                }
360                finally {
361                    try {
362                        H5.H5Pclose(pid);
363                    }
364                    catch (Exception ex) {
365                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
366                    }
367                }
368
369                if (rank == 0) {
370                    // a scalar data point
371                    rank = 1;
372                    dims = new long[1];
373                    dims[0] = 1;
374                    log.trace("init(): rank is a scalar data point");
375                }
376                else {
377                    dims = new long[rank];
378                    maxDims = new long[rank];
379                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
380                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
381                }
382
383                inited = true;
384            }
385            catch (HDF5Exception ex) {
386                log.debug("init(): ", ex);
387            }
388            finally {
389                try {
390                    H5.H5Tclose(tid);
391                }
392                catch (HDF5Exception ex2) {
393                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
394                }
395                try {
396                    H5.H5Sclose(sid);
397                }
398                catch (HDF5Exception ex2) {
399                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
400                }
401            }
402
403            // check for the type of image and interlace mode
404            // it is a true color image at one of three cases:
405            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
406            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
407            // 3) INTERLACE_MODE = INTERLACE_PLANE
408            if ((rank >= 3) && isImage) {
409                interlace = -1;
410                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
411
412                if (isTrueColor) {
413                    interlace = INTERLACE_PIXEL;
414                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
415                        interlace = INTERLACE_PLANE;
416                    }
417                }
418            }
419
420            log.trace("init(): close dataset");
421            close(did);
422
423            startDims = new long[rank];
424            selectedDims = new long[rank];
425
426            resetSelection();
427        }
428        else {
429            log.debug("init(): failed to open dataset");
430        }
431        log.trace("init(): rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims);
432        log.trace("init(): finish");
433    }
434
435    /*
436     * (non-Javadoc)
437     *
438     * @see hdf.object.DataFormat#hasAttribute()
439     */
440    @Override
441    public boolean hasAttribute() {
442        obj_info.num_attrs = nAttributes;
443
444        if (obj_info.num_attrs < 0) {
445            long did = open();
446            if (did >= 0) {
447                long tid = -1;
448                obj_info.num_attrs = 0;
449
450                try {
451                    obj_info = H5.H5Oget_info(did);
452                    nAttributes = (int) obj_info.num_attrs;
453
454                    tid = H5.H5Dget_type(did);
455                    H5Datatype DSdatatype = new H5Datatype(tid);
456
457                    log.trace("hasAttribute(): dataclass type: isText={},isVLEN={},isEnum={}", DSdatatype.isText(), DSdatatype.isVLEN(), DSdatatype.isEnum());
458                }
459                catch (Exception ex) {
460                    obj_info.num_attrs = 0;
461                    log.debug("hasAttribute(): get object info: ", ex);
462                }
463                finally {
464                    try {
465                        H5.H5Tclose(tid);
466                    }
467                    catch (HDF5Exception ex) {
468                        log.debug("hasAttribute(): H5Tclose(tid {}) failure: ", tid, ex);
469                    }
470                }
471
472                if(nAttributes > 0) {
473                    // test if it is an image
474                    // check image
475                    Object avalue = getAttrValue(did, "CLASS");
476                    if (avalue != null) {
477                        try {
478                            isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
479                            log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
480                        }
481                        catch (Throwable err) {
482                            log.debug("hasAttribute(): check image: ", err);
483                        }
484                    }
485
486                    // retrieve the IMAGE_MINMAXRANGE
487                    avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
488                    if (avalue != null) {
489                        double x0 = 0, x1 = 0;
490                        try {
491                            x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
492                            x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
493                        }
494                        catch (Exception ex2) {
495                            x0 = x1 = 0;
496                        }
497                        if (x1 > x0) {
498                            imageDataRange = new double[2];
499                            imageDataRange[0] = x0;
500                            imageDataRange[1] = x1;
501                        }
502                    }
503
504                    try {
505                        checkCFconvention(did);
506                    }
507                    catch (Exception ex) {
508                        log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
509                    }
510                }
511                close(did);
512            }
513            else {
514                log.debug("hasAttribute(): could not open dataset");
515            }
516        }
517
518        log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
519        return (obj_info.num_attrs > 0);
520    }
521
522    /*
523     * (non-Javadoc)
524     *
525     * @see hdf.object.Dataset#getDatatype()
526     */
527    @Override
528    public Datatype getDatatype() {
529        log.trace("getDatatype(): start");
530        if (datatype == null) {
531            log.trace("getDatatype(): datatype == null");
532            long did = -1;
533            long tid = -1;
534
535            did = open();
536            if (did >= 0) {
537                try {
538                    tid = H5.H5Dget_type(did);
539                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
540                    if (!isNativeDatatype) {
541                        long tmptid = -1;
542                        try {
543                            tmptid = H5Datatype.toNative(tid);
544                            if (tmptid >= 0) {
545                                try {
546                                    H5.H5Tclose(tid);
547                                }
548                                catch (Exception ex2) {
549                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
550                                }
551                                tid = tmptid;
552                            }
553                        }
554                        catch (Exception ex) {
555                            log.debug("getDatatype(): toNative: ", ex);
556                        }
557                    }
558                    datatype = new H5Datatype(tid);
559                }
560                catch (Exception ex) {
561                    log.debug("getDatatype(): get datatype failure: ", ex);
562                }
563                finally {
564                    try {
565                        H5.H5Tclose(tid);
566                    }
567                    catch (HDF5Exception ex) {
568                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
569                    }
570                    try {
571                        H5.H5Dclose(did);
572                    }
573                    catch (HDF5Exception ex) {
574                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
575                    }
576                }
577            }
578        }
579
580        log.trace("getDatatype(): finish");
581        return datatype;
582    }
583
584    /*
585     * (non-Javadoc)
586     *
587     * @see hdf.object.Dataset#clear()
588     */
589    @Override
590    public void clear() {
591        super.clear();
592
593        if (attributeList != null) {
594            ((Vector<Attribute>) attributeList).setSize(0);
595        }
596    }
597
598    /*
599     * (non-Javadoc)
600     *
601     * @see hdf.object.Dataset#readBytes()
602     */
603    @Override
604    public byte[] readBytes() throws HDF5Exception {
605        log.trace("readBytes(): start");
606
607        byte[] theData = null;
608
609        if (!isInited())
610            init();
611
612        long did = open();
613        if (did >= 0) {
614            long fspace = -1;
615            long mspace = -1;
616            long tid = -1;
617
618            try {
619                long[] lsize = { 1 };
620                for (int j = 0; j < selectedDims.length; j++) {
621                    lsize[0] *= selectedDims[j];
622                }
623
624                fspace = H5.H5Dget_space(did);
625                mspace = H5.H5Screate_simple(rank, selectedDims, null);
626
627                // set the rectangle selection
628                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
629                if (rank * dims[0] > 1) {
630                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
631                            selectedDims, null); // set block to 1
632                }
633
634                tid = H5.H5Dget_type(did);
635                long size = H5.H5Tget_size(tid) * lsize[0];
636                log.trace("readBytes(): size = {}", size);
637
638                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
639
640                theData = new byte[(int)size];
641
642                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
643                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
644            }
645            catch (Exception ex) {
646                log.debug("readBytes(): failed to read data: ", ex);
647            }
648            finally {
649                try {
650                    H5.H5Sclose(fspace);
651                }
652                catch (Exception ex2) {
653                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
654                }
655                try {
656                    H5.H5Sclose(mspace);
657                }
658                catch (Exception ex2) {
659                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
660                }
661                try {
662                    H5.H5Tclose(tid);
663                }
664                catch (HDF5Exception ex2) {
665                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
666                }
667                close(did);
668            }
669        }
670
671        log.trace("readBytes(): finish");
672        return theData;
673    }
674
675    /**
676     * Reads the data from file.
677     * <p>
678     * read() reads the data from file to a memory buffer and returns the memory
679     * buffer. The dataset object does not hold the memory buffer. To store the
680     * memory buffer in the dataset object, one must call getData().
681     * <p>
682     * By default, the whole dataset is read into memory. Users can also select
683     * a subset to read. Subsetting is done in an implicit way.
684     * <p>
685     * <b>How to Select a Subset</b>
686     * <p>
687     * A selection is specified by three arrays: start, stride and count.
688     * <ol>
689     * <li>start: offset of a selection
690     * <li>stride: determines how many elements to move in each dimension
691     * <li>count: number of elements to select in each dimension
692     * </ol>
693     * getStartDims(), getStride() and getSelectedDims() returns the start,
694     * stride and count arrays respectively. Applications can make a selection
695     * by changing the values of the arrays.
696     * <p>
697     * The following example shows how to make a subset. In the example, the
698     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
699     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
700     * We want to select every other data point in dims[1] and dims[2]
701     *
702     * <pre>
703     * int rank = dataset.getRank(); // number of dimensions of the dataset
704     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
705     * long[] selected = dataset.getSelectedDims(); // the selected size of the
706     *                                              // dataset
707     * long[] start = dataset.getStartDims(); // the offset of the selection
708     * long[] stride = dataset.getStride(); // the stride of the dataset
709     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
710     *                                                   // dimensions for
711     *                                                   // display
712     *
713     * // select dim1 and dim2 as 2D data for display, and slice through dim0
714     * selectedIndex[0] = 1;
715     * selectedIndex[1] = 2;
716     * selectedIndex[1] = 0;
717     *
718     * // reset the selection arrays
719     * for (int i = 0; i &lt; rank; i++) {
720     *     start[i] = 0;
721     *     selected[i] = 1;
722     *     stride[i] = 1;
723     * }
724     *
725     * // set stride to 2 on dim1 and dim2 so that every other data point is
726     * // selected.
727     * stride[1] = 2;
728     * stride[2] = 2;
729     *
730     * // set the selection size of dim1 and dim2
731     * selected[1] = dims[1] / stride[1];
732     * selected[2] = dims[1] / stride[2];
733     *
734     * // when dataset.getData() is called, the selection above will be used
735     * // since
736     * // the dimension arrays are passed by reference. Changes of these arrays
737     * // outside the dataset object directly change the values of these array
738     * // in the dataset object.
739     * </pre>
740     * <p>
741     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
742     * short, int, float, double or String type based on the datatype of the
743     * dataset.
744     * <p>
745     * For CompoundDS, the memory data object is an java.util.List object. Each
746     * element of the list is a data array that corresponds to a compound field.
747     * <p>
748     * For example, if compound dataset "comp" has the following nested
749     * structure, and member datatypes
750     *
751     * <pre>
752     * comp --&gt; m01 (int)
753     * comp --&gt; m02 (float)
754     * comp --&gt; nest1 --&gt; m11 (char)
755     * comp --&gt; nest1 --&gt; m12 (String)
756     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
757     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
758     * </pre>
759     *
760     * getData() returns a list of six arrays: {int[], float[], char[],
761     * String[], long[] and double[]}.
762     *
763     * @return the data read from file.
764     *
765     * @see #getData()
766     * @see hdf.object.DataFormat#read()
767     *
768     * @throws Exception
769     *             if object can not be read
770     */
771    @Override
772    public Object read() throws Exception {
773        log.trace("read(): start");
774
775        Object theData = null;
776        H5Datatype DSdatatype = null;
777
778        if (!isInited())
779            init();
780
781        try {
782            DSdatatype = (H5Datatype) this.getDatatype();
783        }
784        catch (Exception ex) {
785            log.debug("read(): get datatype: ", ex);
786        }
787
788        /*
789         * Check for any unsupported datatypes and fail early before
790         * attempting to read the dataset
791         */
792        if (DSdatatype.isArray()) {
793            H5Datatype baseType = (H5Datatype) DSdatatype.getDatatypeBase();
794
795            if (baseType == null) {
796                log.debug("read(): ARRAY datatype has no base type");
797                throw new Exception("Dataset's datatype (ARRAY) has no base datatype");
798            }
799        }
800
801        if (isExternal) {
802            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
803
804            if (pdir == null) {
805                pdir = ".";
806            }
807            System.setProperty("user.dir", pdir);// H5.H5Dchdir_ext(pdir);
808            log.trace("read(): External dataset: user.dir={}", pdir);
809        }
810
811        log.trace("read(): open dataset");
812
813        long did = open();
814        if (did >= 0) {
815            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
816
817            try {
818                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
819
820                log.trace("read(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
821
822                if (totalSelectedSpacePoints == 0) {
823                    log.debug("read(): No data to read. Dataset or selected subset is empty.");
824                    log.trace("read(): finish");
825                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
826                }
827
828                if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
829                    log.debug("read(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
830                    log.trace("read(): finish");
831                    throw new HDF5Exception("Invalid int size");
832                }
833
834                if (log.isDebugEnabled()) {
835                    // check is storage space is allocated
836                    try {
837                        long ssize = H5.H5Dget_storage_size(did);
838                        log.trace("read(): Storage space allocated = {}.", ssize);
839                    }
840                    catch (Exception ex) {
841                        log.debug("read(): check if storage space is allocated:", ex);
842                    }
843                }
844
845                log.trace("read(): originalBuf={} isText={} isREF={} totalSelectedSpacePoints={} nPoints={}", originalBuf, DSdatatype.isText(), DSdatatype.isRefObj(), totalSelectedSpacePoints, nPoints);
846                if ((originalBuf == null) || DSdatatype.isEnum() || DSdatatype.isText() || DSdatatype.isRefObj() || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
847                    try {
848                        theData = DSdatatype.allocateArray((int) totalSelectedSpacePoints);
849                    }
850                    catch (OutOfMemoryError err) {
851                        log.debug("read(): Out of memory.");
852                        log.trace("read(): finish");
853                        throw new HDF5Exception("Out Of Memory.");
854                    }
855                }
856                else {
857                    theData = originalBuf; // reuse the buffer if the size is the same
858                }
859
860                /*
861                 * Actually read the data now that everything has been setup
862                 */
863                if (theData != null) {
864                    long tid = -1;
865                    try {
866                        tid = DSdatatype.createNative();
867
868                        if (DSdatatype.isVLEN() || (DSdatatype.isArray() && DSdatatype.getDatatypeBase().isVLEN())) {
869                            log.trace("read(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", did, tid, spaceIDs[0], spaceIDs[1]);
870                            H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) theData);
871                        }
872                        else {
873                            log.trace("read(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", did, tid, spaceIDs[0], spaceIDs[1]);
874                            H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
875                        }
876
877                        /*
878                         * Perform any necessary data conversions
879                         */
880                        try {
881                            if (DSdatatype.isText() && convertByteToString && theData instanceof byte[]) {
882                                log.trace("read(): isText: converting byte array to string array");
883                                theData = byteToString((byte[]) theData, (int) DSdatatype.getDatatypeSize());
884                            }
885                            else if (DSdatatype.isRefObj()) {
886                                log.trace("read(): isREF: converting byte array to long array");
887                                theData = HDFNativeData.byteToLong((byte[]) theData);
888                            }
889                        }
890                        catch (Exception ex) {
891                            log.debug("read(): data conversion failure: ", ex);
892                        }
893                    }
894                    catch (HDF5DataFiltersException exfltr) {
895                        log.debug("read(): read failure:", exfltr);
896                        log.trace("read(): finish");
897                        throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
898                    }
899                    catch (Exception ex) {
900                        log.debug("read(): read failure: ", ex);
901                        log.trace("read(): finish");
902                        throw new HDF5Exception(ex.getMessage());
903                    }
904                    finally {
905                        DSdatatype.close(tid);
906                    }
907                } // if (theData != null)
908            }
909            finally {
910                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
911                    try {
912                        H5.H5Sclose(spaceIDs[0]);
913                    }
914                    catch (Exception ex) {
915                        log.debug("read(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
916                    }
917                }
918
919                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
920                    try {
921                        H5.H5Sclose(spaceIDs[1]);
922                    }
923                    catch (Exception ex) {
924                        log.debug("read(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
925                    }
926                }
927
928                close(did);
929            }
930        }
931
932        log.trace("read(): finish");
933        return theData;
934    }
935
936    /**
937     * Writes the given data buffer into this dataset in a file.
938     *
939     * @param buf
940     *            The buffer that contains the data values.
941     *
942     * @throws HDF5Exception
943     *             If there is an error at the HDF5 library level.
944     */
945    @Override
946    public void write(Object buf) throws HDF5Exception {
947        log.trace("write(): start");
948
949        Object tmpData = null;
950        H5Datatype DSdatatype = null;
951
952        if (buf == null) {
953            log.debug("write(): buf is null");
954            log.trace("write(): finish");
955            return;
956        }
957
958        if (!isInited())
959            init();
960
961        try {
962            DSdatatype = (H5Datatype) this.getDatatype();
963        }
964        catch (Exception ex) {
965            log.debug("write(): get datatype: ", ex);
966        }
967
968        /*
969         * Check for any unsupported datatypes and fail early before
970         * attempting to write to the dataset
971         */
972        if (DSdatatype.isVLEN() && !DSdatatype.isText()) {
973            log.debug("write(): Cannot write non-string variable-length data");
974            log.trace("write(): finish");
975            throw (new HDF5Exception("Writing non-string variable-length data is not supported"));
976        }
977        if (DSdatatype.isRegRef()) {
978            log.debug("write(): Cannot write region reference data");
979            log.trace("write(): finish");
980            throw (new HDF5Exception("Writing region reference data is not supported"));
981        }
982
983        log.trace("write(): open dataset");
984
985        long did = open();
986        if (did >= 0) {
987            long spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
988
989            try {
990                long totalSelectedSpacePoints = selectHyperslab(did, spaceIDs);
991
992                log.trace("write(): selected {} points in dataset dataspace", totalSelectedSpacePoints);
993
994                if (totalSelectedSpacePoints == 0) {
995                    log.debug("write(): No data to write. Selected subset is empty.");
996                    log.trace("write(): finish");
997                    throw new HDF5Exception("No data to write.\nThe selected subset is empty.");
998                }
999
1000                if (totalSelectedSpacePoints < Integer.MIN_VALUE || totalSelectedSpacePoints > Integer.MAX_VALUE) {
1001                    log.debug("write(): totalSelectedSpacePoints outside valid Java int range; unsafe cast");
1002                    log.trace("write(): finish");
1003                    throw new HDF5Exception("Invalid int size");
1004                }
1005
1006                /*
1007                 * Perform any necessary data conversions before writing the data.
1008                 *
1009                 * Note that v-len strings do not get converted, regardless of
1010                 * conversion request type
1011                 */
1012                try {
1013                    // Check if we need to convert integer data
1014                    int tsize = (int) DSdatatype.getDatatypeSize();
1015                    String cname = buf.getClass().getName();
1016                    char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1017                    boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
1018                            || ((tsize == 4) && (dname == 'J')) || (DSdatatype.isUnsigned() && unsignedConverted));
1019
1020                    tmpData = buf;
1021
1022                    if (doIntConversion) {
1023                        log.trace("write(): converting integer data to unsigned C-type integers");
1024                        tmpData = convertToUnsignedC(buf, null);
1025                    }
1026                    else if (DSdatatype.isText() && convertByteToString && !DSdatatype.isVarStr()) {
1027                        log.trace("write(): converting string array to byte array");
1028                        tmpData = stringToByte((String[]) buf, tsize);
1029                    }
1030                    else if (DSdatatype.isEnum() && (Array.get(buf, 0) instanceof String)) {
1031                        log.trace("write(): converting enum names to values");
1032                        tmpData = DSdatatype.convertEnumNameToValue((String[]) buf);
1033                    }
1034                }
1035                catch (Exception ex) {
1036                    log.debug("write(): data conversion failure: ", ex);
1037                    tmpData = null;
1038                }
1039
1040                /*
1041                 * Actually write the data now that everything has been setup
1042                 */
1043                if (tmpData != null) {
1044                    long tid = -1;
1045                    try {
1046                        tid = DSdatatype.createNative();
1047
1048                        if (DSdatatype.isVLEN() || (DSdatatype.isArray() && DSdatatype.getDatatypeBase().isVLEN())) {
1049                            log.trace("write(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", did, tid, spaceIDs[0], spaceIDs[1]);
1050                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1051                        }
1052                        else {
1053                            log.trace("write(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}", did, tid, spaceIDs[0], spaceIDs[1]);
1054                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1055                        }
1056                    }
1057                    catch (Exception ex) {
1058                        log.debug("write(): write failure: ", ex);
1059                        log.trace("write(): finish");
1060                        throw new HDF5Exception(ex.getMessage());
1061                    }
1062                    finally {
1063                        DSdatatype.close(tid);
1064                    }
1065                }
1066            }
1067            finally {
1068                tmpData = null;
1069
1070                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1071                    try {
1072                        H5.H5Sclose(spaceIDs[0]);
1073                    }
1074                    catch (Exception ex) {
1075                        log.debug("write(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1076                    }
1077                }
1078
1079                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1080                    try {
1081                        H5.H5Sclose(spaceIDs[1]);
1082                    }
1083                    catch (Exception ex) {
1084                        log.debug("write(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1085                    }
1086                }
1087
1088                close(did);
1089            }
1090        }
1091
1092        log.trace("write(): finish");
1093    }
1094
1095    /**
1096     * Set up the selection of hyperslab
1097     *
1098     * @param did
1099     *            IN dataset ID
1100     * @param spaceIDs
1101     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
1102     *
1103     * @return total number of data point selected
1104     *
1105     * @throws HDF5Exception
1106     *             If there is an error at the HDF5 library level.
1107     */
1108    private long selectHyperslab(long did, long[] spaceIDs) throws HDF5Exception {
1109        log.trace("selectHyperslab(): start");
1110        long lsize = 1;
1111
1112        boolean isAllSelected = true;
1113        for (int i = 0; i < rank; i++) {
1114            lsize *= selectedDims[i];
1115            if (selectedDims[i] < dims[i]) {
1116                isAllSelected = false;
1117            }
1118        }
1119        log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
1120
1121        if (isAllSelected) {
1122            spaceIDs[0] = HDF5Constants.H5S_ALL;
1123            spaceIDs[1] = HDF5Constants.H5S_ALL;
1124        }
1125        else {
1126            spaceIDs[1] = H5.H5Dget_space(did);
1127
1128            // When 1D dataspace is used in chunked dataset, reading is very
1129            // slow.
1130            // It is a known problem on HDF5 library for chunked dataset.
1131            // mspace = H5.H5Screate_simple(1, lsize, null);
1132            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
1133            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
1134                    null);
1135        }
1136
1137        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1138            isDefaultImageOrder = false;
1139        else
1140            isDefaultImageOrder = true;
1141
1142        log.trace("selectHyperslab(): isDefaultImageOrder={}", isDefaultImageOrder);
1143        log.trace("selectHyperslab(): finish");
1144        return lsize;
1145    }
1146
1147    /*
1148     * (non-Javadoc)
1149     *
1150     * @see hdf.object.DataFormat#getMetadata()
1151     */
1152    @Override
1153    public List<Attribute> getMetadata() throws HDF5Exception {
1154        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1155    }
1156
1157    /*
1158     * (non-Javadoc)
1159     *
1160     * @see hdf.object.DataFormat#getMetadata(int...)
1161     */
1162    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1163        log.trace("getMetadata(): start");
1164
1165        if (!isInited()) {
1166            init();
1167            log.trace("getMetadata(): inited");
1168        }
1169
1170        try {
1171            this.linkTargetObjName = H5File.getLinkTargetName(this);
1172        }
1173        catch (Exception ex) {
1174            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1175        }
1176
1177        if (attributeList != null) {
1178            log.trace("getMetadata(): attributeList != null");
1179            log.trace("getMetadata(): finish");
1180            return attributeList;
1181        }
1182
1183        long did = -1;
1184        long pcid = -1;
1185        long paid = -1;
1186        int indxType = fileFormat.getIndexType(null);
1187        int order = fileFormat.getIndexOrder(null);
1188
1189        // load attributes first
1190        if (attrPropList.length > 0) {
1191            indxType = attrPropList[0];
1192            if (attrPropList.length > 1) {
1193                order = attrPropList[1];
1194            }
1195        }
1196
1197        attributeList = H5File.getAttribute(this, indxType, order);
1198        log.trace("getMetadata(): attributeList loaded");
1199
1200        log.trace("getMetadata(): open dataset");
1201        did = open();
1202        if (did >= 0) {
1203            log.trace("getMetadata(): dataset opened");
1204            try {
1205                compression = "";
1206
1207                // get the compression and chunk information
1208                pcid = H5.H5Dget_create_plist(did);
1209                paid = H5.H5Dget_access_plist(did);
1210                long storage_size = H5.H5Dget_storage_size(did);
1211                int nfilt = H5.H5Pget_nfilters(pcid);
1212                int layout_type = H5.H5Pget_layout(pcid);
1213                if (layout_type == HDF5Constants.H5D_CHUNKED) {
1214                    chunkSize = new long[rank];
1215                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1216                    int n = chunkSize.length;
1217                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
1218                    for (int i = 1; i < n; i++) {
1219                        storage_layout += " X " + chunkSize[i];
1220                    }
1221
1222                    if (nfilt > 0) {
1223                        long nelmts = 1;
1224                        long uncomp_size;
1225                        long datum_size = getDatatype().getDatatypeSize();
1226
1227                        if (datum_size < 0) {
1228                            long tmptid = -1;
1229                            try {
1230                                tmptid = H5.H5Dget_type(did);
1231                                datum_size = H5.H5Tget_size(tmptid);
1232                            }
1233                            finally {
1234                                try {H5.H5Tclose(tmptid);}
1235                                catch (Exception ex2) {log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
1236                            }
1237                        }
1238
1239                        for (int i = 0; i < rank; i++) {
1240                            nelmts *= dims[i];
1241                        }
1242                        uncomp_size = nelmts * datum_size;
1243
1244                        /* compression ratio = uncompressed size / compressed size */
1245
1246                        if (storage_size != 0) {
1247                            double ratio = (double) uncomp_size / (double) storage_size;
1248                            DecimalFormat df = new DecimalFormat();
1249                            df.setMinimumFractionDigits(3);
1250                            df.setMaximumFractionDigits(3);
1251                            compression += df.format(ratio) + ":1";
1252                        }
1253                    }
1254                }
1255                else if (layout_type == HDF5Constants.H5D_COMPACT) {
1256                    storage_layout = "COMPACT";
1257                }
1258                else if (layout_type == HDF5Constants.H5D_CONTIGUOUS) {
1259                    storage_layout = "CONTIGUOUS";
1260                    if (H5.H5Pget_external_count(pcid) > 0)
1261                        storage_layout += " - EXTERNAL ";
1262                }
1263                else if (layout_type == HDF5Constants.H5D_VIRTUAL) {
1264                    storage_layout = "VIRTUAL - ";
1265                    try {
1266                        long vmaps = H5.H5Pget_virtual_count(pcid);
1267                        try {
1268                            int virt_view = H5.H5Pget_virtual_view(paid);
1269                            long virt_gap = H5.H5Pget_virtual_printf_gap(paid);
1270                            if (virt_view == HDF5Constants.H5D_VDS_FIRST_MISSING)
1271                                storage_layout += "First Missing";
1272                            else
1273                                storage_layout += "Last Available";
1274                            storage_layout += "\nGAP : " + String.valueOf(virt_gap);
1275                        }
1276                        catch (Throwable err) {
1277                            log.debug("getMetadata(): vds error: ", err);
1278                            storage_layout += "ERROR";
1279                        }
1280                        storage_layout += "\nMAPS : " + String.valueOf(vmaps);
1281                        if (vmaps > 0) {
1282                            for (long next = 0; next < vmaps; next++) {
1283                                try {
1284                                    H5.H5Pget_virtual_vspace(pcid, next);
1285                                    H5.H5Pget_virtual_srcspace(pcid, next);
1286                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1287                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1288                                    storage_layout += "\n" + fname + " : " + dsetname;
1289                                }
1290                                catch (Throwable err) {
1291                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1292                                    log.trace("getMetadata(): vds[{}] continue", next);
1293                                    storage_layout += "ERROR";
1294                                    continue;
1295                                }
1296                            }
1297                        }
1298                    }
1299                    catch (Throwable err) {
1300                        log.debug("getMetadata(): vds count error: ", err);
1301                        storage_layout += "ERROR";
1302                    }
1303                }
1304                else {
1305                    chunkSize = null;
1306                    storage_layout = "NONE";
1307                }
1308
1309                int[] flags = { 0, 0 };
1310                long[] cd_nelmts = { 20 };
1311                int[] cd_values = new int[(int) cd_nelmts[0]];
1312                String[] cd_name = { "", "" };
1313                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1314                int filter = -1;
1315                int[] filter_config = { 1 };
1316                filters = "";
1317
1318                for (int i = 0, k = 0; i < nfilt; i++) {
1319                    log.trace("getMetadata(): filter[{}]", i);
1320                    if (i > 0) {
1321                        filters += ", ";
1322                    }
1323                    if (k > 0) {
1324                        compression += ", ";
1325                    }
1326
1327                    try {
1328                        cd_nelmts[0] = 20;
1329                        cd_values = new int[(int) cd_nelmts[0]];
1330                        cd_values = new int[(int) cd_nelmts[0]];
1331                        filter = H5.H5Pget_filter(pcid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1332                        log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1333                        for (int j = 0; j < cd_nelmts[0]; j++) {
1334                            log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cd_values[j]);
1335                        }
1336                    }
1337                    catch (Throwable err) {
1338                        log.debug("getMetadata(): filter[{}] error: ", i, err);
1339                        log.trace("getMetadata(): filter[{}] continue", i);
1340                        filters += "ERROR";
1341                        continue;
1342                    }
1343
1344                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1345                        filters += "NONE";
1346                    }
1347                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1348                        filters += "GZIP";
1349                        compression += compression_gzip_txt + cd_values[0];
1350                        k++;
1351                    }
1352                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1353                        filters += "Error detection filter";
1354                    }
1355                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1356                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1357                    }
1358                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1359                        filters += "NBIT";
1360                    }
1361                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1362                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1363                    }
1364                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1365                        filters += "SZIP";
1366                        compression += "SZIP: Pixels per block = " + cd_values[1];
1367                        k++;
1368                        int flag = -1;
1369                        try {
1370                            flag = H5.H5Zget_filter_info(filter);
1371                        }
1372                        catch (Exception ex) {
1373                            log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1374                            flag = -1;
1375                        }
1376                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1377                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1378                        }
1379                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1380                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1381                                        + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1382                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1383                        }
1384                    }
1385                    else {
1386                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1387                        for (int j = 0; j < cd_nelmts[0]; j++) {
1388                            if (j > 0)
1389                                filters += ", ";
1390                            filters += cd_values[j];
1391                        }
1392                        log.debug("getMetadata(): filter[{}] is user defined compression", i);
1393                    }
1394                } // for (int i=0; i<nfilt; i++)
1395
1396                if (compression.length() == 0) {
1397                    compression = "NONE";
1398                }
1399                log.trace("getMetadata(): filter compression={}", compression);
1400
1401                if (filters.length() == 0) {
1402                    filters = "NONE";
1403                }
1404                log.trace("getMetadata(): filter information={}", filters);
1405
1406                storage = "SIZE: " + storage_size;
1407                try {
1408                    int[] at = { 0 };
1409                    H5.H5Pget_alloc_time(pcid, at);
1410                    storage += ", allocation time: ";
1411                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1412                        storage += "Early";
1413                    }
1414                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1415                        storage += "Incremental";
1416                    }
1417                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1418                        storage += "Late";
1419                    }
1420                }
1421                catch (Exception ex) {
1422                    log.debug("getMetadata(): Storage allocation time:", ex);
1423                }
1424                if (storage.length() == 0) {
1425                    storage = "NONE";
1426                }
1427                log.trace("getMetadata(): storage={}", storage);
1428            }
1429            finally {
1430                try {
1431                    H5.H5Pclose(paid);
1432                }
1433                catch (Exception ex) {
1434                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1435                }
1436                try {
1437                    H5.H5Pclose(pcid);
1438                }
1439                catch (Exception ex) {
1440                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1441                }
1442                close(did);
1443            }
1444        }
1445
1446        log.trace("getMetadata(): finish");
1447        return attributeList;
1448    }
1449
1450    /*
1451     * (non-Javadoc)
1452     *
1453     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1454     */
1455    @Override
1456    public void writeMetadata(Object info) throws Exception {
1457        log.trace("writeMetadata(): start");
1458        // only attribute metadata is supported.
1459        if (!(info instanceof Attribute)) {
1460            log.debug("writeMetadata(): Object not an Attribute");
1461            log.trace("writeMetadata(): finish");
1462            return;
1463        }
1464
1465        boolean attrExisted = false;
1466        Attribute attr = (Attribute) info;
1467        log.trace("writeMetadata(): {}", attr.getName());
1468
1469        if (attributeList == null) {
1470            this.getMetadata();
1471        }
1472
1473        if (attributeList != null)
1474            attrExisted = attributeList.contains(attr);
1475
1476        getFileFormat().writeAttribute(this, attr, attrExisted);
1477        // add the new attribute into attribute list
1478        if (!attrExisted) {
1479            attributeList.add(attr);
1480            nAttributes = attributeList.size();
1481        }
1482        log.trace("writeMetadata(): finish");
1483    }
1484
1485    /*
1486     * (non-Javadoc)
1487     *
1488     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1489     */
1490    @Override
1491    public void removeMetadata(Object info) throws HDF5Exception {
1492        log.trace("removeMetadata(): start");
1493        // only attribute metadata is supported.
1494        if (!(info instanceof Attribute)) {
1495            log.debug("removeMetadata(): Object not an Attribute");
1496            log.trace("removeMetadata(): finish");
1497            return;
1498        }
1499
1500        Attribute attr = (Attribute) info;
1501        log.trace("removeMetadata(): {}", attr.getName());
1502        long did = open();
1503        if (did >= 0) {
1504            try {
1505                H5.H5Adelete(did, attr.getName());
1506                List<Attribute> attrList = getMetadata();
1507                attrList.remove(attr);
1508                nAttributes = attrList.size();
1509            }
1510            finally {
1511                close(did);
1512            }
1513        }
1514        log.trace("removeMetadata(): finish");
1515    }
1516
1517    /*
1518     * (non-Javadoc)
1519     *
1520     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1521     */
1522    @Override
1523    public void updateMetadata(Object info) throws HDF5Exception {
1524        log.trace("updateMetadata(): start");
1525        // only attribute metadata is supported.
1526        if (!(info instanceof Attribute)) {
1527            log.debug("updateMetadata(): Object not an Attribute");
1528            log.trace("updateMetadata(): finish");
1529            return;
1530        }
1531
1532        nAttributes = -1;
1533        log.trace("updateMetadata(): finish");
1534    }
1535
1536    /*
1537     * (non-Javadoc)
1538     *
1539     * @see hdf.object.HObject#setName(java.lang.String)
1540     */
1541    @Override
1542    public void setName(String newName) throws Exception {
1543        H5File.renameObject(this, newName);
1544        super.setName(newName);
1545    }
1546
1547    /**
1548     * Resets selection of dataspace
1549     */
1550    private void resetSelection() {
1551        log.trace("resetSelection(): start");
1552
1553        for (int i = 0; i < rank; i++) {
1554            startDims[i] = 0;
1555            selectedDims[i] = 1;
1556            if (selectedStride != null) {
1557                selectedStride[i] = 1;
1558            }
1559        }
1560
1561        if (interlace == INTERLACE_PIXEL) {
1562            // 24-bit TRUE color image
1563            // [height][width][pixel components]
1564            selectedDims[2] = 3;
1565            selectedDims[0] = dims[0];
1566            selectedDims[1] = dims[1];
1567            selectedIndex[0] = 0; // index for height
1568            selectedIndex[1] = 1; // index for width
1569            selectedIndex[2] = 2; // index for depth
1570        }
1571        else if (interlace == INTERLACE_PLANE) {
1572            // 24-bit TRUE color image
1573            // [pixel components][height][width]
1574            selectedDims[0] = 3;
1575            selectedDims[1] = dims[1];
1576            selectedDims[2] = dims[2];
1577            selectedIndex[0] = 1; // index for height
1578            selectedIndex[1] = 2; // index for width
1579            selectedIndex[2] = 0; // index for depth
1580        }
1581        else if (rank == 1) {
1582            selectedIndex[0] = 0;
1583            selectedDims[0] = dims[0];
1584        }
1585        else if (rank == 2) {
1586            selectedIndex[0] = 0;
1587            selectedIndex[1] = 1;
1588            selectedDims[0] = dims[0];
1589            selectedDims[1] = dims[1];
1590        }
1591        else if (rank > 2) {
1592            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1593            // [frame][height][width] by default
1594            // selectedIndex[1] = rank-1; // width, the fastest dimension
1595            // selectedIndex[0] = rank-2; // height
1596            // selectedIndex[2] = rank-3; // frames
1597
1598            //
1599            // (5/4/09) Modified the default dimension order. See bug#1379
1600            // We change the default order to the following. In most situation,
1601            // users want to use the natural order of
1602            // selectedIndex[0] = 0
1603            // selectedIndex[1] = 1
1604            // selectedIndex[2] = 2
1605            // Most of NPOESS data is the the order above.
1606
1607            if (isImage) {
1608                // 3D dataset is arranged in the order of [frame][height][width]
1609                selectedIndex[1] = rank - 1; // width, the fastest dimension
1610                selectedIndex[0] = rank - 2; // height
1611                selectedIndex[2] = rank - 3; // frames
1612            }
1613            else {
1614                selectedIndex[0] = 0; // width, the fastest dimension
1615                selectedIndex[1] = 1; // height
1616                selectedIndex[2] = 2; // frames
1617            }
1618
1619            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1620            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1621            selectedDims[selectedIndex[2]] = dims[selectedIndex[2]];
1622        }
1623
1624        isDataLoaded = false;
1625        isDefaultImageOrder = true;
1626        log.trace("resetSelection(): finish");
1627    }
1628
1629    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1630            long[] chunks, int gzip, Object data) throws Exception {
1631        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1632    }
1633
1634    /**
1635     * Creates a scalar dataset in a file with/without chunking and compression.
1636     * <p>
1637     * The following example shows how to create a string dataset using this function.
1638     *
1639     * <pre>
1640     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1641     * int max_str_len = 120;
1642     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, -1, -1);
1643     * int size = 10000;
1644     * long dims[] = { size };
1645     * long chunks[] = { 1000 };
1646     * int gzip = 9;
1647     * String strs[] = new String[size];
1648     *
1649     * for (int i = 0; i &lt; size; i++)
1650     *     strs[i] = String.valueOf(i);
1651     *
1652     * file.open();
1653     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1654     *
1655     * try {
1656     *     file.close();
1657     * }
1658     * catch (Exception ex) {
1659     * }
1660     * </pre>
1661     *
1662     * @param name
1663     *            the name of the dataset to create.
1664     * @param pgroup
1665     *            parent group where the new dataset is created.
1666     * @param type
1667     *            the datatype of the dataset.
1668     * @param dims
1669     *            the dimension size of the dataset.
1670     * @param maxdims
1671     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1672     * @param chunks
1673     *            the chunk size of the dataset. No chunking if chunk = null.
1674     * @param gzip
1675     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1676     * @param fillValue
1677     *            the default data value.
1678     * @param data
1679     *            the array of data values.
1680     *
1681     * @return the new scalar dataset if successful; otherwise returns null.
1682     *
1683     * @throws Exception if there is a failure.
1684     */
1685    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1686            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1687        log.trace("create(): start");
1688
1689        H5ScalarDS dataset = null;
1690        String fullPath = null;
1691        long did = -1;
1692        long plist = -1;
1693        long sid = -1;
1694        long tid = -1;
1695
1696        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1697            log.debug("create(): one or more parameters are null");
1698            log.trace("create(): finish");
1699            return null;
1700        }
1701
1702        H5File file = (H5File) pgroup.getFileFormat();
1703        if (file == null) {
1704            log.debug("create(): parent group FileFormat is null");
1705            log.trace("create(): finish");
1706            return null;
1707        }
1708
1709        String path = HObject.separator;
1710        if (!pgroup.isRoot()) {
1711            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1712            if (name.endsWith("/")) {
1713                name = name.substring(0, name.length() - 1);
1714            }
1715            int idx = name.lastIndexOf("/");
1716            if (idx >= 0) {
1717                name = name.substring(idx + 1);
1718            }
1719        }
1720
1721        fullPath = path + name;
1722        log.trace("create(): fullPath={}", fullPath);
1723
1724        // setup chunking and compression
1725        boolean isExtentable = false;
1726        if (maxdims != null) {
1727            for (int i = 0; i < maxdims.length; i++) {
1728                if (maxdims[i] == 0) {
1729                    maxdims[i] = dims[i];
1730                }
1731                else if (maxdims[i] < 0) {
1732                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1733                }
1734
1735                if (maxdims[i] != dims[i]) {
1736                    isExtentable = true;
1737                }
1738            }
1739        }
1740
1741        // HDF5 requires you to use chunking in order to define extendible
1742        // datasets. Chunking makes it possible to extend datasets efficiently,
1743        // without having to reorganize storage excessively. Using default size
1744        // of 64x...which has good performance
1745        if ((chunks == null) && isExtentable) {
1746            chunks = new long[dims.length];
1747            for (int i = 0; i < dims.length; i++)
1748                chunks[i] = Math.min(dims[i], 64);
1749        }
1750
1751        // prepare the dataspace and datatype
1752        int rank = dims.length;
1753        log.trace("create(): rank={}", rank);
1754
1755        if ((tid = type.createNative()) >= 0) {
1756            log.trace("create(): createNative={}", tid);
1757            try {
1758                sid = H5.H5Screate_simple(rank, dims, maxdims);
1759                log.trace("create(): H5Screate_simple={}", sid);
1760
1761                // figure out creation properties
1762                plist = HDF5Constants.H5P_DEFAULT;
1763
1764                byte[] val_fill = null;
1765                try {
1766                    val_fill = parseFillValue(type, fillValue);
1767                }
1768                catch (Exception ex) {
1769                    log.debug("create(): parse fill value: ", ex);
1770                }
1771                log.trace("create(): parseFillValue={}", val_fill);
1772
1773                if (chunks != null || val_fill != null) {
1774                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1775
1776                    if (chunks != null) {
1777                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1778                        H5.H5Pset_chunk(plist, rank, chunks);
1779
1780                        // compression requires chunking
1781                        if (gzip > 0) {
1782                            H5.H5Pset_deflate(plist, gzip);
1783                        }
1784                    }
1785
1786                    if (val_fill != null) {
1787                        H5.H5Pset_fill_value(plist, tid, val_fill);
1788                    }
1789                }
1790
1791                long fid = file.getFID();
1792
1793                log.trace("create(): create dataset fid={}", fid);
1794                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1795                log.trace("create(): create dataset did={}", did);
1796                dataset = new H5ScalarDS(file, name, path);
1797            }
1798            finally {
1799                try {
1800                    H5.H5Pclose(plist);
1801                }
1802                catch (HDF5Exception ex) {
1803                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1804                }
1805                try {
1806                    H5.H5Sclose(sid);
1807                }
1808                catch (HDF5Exception ex) {
1809                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1810                }
1811                try {
1812                    H5.H5Tclose(tid);
1813                }
1814                catch (HDF5Exception ex) {
1815                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1816                }
1817                try {
1818                    H5.H5Dclose(did);
1819                }
1820                catch (HDF5Exception ex) {
1821                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1822                }
1823            }
1824        }
1825
1826        log.trace("create(): dataset created");
1827
1828        if (dataset != null) {
1829            pgroup.addToMemberList(dataset);
1830            if (data != null) {
1831                dataset.init();
1832                long selected[] = dataset.getSelectedDims();
1833                for (int i = 0; i < rank; i++) {
1834                    selected[i] = dims[i];
1835                }
1836                dataset.write(data);
1837            }
1838        }
1839        log.trace("create(): finish");
1840        return dataset;
1841    }
1842
1843    // check _FillValue, valid_min, valid_max, and valid_range
1844    private void checkCFconvention(long oid) throws Exception {
1845        log.trace("checkCFconvention(): start");
1846
1847        Object avalue = getAttrValue(oid, "_FillValue");
1848
1849        if (avalue != null) {
1850            int n = Array.getLength(avalue);
1851            for (int i = 0; i < n; i++)
1852                addFilteredImageValue((Number) Array.get(avalue, i));
1853        }
1854
1855        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1856            double x0 = 0, x1 = 0;
1857            avalue = getAttrValue(oid, "valid_range");
1858            if (avalue != null) {
1859                try {
1860                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1861                    x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
1862                    imageDataRange = new double[2];
1863                    imageDataRange[0] = x0;
1864                    imageDataRange[1] = x1;
1865                    return;
1866                }
1867                catch (Exception ex) {
1868                    log.debug("checkCFconvention(): valid_range: ", ex);
1869                }
1870            }
1871
1872            avalue = getAttrValue(oid, "valid_min");
1873            if (avalue != null) {
1874                try {
1875                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1876                }
1877                catch (Exception ex) {
1878                    log.debug("checkCFconvention(): valid_min: ", ex);
1879                }
1880                avalue = getAttrValue(oid, "valid_max");
1881                if (avalue != null) {
1882                    try {
1883                        x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1884                        imageDataRange = new double[2];
1885                        imageDataRange[0] = x0;
1886                        imageDataRange[1] = x1;
1887                    }
1888                    catch (Exception ex) {
1889                        log.debug("checkCFconvention(): valid_max:", ex);
1890                    }
1891                }
1892            }
1893        } // if (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1894        log.trace("checkCFconvention(): finish");
1895    }
1896
1897    private Object getAttrValue(long oid, String aname) {
1898        log.trace("getAttrValue(): start: name={}", aname);
1899
1900        long aid = -1;
1901        long atid = -1;
1902        long asid = -1;
1903        Object avalue = null;
1904
1905        try {
1906            // try to find attribute name
1907            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1908                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1909        }
1910        catch (HDF5LibraryException ex5) {
1911            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1912        }
1913        catch (Exception ex) {
1914            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1915        }
1916        if (aid > 0) {
1917            try {
1918                atid = H5.H5Aget_type(aid);
1919                long tmptid = atid;
1920                atid = H5.H5Tget_native_type(tmptid);
1921                try {
1922                    H5.H5Tclose(tmptid);
1923                }
1924                catch (Exception ex) {
1925                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1926                }
1927                H5Datatype DSdatatype = new H5Datatype(atid);
1928
1929                asid = H5.H5Aget_space(aid);
1930                long adims[] = null;
1931
1932                int arank = H5.H5Sget_simple_extent_ndims(asid);
1933                if (arank > 0) {
1934                    adims = new long[arank];
1935                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1936                }
1937                log.trace("getAttrValue(): adims={}", adims);
1938
1939                // retrieve the attribute value
1940                long lsize = 1;
1941                if (adims != null) {
1942                    for (int j = 0; j < adims.length; j++) {
1943                        lsize *= adims[j];
1944                    }
1945                }
1946                log.trace("getAttrValue(): lsize={}", lsize);
1947
1948                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1949
1950                try {
1951                    avalue = DSdatatype.allocateArray((int) lsize);
1952                }
1953                catch (OutOfMemoryError e) {
1954                    log.debug("getAttrValue(): out of memory: ", e);
1955                    avalue = null;
1956                }
1957
1958                if (avalue != null) {
1959                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1960                    H5.H5Aread(aid, atid, avalue);
1961
1962                    if (DSdatatype.isUnsigned()) {
1963                        log.trace("getAttrValue(): id {} is unsigned", atid);
1964                        avalue = convertFromUnsignedC(avalue, null);
1965                    }
1966                }
1967            }
1968            catch (Exception ex) {
1969                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1970            }
1971            finally {
1972                try {
1973                    H5.H5Tclose(atid);
1974                }
1975                catch (HDF5Exception ex) {
1976                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1977                }
1978                try {
1979                    H5.H5Sclose(asid);
1980                }
1981                catch (HDF5Exception ex) {
1982                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1983                }
1984                try {
1985                    H5.H5Aclose(aid);
1986                }
1987                catch (HDF5Exception ex) {
1988                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1989                }
1990            }
1991        } // if (aid > 0)
1992
1993        log.trace("getAttrValue(): finish");
1994        return avalue;
1995    }
1996
1997    private boolean isStringAttributeOf(long objID, String name, String value) {
1998        boolean retValue = false;
1999        long aid = -1;
2000        long atid = -1;
2001
2002        try {
2003            // try to find out interlace mode
2004            aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2005            atid = H5.H5Aget_type(aid);
2006            int size = (int)H5.H5Tget_size(atid);
2007            byte[] attrValue = new byte[size];
2008            H5.H5Aread(aid, atid, attrValue);
2009            String strValue = new String(attrValue).trim();
2010            retValue = strValue.equalsIgnoreCase(value);
2011        }
2012        catch (Exception ex) {
2013            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
2014        }
2015        finally {
2016            try {
2017                H5.H5Tclose(atid);
2018            }
2019            catch (HDF5Exception ex) {
2020                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
2021            }
2022            try {
2023                H5.H5Aclose(aid);
2024            }
2025            catch (HDF5Exception ex) {
2026                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
2027            }
2028        }
2029
2030        return retValue;
2031    }
2032
2033    /*
2034     * (non-Javadoc)
2035     *
2036     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
2037     */
2038    @Override
2039    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
2040        log.trace("copy(): start");
2041        // must give a location to copy
2042        if (pgroup == null) {
2043            log.debug("copy(): Parent group is null");
2044            log.trace("copy(): finish");
2045            return null;
2046        }
2047
2048        Dataset dataset = null;
2049        long srcdid = -1;
2050        long dstdid = -1;
2051        long plist = -1;
2052        long tid = -1;
2053        long sid = -1;
2054        String dname = null, path = null;
2055
2056        if (pgroup.isRoot()) {
2057            path = HObject.separator;
2058        }
2059        else {
2060            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
2061        }
2062        dname = path + dstName;
2063
2064        srcdid = open();
2065        if (srcdid >= 0) {
2066            try {
2067                tid = H5.H5Dget_type(srcdid);
2068                sid = H5.H5Screate_simple(dims.length, dims, null);
2069                plist = H5.H5Dget_create_plist(srcdid);
2070
2071                long[] chunks = new long[dims.length];
2072                boolean setChunkFlag = false;
2073                try {
2074                    H5.H5Pget_chunk(plist, dims.length, chunks);
2075                    for (int i = 0; i < dims.length; i++) {
2076                        if (dims[i] < chunks[i]) {
2077                            setChunkFlag = true;
2078                            if (dims[i] == 1)
2079                                chunks[i] = 1;
2080                            else
2081                                chunks[i] = dims[i] / 2;
2082                        }
2083                    }
2084                }
2085                catch (Exception ex) {
2086                    log.debug("copy(): chunk: ", ex);
2087                }
2088
2089                if (setChunkFlag)
2090                    H5.H5Pset_chunk(plist, dims.length, chunks);
2091
2092                try {
2093                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2094                            HDF5Constants.H5P_DEFAULT);
2095                }
2096                catch (Exception e) {
2097                    log.debug("copy(): H5Dcreate: ", e);
2098                }
2099                finally {
2100                    try {
2101                        H5.H5Dclose(dstdid);
2102                    }
2103                    catch (Exception ex2) {
2104                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
2105                    }
2106                }
2107
2108                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
2109                if (buff != null) {
2110                    dataset.init();
2111                    dataset.write(buff);
2112                }
2113
2114                dstdid = dataset.open();
2115                if (dstdid >= 0) {
2116                    try {
2117                        H5File.copyAttributes(srcdid, dstdid);
2118                    }
2119                    finally {
2120                        try {
2121                            H5.H5Dclose(dstdid);
2122                        }
2123                        catch (Exception ex) {
2124                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2125                        }
2126                    }
2127                }
2128            }
2129            finally {
2130                try {
2131                    H5.H5Pclose(plist);
2132                }
2133                catch (Exception ex) {
2134                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2135                }
2136                try {
2137                    H5.H5Sclose(sid);
2138                }
2139                catch (Exception ex) {
2140                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2141                }
2142                try {
2143                    H5.H5Tclose(tid);
2144                }
2145                catch (Exception ex) {
2146                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2147                }
2148                try {
2149                    H5.H5Dclose(srcdid);
2150                }
2151                catch (Exception ex) {
2152                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2153                }
2154            }
2155        }
2156
2157        pgroup.addToMemberList(dataset);
2158
2159        ((ScalarDS) dataset).setIsImage(isImage);
2160
2161        log.trace("copy(): finish");
2162        return dataset;
2163    }
2164
2165    /*
2166     * (non-Javadoc)
2167     *
2168     * @see hdf.object.ScalarDS#getPalette()
2169     */
2170    @Override
2171    public byte[][] getPalette() {
2172        if (palette == null) {
2173            palette = readPalette(0);
2174        }
2175
2176        return palette;
2177    }
2178
2179    /*
2180     * (non-Javadoc)
2181     *
2182     * @see hdf.object.ScalarDS#getPaletteName(int)
2183     */
2184    @Override
2185    public String getPaletteName(int idx) {
2186        log.trace("getPaletteName(): start");
2187
2188        byte[] refs = getPaletteRefs();
2189        long did = -1;
2190        long pal_id = -1;
2191        String paletteName = null;
2192
2193        if (refs == null) {
2194            log.debug("getPaletteName(): refs is null");
2195            log.trace("getPaletteName(): finish");
2196            return null;
2197        }
2198
2199        byte[] ref_buf = new byte[8];
2200
2201        try {
2202            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
2203        }
2204        catch (Throwable err) {
2205            log.debug("getPaletteName(): arraycopy failure: ", err);
2206            log.trace("getPaletteName(): finish");
2207            return null;
2208        }
2209
2210        did = open();
2211        if (did >= 0) {
2212            try {
2213                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, ref_buf);
2214                paletteName = H5.H5Iget_name(pal_id);
2215            }
2216            catch (Exception ex) {
2217                ex.printStackTrace();
2218            }
2219            finally {
2220                close(pal_id);
2221                close(did);
2222            }
2223        }
2224
2225        log.trace("getPaletteName(): finish");
2226        return paletteName;
2227    }
2228
2229    /*
2230     * (non-Javadoc)
2231     *
2232     * @see hdf.object.ScalarDS#readPalette(int)
2233     */
2234    @Override
2235    public byte[][] readPalette(int idx) {
2236        log.trace("readPalette(): start");
2237
2238        byte[][] thePalette = null;
2239        byte[] refs = getPaletteRefs();
2240        long did = -1;
2241        long pal_id = -1;
2242        long tid = -1;
2243
2244        if (refs == null) {
2245            log.debug("readPalette(): refs is null");
2246            log.trace("readPalette(): finish");
2247            return null;
2248        }
2249
2250        byte[] p = null;
2251        byte[] ref_buf = new byte[8];
2252
2253        try {
2254            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
2255        }
2256        catch (Throwable err) {
2257            log.debug("readPalette(): arraycopy failure: ", err);
2258            log.trace("readPalette(): failure");
2259            return null;
2260        }
2261
2262        did = open();
2263        if (did >= 0) {
2264            try {
2265                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, ref_buf);
2266                log.trace("readPalette(): H5Rdereference: {}", pal_id);
2267                tid = H5.H5Dget_type(pal_id);
2268
2269                // support only 3*256 byte palette data
2270                if (H5.H5Dget_storage_size(pal_id) <= 768) {
2271                    p = new byte[3 * 256];
2272                    H5.H5Dread(pal_id, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2273                }
2274            }
2275            catch (HDF5Exception ex) {
2276                log.debug("readPalette(): failure: ", ex);
2277                p = null;
2278            }
2279            finally {
2280                try {
2281                    H5.H5Tclose(tid);
2282                }
2283                catch (HDF5Exception ex2) {
2284                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2285                }
2286                close(pal_id);
2287                close(did);
2288            }
2289        }
2290
2291        if (p != null) {
2292            thePalette = new byte[3][256];
2293            for (int i = 0; i < 256; i++) {
2294                thePalette[0][i] = p[i * 3];
2295                thePalette[1][i] = p[i * 3 + 1];
2296                thePalette[2][i] = p[i * 3 + 2];
2297            }
2298        }
2299
2300        log.trace("readPalette(): finish");
2301        return thePalette;
2302    }
2303
2304    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2305        log.trace("parseFillValue(): start");
2306
2307        byte[] data = null;
2308
2309        if (type == null || fillValue == null) {
2310            log.debug("parseFillValue(): datatype or fill value is null");
2311            log.trace("parseFillValue(): finish");
2312            return null;
2313        }
2314
2315        int datatypeClass = type.getDatatypeClass();
2316        int datatypeSize = (int)type.getDatatypeSize();
2317
2318        double val_dbl = 0;
2319        String val_str = null;
2320
2321        if (fillValue instanceof String) {
2322            val_str = (String) fillValue;
2323        }
2324        else if (fillValue.getClass().isArray()) {
2325            val_str = Array.get(fillValue, 0).toString();
2326        }
2327
2328        if (!type.isString()) {
2329            try {
2330                val_dbl = Double.parseDouble(val_str);
2331            }
2332            catch (NumberFormatException ex) {
2333                log.debug("parseFillValue(): parse error: ", ex);
2334                log.trace("parseFillValue(): finish");
2335                return null;
2336            }
2337        }
2338
2339        try {
2340            switch (datatypeClass) {
2341                case Datatype.CLASS_INTEGER:
2342                case Datatype.CLASS_ENUM:
2343                case Datatype.CLASS_CHAR:
2344                    log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2345                    if (datatypeSize == 1) {
2346                        data = new byte[] { (byte) val_dbl };
2347                    }
2348                    else if (datatypeSize == 2) {
2349                        data = HDFNativeData.shortToByte((short) val_dbl);
2350                    }
2351                    else if (datatypeSize == 8) {
2352                        data = HDFNativeData.longToByte((long) val_dbl);
2353                    }
2354                    else {
2355                        data = HDFNativeData.intToByte((int) val_dbl);
2356                    }
2357                    break;
2358                case Datatype.CLASS_FLOAT:
2359                    log.trace("parseFillValue(): class CLASS_FLOAT");
2360                    if (datatypeSize == 8) {
2361                        data = HDFNativeData.doubleToByte(val_dbl);
2362                    }
2363                    else {
2364                        data = HDFNativeData.floatToByte((float) val_dbl);
2365                        ;
2366                    }
2367                    break;
2368                case Datatype.CLASS_STRING:
2369                    log.trace("parseFillValue(): class CLASS_STRING");
2370                    data = val_str.getBytes();
2371                    break;
2372                case Datatype.CLASS_REFERENCE:
2373                    log.trace("parseFillValue(): class CLASS_REFERENCE");
2374                    data = HDFNativeData.longToByte((long) val_dbl);
2375                    break;
2376                default:
2377                    log.debug("parseFillValue(): datatypeClass unknown");
2378                    break;
2379            } // switch (datatypeClass)
2380        }
2381        catch (Exception ex) {
2382            log.debug("parseFillValue(): failure: ", ex);
2383            data = null;
2384        }
2385
2386        log.trace("parseFillValue(): finish");
2387        return data;
2388    }
2389
2390    /*
2391     * (non-Javadoc)
2392     *
2393     * @see hdf.object.ScalarDS#getPaletteRefs()
2394     */
2395    @Override
2396    public byte[] getPaletteRefs() {
2397        if (!isInited())
2398            init(); // init will be called to get refs
2399
2400        return paletteRefs;
2401    }
2402
2403    /**
2404     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2405     * length is 8*numberOfPalettes.
2406     */
2407    private byte[] getPaletteRefs(long did) {
2408        log.trace("getPaletteRefs(): start");
2409
2410        long aid = -1;
2411        long sid = -1;
2412        long atype = -1;
2413        int size = 0, rank = 0;
2414        byte[] ref_buf = null;
2415
2416        try {
2417            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2418                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2419                sid = H5.H5Aget_space(aid);
2420                rank = H5.H5Sget_simple_extent_ndims(sid);
2421                size = 1;
2422                if (rank > 0) {
2423                    long[] dims = new long[rank];
2424                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2425                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2426                    for (int i = 0; i < rank; i++) {
2427                        size *= (int) dims[i];
2428                    }
2429                }
2430
2431                if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE) throw new HDF5Exception("Invalid int size");
2432
2433                ref_buf = new byte[size * 8];
2434                atype = H5.H5Aget_type(aid);
2435
2436                H5.H5Aread(aid, atype, ref_buf);
2437            }
2438        }
2439        catch (HDF5Exception ex) {
2440            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2441            ref_buf = null;
2442        }
2443        finally {
2444            try {
2445                H5.H5Tclose(atype);
2446            }
2447            catch (HDF5Exception ex2) {
2448                log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2);
2449            }
2450            try {
2451                H5.H5Sclose(sid);
2452            }
2453            catch (HDF5Exception ex2) {
2454                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2455            }
2456            try {
2457                H5.H5Aclose(aid);
2458            }
2459            catch (HDF5Exception ex2) {
2460                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2461            }
2462        }
2463
2464        log.trace("getPaletteRefs(): finish");
2465        return ref_buf;
2466    }
2467
2468    /**
2469     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2470     * of size is the same as that of the dataspace of the dataset being changed.
2471     *
2472     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2473     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2474     * H5Screate_simple)
2475     *
2476     * @param newDims the dimension target size
2477     *
2478     * @throws HDF5Exception
2479     *             If there is an error at the HDF5 library level.
2480     */
2481    public void extend(long[] newDims) throws HDF5Exception {
2482        long did = -1;
2483        long sid = -1;
2484
2485        did = open();
2486        if (did >= 0) {
2487            try {
2488                H5.H5Dset_extent(did, newDims);
2489                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2490                sid = H5.H5Dget_space(did);
2491                long[] checkDims = new long[rank];
2492                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2493                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2494                for (int i = 0; i < rank; i++) {
2495                    if (checkDims[i] != newDims[i]) {
2496                        log.debug("extend(): error extending dataset");
2497                        throw new HDF5Exception("error extending dataset " + getName());
2498                    }
2499                }
2500                dims = checkDims;
2501            }
2502            catch (Exception e) {
2503                log.debug("extend(): failure: ", e);
2504                throw new HDF5Exception(e.getMessage());
2505            }
2506            finally {
2507                if (sid > 0)
2508                    H5.H5Sclose(sid);
2509
2510                close(did);
2511            }
2512        }
2513    }
2514
2515    /*
2516     * (non-Javadoc)
2517     *
2518     * @see hdf.object.Dataset#isVirtual()
2519     */
2520    @Override
2521    public boolean isVirtual() {
2522        return isVirtual;
2523    }
2524
2525    /*
2526     * (non-Javadoc)
2527     *
2528     * @see hdf.object.Dataset#getVirtualFilename(int)
2529     */
2530    @Override
2531    public String getVirtualFilename(int index) {
2532        if(isVirtual)
2533            return virtualNameList.get(index);
2534        else
2535            return null;
2536    }
2537
2538    /*
2539     * (non-Javadoc)
2540     *
2541     * @see hdf.object.Dataset#getVirtualMaps()
2542     */
2543    @Override
2544    public int getVirtualMaps() {
2545        if(isVirtual)
2546            return virtualNameList.size();
2547        else
2548            return -1;
2549    }
2550}