001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.List;
021import java.util.Vector;
022
023import hdf.hdf5lib.H5;
024import hdf.hdf5lib.HDF5Constants;
025import hdf.hdf5lib.HDFArray;
026import hdf.hdf5lib.HDFNativeData;
027import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
028import hdf.hdf5lib.exceptions.HDF5Exception;
029import hdf.hdf5lib.exceptions.HDF5LibraryException;
030import hdf.hdf5lib.structs.H5O_info_t;
031import hdf.hdf5lib.structs.H5O_token_t;
032
033import hdf.object.Attribute;
034import hdf.object.Dataset;
035import hdf.object.Datatype;
036import hdf.object.FileFormat;
037import hdf.object.Group;
038import hdf.object.HObject;
039import hdf.object.MetaDataContainer;
040import hdf.object.ScalarDS;
041
042import hdf.object.h5.H5Attribute;
043import hdf.object.h5.H5MetaDataContainer;
044import hdf.object.h5.H5ReferenceType;
045
046/**
047 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
048 * float, double and string, and operations performed on the scalar dataset.
049 *
050 * The library predefines a modest number of datatypes. For details,
051 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
052 *
053 * @version 1.1 9/4/2007
054 * @author Peter X. Cao
055 */
056public class H5ScalarDS extends ScalarDS implements MetaDataContainer
057{
058    private static final long serialVersionUID = 2887517608230611642L;
059
060    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
061
062    /**
063     * The metadata object for this data object. Members of the metadata are instances of Attribute.
064     */
065    private H5MetaDataContainer objMetadata;
066
067    /** the object properties */
068    private H5O_info_t objInfo;
069
070    /** the number of palettes */
071    private int NumberOfPalettes;
072
073    /** flag to indicate if the dataset is an external dataset */
074    private boolean isExternal = false;
075
076    /** flag to indicate if the dataset is a virtual dataset */
077    private boolean isVirtual = false;
078    /** the list of virtual names */
079    private List<String> virtualNameList;
080
081    /**
082     * flag to indicate if the dataset buffers should be refreshed.
083     */
084    protected boolean refresh = false;
085
086    /**
087     * flag to indicate if the datatype in file is the same as dataype in memory
088     */
089    protected boolean isNativeDatatype = false;
090
091    /**
092     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
093     *
094     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
095     * path of the dataset.
096     *
097     * @param theFile
098     *            the file that contains the data object.
099     * @param theName
100     *            the name of the data object, e.g. "dset".
101     * @param thePath
102     *            the full path of the data object, e.g. "/arrays/".
103     */
104    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
105        this(theFile, theName, thePath, null);
106    }
107
108    /**
109     * @deprecated Not for public use in the future.<br>
110     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
111     *
112     * @param theFile
113     *            the file that contains the data object.
114     * @param theName
115     *            the name of the data object, e.g. "dset".
116     * @param thePath
117     *            the full path of the data object, e.g. "/arrays/".
118     * @param oid
119     *            the oid of the data object.
120     */
121    @Deprecated
122    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
123        super(theFile, theName, thePath, oid);
124        unsignedConverted = false;
125        NumberOfPalettes = 0;
126        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
127
128        if (theFile != null) {
129            if (oid == null) {
130                // retrieve the object ID
131                byte[] refBuf = null;
132                try {
133                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
134                    this.oid = HDFNativeData.byteToLong(refBuf);
135                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
136                }
137                catch (Exception ex) {
138                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
139                }
140                finally {
141                    if (refBuf != null)
142                        H5.H5Rdestroy(refBuf);
143                }
144            }
145            log.trace("constructor OID {}", this.oid);
146            try {
147                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
148            }
149            catch (Exception ex) {
150                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
151            }
152        }
153        else {
154            this.oid = null;
155            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
156        }
157    }
158
159    /*
160     * (non-Javadoc)
161     *
162     * @see hdf.object.HObject#open()
163     */
164    @Override
165    public long open() {
166        long did = HDF5Constants.H5I_INVALID_HID;
167
168        if (getFID() < 0)
169            log.trace("open(): file id for:{} is invalid", getPath() + getName());
170        else {
171            try {
172                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
173                log.trace("open(): did={}", did);
174            }
175            catch (HDF5Exception ex) {
176                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
177                did = HDF5Constants.H5I_INVALID_HID;
178            }
179        }
180
181        return did;
182    }
183
184    /*
185     * (non-Javadoc)
186     *
187     * @see hdf.object.HObject#close(int)
188     */
189    @Override
190    public void close(long did) {
191        if (did >= 0) {
192            try {
193                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
194            }
195            catch (Exception ex) {
196                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
197            }
198            try {
199                H5.H5Dclose(did);
200            }
201            catch (HDF5Exception ex) {
202                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
203            }
204        }
205    }
206
207    /**
208     * Retrieves datatype and dataspace information from file and sets the dataset
209     * in memory.
210     *
211     * The init() is designed to support lazy operation in a dataset object. When a
212     * data object is retrieved from file, the datatype, dataspace and raw data are
213     * not loaded into memory. When it is asked to read the raw data from file,
214     * init() is first called to get the datatype and dataspace information, then
215     * load the raw data from file.
216     *
217     * init() is also used to reset the selection of a dataset (start, stride and
218     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
219     * the following example, init() at step 1) retrieves datatype and dataspace
220     * information from file. getData() at step 3) reads only one data point. init()
221     * at step 4) resets the selection to the whole dataset. getData() at step 4)
222     * reads the values of whole dataset into memory.
223     *
224     * <pre>
225     * dset = (Dataset) file.get(NAME_DATASET);
226     *
227     * // 1) get datatype and dataspace information from file
228     * dset.init();
229     * rank = dset.getRank(); // rank = 2, a 2D dataset
230     * count = dset.getSelectedDims();
231     * start = dset.getStartDims();
232     * dims = dset.getDims();
233     *
234     * // 2) select only one data point
235     * for (int i = 0; i &lt; rank; i++) {
236     *     start[0] = 0;
237     *     count[i] = 1;
238     * }
239     *
240     * // 3) read one data point
241     * data = dset.getData();
242     *
243     * // 4) reset selection to the whole dataset
244     * dset.init();
245     *
246     * // 5) clean the memory data buffer
247     * dset.clearData();
248     *
249     * // 6) Read the whole dataset
250     * data = dset.getData();
251     * </pre>
252     */
253    @Override
254    public void init() {
255        if (inited) {
256            resetSelection();
257            log.trace("init(): Dataset already initialized");
258            return; // already called. Initialize only once
259        }
260
261        long did = HDF5Constants.H5I_INVALID_HID;
262        long tid = HDF5Constants.H5I_INVALID_HID;
263        long sid = HDF5Constants.H5I_INVALID_HID;
264        long nativeTID = HDF5Constants.H5I_INVALID_HID;
265
266        did = open();
267        if (did >= 0) {
268            try {
269                H5.H5Drefresh(did);
270            }
271            catch (Exception ex) {
272                log.debug("H5Drefresh(): ", ex);
273            }
274            // check if it is an external or virtual dataset
275            long pid = HDF5Constants.H5I_INVALID_HID;
276            try {
277                pid = H5.H5Dget_create_plist(did);
278                try {
279                    int nfiles = H5.H5Pget_external_count(pid);
280                    isExternal = (nfiles > 0);
281                    int layoutType = H5.H5Pget_layout(pid);
282                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
283                        try {
284                            long vmaps = H5.H5Pget_virtual_count(pid);
285                            if (vmaps > 0) {
286                                virtualNameList = new Vector<>();
287                                for (long next = 0; next < vmaps; next++) {
288                                    try {
289                                        String fname = H5.H5Pget_virtual_filename(pid, next);
290                                        virtualNameList.add(fname);
291                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
292                                    }
293                                    catch (Exception err) {
294                                        log.trace("init(): vds[{}] continue", next);
295                                    }
296                                }
297                            }
298                        }
299                        catch (Exception err) {
300                            log.debug("init(): vds count error: ", err);
301                        }
302                    }
303                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
304                }
305                catch (Exception ex) {
306                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
307                }
308            }
309            catch (Exception ex) {
310                log.debug("init(): H5Dget_create_plist() failure: ", ex);
311            }
312            finally {
313                try {
314                    H5.H5Pclose(pid);
315                }
316                catch (Exception ex) {
317                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
318                }
319            }
320
321            NumberOfPalettes = readNumberOfPalette(did);
322
323            try {
324                sid = H5.H5Dget_space(did);
325                rank = H5.H5Sget_simple_extent_ndims(sid);
326                space_type = H5.H5Sget_simple_extent_type(sid);
327                tid = H5.H5Dget_type(did);
328                log.trace("init(): tid={} sid={} rank={} space_type={} ", tid, sid, rank, space_type);
329
330                if (rank == 0) {
331                    // a scalar data point
332                    isScalar = true;
333                    rank = 1;
334                    dims = new long[] { 1 };
335                    log.trace("init(): rank is a scalar data point");
336                }
337                else {
338                    isScalar = false;
339                    dims = new long[rank];
340                    maxDims = new long[rank];
341                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
342                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
343                }
344
345                try {
346                    int nativeClass = H5.H5Tget_class(tid);
347                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
348                        long lsize = 1;
349                        if (rank > 0) {
350                            log.trace("init():rank={}, dims={}", rank, dims);
351                            for (int j = 0; j < dims.length; j++) {
352                                lsize *= dims[j];
353                            }
354                        }
355                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
356                    }
357                    else
358                        datatype = new H5Datatype(getFileFormat(), tid);
359
360                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
361                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
362                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
363                }
364                catch (Exception ex) {
365                    log.debug("init(): failed to create datatype for dataset: ", ex);
366                    datatype = null;
367                }
368
369                // Check if the datatype in the file is the native datatype
370                try {
371                    nativeTID = H5.H5Tget_native_type(tid);
372                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
373                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
374                }
375                catch (Exception ex) {
376                    log.debug("init(): check if native type failure: ", ex);
377                }
378
379                try {
380                    pid = H5.H5Dget_create_plist(did);
381                    int[] fillStatus = { 0 };
382                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
383                        // Check if fill value is user-defined before retrieving it.
384                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
385                            try {
386                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
387                            }
388                            catch (OutOfMemoryError e) {
389                                log.debug("init(): out of memory: ", e);
390                                fillValue = null;
391                            }
392                            catch (Exception ex) {
393                                log.debug("init(): allocate fill value buffer failed: ", ex);
394                                fillValue = null;
395                            }
396
397                            log.trace("init(): fillValue={}", fillValue);
398                            try {
399                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
400                                log.trace("init(): H5Pget_fill_value={}", fillValue);
401                                if (fillValue != null) {
402                                    if (datatype.isUnsigned() && !isFillValueConverted) {
403                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
404                                        isFillValueConverted = true;
405                                    }
406
407                                    int n = Array.getLength(fillValue);
408                                    for (int i = 0; i < n; i++)
409                                        addFilteredImageValue((Number) Array.get(fillValue, i));
410                                }
411                            }
412                            catch (Exception ex2) {
413                                log.debug("init(): fill value was defined: ", ex2);
414                                fillValue = null;
415                            }
416                        }
417                    }
418                }
419                catch (HDF5Exception ex) {
420                    log.debug("init(): check if fill value is defined failure: ", ex);
421                }
422                finally {
423                    try {
424                        H5.H5Pclose(pid);
425                    }
426                    catch (Exception ex) {
427                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
428                    }
429                }
430
431                inited = true;
432            }
433            catch (HDF5Exception ex) {
434                log.debug("init(): ", ex);
435            }
436            finally {
437                try {
438                    H5.H5Tclose(nativeTID);
439                }
440                catch (Exception ex2) {
441                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
442                }
443                try {
444                    H5.H5Tclose(tid);
445                }
446                catch (HDF5Exception ex2) {
447                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
448                }
449                try {
450                    H5.H5Sclose(sid);
451                }
452                catch (HDF5Exception ex2) {
453                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
454                }
455            }
456
457            // check for the type of image and interlace mode
458            // it is a true color image at one of three cases:
459            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
460            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
461            // 3) INTERLACE_MODE = INTERLACE_PLANE
462            if ((rank >= 3) && isImage) {
463                interlace = -1;
464                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
465
466                if (isTrueColor) {
467                    interlace = INTERLACE_PIXEL;
468                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
469                        interlace = INTERLACE_PLANE;
470                    }
471                }
472            }
473
474            close(did);
475
476            startDims = new long[rank];
477            selectedDims = new long[rank];
478
479            resetSelection();
480        }
481        else {
482            log.debug("init(): failed to open dataset");
483        }
484        refresh = false;
485    }
486
487    /**
488     * Get the token for this object.
489     *
490     * @return true if it has any attributes, false otherwise.
491     */
492    public long[] getToken() {
493        H5O_token_t token = objInfo.token;
494        return HDFNativeData.byteToLong(token.data);
495    }
496
497    /**
498     * Check if the object has any attributes attached.
499     *
500     * @return true if it has any attributes, false otherwise.
501     */
502    @Override
503    public boolean hasAttribute() {
504        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
505
506        if (objInfo.num_attrs < 0) {
507            long did = open();
508            if (did >= 0) {
509                objInfo.num_attrs = 0;
510
511                try {
512                    objInfo = H5.H5Oget_info(did);
513
514                    if(objInfo.num_attrs > 0) {
515                        // test if it is an image
516                        // check image
517                        Object avalue = getAttrValue(did, "CLASS");
518                        if (avalue != null) {
519                            try {
520                                isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
521                                log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
522                            }
523                            catch (Exception err) {
524                                log.debug("hasAttribute(): check image: ", err);
525                            }
526                        }
527
528                        // retrieve the IMAGE_MINMAXRANGE
529                        avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
530                        if (avalue != null) {
531                            double x0 = 0;
532                            double x1 = 0;
533                            try {
534                                x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
535                                x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
536                            }
537                            catch (Exception ex2) {
538                                x0 = x1 = 0;
539                            }
540                            if (x1 > x0) {
541                                imageDataRange = new double[2];
542                                imageDataRange[0] = x0;
543                                imageDataRange[1] = x1;
544                            }
545                        }
546
547                        try {
548                            checkCFconvention(did);
549                        }
550                        catch (Exception ex) {
551                            log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
552                        }
553                    }
554                }
555                catch (Exception ex) {
556                    objInfo.num_attrs = 0;
557                    log.debug("hasAttribute(): get object info failure: ", ex);
558                }
559                finally {
560                    close(did);
561                }
562                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
563            }
564            else {
565                log.debug("hasAttribute(): could not open dataset");
566            }
567        }
568
569        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
570        return (objInfo.num_attrs > 0);
571    }
572
573    /**
574     * Returns the datatype of the data object.
575     *
576     * @return the datatype of the data object.
577     */
578    @Override
579    public Datatype getDatatype() {
580        if (!inited)
581            init();
582
583        if (datatype == null) {
584            long did = HDF5Constants.H5I_INVALID_HID;
585            long tid = HDF5Constants.H5I_INVALID_HID;
586
587            did = open();
588            if (did >= 0) {
589                try {
590                    tid = H5.H5Dget_type(did);
591                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
592                    if (!isNativeDatatype) {
593                        long tmptid = -1;
594                        try {
595                            tmptid = H5Datatype.toNative(tid);
596                            if (tmptid >= 0) {
597                                try {
598                                    H5.H5Tclose(tid);
599                                }
600                                catch (Exception ex2) {
601                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
602                                }
603                                tid = tmptid;
604                            }
605                        }
606                        catch (Exception ex) {
607                            log.debug("getDatatype(): toNative: ", ex);
608                        }
609                    }
610                    int nativeClass = H5.H5Tget_class(tid);
611                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
612                        long lsize = 1;
613                        if (rank > 0) {
614                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
615                            for (int j = 0; j < dims.length; j++) {
616                                lsize *= dims[j];
617                            }
618                        }
619                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
620                    }
621                    else
622                        datatype = new H5Datatype(getFileFormat(), tid);
623                }
624                catch (Exception ex) {
625                    log.debug("getDatatype(): ", ex);
626                }
627                finally {
628                    try {
629                        H5.H5Tclose(tid);
630                    }
631                    catch (HDF5Exception ex) {
632                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
633                    }
634                    try {
635                        H5.H5Dclose(did);
636                    }
637                    catch (HDF5Exception ex) {
638                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
639                    }
640                }
641            }
642        }
643
644        if (isExternal) {
645            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
646
647            if (pdir == null) {
648                pdir = ".";
649            }
650            System.setProperty("user.dir", pdir);
651            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
652        }
653
654        return datatype;
655    }
656
657    /**
658     * Refreshes the dataset before re-read of data.
659     */
660    @Override
661    public Object refreshData() {
662        inited = false;
663        refresh = true;
664
665        init();
666        return super.refreshData();
667    }
668
669    /**
670     * Removes all of the elements from metadata list.
671     * The list should be empty after this call returns.
672     */
673    @Override
674    public void clear() {
675        super.clear();
676        objMetadata.clear();
677    }
678
679    /*
680     * (non-Javadoc)
681     *
682     * @see hdf.object.Dataset#readBytes()
683     */
684    @Override
685    public byte[] readBytes() throws HDF5Exception {
686        byte[] theData = null;
687
688        if (!isInited())
689            init();
690
691        long did = open();
692        if (did >= 0) {
693            long fspace = HDF5Constants.H5I_INVALID_HID;
694            long mspace = HDF5Constants.H5I_INVALID_HID;
695            long tid = HDF5Constants.H5I_INVALID_HID;
696
697            try {
698                long[] lsize = { 1 };
699                for (int j = 0; j < selectedDims.length; j++)
700                    lsize[0] *= selectedDims[j];
701
702                fspace = H5.H5Dget_space(did);
703                mspace = H5.H5Screate_simple(rank, selectedDims, null);
704
705                // set the rectangle selection
706                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
707                if (rank * dims[0] > 1)
708                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
709
710                tid = H5.H5Dget_type(did);
711                long size = H5.H5Tget_size(tid) * lsize[0];
712                log.trace("readBytes(): size = {}", size);
713
714                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
715                    throw new Exception("Invalid int size");
716
717                theData = new byte[(int)size];
718
719                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
720                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
721            }
722            catch (Exception ex) {
723                log.debug("readBytes(): failed to read data: ", ex);
724            }
725            finally {
726                try {
727                    H5.H5Sclose(fspace);
728                }
729                catch (Exception ex2) {
730                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
731                }
732                try {
733                    H5.H5Sclose(mspace);
734                }
735                catch (Exception ex2) {
736                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
737                }
738                try {
739                    H5.H5Tclose(tid);
740                }
741                catch (HDF5Exception ex2) {
742                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
743                }
744                close(did);
745            }
746        }
747
748        return theData;
749    }
750
751    /**
752     * Reads the data from file.
753     *
754     * read() reads the data from file to a memory buffer and returns the memory
755     * buffer. The dataset object does not hold the memory buffer. To store the
756     * memory buffer in the dataset object, one must call getData().
757     *
758     * By default, the whole dataset is read into memory. Users can also select
759     * a subset to read. Subsetting is done in an implicit way.
760     *
761     * <b>How to Select a Subset</b>
762     *
763     * A selection is specified by three arrays: start, stride and count.
764     * <ol>
765     * <li>start: offset of a selection
766     * <li>stride: determines how many elements to move in each dimension
767     * <li>count: number of elements to select in each dimension
768     * </ol>
769     * getStartDims(), getStride() and getSelectedDims() returns the start,
770     * stride and count arrays respectively. Applications can make a selection
771     * by changing the values of the arrays.
772     *
773     * The following example shows how to make a subset. In the example, the
774     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
775     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
776     * We want to select every other data point in dims[1] and dims[2]
777     *
778     * <pre>
779     * int rank = dataset.getRank(); // number of dimensions of the dataset
780     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
781     * long[] selected = dataset.getSelectedDims(); // the selected size of the
782     *                                              // dataset
783     * long[] start = dataset.getStartDims(); // the offset of the selection
784     * long[] stride = dataset.getStride(); // the stride of the dataset
785     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
786     *                                                   // dimensions for
787     *                                                   // display
788     *
789     * // select dim1 and dim2 as 2D data for display, and slice through dim0
790     * selectedIndex[0] = 1;
791     * selectedIndex[1] = 2;
792     * selectedIndex[1] = 0;
793     *
794     * // reset the selection arrays
795     * for (int i = 0; i &lt; rank; i++) {
796     *     start[i] = 0;
797     *     selected[i] = 1;
798     *     stride[i] = 1;
799     * }
800     *
801     * // set stride to 2 on dim1 and dim2 so that every other data point is
802     * // selected.
803     * stride[1] = 2;
804     * stride[2] = 2;
805     *
806     * // set the selection size of dim1 and dim2
807     * selected[1] = dims[1] / stride[1];
808     * selected[2] = dims[1] / stride[2];
809     *
810     * // when dataset.getData() is called, the selection above will be used
811     * // since
812     * // the dimension arrays are passed by reference. Changes of these arrays
813     * // outside the dataset object directly change the values of these array
814     * // in the dataset object.
815     * </pre>
816     *
817     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
818     * short, int, float, double or String type based on the datatype of the
819     * dataset.
820     *
821     * @return the data read from file.
822     *
823     * @see #getData()
824     * @see hdf.object.DataFormat#read()
825     *
826     * @throws Exception
827     *             if object can not be read
828     */
829    @Override
830    public Object read() throws Exception {
831        Object readData = null;
832
833        if (!isInited())
834            init();
835
836        try {
837            readData = scalarDatasetCommonIO(H5File.IO_TYPE.READ, null);
838        }
839        catch (Exception ex) {
840            log.debug("read(): failed to read scalar dataset: ", ex);
841            throw new Exception("failed to read scalar dataset: " + ex.getMessage(), ex);
842        }
843
844        return readData;
845    }
846
847    /**
848     * Writes the given data buffer into this dataset in a file.
849     *
850     * @param buf
851     *            The buffer that contains the data values.
852     *
853     * @throws Exception
854     *             If there is an error at the HDF5 library level.
855     */
856    @Override
857    public void write(Object buf) throws Exception {
858        if (this.getFileFormat().isReadOnly())
859            throw new Exception("cannot write to scalar dataset in file opened as read-only");
860
861        if (!isInited())
862            init();
863
864        try {
865            scalarDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
866        }
867        catch (Exception ex) {
868            log.debug("write(Object): failed to write to scalar dataset: ", ex);
869            throw new Exception("failed to write to scalar dataset: " + ex.getMessage(), ex);
870        }
871    }
872
873    private Object scalarDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
874        H5Datatype dsDatatype = (H5Datatype) getDatatype();
875        Object theData = null;
876
877        /*
878         * I/O type-specific pre-initialization.
879         */
880        if (ioType == H5File.IO_TYPE.WRITE) {
881            if (writeBuf == null) {
882                log.debug("scalarDatasetCommonIO(): writeBuf is null");
883                throw new Exception("write buffer is null");
884            }
885            log.trace("scalarDatasetCommonIO(): check write unsupported datatype data");
886
887            /*
888             * Check for any unsupported datatypes and fail early before
889             * attempting to write to the dataset.
890             */
891            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
892                log.debug("scalarDatasetCommonIO(): Cannot write non-string variable-length data");
893                throw new HDF5Exception("Writing non-string variable-length data is not supported");
894            }
895        }
896
897        long did = open();
898        if (did >= 0) {
899            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
900
901            try {
902                /*
903                 * NOTE: this call sets up a hyperslab selection in the file according to the
904                 * current selection in the dataset object.
905                 */
906                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
907                        selectedStride, selectedDims, spaceIDs);
908
909                if (ioType == H5File.IO_TYPE.READ) {
910                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
911                    if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
912                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
913                        try {
914                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
915                        }
916                        catch (OutOfMemoryError err) {
917                            log.debug("scalarDatasetCommonIO(): Out of memory");
918                            throw new HDF5Exception("Out Of Memory");
919                        }
920                    }
921                    else {
922                        // reuse the buffer if the size is the same
923                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
924                        theData = originalBuf;
925                    }
926
927                    if (theData != null) {
928                        /*
929                         * Actually read the data now that everything has been setup.
930                         */
931                        long tid = HDF5Constants.H5I_INVALID_HID;
932                        try {
933                            log.trace("scalarDatasetCommonIO():read ioType create native");
934                            tid = dsDatatype.createNative();
935
936                            if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
937                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
938                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
939                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
940
941                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
942                                        (Object[]) theData);
943                            }
944                            else {
945                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
946                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
947                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
948
949                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
950                            }
951                        }
952                        catch (HDF5DataFiltersException exfltr) {
953                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
954                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
955                        }
956                        catch (Exception ex) {
957                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
958                            throw new Exception(ex.getMessage(), ex);
959                        }
960                        finally {
961                            dsDatatype.close(tid);
962                        }
963
964                        /*
965                         * Perform any necessary data conversions.
966                         */
967                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
968                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
969                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
970                        }
971                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
972                            log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
973                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints, (byte[]) theData);
974                        }
975                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
976                            log.trace("scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
977                            long[] arrayDims = dsDatatype.getArrayDims();
978                            int asize = (int)totalSelectedSpacePoints;
979                            for (int j = 0; j < arrayDims.length; j++) {
980                                asize *= arrayDims[j];
981                            }
982                            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
983                        }
984                    }
985                } // H5File.IO_TYPE.READ
986                else {
987                    /*
988                     * Perform any necessary data conversions before writing the data.
989                     *
990                     * Note that v-len strings do not get converted, regardless of
991                     * conversion request type.
992                     */
993                    Object tmpData = writeBuf;
994                    try {
995                        // Check if we need to convert integer data
996                        int tsize = (int) dsDatatype.getDatatypeSize();
997                        String cname = writeBuf.getClass().getName();
998                        log.trace("scalarDatasetCommonIO(): cname={} of datatype size={}", cname, tsize);
999                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1000                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
1001                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1002
1003                        if (doIntConversion) {
1004                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
1005                            tmpData = convertToUnsignedC(writeBuf, null);
1006                        }
1007                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
1008                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
1009                            tmpData = stringToByte((String[]) writeBuf, tsize);
1010                        }
1011                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
1012                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
1013                            tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
1014                        }
1015                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1016                            log.trace("scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
1017                            throw new Exception("data conversion failure: cannot write BigDecimal values");
1018                            //tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints, (BigDecimal[]) writeBuf);
1019                        }
1020                    }
1021                    catch (Exception ex) {
1022                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
1023                        throw new Exception("data conversion failure: " + ex.getMessage());
1024                    }
1025
1026                    /*
1027                     * Actually write the data now that everything has been setup.
1028                     */
1029                    long tid = HDF5Constants.H5I_INVALID_HID;
1030                    try {
1031                        tid = dsDatatype.createNative();
1032
1033                        if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1034                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1035                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1036                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1037
1038                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1039                        }
1040                        else {
1041                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1042                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1043                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1044
1045                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1046                        }
1047                    }
1048                    catch (Exception ex) {
1049                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
1050                        throw new Exception(ex.getMessage());
1051                    }
1052                    finally {
1053                        dsDatatype.close(tid);
1054                    }
1055                } // H5File.IO_TYPE.WRITE
1056            }
1057            finally {
1058                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1059                    try {
1060                        H5.H5Sclose(spaceIDs[0]);
1061                    }
1062                    catch (Exception ex) {
1063                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1064                    }
1065                }
1066
1067                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1068                    try {
1069                        H5.H5Sclose(spaceIDs[1]);
1070                    }
1071                    catch (Exception ex) {
1072                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1073                    }
1074                }
1075
1076                close(did);
1077            }
1078        }
1079        else
1080            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1081
1082        return theData;
1083    }
1084
1085    /**
1086     * Retrieves the object's metadata, such as attributes, from the file.
1087     *
1088     * Metadata, such as attributes, is stored in a List.
1089     *
1090     * @return the list of metadata objects.
1091     *
1092     * @throws HDF5Exception
1093     *             if the metadata can not be retrieved
1094     */
1095    @Override
1096    public List<Attribute> getMetadata() throws HDF5Exception {
1097        int gmIndexType = 0;
1098        int gmIndexOrder = 0;
1099
1100        try {
1101            gmIndexType = fileFormat.getIndexType(null);
1102        }
1103        catch (Exception ex) {
1104            log.debug("getMetadata(): getIndexType failed: ", ex);
1105        }
1106        try {
1107            gmIndexOrder = fileFormat.getIndexOrder(null);
1108        }
1109        catch (Exception ex) {
1110            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1111        }
1112        return this.getMetadata(gmIndexType, gmIndexOrder);
1113    }
1114
1115    /**
1116     * Retrieves the object's metadata, such as attributes, from the file.
1117     *
1118     * Metadata, such as attributes, is stored in a List.
1119     *
1120     * @param attrPropList
1121     *             the list of properties to get
1122     *
1123     * @return the list of metadata objects.
1124     *
1125     * @throws HDF5Exception
1126     *             if the metadata can not be retrieved
1127     */
1128    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1129        if (!isInited())
1130            init();
1131
1132        try {
1133            this.linkTargetObjName = H5File.getLinkTargetName(this);
1134        }
1135        catch (Exception ex) {
1136            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1137        }
1138
1139        if (objMetadata.getAttributeList() == null) {
1140            long did = HDF5Constants.H5I_INVALID_HID;
1141            long pcid = HDF5Constants.H5I_INVALID_HID;
1142            long paid = HDF5Constants.H5I_INVALID_HID;
1143
1144            did = open();
1145            if (did >= 0) {
1146                try {
1147                    // get the compression and chunk information
1148                    pcid = H5.H5Dget_create_plist(did);
1149                    paid = H5.H5Dget_access_plist(did);
1150                    long storageSize = H5.H5Dget_storage_size(did);
1151                    int nfilt = H5.H5Pget_nfilters(pcid);
1152                    int layoutType = H5.H5Pget_layout(pcid);
1153
1154                    storageLayout.setLength(0);
1155                    compression.setLength(0);
1156
1157                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1158                        chunkSize = new long[rank];
1159                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1160                        int n = chunkSize.length;
1161                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1162                        for (int i = 1; i < n; i++)
1163                            storageLayout.append(" X ").append(chunkSize[i]);
1164
1165                        if (nfilt > 0) {
1166                            long nelmts = 1;
1167                            long uncompSize;
1168                            long datumSize = getDatatype().getDatatypeSize();
1169
1170                            if (datumSize < 0) {
1171                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1172                                try {
1173                                    tmptid = H5.H5Dget_type(did);
1174                                    datumSize = H5.H5Tget_size(tmptid);
1175                                }
1176                                finally {
1177                                    try {
1178                                        H5.H5Tclose(tmptid);
1179                                    }
1180                                    catch (Exception ex2) {
1181                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1182                                    }
1183                                }
1184                            }
1185
1186                            for (int i = 0; i < rank; i++)
1187                                nelmts *= dims[i];
1188                            uncompSize = nelmts * datumSize;
1189
1190                            /* compression ratio = uncompressed size / compressed size */
1191
1192                            if (storageSize != 0) {
1193                                double ratio = (double) uncompSize / (double) storageSize;
1194                                DecimalFormat df = new DecimalFormat();
1195                                df.setMinimumFractionDigits(3);
1196                                df.setMaximumFractionDigits(3);
1197                                compression.append(df.format(ratio)).append(":1");
1198                            }
1199                        }
1200                    }
1201                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1202                        storageLayout.append("COMPACT");
1203                    }
1204                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1205                        storageLayout.append("CONTIGUOUS");
1206                        if (H5.H5Pget_external_count(pcid) > 0)
1207                            storageLayout.append(" - EXTERNAL ");
1208                    }
1209                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1210                        storageLayout.append("VIRTUAL - ");
1211                        try {
1212                            long vmaps = H5.H5Pget_virtual_count(pcid);
1213                            try {
1214                                int virtView = H5.H5Pget_virtual_view(paid);
1215                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1216                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1217                                    storageLayout.append("First Missing");
1218                                else
1219                                    storageLayout.append("Last Available");
1220                                storageLayout.append("\nGAP : ").append(virtGap);
1221                            }
1222                            catch (Exception err) {
1223                                log.debug("getMetadata(): vds error: ", err);
1224                                storageLayout.append("ERROR");
1225                            }
1226                            storageLayout.append("\nMAPS : ").append(vmaps);
1227                            if (vmaps > 0) {
1228                                for (long next = 0; next < vmaps; next++) {
1229                                    try {
1230                                        H5.H5Pget_virtual_vspace(pcid, next);
1231                                        H5.H5Pget_virtual_srcspace(pcid, next);
1232                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1233                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1234                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1235                                    }
1236                                    catch (Exception err) {
1237                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1238                                        storageLayout.append("ERROR");
1239                                    }
1240                                }
1241                            }
1242                        }
1243                        catch (Exception err) {
1244                            log.debug("getMetadata(): vds count error: ", err);
1245                            storageLayout.append("ERROR");
1246                        }
1247                    }
1248                    else {
1249                        chunkSize = null;
1250                        storageLayout.append("NONE");
1251                    }
1252
1253                    int[] flags = { 0, 0 };
1254                    long[] cdNelmts = { 20 };
1255                    int[] cdValues = new int[(int) cdNelmts[0]];
1256                    String[] cdName = { "", "" };
1257                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1258                    int filter = -1;
1259                    int[] filterConfig = { 1 };
1260
1261                    filters.setLength(0);
1262
1263                    if (nfilt == 0) {
1264                        filters.append("NONE");
1265                    }
1266                    else {
1267                        for (int i = 0, k = 0; i < nfilt; i++) {
1268                            log.trace("getMetadata(): filter[{}]", i);
1269                            if (i > 0)
1270                                filters.append(", ");
1271                            if (k > 0)
1272                                compression.append(", ");
1273
1274                            try {
1275                                cdNelmts[0] = 20;
1276                                cdValues = new int[(int) cdNelmts[0]];
1277                                cdValues = new int[(int) cdNelmts[0]];
1278                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1279                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1280                                for (int j = 0; j < cdNelmts[0]; j++)
1281                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1282                            }
1283                            catch (Exception err) {
1284                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1285                                filters.append("ERROR");
1286                                continue;
1287                            }
1288
1289                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1290                                filters.append("NONE");
1291                            }
1292                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1293                                filters.append("GZIP");
1294                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1295                                k++;
1296                            }
1297                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1298                                filters.append("Error detection filter");
1299                            }
1300                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1301                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1302                            }
1303                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1304                                filters.append("NBIT");
1305                            }
1306                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1307                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1308                            }
1309                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1310                                filters.append("SZIP");
1311                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1312                                k++;
1313                                int flag = -1;
1314                                try {
1315                                    flag = H5.H5Zget_filter_info(filter);
1316                                }
1317                                catch (Exception ex) {
1318                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1319                                    flag = -1;
1320                                }
1321                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1322                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1323                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1324                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1325                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1326                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1327                            }
1328                            else {
1329                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1330                                for (int j = 0; j < cdNelmts[0]; j++) {
1331                                    if (j > 0)
1332                                        filters.append(", ");
1333                                    filters.append(cdValues[j]);
1334                                }
1335                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1336                            }
1337                        } //  (int i=0; i<nfilt; i++)
1338                    }
1339
1340                    if (compression.length() == 0)
1341                        compression.append("NONE");
1342                    log.trace("getMetadata(): filter compression={}", compression);
1343                    log.trace("getMetadata(): filter information={}", filters);
1344
1345                    storage.setLength(0);
1346                    storage.append("SIZE: ").append(storageSize);
1347
1348                    try {
1349                        int[] at = { 0 };
1350                        H5.H5Pget_alloc_time(pcid, at);
1351                        storage.append(", allocation time: ");
1352                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1353                            storage.append("Early");
1354                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1355                            storage.append("Incremental");
1356                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1357                            storage.append("Late");
1358                        else
1359                            storage.append("Default");
1360                    }
1361                    catch (Exception ex) {
1362                        log.debug("getMetadata(): Storage allocation time:", ex);
1363                    }
1364                    log.trace("getMetadata(): storage={}", storage);
1365                }
1366                finally {
1367                    try {
1368                        H5.H5Pclose(paid);
1369                    }
1370                    catch (Exception ex) {
1371                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1372                    }
1373                    try {
1374                        H5.H5Pclose(pcid);
1375                    }
1376                    catch (Exception ex) {
1377                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1378                    }
1379                    close(did);
1380                }
1381            }
1382        }
1383
1384        List<Attribute> attrlist = null;
1385        try {
1386            attrlist = objMetadata.getMetadata(attrPropList);
1387        }
1388        catch (Exception ex) {
1389            log.debug("getMetadata(): getMetadata failed: ", ex);
1390        }
1391        return attrlist;
1392    }
1393
1394    /**
1395     * Writes a specific piece of metadata (such as an attribute) into the file.
1396     *
1397     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1398     * value. If the attribute does not exist in the file, it creates the
1399     * attribute in the file and attaches it to the object. It will fail to
1400     * write a new attribute to the object where an attribute with the same name
1401     * already exists. To update the value of an existing attribute in the file,
1402     * one needs to get the instance of the attribute by getMetadata(), change
1403     * its values, then use writeMetadata() to write the value.
1404     *
1405     * @param info
1406     *            the metadata to write.
1407     *
1408     * @throws Exception
1409     *             if the metadata can not be written
1410     */
1411    @Override
1412    public void writeMetadata(Object info) throws Exception {
1413        try {
1414            objMetadata.writeMetadata(info);
1415        }
1416        catch (Exception ex) {
1417            log.debug("writeMetadata(): Object not an Attribute");
1418        }
1419    }
1420
1421    /**
1422     * Deletes an existing piece of metadata from this object.
1423     *
1424     * @param info
1425     *            the metadata to delete.
1426     *
1427     * @throws HDF5Exception
1428     *             if the metadata can not be removed
1429     */
1430    @Override
1431    public void removeMetadata(Object info) throws HDF5Exception {
1432        try {
1433            objMetadata.removeMetadata(info);
1434        }
1435        catch (Exception ex) {
1436            log.debug("removeMetadata(): Object not an Attribute");
1437            return;
1438        }
1439
1440        Attribute attr = (Attribute) info;
1441        log.trace("removeMetadata(): {}", attr.getAttributeName());
1442        long did = open();
1443        if (did >= 0) {
1444            try {
1445                H5.H5Adelete(did, attr.getAttributeName());
1446            }
1447            finally {
1448                close(did);
1449            }
1450        }
1451        else {
1452            log.debug("removeMetadata(): failed to open scalar dataset");
1453        }
1454    }
1455
1456    /**
1457     * Updates an existing piece of metadata attached to this object.
1458     *
1459     * @param info
1460     *            the metadata to update.
1461     *
1462     * @throws HDF5Exception
1463     *             if the metadata can not be updated
1464     */
1465    @Override
1466    public void updateMetadata(Object info) throws HDF5Exception {
1467        try {
1468            objMetadata.updateMetadata(info);
1469        }
1470        catch (Exception ex) {
1471            log.debug("updateMetadata(): Object not an Attribute");
1472            return;
1473        }
1474    }
1475
1476    /*
1477     * (non-Javadoc)
1478     *
1479     * @see hdf.object.HObject#setName(java.lang.String)
1480     */
1481    @Override
1482    public void setName(String newName) throws Exception {
1483        if (newName == null)
1484            throw new IllegalArgumentException("The new name is NULL");
1485
1486        H5File.renameObject(this, newName);
1487        super.setName(newName);
1488    }
1489
1490    /**
1491     * Resets selection of dataspace
1492     */
1493    protected void resetSelection() {
1494        super.resetSelection();
1495
1496        if (interlace == INTERLACE_PIXEL) {
1497            // 24-bit TRUE color image
1498            // [height][width][pixel components]
1499            selectedDims[2] = 3;
1500            selectedDims[0] = dims[0];
1501            selectedDims[1] = dims[1];
1502            selectedIndex[0] = 0; // index for height
1503            selectedIndex[1] = 1; // index for width
1504            selectedIndex[2] = 2; // index for depth
1505        }
1506        else if (interlace == INTERLACE_PLANE) {
1507            // 24-bit TRUE color image
1508            // [pixel components][height][width]
1509            selectedDims[0] = 3;
1510            selectedDims[1] = dims[1];
1511            selectedDims[2] = dims[2];
1512            selectedIndex[0] = 1; // index for height
1513            selectedIndex[1] = 2; // index for width
1514            selectedIndex[2] = 0; // index for depth
1515        }
1516
1517        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1518            isDefaultImageOrder = false;
1519        else
1520            isDefaultImageOrder = true;
1521    }
1522
1523    /**
1524     * Creates a scalar dataset in a file with/without chunking and compression.
1525     *
1526     * @param name
1527     *            the name of the dataset to create.
1528     * @param pgroup
1529     *            parent group where the new dataset is created.
1530     * @param type
1531     *            the datatype of the dataset.
1532     * @param dims
1533     *            the dimension size of the dataset.
1534     * @param maxdims
1535     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1536     * @param chunks
1537     *            the chunk size of the dataset. No chunking if chunk = null.
1538     * @param gzip
1539     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1540     * @param data
1541     *            the array of data values.
1542     *
1543     * @return the new scalar dataset if successful; otherwise returns null.
1544     *
1545     * @throws Exception if there is a failure.
1546     */
1547    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1548            long[] chunks, int gzip, Object data) throws Exception {
1549        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1550    }
1551
1552    /**
1553     * Creates a scalar dataset in a file with/without chunking and compression.
1554     *
1555     * The following example shows how to create a string dataset using this function.
1556     *
1557     * <pre>
1558     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1559     * int max_str_len = 120;
1560     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1561     * int size = 10000;
1562     * long dims[] = { size };
1563     * long chunks[] = { 1000 };
1564     * int gzip = 9;
1565     * String strs[] = new String[size];
1566     *
1567     * for (int i = 0; i &lt; size; i++)
1568     *     strs[i] = String.valueOf(i);
1569     *
1570     * file.open();
1571     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1572     *
1573     * try {
1574     *     file.close();
1575     * }
1576     * catch (Exception ex) {
1577     * }
1578     * </pre>
1579     *
1580     * @param name
1581     *            the name of the dataset to create.
1582     * @param pgroup
1583     *            parent group where the new dataset is created.
1584     * @param type
1585     *            the datatype of the dataset.
1586     * @param dims
1587     *            the dimension size of the dataset.
1588     * @param maxdims
1589     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1590     * @param chunks
1591     *            the chunk size of the dataset. No chunking if chunk = null.
1592     * @param gzip
1593     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1594     * @param fillValue
1595     *            the default data value.
1596     * @param data
1597     *            the array of data values.
1598     *
1599     * @return the new scalar dataset if successful; otherwise returns null.
1600     *
1601     * @throws Exception if there is a failure.
1602     */
1603    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1604            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1605        H5ScalarDS dataset = null;
1606        String fullPath = null;
1607        long did = HDF5Constants.H5I_INVALID_HID;
1608        long plist = HDF5Constants.H5I_INVALID_HID;
1609        long sid = HDF5Constants.H5I_INVALID_HID;
1610        long tid = HDF5Constants.H5I_INVALID_HID;
1611
1612        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1613            log.debug("create(): one or more parameters are null");
1614            return null;
1615        }
1616
1617        H5File file = (H5File) pgroup.getFileFormat();
1618        if (file == null) {
1619            log.debug("create(): parent group FileFormat is null");
1620            return null;
1621        }
1622
1623        String path = HObject.SEPARATOR;
1624        if (!pgroup.isRoot()) {
1625            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1626            if (name.endsWith("/"))
1627                name = name.substring(0, name.length() - 1);
1628            int idx = name.lastIndexOf('/');
1629            if (idx >= 0)
1630                name = name.substring(idx + 1);
1631        }
1632
1633        fullPath = path + name;
1634
1635        // setup chunking and compression
1636        boolean isExtentable = false;
1637        if (maxdims != null) {
1638            for (int i = 0; i < maxdims.length; i++) {
1639                if (maxdims[i] == 0)
1640                    maxdims[i] = dims[i];
1641                else if (maxdims[i] < 0)
1642                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1643
1644                if (maxdims[i] != dims[i])
1645                    isExtentable = true;
1646            }
1647        }
1648
1649        // HDF5 requires you to use chunking in order to define extendible
1650        // datasets. Chunking makes it possible to extend datasets efficiently,
1651        // without having to reorganize storage excessively. Using default size
1652        // of 64x...which has good performance
1653        if ((chunks == null) && isExtentable) {
1654            chunks = new long[dims.length];
1655            for (int i = 0; i < dims.length; i++)
1656                chunks[i] = Math.min(dims[i], 64);
1657        }
1658
1659        // prepare the dataspace and datatype
1660        int rank = dims.length;
1661
1662        tid = type.createNative();
1663        if (tid >= 0) {
1664            try {
1665                sid = H5.H5Screate_simple(rank, dims, maxdims);
1666
1667                // figure out creation properties
1668                plist = HDF5Constants.H5P_DEFAULT;
1669
1670                byte[] valFill = null;
1671                try {
1672                    valFill = parseFillValue(type, fillValue);
1673                }
1674                catch (Exception ex) {
1675                    log.debug("create(): parse fill value: ", ex);
1676                }
1677                log.trace("create(): parseFillValue={}", valFill);
1678
1679                if (chunks != null || valFill != null) {
1680                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1681
1682                    if (chunks != null) {
1683                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1684                        H5.H5Pset_chunk(plist, rank, chunks);
1685
1686                        // compression requires chunking
1687                        if (gzip > 0) {
1688                            H5.H5Pset_deflate(plist, gzip);
1689                        }
1690                    }
1691
1692                    if (valFill != null)
1693                        H5.H5Pset_fill_value(plist, tid, valFill);
1694                }
1695
1696                long fid = file.getFID();
1697
1698                log.trace("create(): create dataset fid={}", fid);
1699                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1700                log.trace("create(): create dataset did={}", did);
1701                dataset = new H5ScalarDS(file, name, path);
1702            }
1703            finally {
1704                try {
1705                    H5.H5Pclose(plist);
1706                }
1707                catch (HDF5Exception ex) {
1708                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1709                }
1710                try {
1711                    H5.H5Sclose(sid);
1712                }
1713                catch (HDF5Exception ex) {
1714                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1715                }
1716                try {
1717                    H5.H5Tclose(tid);
1718                }
1719                catch (HDF5Exception ex) {
1720                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1721                }
1722                try {
1723                    H5.H5Dclose(did);
1724                }
1725                catch (HDF5Exception ex) {
1726                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1727                }
1728            }
1729        }
1730
1731        if (dataset != null) {
1732            pgroup.addToMemberList(dataset);
1733            if (data != null) {
1734                dataset.init();
1735                long[] selected = dataset.getSelectedDims();
1736                for (int i = 0; i < rank; i++)
1737                    selected[i] = dims[i];
1738                dataset.write(data);
1739            }
1740        }
1741
1742        return dataset;
1743    }
1744
1745    // check _FillValue, valid_min, valid_max, and valid_range
1746    private void checkCFconvention(long oid) throws Exception {
1747        Object avalue = getAttrValue(oid, "_FillValue");
1748
1749        if (avalue != null) {
1750            int n = Array.getLength(avalue);
1751            for (int i = 0; i < n; i++)
1752                addFilteredImageValue((Number) Array.get(avalue, i));
1753        }
1754
1755        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1756            double x0 = 0;
1757            double x1 = 0;
1758            avalue = getAttrValue(oid, "valid_range");
1759            if (avalue != null) {
1760                try {
1761                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1762                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1763                    imageDataRange = new double[2];
1764                    imageDataRange[0] = x0;
1765                    imageDataRange[1] = x1;
1766                    return;
1767                }
1768                catch (Exception ex) {
1769                    log.debug("checkCFconvention(): valid_range: ", ex);
1770                }
1771            }
1772
1773            avalue = getAttrValue(oid, "valid_min");
1774            if (avalue != null) {
1775                try {
1776                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1777                }
1778                catch (Exception ex) {
1779                    log.debug("checkCFconvention(): valid_min: ", ex);
1780                }
1781                avalue = getAttrValue(oid, "valid_max");
1782                if (avalue != null) {
1783                    try {
1784                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1785                        imageDataRange = new double[2];
1786                        imageDataRange[0] = x0;
1787                        imageDataRange[1] = x1;
1788                    }
1789                    catch (Exception ex) {
1790                        log.debug("checkCFconvention(): valid_max:", ex);
1791                    }
1792                }
1793            }
1794        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1795    }
1796
1797    private Object getAttrValue(long oid, String aname) {
1798        log.trace("getAttrValue(): start: name={}", aname);
1799
1800        long aid = HDF5Constants.H5I_INVALID_HID;
1801        long atid = HDF5Constants.H5I_INVALID_HID;
1802        long asid = HDF5Constants.H5I_INVALID_HID;
1803        Object avalue = null;
1804
1805        try {
1806            // try to find attribute name
1807            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1808                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1809        }
1810        catch (HDF5LibraryException ex5) {
1811            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1812        }
1813        catch (Exception ex) {
1814            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1815        }
1816        if (aid > 0) {
1817            try {
1818                atid = H5.H5Aget_type(aid);
1819                long tmptid = atid;
1820                atid = H5.H5Tget_native_type(tmptid);
1821                try {
1822                    H5.H5Tclose(tmptid);
1823                }
1824                catch (Exception ex) {
1825                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1826                }
1827
1828                asid = H5.H5Aget_space(aid);
1829                long adims[] = null;
1830
1831                int arank = H5.H5Sget_simple_extent_ndims(asid);
1832                if (arank > 0) {
1833                    adims = new long[arank];
1834                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1835                }
1836                log.trace("getAttrValue(): adims={}", adims);
1837
1838                // retrieve the attribute value
1839                long lsize = 1;
1840                if (adims != null) {
1841                    for (int j = 0; j < adims.length; j++) {
1842                        lsize *= adims[j];
1843                    }
1844                }
1845                log.trace("getAttrValue(): lsize={}", lsize);
1846
1847                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1848
1849                H5Datatype dsDatatype = null;
1850                int nativeClass = H5.H5Tget_class(atid);
1851                if (nativeClass == HDF5Constants.H5T_REFERENCE)
1852                    dsDatatype = new H5ReferenceType(getFileFormat(), lsize, atid);
1853                else
1854                    dsDatatype = new H5Datatype(getFileFormat(), atid);
1855
1856                try {
1857                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1858                }
1859                catch (OutOfMemoryError e) {
1860                    log.debug("getAttrValue(): out of memory: ", e);
1861                    avalue = null;
1862                }
1863
1864                if (avalue != null) {
1865                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1866                    H5.H5Aread(aid, atid, avalue);
1867
1868                    if (dsDatatype.isUnsigned()) {
1869                        log.trace("getAttrValue(): id {} is unsigned", atid);
1870                        avalue = convertFromUnsignedC(avalue, null);
1871                    }
1872                    if (dsDatatype.isReference())
1873                        ((H5ReferenceType)dsDatatype).setData(avalue);
1874                }
1875            }
1876            catch (Exception ex) {
1877                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1878            }
1879            finally {
1880                try {
1881                    H5.H5Tclose(atid);
1882                }
1883                catch (HDF5Exception ex) {
1884                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1885                }
1886                try {
1887                    H5.H5Sclose(asid);
1888                }
1889                catch (HDF5Exception ex) {
1890                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1891                }
1892                try {
1893                    H5.H5Aclose(aid);
1894                }
1895                catch (HDF5Exception ex) {
1896                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1897                }
1898            }
1899        } // (aid > 0)
1900
1901        return avalue;
1902    }
1903
1904    private boolean isStringAttributeOf(long objID, String name, String value) {
1905        boolean retValue = false;
1906        long aid = HDF5Constants.H5I_INVALID_HID;
1907        long atid = HDF5Constants.H5I_INVALID_HID;
1908
1909        try {
1910            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1911                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1912                atid = H5.H5Aget_type(aid);
1913                int size = (int)H5.H5Tget_size(atid);
1914                byte[] attrValue = new byte[size];
1915                H5.H5Aread(aid, atid, attrValue);
1916                String strValue = new String(attrValue).trim();
1917                retValue = strValue.equalsIgnoreCase(value);
1918            }
1919        }
1920        catch (Exception ex) {
1921            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1922        }
1923        finally {
1924            try {
1925                H5.H5Tclose(atid);
1926            }
1927            catch (HDF5Exception ex) {
1928                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1929            }
1930            try {
1931                H5.H5Aclose(aid);
1932            }
1933            catch (HDF5Exception ex) {
1934                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1935            }
1936        }
1937
1938        return retValue;
1939    }
1940
1941    /*
1942     * (non-Javadoc)
1943     *
1944     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1945     */
1946    @Override
1947    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1948        // must give a location to copy
1949        if (pgroup == null) {
1950            log.debug("copy(): Parent group is null");
1951            return null;
1952        }
1953
1954        Dataset dataset = null;
1955        long srcdid = HDF5Constants.H5I_INVALID_HID;
1956        long dstdid = HDF5Constants.H5I_INVALID_HID;
1957        long plist = HDF5Constants.H5I_INVALID_HID;
1958        long tid = HDF5Constants.H5I_INVALID_HID;
1959        long sid = HDF5Constants.H5I_INVALID_HID;
1960        String dname = null;
1961        String path = null;
1962
1963        if (pgroup.isRoot())
1964            path = HObject.SEPARATOR;
1965        else
1966            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1967        dname = path + dstName;
1968
1969        srcdid = open();
1970        if (srcdid >= 0) {
1971            try {
1972                tid = H5.H5Dget_type(srcdid);
1973                sid = H5.H5Screate_simple(dims.length, dims, null);
1974                plist = H5.H5Dget_create_plist(srcdid);
1975
1976                long[] chunks = new long[dims.length];
1977                boolean setChunkFlag = false;
1978                try {
1979                    H5.H5Pget_chunk(plist, dims.length, chunks);
1980                    for (int i = 0; i < dims.length; i++) {
1981                        if (dims[i] < chunks[i]) {
1982                            setChunkFlag = true;
1983                            if (dims[i] == 1)
1984                                chunks[i] = 1;
1985                            else
1986                                chunks[i] = dims[i] / 2;
1987                        }
1988                    }
1989                }
1990                catch (Exception ex) {
1991                    log.debug("copy(): chunk: ", ex);
1992                }
1993
1994                if (setChunkFlag)
1995                    H5.H5Pset_chunk(plist, dims.length, chunks);
1996
1997                try {
1998                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1999                            HDF5Constants.H5P_DEFAULT);
2000                }
2001                catch (Exception e) {
2002                    log.debug("copy(): H5Dcreate: ", e);
2003                }
2004                finally {
2005                    try {
2006                        H5.H5Dclose(dstdid);
2007                    }
2008                    catch (Exception ex2) {
2009                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
2010                    }
2011                }
2012
2013                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
2014                if (buff != null) {
2015                    dataset.init();
2016                    dataset.write(buff);
2017                }
2018
2019                dstdid = dataset.open();
2020                if (dstdid >= 0) {
2021                    try {
2022                        H5File.copyAttributes(srcdid, dstdid);
2023                    }
2024                    finally {
2025                        try {
2026                            H5.H5Dclose(dstdid);
2027                        }
2028                        catch (Exception ex) {
2029                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2030                        }
2031                    }
2032                }
2033            }
2034            finally {
2035                try {
2036                    H5.H5Pclose(plist);
2037                }
2038                catch (Exception ex) {
2039                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2040                }
2041                try {
2042                    H5.H5Sclose(sid);
2043                }
2044                catch (Exception ex) {
2045                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2046                }
2047                try {
2048                    H5.H5Tclose(tid);
2049                }
2050                catch (Exception ex) {
2051                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2052                }
2053                try {
2054                    H5.H5Dclose(srcdid);
2055                }
2056                catch (Exception ex) {
2057                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2058                }
2059            }
2060        }
2061
2062        pgroup.addToMemberList(dataset);
2063
2064        if (dataset != null)
2065            ((ScalarDS) dataset).setIsImage(isImage);
2066
2067        return dataset;
2068    }
2069
2070    /**
2071     * Get the number of pallettes for this object.
2072     *
2073     * @return the number of palettes if it has any, 0 otherwise.
2074     */
2075    public int getNumberOfPalettes() {
2076        return NumberOfPalettes;
2077    }
2078
2079    /*
2080     * (non-Javadoc)
2081     *
2082     * @see hdf.object.ScalarDS#getPalette()
2083     */
2084    @Override
2085    public byte[][] getPalette() {
2086        log.trace("getPalette(): NumberOfPalettes={}", NumberOfPalettes);
2087        if (NumberOfPalettes > 0)
2088            if (palette == null)
2089                palette = readPalette(0);
2090
2091        return palette;
2092    }
2093
2094    /*
2095     * (non-Javadoc)
2096     *
2097     * @see hdf.object.ScalarDS#getPaletteName(int)
2098     */
2099    @Override
2100    public String getPaletteName(int idx) {
2101        int count = readNumberOfPalettes();
2102        long did = HDF5Constants.H5I_INVALID_HID;
2103        long palID = HDF5Constants.H5I_INVALID_HID;
2104        String paletteName = null;
2105
2106        if (count < 1) {
2107            log.debug("getPaletteName(): no palettes are attached");
2108            return null;
2109        }
2110
2111        byte[][] refBuf = null;
2112
2113        did = open();
2114        if (did >= 0) {
2115            try {
2116                refBuf = getPaletteRefs(did);
2117                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2118                paletteName = H5.H5Iget_name(palID);
2119            }
2120            catch (Exception ex) {
2121                ex.printStackTrace();
2122            }
2123            finally {
2124                close(palID);
2125                for (int i = 0; i < count; i++)
2126                    H5.H5Rdestroy(refBuf[i]);
2127                close(did);
2128            }
2129        }
2130
2131        return paletteName;
2132    }
2133
2134    /*
2135     * (non-Javadoc)
2136     *
2137     * @see hdf.object.ScalarDS#readPalette(int)
2138     */
2139    @Override
2140    public byte[][] readPalette(int idx) {
2141        byte[][] thePalette = null;
2142        int count = readNumberOfPalettes();
2143        long did = HDF5Constants.H5I_INVALID_HID;
2144        long palID = HDF5Constants.H5I_INVALID_HID;
2145        long tid = HDF5Constants.H5I_INVALID_HID;
2146        log.trace("readPalette(): palette count={}", count);
2147
2148        if (count < 1) {
2149            log.debug("readPalette(): no palettes are attached");
2150            return null;
2151        }
2152
2153        byte[] p = null;
2154        byte[][] refBuf = null;
2155
2156        did = open();
2157        if (did >= 0) {
2158            try {
2159                refBuf = getPaletteRefs(did);
2160                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2161                log.trace("readPalette(): H5Ropen_object: {}", palID);
2162                tid = H5.H5Dget_type(palID);
2163
2164                // support only 3*256 byte palette data
2165                if (H5.H5Dget_storage_size(palID) <= 768) {
2166                    p = new byte[3 * 256];
2167                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2168                }
2169            }
2170            catch (HDF5Exception ex) {
2171                log.debug("readPalette(): failure: ", ex);
2172                p = null;
2173            }
2174            finally {
2175                try {
2176                    H5.H5Tclose(tid);
2177                }
2178                catch (HDF5Exception ex2) {
2179                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2180                }
2181                close(palID);
2182                for (int i = 0; i < count; i++)
2183                    H5.H5Rdestroy(refBuf[i]);
2184                close(did);
2185            }
2186        }
2187
2188        if (p != null) {
2189            thePalette = new byte[3][256];
2190            for (int i = 0; i < 256; i++) {
2191                thePalette[0][i] = p[i * 3];
2192                thePalette[1][i] = p[i * 3 + 1];
2193                thePalette[2][i] = p[i * 3 + 2];
2194            }
2195        }
2196
2197        return thePalette;
2198    }
2199
2200    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2201        byte[] data = null;
2202
2203        if (type == null || fillValue == null) {
2204            log.debug("parseFillValue(): datatype or fill value is null");
2205            return null;
2206        }
2207
2208        int datatypeClass = type.getDatatypeClass();
2209        int datatypeSize = (int)type.getDatatypeSize();
2210
2211        double valDbl = 0;
2212        String valStr = null;
2213
2214        if (fillValue instanceof String)
2215            valStr = (String) fillValue;
2216        else if (fillValue.getClass().isArray())
2217            valStr = Array.get(fillValue, 0).toString();
2218
2219        if (!type.isString()) {
2220            try {
2221                valDbl = Double.parseDouble(valStr);
2222            }
2223            catch (NumberFormatException ex) {
2224                log.debug("parseFillValue(): parse error: ", ex);
2225                return null;
2226            }
2227        }
2228
2229        try {
2230            switch (datatypeClass) {
2231                case Datatype.CLASS_INTEGER:
2232                case Datatype.CLASS_ENUM:
2233                case Datatype.CLASS_CHAR:
2234                    log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2235                    if (datatypeSize == 1)
2236                        data = new byte[] { (byte) valDbl };
2237                    else if (datatypeSize == 2)
2238                        data = HDFNativeData.shortToByte((short) valDbl);
2239                    else if (datatypeSize == 8)
2240                        data = HDFNativeData.longToByte((long) valDbl);
2241                    else
2242                        data = HDFNativeData.intToByte((int) valDbl);
2243                    break;
2244                case Datatype.CLASS_FLOAT:
2245                    log.trace("parseFillValue(): class CLASS_FLOAT");
2246                    if (datatypeSize > 8)
2247                        data =  valStr.getBytes();
2248                    else if (datatypeSize == 8)
2249                        data = HDFNativeData.doubleToByte(valDbl);
2250                    else
2251                        data = HDFNativeData.floatToByte((float) valDbl);
2252                    break;
2253                case Datatype.CLASS_STRING:
2254                    log.trace("parseFillValue(): class CLASS_STRING");
2255                    if (valStr != null)
2256                        data = valStr.getBytes();
2257                    break;
2258                case Datatype.CLASS_REFERENCE:
2259                    log.trace("parseFillValue(): class CLASS_REFERENCE");
2260                    data = HDFNativeData.longToByte((long) valDbl);
2261                    break;
2262                default:
2263                    log.debug("parseFillValue(): datatypeClass unknown");
2264                    break;
2265            } // (datatypeClass)
2266        }
2267        catch (Exception ex) {
2268            log.debug("parseFillValue(): failure: ", ex);
2269            data = null;
2270        }
2271
2272        return data;
2273    }
2274
2275    /**
2276     * reads references of palettes to count the numberOfPalettes.
2277     *
2278     * @return the number of palettes referenced.
2279     */
2280    public int readNumberOfPalettes() {
2281        log.trace("readNumberOfPalettes(): isInited={}", isInited());
2282        if (!isInited())
2283            init(); // init will be called to get refs
2284
2285        return NumberOfPalettes;
2286    }
2287
2288    /**
2289     * reads references of palettes to calculate the numberOfPalettes.
2290     */
2291    private int readNumberOfPalette(long did) {
2292        long aid = HDF5Constants.H5I_INVALID_HID;
2293        long sid = HDF5Constants.H5I_INVALID_HID;
2294        long atype = HDF5Constants.H5I_INVALID_HID;
2295        int size = 0;
2296        int rank = 0;
2297        byte[] refbuf = null;
2298        log.trace("readNumberOfPalette(): did={}", did);
2299
2300        try {
2301            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2302                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2303                sid = H5.H5Aget_space(aid);
2304                rank = H5.H5Sget_simple_extent_ndims(sid);
2305                size = 1;
2306                if (rank > 0) {
2307                    long[] dims = new long[rank];
2308                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2309                    log.trace("readNumberOfPalette(): rank={}, dims={}", rank, dims);
2310                    for (int i = 0; i < rank; i++)
2311                        size *= (int) dims[i];
2312                }
2313                log.trace("readNumberOfPalette(): size={}", size);
2314
2315                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2316                    throw new HDF5Exception("Invalid int size");
2317            }
2318        }
2319        catch (HDF5Exception ex) {
2320            log.debug("readNumberOfPalette(): Palette attribute search failed: Expected", ex);
2321            refbuf = null;
2322        }
2323        finally {
2324            try {
2325                H5.H5Tclose(atype);
2326            }
2327            catch (HDF5Exception ex2) {
2328                log.debug("readNumberOfPalette(): H5Tclose(atype {}) failure: ", atype, ex2);
2329            }
2330            try {
2331                H5.H5Sclose(sid);
2332            }
2333            catch (HDF5Exception ex2) {
2334                log.debug("readNumberOfPalette(): H5Sclose(sid {}) failure: ", sid, ex2);
2335            }
2336            try {
2337                H5.H5Aclose(aid);
2338            }
2339            catch (HDF5Exception ex2) {
2340                log.debug("readNumberOfPalette(): H5Aclose(aid {}) failure: ", aid, ex2);
2341            }
2342        }
2343
2344        return size;
2345    }
2346
2347    /**
2348     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2349     * length is 8*numberOfPalettes.
2350     */
2351    private byte[][] getPaletteRefs(long did) {
2352        long aid = HDF5Constants.H5I_INVALID_HID;
2353        long sid = HDF5Constants.H5I_INVALID_HID;
2354        long atype = HDF5Constants.H5I_INVALID_HID;
2355        int size = 0;
2356        int rank = 0;
2357        byte[][] refBuf = null;
2358        log.trace("getPaletteRefs(): did={}", did);
2359
2360        try {
2361            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2362                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2363                sid = H5.H5Aget_space(aid);
2364                rank = H5.H5Sget_simple_extent_ndims(sid);
2365                size = 1;
2366                if (rank > 0) {
2367                    long[] dims = new long[rank];
2368                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2369                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2370                    for (int i = 0; i < rank; i++)
2371                        size *= (int) dims[i];
2372                }
2373                log.trace("getPaletteRefs(): size={}", size);
2374
2375                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2376                    throw new HDF5Exception("Invalid int size");
2377                refBuf = new byte[size][HDF5Constants.H5R_REF_BUF_SIZE];
2378
2379                H5.H5Aread(aid, HDF5Constants.H5T_STD_REF, refBuf);
2380            }
2381        }
2382        catch (HDF5Exception ex) {
2383            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2384            refBuf = null;
2385        }
2386        finally {
2387            try {
2388                H5.H5Sclose(sid);
2389            }
2390            catch (HDF5Exception ex2) {
2391                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2392            }
2393            try {
2394                H5.H5Aclose(aid);
2395            }
2396            catch (HDF5Exception ex2) {
2397                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2398            }
2399        }
2400
2401        return refBuf;
2402    }
2403
2404    /**
2405     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2406     * of size is the same as that of the dataspace of the dataset being changed.
2407     *
2408     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2409     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2410     * H5Screate_simple)
2411     *
2412     * @param newDims the dimension target size
2413     *
2414     * @throws HDF5Exception
2415     *             If there is an error at the HDF5 library level.
2416     */
2417    public void extend(long[] newDims) throws HDF5Exception {
2418        long did = HDF5Constants.H5I_INVALID_HID;
2419        long sid = HDF5Constants.H5I_INVALID_HID;
2420
2421        did = open();
2422        if (did >= 0) {
2423            try {
2424                H5.H5Dset_extent(did, newDims);
2425                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2426                sid = H5.H5Dget_space(did);
2427                long[] checkDims = new long[rank];
2428                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2429                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2430                for (int i = 0; i < rank; i++) {
2431                    if (checkDims[i] != newDims[i]) {
2432                        log.debug("extend(): error extending dataset");
2433                        throw new HDF5Exception("error extending dataset " + getName());
2434                    }
2435                }
2436                dims = checkDims;
2437            }
2438            catch (Exception e) {
2439                log.debug("extend(): failure: ", e);
2440                throw new HDF5Exception(e.getMessage());
2441            }
2442            finally {
2443                if (sid > 0)
2444                    H5.H5Sclose(sid);
2445
2446                close(did);
2447            }
2448        }
2449    }
2450
2451    /*
2452     * (non-Javadoc)
2453     *
2454     * @see hdf.object.Dataset#isVirtual()
2455     */
2456    @Override
2457    public boolean isVirtual() {
2458        return isVirtual;
2459    }
2460
2461    /*
2462     * (non-Javadoc)
2463     *
2464     * @see hdf.object.Dataset#getVirtualFilename(int)
2465     */
2466    @Override
2467    public String getVirtualFilename(int index) {
2468        if(isVirtual)
2469            return virtualNameList.get(index);
2470        else
2471            return null;
2472    }
2473
2474    /*
2475     * (non-Javadoc)
2476     *
2477     * @see hdf.object.Dataset#getVirtualMaps()
2478     */
2479    @Override
2480    public int getVirtualMaps() {
2481        if(isVirtual)
2482            return virtualNameList.size();
2483        else
2484            return -1;
2485    }
2486
2487    /*
2488     * (non-Javadoc)
2489     *
2490     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2491     */
2492    @Override
2493    public String toString(String delimiter, int maxItems) {
2494        Object theData = originalBuf;
2495        if (theData == null) {
2496            log.debug("toString: value is null");
2497            return null;
2498        }
2499
2500        if (theData instanceof List<?>) {
2501            log.trace("toString: value is list");
2502            return null;
2503        }
2504
2505        Class<? extends Object> valClass = theData.getClass();
2506
2507        if (!valClass.isArray()) {
2508            log.trace("toString: finish - not array");
2509            String strValue = theData.toString();
2510            if (maxItems > 0 && strValue.length() > maxItems)
2511                // truncate the extra characters
2512                strValue = strValue.substring(0, maxItems);
2513            return strValue;
2514        }
2515
2516        // value is an array
2517        StringBuilder sb = new StringBuilder();
2518        long lsize = 1;
2519        for (int j = 0; j < dims.length; j++)
2520            lsize *= dims[j];
2521
2522        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), Array.getLength(theData));
2523        if (((H5Datatype) getDatatype()).isStdRef()) {
2524            String cname = valClass.getName();
2525            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2526            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2527            for (int i = 0; i < (int)lsize; i++) {
2528                int refIndex = HDF5Constants.H5R_REF_BUF_SIZE * i;
2529                byte[] refarr = new byte[(int) HDF5Constants.H5R_REF_BUF_SIZE];
2530                System.arraycopy((byte[])theData, refIndex, refarr, 0, (int)HDF5Constants.H5R_REF_BUF_SIZE);
2531                String ref_str = ((H5ReferenceType) getDatatype()).getReferenceRegion(refarr, false);
2532                log.trace("toString: ref_str[{}]={}", i, ref_str);
2533                if (i > 0)
2534                    sb.append(", ");
2535                sb.append(ref_str);
2536
2537//                int n = ref_str.length();
2538//                if (maxItems > 0) {
2539//                    if (n > maxItems)
2540//                        break;
2541//                    else
2542//                        maxItems -= n;
2543//                }
2544            }
2545            return sb.toString();
2546        }
2547        return super.toString(delimiter, maxItems);
2548    }
2549
2550}