001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.ArrayList;
021import java.util.List;
022import java.util.Vector;
023
024import org.slf4j.Logger;
025import org.slf4j.LoggerFactory;
026
027import hdf.hdf5lib.H5;
028import hdf.hdf5lib.HDF5Constants;
029import hdf.hdf5lib.HDFArray;
030import hdf.hdf5lib.HDFNativeData;
031import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
032import hdf.hdf5lib.exceptions.HDF5Exception;
033import hdf.hdf5lib.exceptions.HDF5LibraryException;
034import hdf.hdf5lib.structs.H5O_info_t;
035import hdf.hdf5lib.structs.H5O_token_t;
036
037import hdf.object.Attribute;
038import hdf.object.Dataset;
039import hdf.object.Datatype;
040import hdf.object.FileFormat;
041import hdf.object.Group;
042import hdf.object.HObject;
043import hdf.object.MetaDataContainer;
044import hdf.object.ScalarDS;
045
046import hdf.object.h5.H5Attribute;
047import hdf.object.h5.H5MetaDataContainer;
048import hdf.object.h5.H5ReferenceType;
049
050/**
051 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
052 * float, double and string, and operations performed on the scalar dataset.
053 *
054 * The library predefines a modest number of datatypes. For details,
055 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
056 *
057 * @version 1.1 9/4/2007
058 * @author Peter X. Cao
059 */
060public class H5ScalarDS extends ScalarDS implements MetaDataContainer
061{
062    private static final long serialVersionUID = 2887517608230611642L;
063
064    private static final Logger log = LoggerFactory.getLogger(H5ScalarDS.class);
065
066    /**
067     * The metadata object for this data object. Members of the metadata are instances of Attribute.
068     */
069    private H5MetaDataContainer objMetadata;
070
071    /** the object properties */
072    private H5O_info_t objInfo;
073
074    /** the number of palettes */
075    private int NumberOfPalettes;
076
077    /** flag to indicate if the dataset is an external dataset */
078    private boolean isExternal = false;
079
080    /** flag to indicate if the dataset is a virtual dataset */
081    private boolean isVirtual = false;
082    /** the list of virtual names */
083    private List<String> virtualNameList;
084
085    /**
086     * flag to indicate if the dataset buffers should be refreshed.
087     */
088    protected boolean refresh = false;
089
090    /**
091     * flag to indicate if the datatype in file is the same as dataype in memory
092     */
093    protected boolean isNativeDatatype = false;
094
095    /**
096     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
097     *
098     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
099     * path of the dataset.
100     *
101     * @param theFile
102     *            the file that contains the data object.
103     * @param theName
104     *            the name of the data object, e.g. "dset".
105     * @param thePath
106     *            the full path of the data object, e.g. "/arrays/".
107     */
108    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
109        this(theFile, theName, thePath, null);
110    }
111
112    /**
113     * @deprecated Not for public use in the future.<br>
114     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
115     *
116     * @param theFile
117     *            the file that contains the data object.
118     * @param theName
119     *            the name of the data object, e.g. "dset".
120     * @param thePath
121     *            the full path of the data object, e.g. "/arrays/".
122     * @param oid
123     *            the oid of the data object.
124     */
125    @Deprecated
126    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
127        super(theFile, theName, thePath, oid);
128        unsignedConverted = false;
129        NumberOfPalettes = 0;
130        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
131
132        if (theFile != null) {
133            if (oid == null) {
134                // retrieve the object ID
135                byte[] refBuf = null;
136                try {
137                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
138                    this.oid = HDFNativeData.byteToLong(refBuf);
139                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
140                }
141                catch (Exception ex) {
142                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
143                }
144                finally {
145                    if (refBuf != null)
146                        H5.H5Rdestroy(refBuf);
147                }
148            }
149            log.trace("constructor OID {}", this.oid);
150            try {
151                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
152            }
153            catch (Exception ex) {
154                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
155            }
156        }
157        else {
158            this.oid = null;
159            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
160        }
161    }
162
163    /*
164     * (non-Javadoc)
165     *
166     * @see hdf.object.HObject#open()
167     */
168    @Override
169    public long open() {
170        long did = HDF5Constants.H5I_INVALID_HID;
171
172        if (getFID() < 0)
173            log.trace("open(): file id for:{} is invalid", getPath() + getName());
174        else {
175            try {
176                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
177                log.trace("open(): did={}", did);
178            }
179            catch (HDF5Exception ex) {
180                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
181                did = HDF5Constants.H5I_INVALID_HID;
182            }
183        }
184
185        return did;
186    }
187
188    /*
189     * (non-Javadoc)
190     *
191     * @see hdf.object.HObject#close(int)
192     */
193    @Override
194    public void close(long did) {
195        if (did >= 0) {
196            try {
197                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
198            }
199            catch (Exception ex) {
200                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
201            }
202            try {
203                H5.H5Dclose(did);
204            }
205            catch (HDF5Exception ex) {
206                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
207            }
208        }
209    }
210
211    /**
212     * Retrieves datatype and dataspace information from file and sets the dataset
213     * in memory.
214     *
215     * The init() is designed to support lazy operation in a dataset object. When a
216     * data object is retrieved from file, the datatype, dataspace and raw data are
217     * not loaded into memory. When it is asked to read the raw data from file,
218     * init() is first called to get the datatype and dataspace information, then
219     * load the raw data from file.
220     *
221     * init() is also used to reset the selection of a dataset (start, stride and
222     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
223     * the following example, init() at step 1) retrieves datatype and dataspace
224     * information from file. getData() at step 3) reads only one data point. init()
225     * at step 4) resets the selection to the whole dataset. getData() at step 4)
226     * reads the values of whole dataset into memory.
227     *
228     * <pre>
229     * dset = (Dataset) file.get(NAME_DATASET);
230     *
231     * // 1) get datatype and dataspace information from file
232     * dset.init();
233     * rank = dset.getRank(); // rank = 2, a 2D dataset
234     * count = dset.getSelectedDims();
235     * start = dset.getStartDims();
236     * dims = dset.getDims();
237     *
238     * // 2) select only one data point
239     * for (int i = 0; i &lt; rank; i++) {
240     *     start[0] = 0;
241     *     count[i] = 1;
242     * }
243     *
244     * // 3) read one data point
245     * data = dset.getData();
246     *
247     * // 4) reset selection to the whole dataset
248     * dset.init();
249     *
250     * // 5) clean the memory data buffer
251     * dset.clearData();
252     *
253     * // 6) Read the whole dataset
254     * data = dset.getData();
255     * </pre>
256     */
257    @Override
258    public void init() {
259        if (inited) {
260            // already called. Initialize only once
261            resetSelection();
262            log.trace("init(): H5ScalarDS already initialized");
263            return;
264        }
265
266        long did = HDF5Constants.H5I_INVALID_HID;
267        long tid = HDF5Constants.H5I_INVALID_HID;
268        long sid = HDF5Constants.H5I_INVALID_HID;
269        long nativeTID = HDF5Constants.H5I_INVALID_HID;
270
271        did = open();
272        if (did >= 0) {
273            try {
274                H5.H5Drefresh(did);
275            }
276            catch (Exception ex) {
277                log.debug("H5Drefresh(): ", ex);
278            }
279            // check if it is an external or virtual dataset
280            long pid = HDF5Constants.H5I_INVALID_HID;
281            try {
282                pid = H5.H5Dget_create_plist(did);
283                try {
284                    int nfiles = H5.H5Pget_external_count(pid);
285                    isExternal = (nfiles > 0);
286                    int layoutType = H5.H5Pget_layout(pid);
287                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
288                        try {
289                            long vmaps = H5.H5Pget_virtual_count(pid);
290                            if (vmaps > 0) {
291                                virtualNameList = new Vector<>();
292                                for (long next = 0; next < vmaps; next++) {
293                                    try {
294                                        String fname = H5.H5Pget_virtual_filename(pid, next);
295                                        virtualNameList.add(fname);
296                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
297                                    }
298                                    catch (Exception err) {
299                                        log.trace("init(): vds[{}] continue", next);
300                                    }
301                                }
302                            }
303                        }
304                        catch (Exception err) {
305                            log.debug("init(): vds count error: ", err);
306                        }
307                    }
308                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
309                }
310                catch (Exception ex) {
311                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
312                }
313            }
314            catch (Exception ex) {
315                log.debug("init(): H5Dget_create_plist() failure: ", ex);
316            }
317            finally {
318                try {
319                    H5.H5Pclose(pid);
320                }
321                catch (Exception ex) {
322                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
323                }
324            }
325
326            NumberOfPalettes = readNumberOfPalette(did);
327
328            try {
329                sid = H5.H5Dget_space(did);
330                rank = H5.H5Sget_simple_extent_ndims(sid);
331                space_type = H5.H5Sget_simple_extent_type(sid);
332                tid = H5.H5Dget_type(did);
333                log.trace("init(): tid={} sid={} rank={} space_type={} ", tid, sid, rank, space_type);
334
335                if (rank == 0) {
336                    // a scalar data point
337                    isScalar = true;
338                    rank = 1;
339                    dims = new long[] { 1 };
340                    log.trace("init(): rank is a scalar data point");
341                }
342                else {
343                    isScalar = false;
344                    dims = new long[rank];
345                    maxDims = new long[rank];
346                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
347                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
348                }
349
350                try {
351                    int nativeClass = H5.H5Tget_class(tid);
352                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
353                        long lsize = 1;
354                        if (rank > 0) {
355                            log.trace("init():rank={}, dims={}", rank, dims);
356                            for (int j = 0; j < dims.length; j++) {
357                                lsize *= dims[j];
358                            }
359                        }
360                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
361                    }
362                    else
363                        datatype = new H5Datatype(getFileFormat(), tid);
364
365                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
366                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
367                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
368                }
369                catch (Exception ex) {
370                    log.debug("init(): failed to create datatype for dataset: ", ex);
371                    datatype = null;
372                }
373
374                // Check if the datatype in the file is the native datatype
375                try {
376                    nativeTID = H5.H5Tget_native_type(tid);
377                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
378                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
379                }
380                catch (Exception ex) {
381                    log.debug("init(): check if native type failure: ", ex);
382                }
383
384                try {
385                    pid = H5.H5Dget_create_plist(did);
386                    int[] fillStatus = { 0 };
387                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
388                        // Check if fill value is user-defined before retrieving it.
389                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
390                            try {
391                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
392                            }
393                            catch (OutOfMemoryError e) {
394                                log.debug("init(): out of memory: ", e);
395                                fillValue = null;
396                            }
397                            catch (Exception ex) {
398                                log.debug("init(): allocate fill value buffer failed: ", ex);
399                                fillValue = null;
400                            }
401
402                            log.trace("init(): fillValue={}", fillValue);
403                            try {
404                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
405                                log.trace("init(): H5Pget_fill_value={}", fillValue);
406                                if (fillValue != null) {
407                                    if (datatype.isUnsigned() && !isFillValueConverted) {
408                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
409                                        isFillValueConverted = true;
410                                    }
411
412                                    int n = Array.getLength(fillValue);
413                                    for (int i = 0; i < n; i++)
414                                        addFilteredImageValue((Number) Array.get(fillValue, i));
415                                }
416                            }
417                            catch (Exception ex2) {
418                                log.debug("init(): fill value was defined: ", ex2);
419                                fillValue = null;
420                            }
421                        }
422                    }
423                }
424                catch (HDF5Exception ex) {
425                    log.debug("init(): check if fill value is defined failure: ", ex);
426                }
427                finally {
428                    try {
429                        H5.H5Pclose(pid);
430                    }
431                    catch (Exception ex) {
432                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
433                    }
434                }
435
436                inited = true;
437            }
438            catch (HDF5Exception ex) {
439                log.debug("init(): ", ex);
440            }
441            finally {
442                try {
443                    H5.H5Tclose(nativeTID);
444                }
445                catch (Exception ex2) {
446                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
447                }
448                try {
449                    H5.H5Tclose(tid);
450                }
451                catch (HDF5Exception ex2) {
452                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
453                }
454                try {
455                    H5.H5Sclose(sid);
456                }
457                catch (HDF5Exception ex2) {
458                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
459                }
460            }
461
462            // check for the type of image and interlace mode
463            // it is a true color image at one of three cases:
464            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
465            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
466            // 3) INTERLACE_MODE = INTERLACE_PLANE
467            if ((rank >= 3) && isImage) {
468                interlace = -1;
469                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
470
471                if (isTrueColor) {
472                    interlace = INTERLACE_PIXEL;
473                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
474                        interlace = INTERLACE_PLANE;
475                    }
476                }
477            }
478
479            close(did);
480
481            startDims = new long[rank];
482            selectedDims = new long[rank];
483
484            resetSelection();
485        }
486        else {
487            log.debug("init(): failed to open dataset");
488        }
489        refresh = false;
490    }
491
492    /**
493     * Get the token for this object.
494     *
495     * @return true if it has any attributes, false otherwise.
496     */
497    public long[] getToken() {
498        H5O_token_t token = objInfo.token;
499        return HDFNativeData.byteToLong(token.data);
500    }
501
502    /**
503     * Check if the object has any attributes attached.
504     *
505     * @return true if it has any attributes, false otherwise.
506     */
507    @Override
508    public boolean hasAttribute() {
509        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
510
511        if (objInfo.num_attrs < 0) {
512            long did = open();
513            if (did >= 0) {
514                objInfo.num_attrs = 0;
515
516                try {
517                    objInfo = H5.H5Oget_info(did);
518
519                    if(objInfo.num_attrs > 0) {
520                        // test if it is an image
521                        // check image
522                        Object avalue = getAttrValue(did, "CLASS");
523                        if (avalue != null) {
524                            try {
525                                isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
526                                log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
527                            }
528                            catch (Exception err) {
529                                log.debug("hasAttribute(): check image: ", err);
530                            }
531                        }
532
533                        // retrieve the IMAGE_MINMAXRANGE
534                        avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
535                        if (avalue != null) {
536                            double x0 = 0;
537                            double x1 = 0;
538                            try {
539                                x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
540                                x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
541                            }
542                            catch (Exception ex2) {
543                                x0 = x1 = 0;
544                            }
545                            if (x1 > x0) {
546                                imageDataRange = new double[2];
547                                imageDataRange[0] = x0;
548                                imageDataRange[1] = x1;
549                            }
550                        }
551
552                        try {
553                            checkCFconvention(did);
554                        }
555                        catch (Exception ex) {
556                            log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
557                        }
558                    }
559                }
560                catch (Exception ex) {
561                    objInfo.num_attrs = 0;
562                    log.debug("hasAttribute(): get object info failure: ", ex);
563                }
564                finally {
565                    close(did);
566                }
567                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
568            }
569            else {
570                log.debug("hasAttribute(): could not open dataset");
571            }
572        }
573
574        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
575        return (objInfo.num_attrs > 0);
576    }
577
578    /**
579     * Returns the datatype of the data object.
580     *
581     * @return the datatype of the data object.
582     */
583    @Override
584    public Datatype getDatatype() {
585        if (!inited)
586            init();
587
588        if (datatype == null) {
589            long did = HDF5Constants.H5I_INVALID_HID;
590            long tid = HDF5Constants.H5I_INVALID_HID;
591
592            did = open();
593            if (did >= 0) {
594                try {
595                    tid = H5.H5Dget_type(did);
596                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
597                    if (!isNativeDatatype) {
598                        long tmptid = -1;
599                        try {
600                            tmptid = H5Datatype.toNative(tid);
601                            if (tmptid >= 0) {
602                                try {
603                                    H5.H5Tclose(tid);
604                                }
605                                catch (Exception ex2) {
606                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
607                                }
608                                tid = tmptid;
609                            }
610                        }
611                        catch (Exception ex) {
612                            log.debug("getDatatype(): toNative: ", ex);
613                        }
614                    }
615                    int nativeClass = H5.H5Tget_class(tid);
616                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
617                        long lsize = 1;
618                        if (rank > 0) {
619                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
620                            for (int j = 0; j < dims.length; j++) {
621                                lsize *= dims[j];
622                            }
623                        }
624                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
625                    }
626                    else
627                        datatype = new H5Datatype(getFileFormat(), tid);
628                }
629                catch (Exception ex) {
630                    log.debug("getDatatype(): ", ex);
631                }
632                finally {
633                    try {
634                        H5.H5Tclose(tid);
635                    }
636                    catch (HDF5Exception ex) {
637                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
638                    }
639                    try {
640                        H5.H5Dclose(did);
641                    }
642                    catch (HDF5Exception ex) {
643                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
644                    }
645                }
646            }
647        }
648
649        if (isExternal) {
650            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
651
652            if (pdir == null) {
653                pdir = ".";
654            }
655            System.setProperty("user.dir", pdir);
656            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
657        }
658
659        return datatype;
660    }
661
662    /**
663     * Refreshes the dataset before re-read of data.
664     */
665    @Override
666    public Object refreshData() {
667        inited = false;
668        refresh = true;
669
670        init();
671        return super.refreshData();
672    }
673
674    /**
675     * Removes all of the elements from metadata list.
676     * The list should be empty after this call returns.
677     */
678    @Override
679    public void clear() {
680        super.clear();
681        objMetadata.clear();
682    }
683
684    /*
685     * (non-Javadoc)
686     *
687     * @see hdf.object.Dataset#readBytes()
688     */
689    @Override
690    public byte[] readBytes() throws HDF5Exception {
691        byte[] theData = null;
692
693        if (!isInited())
694            init();
695
696        long did = open();
697        if (did >= 0) {
698            long fspace = HDF5Constants.H5I_INVALID_HID;
699            long mspace = HDF5Constants.H5I_INVALID_HID;
700            long tid = HDF5Constants.H5I_INVALID_HID;
701
702            try {
703                long[] lsize = { 1 };
704                for (int j = 0; j < selectedDims.length; j++)
705                    lsize[0] *= selectedDims[j];
706
707                fspace = H5.H5Dget_space(did);
708                mspace = H5.H5Screate_simple(rank, selectedDims, null);
709
710                // set the rectangle selection
711                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
712                if (rank * dims[0] > 1)
713                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
714
715                tid = H5.H5Dget_type(did);
716                long size = H5.H5Tget_size(tid) * lsize[0];
717                log.trace("readBytes(): size = {}", size);
718
719                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
720                    throw new Exception("Invalid int size");
721
722                theData = new byte[(int)size];
723
724                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
725                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
726            }
727            catch (Exception ex) {
728                log.debug("readBytes(): failed to read data: ", ex);
729            }
730            finally {
731                try {
732                    H5.H5Sclose(fspace);
733                }
734                catch (Exception ex2) {
735                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
736                }
737                try {
738                    H5.H5Sclose(mspace);
739                }
740                catch (Exception ex2) {
741                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
742                }
743                try {
744                    H5.H5Tclose(tid);
745                }
746                catch (HDF5Exception ex2) {
747                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
748                }
749                close(did);
750            }
751        }
752
753        return theData;
754    }
755
756    /**
757     * Reads the data from file.
758     *
759     * read() reads the data from file to a memory buffer and returns the memory
760     * buffer. The dataset object does not hold the memory buffer. To store the
761     * memory buffer in the dataset object, one must call getData().
762     *
763     * By default, the whole dataset is read into memory. Users can also select
764     * a subset to read. Subsetting is done in an implicit way.
765     *
766     * <b>How to Select a Subset</b>
767     *
768     * A selection is specified by three arrays: start, stride and count.
769     * <ol>
770     * <li>start: offset of a selection
771     * <li>stride: determines how many elements to move in each dimension
772     * <li>count: number of elements to select in each dimension
773     * </ol>
774     * getStartDims(), getStride() and getSelectedDims() returns the start,
775     * stride and count arrays respectively. Applications can make a selection
776     * by changing the values of the arrays.
777     *
778     * The following example shows how to make a subset. In the example, the
779     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
780     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
781     * We want to select every other data point in dims[1] and dims[2]
782     *
783     * <pre>
784     * int rank = dataset.getRank(); // number of dimensions of the dataset
785     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
786     * long[] selected = dataset.getSelectedDims(); // the selected size of the
787     *                                              // dataset
788     * long[] start = dataset.getStartDims(); // the offset of the selection
789     * long[] stride = dataset.getStride(); // the stride of the dataset
790     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
791     *                                                   // dimensions for
792     *                                                   // display
793     *
794     * // select dim1 and dim2 as 2D data for display, and slice through dim0
795     * selectedIndex[0] = 1;
796     * selectedIndex[1] = 2;
797     * selectedIndex[1] = 0;
798     *
799     * // reset the selection arrays
800     * for (int i = 0; i &lt; rank; i++) {
801     *     start[i] = 0;
802     *     selected[i] = 1;
803     *     stride[i] = 1;
804     * }
805     *
806     * // set stride to 2 on dim1 and dim2 so that every other data point is
807     * // selected.
808     * stride[1] = 2;
809     * stride[2] = 2;
810     *
811     * // set the selection size of dim1 and dim2
812     * selected[1] = dims[1] / stride[1];
813     * selected[2] = dims[1] / stride[2];
814     *
815     * // when dataset.getData() is called, the selection above will be used
816     * // since
817     * // the dimension arrays are passed by reference. Changes of these arrays
818     * // outside the dataset object directly change the values of these array
819     * // in the dataset object.
820     * </pre>
821     *
822     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
823     * short, int, float, double or String type based on the datatype of the
824     * dataset.
825     *
826     * @return the data read from file.
827     *
828     * @see #getData()
829     * @see hdf.object.DataFormat#read()
830     *
831     * @throws Exception
832     *             if object can not be read
833     */
834    @Override
835    public Object read() throws Exception {
836        Object readData = null;
837
838        if (!isInited())
839            init();
840
841        try {
842            readData = scalarDatasetCommonIO(H5File.IO_TYPE.READ, null);
843        }
844        catch (Exception ex) {
845            log.debug("read(): failed to read scalar dataset: ", ex);
846            throw new Exception("failed to read scalar dataset: " + ex.getMessage(), ex);
847        }
848
849        return readData;
850    }
851
852    /**
853     * Writes the given data buffer into this dataset in a file.
854     *
855     * @param buf
856     *            The buffer that contains the data values.
857     *
858     * @throws Exception
859     *             If there is an error at the HDF5 library level.
860     */
861    @Override
862    public void write(Object buf) throws Exception {
863        if (this.getFileFormat().isReadOnly())
864            throw new Exception("cannot write to scalar dataset in file opened as read-only");
865
866        if (!isInited())
867            init();
868
869        try {
870            scalarDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
871        }
872        catch (Exception ex) {
873            log.debug("write(Object): failed to write to scalar dataset: ", ex);
874            throw new Exception("failed to write to scalar dataset: " + ex.getMessage(), ex);
875        }
876    }
877
878    private Object scalarDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
879        H5Datatype dsDatatype = (H5Datatype) getDatatype();
880        H5Datatype dsBaseDatatype = (H5Datatype) getDatatype().getDatatypeBase();
881        boolean BDTisRef = false;
882        if (dsBaseDatatype != null)
883            BDTisRef = dsBaseDatatype.isStdRef();
884        Object theData = null;
885
886        /*
887         * I/O type-specific pre-initialization.
888         */
889        if (ioType == H5File.IO_TYPE.WRITE) {
890            if (writeBuf == null) {
891                log.debug("scalarDatasetCommonIO(): writeBuf is null");
892                throw new Exception("write buffer is null");
893            }
894        }
895
896        long did = open();
897        if (did >= 0) {
898            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
899
900            try {
901                /*
902                 * NOTE: this call sets up a hyperslab selection in the file according to the
903                 * current selection in the dataset object.
904                 */
905                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
906                        selectedStride, selectedDims, spaceIDs);
907
908                if (ioType == H5File.IO_TYPE.READ) {
909                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
910                    if (dsDatatype.isVarStr()) {
911                        try {
912                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
913                        }
914                        catch (OutOfMemoryError err) {
915                            log.debug("scalarDatasetCommonIO(): Out of memory");
916                            throw new HDF5Exception("Out Of Memory");
917                        }
918                    }
919                    else if (dsDatatype.isVLEN()) {
920                        theData = new ArrayList[(int)totalSelectedSpacePoints];
921                        for (int j = 0; j < (int)totalSelectedSpacePoints; j++)
922                            ((ArrayList[])theData)[j] = new ArrayList<byte[]>();
923                    }
924                    else if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
925                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
926                        try {
927                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
928                        }
929                        catch (OutOfMemoryError err) {
930                            log.debug("scalarDatasetCommonIO(): Out of memory");
931                            throw new HDF5Exception("Out Of Memory");
932                        }
933                    }
934                    else {
935                        // reuse the buffer if the size is the same
936                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
937                        theData = originalBuf;
938                    }
939
940                    if (theData != null) {
941                        /*
942                         * Actually read the data now that everything has been setup.
943                         */
944                        long tid = HDF5Constants.H5I_INVALID_HID;
945                        try {
946                            log.trace("scalarDatasetCommonIO():read ioType create native");
947                            tid = dsDatatype.createNative();
948
949                            if (dsDatatype.isVarStr()) {
950                                log.trace("scalarDatasetCommonIO(): H5Dread_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
951                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
952                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
953
954                                H5.H5Dread_VLStrings(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
955                                        (Object[]) theData);
956                            }
957                            else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
958                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
959                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
960                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
961
962                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
963                                        (Object[]) theData);
964                            }
965                            else {
966                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
967                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
968                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
969
970                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
971                            }
972                        }
973                        catch (HDF5DataFiltersException exfltr) {
974                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
975                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
976                        }
977                        catch (Exception ex) {
978                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
979                            throw new Exception(ex.getMessage(), ex);
980                        }
981                        finally {
982                            dsDatatype.close(tid);
983                        }
984
985                        /*
986                         * Perform any necessary data conversions.
987                         */
988                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
989                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
990                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
991                        }
992                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
993                            log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
994                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints, (byte[]) theData);
995                        }
996                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
997                            log.trace("scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
998                            long[] arrayDims = dsDatatype.getArrayDims();
999                            int asize = (int)totalSelectedSpacePoints;
1000                            for (int j = 0; j < arrayDims.length; j++) {
1001                                asize *= arrayDims[j];
1002                            }
1003                            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
1004                        }
1005                        else if (dsDatatype.isRef() && (theData instanceof byte[])) {
1006                            log.trace("AttributeCommonIO():read ioType isRef: converting byte array to List of bytes");
1007                            ArrayList<byte[]> theListData = new ArrayList<>((int)totalSelectedSpacePoints);
1008                            for (int m = 0; m < (int) totalSelectedSpacePoints; m++) {
1009                                byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()];
1010                                try {
1011                                    System.arraycopy(theData, m * (int)dsDatatype.getDatatypeSize(), curBytes, 0, (int)dsDatatype.getDatatypeSize());
1012                                    theListData.add(curBytes);
1013                                }
1014                                catch (Exception err) {
1015                                    log.trace("AttributeCommonIO(): arraycopy failure: ", err);
1016                                }
1017                            }
1018                            theData = theListData;
1019                        }
1020                    }
1021                } // H5File.IO_TYPE.READ
1022                else {
1023                    /*
1024                     * Perform any necessary data conversions before writing the data.
1025                     *
1026                     * Note that v-len strings do not get converted, regardless of
1027                     * conversion request type.
1028                     */
1029                    Object tmpData = writeBuf;
1030                    try {
1031                        // Check if we need to convert integer data
1032                        int tsize = (int) dsDatatype.getDatatypeSize();
1033                        String cname = writeBuf.getClass().getName();
1034                        log.trace("scalarDatasetCommonIO(): cname={} of datatype size={}", cname, tsize);
1035                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1036                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
1037                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1038
1039                        if (doIntConversion) {
1040                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
1041                            tmpData = convertToUnsignedC(writeBuf, null);
1042                        }
1043                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString && !(writeBuf instanceof byte[])) {
1044                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
1045                            tmpData = stringToByte((String[]) writeBuf, tsize);
1046                        }
1047                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
1048                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
1049                            tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
1050                        }
1051                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1052                            log.trace("scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
1053                            throw new Exception("data conversion failure: cannot write BigDecimal values");
1054                            //tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints, (BigDecimal[]) writeBuf);
1055                        }
1056                    }
1057                    catch (Exception ex) {
1058                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
1059                        throw new Exception("data conversion failure: " + ex.getMessage());
1060                    }
1061
1062                    /*
1063                     * Actually write the data now that everything has been setup.
1064                     */
1065                    long tid = HDF5Constants.H5I_INVALID_HID;
1066                    try {
1067                        tid = dsDatatype.createNative();
1068
1069                        if (dsDatatype.isVarStr()) {
1070                            log.trace("scalarDatasetCommonIO(): H5Dwrite_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1071                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1072                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1073
1074                            H5.H5Dwrite_VLStrings(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1075                        }
1076                        else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1077                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1078                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1079                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1080
1081                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1082                        }
1083                        else {
1084                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1085                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1086                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1087
1088                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1089                        }
1090                    }
1091                    catch (Exception ex) {
1092                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
1093                        throw new Exception(ex.getMessage());
1094                    }
1095                    finally {
1096                        dsDatatype.close(tid);
1097                    }
1098                } // H5File.IO_TYPE.WRITE
1099            }
1100            finally {
1101                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1102                    try {
1103                        H5.H5Sclose(spaceIDs[0]);
1104                    }
1105                    catch (Exception ex) {
1106                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1107                    }
1108                }
1109
1110                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1111                    try {
1112                        H5.H5Sclose(spaceIDs[1]);
1113                    }
1114                    catch (Exception ex) {
1115                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1116                    }
1117                }
1118
1119                close(did);
1120            }
1121        }
1122        else
1123            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1124
1125        return theData;
1126    }
1127
1128    /**
1129     * Retrieves the object's metadata, such as attributes, from the file.
1130     *
1131     * Metadata, such as attributes, is stored in a List.
1132     *
1133     * @return the list of metadata objects.
1134     *
1135     * @throws HDF5Exception
1136     *             if the metadata can not be retrieved
1137     */
1138    @Override
1139    public List<Attribute> getMetadata() throws HDF5Exception {
1140        int gmIndexType = 0;
1141        int gmIndexOrder = 0;
1142
1143        try {
1144            gmIndexType = fileFormat.getIndexType(null);
1145        }
1146        catch (Exception ex) {
1147            log.debug("getMetadata(): getIndexType failed: ", ex);
1148        }
1149        try {
1150            gmIndexOrder = fileFormat.getIndexOrder(null);
1151        }
1152        catch (Exception ex) {
1153            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1154        }
1155        return this.getMetadata(gmIndexType, gmIndexOrder);
1156    }
1157
1158    /**
1159     * Retrieves the object's metadata, such as attributes, from the file.
1160     *
1161     * Metadata, such as attributes, is stored in a List.
1162     *
1163     * @param attrPropList
1164     *             the list of properties to get
1165     *
1166     * @return the list of metadata objects.
1167     *
1168     * @throws HDF5Exception
1169     *             if the metadata can not be retrieved
1170     */
1171    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1172        if (!isInited())
1173            init();
1174
1175        try {
1176            this.linkTargetObjName = H5File.getLinkTargetName(this);
1177        }
1178        catch (Exception ex) {
1179            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1180        }
1181
1182        if (objMetadata.getAttributeList() == null) {
1183            long did = HDF5Constants.H5I_INVALID_HID;
1184            long pcid = HDF5Constants.H5I_INVALID_HID;
1185            long paid = HDF5Constants.H5I_INVALID_HID;
1186
1187            did = open();
1188            if (did >= 0) {
1189                try {
1190                    // get the compression and chunk information
1191                    pcid = H5.H5Dget_create_plist(did);
1192                    paid = H5.H5Dget_access_plist(did);
1193                    long storageSize = H5.H5Dget_storage_size(did);
1194                    int nfilt = H5.H5Pget_nfilters(pcid);
1195                    int layoutType = H5.H5Pget_layout(pcid);
1196
1197                    storageLayout.setLength(0);
1198                    compression.setLength(0);
1199
1200                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1201                        chunkSize = new long[rank];
1202                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1203                        int n = chunkSize.length;
1204                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1205                        for (int i = 1; i < n; i++)
1206                            storageLayout.append(" X ").append(chunkSize[i]);
1207
1208                        if (nfilt > 0) {
1209                            long nelmts = 1;
1210                            long uncompSize;
1211                            long datumSize = getDatatype().getDatatypeSize();
1212
1213                            if (datumSize < 0) {
1214                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1215                                try {
1216                                    tmptid = H5.H5Dget_type(did);
1217                                    datumSize = H5.H5Tget_size(tmptid);
1218                                }
1219                                finally {
1220                                    try {
1221                                        H5.H5Tclose(tmptid);
1222                                    }
1223                                    catch (Exception ex2) {
1224                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1225                                    }
1226                                }
1227                            }
1228
1229                            for (int i = 0; i < rank; i++)
1230                                nelmts *= dims[i];
1231                            uncompSize = nelmts * datumSize;
1232
1233                            /* compression ratio = uncompressed size / compressed size */
1234
1235                            if (storageSize != 0) {
1236                                double ratio = (double) uncompSize / (double) storageSize;
1237                                DecimalFormat df = new DecimalFormat();
1238                                df.setMinimumFractionDigits(3);
1239                                df.setMaximumFractionDigits(3);
1240                                compression.append(df.format(ratio)).append(":1");
1241                            }
1242                        }
1243                    }
1244                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1245                        storageLayout.append("COMPACT");
1246                    }
1247                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1248                        storageLayout.append("CONTIGUOUS");
1249                        if (H5.H5Pget_external_count(pcid) > 0)
1250                            storageLayout.append(" - EXTERNAL ");
1251                    }
1252                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1253                        storageLayout.append("VIRTUAL - ");
1254                        try {
1255                            long vmaps = H5.H5Pget_virtual_count(pcid);
1256                            try {
1257                                int virtView = H5.H5Pget_virtual_view(paid);
1258                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1259                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1260                                    storageLayout.append("First Missing");
1261                                else
1262                                    storageLayout.append("Last Available");
1263                                storageLayout.append("\nGAP : ").append(virtGap);
1264                            }
1265                            catch (Exception err) {
1266                                log.debug("getMetadata(): vds error: ", err);
1267                                storageLayout.append("ERROR");
1268                            }
1269                            storageLayout.append("\nMAPS : ").append(vmaps);
1270                            if (vmaps > 0) {
1271                                for (long next = 0; next < vmaps; next++) {
1272                                    try {
1273                                        H5.H5Pget_virtual_vspace(pcid, next);
1274                                        H5.H5Pget_virtual_srcspace(pcid, next);
1275                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1276                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1277                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1278                                    }
1279                                    catch (Exception err) {
1280                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1281                                        storageLayout.append("ERROR");
1282                                    }
1283                                }
1284                            }
1285                        }
1286                        catch (Exception err) {
1287                            log.debug("getMetadata(): vds count error: ", err);
1288                            storageLayout.append("ERROR");
1289                        }
1290                    }
1291                    else {
1292                        chunkSize = null;
1293                        storageLayout.append("NONE");
1294                    }
1295
1296                    int[] flags = { 0, 0 };
1297                    long[] cdNelmts = { 20 };
1298                    int[] cdValues = new int[(int) cdNelmts[0]];
1299                    String[] cdName = { "", "" };
1300                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1301                    int filter = -1;
1302                    int[] filterConfig = { 1 };
1303
1304                    filters.setLength(0);
1305
1306                    if (nfilt == 0) {
1307                        filters.append("NONE");
1308                    }
1309                    else {
1310                        for (int i = 0, k = 0; i < nfilt; i++) {
1311                            log.trace("getMetadata(): filter[{}]", i);
1312                            if (i > 0)
1313                                filters.append(", ");
1314                            if (k > 0)
1315                                compression.append(", ");
1316
1317                            try {
1318                                cdNelmts[0] = 20;
1319                                cdValues = new int[(int) cdNelmts[0]];
1320                                cdValues = new int[(int) cdNelmts[0]];
1321                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1322                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1323                                for (int j = 0; j < cdNelmts[0]; j++)
1324                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1325                            }
1326                            catch (Exception err) {
1327                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1328                                filters.append("ERROR");
1329                                continue;
1330                            }
1331
1332                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1333                                filters.append("NONE");
1334                            }
1335                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1336                                filters.append("GZIP");
1337                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1338                                k++;
1339                            }
1340                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1341                                filters.append("Error detection filter");
1342                            }
1343                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1344                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1345                            }
1346                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1347                                filters.append("NBIT");
1348                            }
1349                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1350                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1351                            }
1352                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1353                                filters.append("SZIP");
1354                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1355                                k++;
1356                                int flag = -1;
1357                                try {
1358                                    flag = H5.H5Zget_filter_info(filter);
1359                                }
1360                                catch (Exception ex) {
1361                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1362                                    flag = -1;
1363                                }
1364                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1365                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1366                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1367                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1368                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1369                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1370                            }
1371                            else {
1372                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1373                                for (int j = 0; j < cdNelmts[0]; j++) {
1374                                    if (j > 0)
1375                                        filters.append(", ");
1376                                    filters.append(cdValues[j]);
1377                                }
1378                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1379                            }
1380                        } //  (int i=0; i<nfilt; i++)
1381                    }
1382
1383                    if (compression.length() == 0)
1384                        compression.append("NONE");
1385                    log.trace("getMetadata(): filter compression={}", compression);
1386                    log.trace("getMetadata(): filter information={}", filters);
1387
1388                    storage.setLength(0);
1389                    storage.append("SIZE: ").append(storageSize);
1390
1391                    try {
1392                        int[] at = { 0 };
1393                        H5.H5Pget_alloc_time(pcid, at);
1394                        storage.append(", allocation time: ");
1395                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1396                            storage.append("Early");
1397                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1398                            storage.append("Incremental");
1399                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1400                            storage.append("Late");
1401                        else
1402                            storage.append("Default");
1403                    }
1404                    catch (Exception ex) {
1405                        log.debug("getMetadata(): Storage allocation time:", ex);
1406                    }
1407                    log.trace("getMetadata(): storage={}", storage);
1408                }
1409                finally {
1410                    try {
1411                        H5.H5Pclose(paid);
1412                    }
1413                    catch (Exception ex) {
1414                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1415                    }
1416                    try {
1417                        H5.H5Pclose(pcid);
1418                    }
1419                    catch (Exception ex) {
1420                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1421                    }
1422                    close(did);
1423                }
1424            }
1425        }
1426
1427        List<Attribute> attrlist = null;
1428        try {
1429            attrlist = objMetadata.getMetadata(attrPropList);
1430        }
1431        catch (Exception ex) {
1432            log.debug("getMetadata(): getMetadata failed: ", ex);
1433        }
1434        return attrlist;
1435    }
1436
1437    /**
1438     * Writes a specific piece of metadata (such as an attribute) into the file.
1439     *
1440     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1441     * value. If the attribute does not exist in the file, it creates the
1442     * attribute in the file and attaches it to the object. It will fail to
1443     * write a new attribute to the object where an attribute with the same name
1444     * already exists. To update the value of an existing attribute in the file,
1445     * one needs to get the instance of the attribute by getMetadata(), change
1446     * its values, then use writeMetadata() to write the value.
1447     *
1448     * @param info
1449     *            the metadata to write.
1450     *
1451     * @throws Exception
1452     *             if the metadata can not be written
1453     */
1454    @Override
1455    public void writeMetadata(Object info) throws Exception {
1456        try {
1457            objMetadata.writeMetadata(info);
1458        }
1459        catch (Exception ex) {
1460            log.debug("writeMetadata(): Object not an Attribute");
1461        }
1462    }
1463
1464    /**
1465     * Deletes an existing piece of metadata from this object.
1466     *
1467     * @param info
1468     *            the metadata to delete.
1469     *
1470     * @throws HDF5Exception
1471     *             if the metadata can not be removed
1472     */
1473    @Override
1474    public void removeMetadata(Object info) throws HDF5Exception {
1475        try {
1476            objMetadata.removeMetadata(info);
1477        }
1478        catch (Exception ex) {
1479            log.debug("removeMetadata(): Object not an Attribute");
1480            return;
1481        }
1482
1483        Attribute attr = (Attribute) info;
1484        log.trace("removeMetadata(): {}", attr.getAttributeName());
1485        long did = open();
1486        if (did >= 0) {
1487            try {
1488                H5.H5Adelete(did, attr.getAttributeName());
1489            }
1490            finally {
1491                close(did);
1492            }
1493        }
1494        else {
1495            log.debug("removeMetadata(): failed to open scalar dataset");
1496        }
1497    }
1498
1499    /**
1500     * Updates an existing piece of metadata attached to this object.
1501     *
1502     * @param info
1503     *            the metadata to update.
1504     *
1505     * @throws HDF5Exception
1506     *             if the metadata can not be updated
1507     */
1508    @Override
1509    public void updateMetadata(Object info) throws HDF5Exception {
1510        try {
1511            objMetadata.updateMetadata(info);
1512        }
1513        catch (Exception ex) {
1514            log.debug("updateMetadata(): Object not an Attribute");
1515            return;
1516        }
1517    }
1518
1519    /*
1520     * (non-Javadoc)
1521     *
1522     * @see hdf.object.HObject#setName(java.lang.String)
1523     */
1524    @Override
1525    public void setName(String newName) throws Exception {
1526        if (newName == null)
1527            throw new IllegalArgumentException("The new name is NULL");
1528
1529        H5File.renameObject(this, newName);
1530        super.setName(newName);
1531    }
1532
1533    /**
1534     * Resets selection of dataspace
1535     */
1536    @Override
1537    protected void resetSelection() {
1538        super.resetSelection();
1539
1540        if (interlace == INTERLACE_PIXEL) {
1541            // 24-bit TRUE color image
1542            // [height][width][pixel components]
1543            selectedDims[2] = 3;
1544            selectedDims[0] = dims[0];
1545            selectedDims[1] = dims[1];
1546            selectedIndex[0] = 0; // index for height
1547            selectedIndex[1] = 1; // index for width
1548            selectedIndex[2] = 2; // index for depth
1549        }
1550        else if (interlace == INTERLACE_PLANE) {
1551            // 24-bit TRUE color image
1552            // [pixel components][height][width]
1553            selectedDims[0] = 3;
1554            selectedDims[1] = dims[1];
1555            selectedDims[2] = dims[2];
1556            selectedIndex[0] = 1; // index for height
1557            selectedIndex[1] = 2; // index for width
1558            selectedIndex[2] = 0; // index for depth
1559        }
1560
1561        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1562            isDefaultImageOrder = false;
1563        else
1564            isDefaultImageOrder = true;
1565    }
1566
1567    /**
1568     * Creates a scalar dataset in a file with/without chunking and compression.
1569     *
1570     * @param name
1571     *            the name of the dataset to create.
1572     * @param pgroup
1573     *            parent group where the new dataset is created.
1574     * @param type
1575     *            the datatype of the dataset.
1576     * @param dims
1577     *            the dimension size of the dataset.
1578     * @param maxdims
1579     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1580     * @param chunks
1581     *            the chunk size of the dataset. No chunking if chunk = null.
1582     * @param gzip
1583     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1584     * @param data
1585     *            the array of data values.
1586     *
1587     * @return the new scalar dataset if successful; otherwise returns null.
1588     *
1589     * @throws Exception if there is a failure.
1590     */
1591    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1592            long[] chunks, int gzip, Object data) throws Exception {
1593        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1594    }
1595
1596    /**
1597     * Creates a scalar dataset in a file with/without chunking and compression.
1598     *
1599     * The following example shows how to create a string dataset using this function.
1600     *
1601     * <pre>
1602     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1603     * int max_str_len = 120;
1604     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1605     * int size = 10000;
1606     * long dims[] = { size };
1607     * long chunks[] = { 1000 };
1608     * int gzip = 9;
1609     * String strs[] = new String[size];
1610     *
1611     * for (int i = 0; i &lt; size; i++)
1612     *     strs[i] = String.valueOf(i);
1613     *
1614     * file.open();
1615     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1616     *
1617     * try {
1618     *     file.close();
1619     * }
1620     * catch (Exception ex) {
1621     * }
1622     * </pre>
1623     *
1624     * @param name
1625     *            the name of the dataset to create.
1626     * @param pgroup
1627     *            parent group where the new dataset is created.
1628     * @param type
1629     *            the datatype of the dataset.
1630     * @param dims
1631     *            the dimension size of the dataset.
1632     * @param maxdims
1633     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1634     * @param chunks
1635     *            the chunk size of the dataset. No chunking if chunk = null.
1636     * @param gzip
1637     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1638     * @param fillValue
1639     *            the default data value.
1640     * @param data
1641     *            the array of data values.
1642     *
1643     * @return the new scalar dataset if successful; otherwise returns null.
1644     *
1645     * @throws Exception if there is a failure.
1646     */
1647    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1648            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1649        H5ScalarDS dataset = null;
1650        String fullPath = null;
1651        long did = HDF5Constants.H5I_INVALID_HID;
1652        long plist = HDF5Constants.H5I_INVALID_HID;
1653        long sid = HDF5Constants.H5I_INVALID_HID;
1654        long tid = HDF5Constants.H5I_INVALID_HID;
1655
1656        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1657            log.debug("create(): one or more parameters are null");
1658            return null;
1659        }
1660
1661        H5File file = (H5File) pgroup.getFileFormat();
1662        if (file == null) {
1663            log.debug("create(): parent group FileFormat is null");
1664            return null;
1665        }
1666
1667        String path = HObject.SEPARATOR;
1668        if (!pgroup.isRoot()) {
1669            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1670            if (name.endsWith("/"))
1671                name = name.substring(0, name.length() - 1);
1672            int idx = name.lastIndexOf('/');
1673            if (idx >= 0)
1674                name = name.substring(idx + 1);
1675        }
1676
1677        fullPath = path + name;
1678
1679        // setup chunking and compression
1680        boolean isExtentable = false;
1681        if (maxdims != null) {
1682            for (int i = 0; i < maxdims.length; i++) {
1683                if (maxdims[i] == 0)
1684                    maxdims[i] = dims[i];
1685                else if (maxdims[i] < 0)
1686                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1687
1688                if (maxdims[i] != dims[i])
1689                    isExtentable = true;
1690            }
1691        }
1692
1693        // HDF5 requires you to use chunking in order to define extendible
1694        // datasets. Chunking makes it possible to extend datasets efficiently,
1695        // without having to reorganize storage excessively. Using default size
1696        // of 64x...which has good performance
1697        if ((chunks == null) && isExtentable) {
1698            chunks = new long[dims.length];
1699            for (int i = 0; i < dims.length; i++)
1700                chunks[i] = Math.min(dims[i], 64);
1701        }
1702
1703        // prepare the dataspace and datatype
1704        int rank = dims.length;
1705
1706        tid = type.createNative();
1707        if (tid >= 0) {
1708            try {
1709                sid = H5.H5Screate_simple(rank, dims, maxdims);
1710
1711                // figure out creation properties
1712                plist = HDF5Constants.H5P_DEFAULT;
1713
1714                byte[] valFill = null;
1715                try {
1716                    valFill = parseFillValue(type, fillValue);
1717                }
1718                catch (Exception ex) {
1719                    log.debug("create(): parse fill value: ", ex);
1720                }
1721                log.trace("create(): parseFillValue={}", valFill);
1722
1723                if (chunks != null || valFill != null) {
1724                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1725
1726                    if (chunks != null) {
1727                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1728                        H5.H5Pset_chunk(plist, rank, chunks);
1729
1730                        // compression requires chunking
1731                        if (gzip > 0) {
1732                            H5.H5Pset_deflate(plist, gzip);
1733                        }
1734                    }
1735
1736                    if (valFill != null)
1737                        H5.H5Pset_fill_value(plist, tid, valFill);
1738                }
1739
1740                long fid = file.getFID();
1741
1742                log.trace("create(): create dataset fid={}", fid);
1743                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1744                log.trace("create(): create dataset did={}", did);
1745                dataset = new H5ScalarDS(file, name, path);
1746            }
1747            finally {
1748                try {
1749                    H5.H5Pclose(plist);
1750                }
1751                catch (HDF5Exception ex) {
1752                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1753                }
1754                try {
1755                    H5.H5Sclose(sid);
1756                }
1757                catch (HDF5Exception ex) {
1758                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1759                }
1760                try {
1761                    H5.H5Tclose(tid);
1762                }
1763                catch (HDF5Exception ex) {
1764                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1765                }
1766                try {
1767                    H5.H5Dclose(did);
1768                }
1769                catch (HDF5Exception ex) {
1770                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1771                }
1772            }
1773        }
1774
1775        if (dataset != null) {
1776            pgroup.addToMemberList(dataset);
1777            if (data != null) {
1778                dataset.init();
1779                long[] selected = dataset.getSelectedDims();
1780                for (int i = 0; i < rank; i++)
1781                    selected[i] = dims[i];
1782                dataset.write(data);
1783            }
1784        }
1785
1786        return dataset;
1787    }
1788
1789    // check _FillValue, valid_min, valid_max, and valid_range
1790    private void checkCFconvention(long oid) throws Exception {
1791        Object avalue = getAttrValue(oid, "_FillValue");
1792
1793        if (avalue != null) {
1794            int n = Array.getLength(avalue);
1795            for (int i = 0; i < n; i++)
1796                addFilteredImageValue((Number) Array.get(avalue, i));
1797        }
1798
1799        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1800            double x0 = 0;
1801            double x1 = 0;
1802            avalue = getAttrValue(oid, "valid_range");
1803            if (avalue != null) {
1804                try {
1805                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1806                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1807                    imageDataRange = new double[2];
1808                    imageDataRange[0] = x0;
1809                    imageDataRange[1] = x1;
1810                    return;
1811                }
1812                catch (Exception ex) {
1813                    log.debug("checkCFconvention(): valid_range: ", ex);
1814                }
1815            }
1816
1817            avalue = getAttrValue(oid, "valid_min");
1818            if (avalue != null) {
1819                try {
1820                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1821                }
1822                catch (Exception ex) {
1823                    log.debug("checkCFconvention(): valid_min: ", ex);
1824                }
1825                avalue = getAttrValue(oid, "valid_max");
1826                if (avalue != null) {
1827                    try {
1828                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1829                        imageDataRange = new double[2];
1830                        imageDataRange[0] = x0;
1831                        imageDataRange[1] = x1;
1832                    }
1833                    catch (Exception ex) {
1834                        log.debug("checkCFconvention(): valid_max:", ex);
1835                    }
1836                }
1837            }
1838        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1839    }
1840
1841    private Object getAttrValue(long oid, String aname) {
1842        log.trace("getAttrValue(): start: name={}", aname);
1843
1844        long aid = HDF5Constants.H5I_INVALID_HID;
1845        long atid = HDF5Constants.H5I_INVALID_HID;
1846        long asid = HDF5Constants.H5I_INVALID_HID;
1847        Object avalue = null;
1848
1849        try {
1850            // try to find attribute name
1851            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1852                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1853        }
1854        catch (HDF5LibraryException ex5) {
1855            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1856        }
1857        catch (Exception ex) {
1858            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1859        }
1860        if (aid > 0) {
1861            try {
1862                atid = H5.H5Aget_type(aid);
1863                long tmptid = atid;
1864                atid = H5.H5Tget_native_type(tmptid);
1865                try {
1866                    H5.H5Tclose(tmptid);
1867                }
1868                catch (Exception ex) {
1869                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1870                }
1871
1872                asid = H5.H5Aget_space(aid);
1873                long adims[] = null;
1874
1875                int arank = H5.H5Sget_simple_extent_ndims(asid);
1876                if (arank > 0) {
1877                    adims = new long[arank];
1878                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1879                }
1880                log.trace("getAttrValue(): adims={}", adims);
1881
1882                // retrieve the attribute value
1883                long lsize = 1;
1884                if (adims != null) {
1885                    for (int j = 0; j < adims.length; j++) {
1886                        lsize *= adims[j];
1887                    }
1888                }
1889                log.trace("getAttrValue(): lsize={}", lsize);
1890
1891                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1892
1893                H5Datatype dsDatatype = null;
1894                int nativeClass = H5.H5Tget_class(atid);
1895                if (nativeClass == HDF5Constants.H5T_REFERENCE)
1896                    dsDatatype = new H5ReferenceType(getFileFormat(), lsize, atid);
1897                else
1898                    dsDatatype = new H5Datatype(getFileFormat(), atid);
1899
1900                try {
1901                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1902                }
1903                catch (OutOfMemoryError e) {
1904                    log.debug("getAttrValue(): out of memory: ", e);
1905                    avalue = null;
1906                }
1907
1908                if (avalue != null) {
1909                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1910                    H5.H5Aread(aid, atid, avalue);
1911
1912                    if (dsDatatype.isUnsigned()) {
1913                        log.trace("getAttrValue(): id {} is unsigned", atid);
1914                        avalue = convertFromUnsignedC(avalue, null);
1915                    }
1916                    if (dsDatatype.isRef() && (avalue instanceof byte[]))
1917                        ((H5ReferenceType)dsDatatype).setData((ArrayList<byte[]>)avalue);
1918                    else if (dsDatatype.isRef())
1919                        ((H5ReferenceType)dsDatatype).setData(avalue);
1920                }
1921            }
1922            catch (Exception ex) {
1923                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1924            }
1925            finally {
1926                try {
1927                    H5.H5Tclose(atid);
1928                }
1929                catch (HDF5Exception ex) {
1930                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1931                }
1932                try {
1933                    H5.H5Sclose(asid);
1934                }
1935                catch (HDF5Exception ex) {
1936                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1937                }
1938                try {
1939                    H5.H5Aclose(aid);
1940                }
1941                catch (HDF5Exception ex) {
1942                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1943                }
1944            }
1945        } // (aid > 0)
1946
1947        return avalue;
1948    }
1949
1950    private boolean isStringAttributeOf(long objID, String name, String value) {
1951        boolean retValue = false;
1952        long aid = HDF5Constants.H5I_INVALID_HID;
1953        long atid = HDF5Constants.H5I_INVALID_HID;
1954
1955        try {
1956            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1957                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1958                atid = H5.H5Aget_type(aid);
1959                int size = (int)H5.H5Tget_size(atid);
1960                byte[] attrValue = new byte[size];
1961                H5.H5Aread(aid, atid, attrValue);
1962                String strValue = new String(attrValue).trim();
1963                retValue = strValue.equalsIgnoreCase(value);
1964            }
1965        }
1966        catch (Exception ex) {
1967            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1968        }
1969        finally {
1970            try {
1971                H5.H5Tclose(atid);
1972            }
1973            catch (HDF5Exception ex) {
1974                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1975            }
1976            try {
1977                H5.H5Aclose(aid);
1978            }
1979            catch (HDF5Exception ex) {
1980                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1981            }
1982        }
1983
1984        return retValue;
1985    }
1986
1987    /*
1988     * (non-Javadoc)
1989     *
1990     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1991     */
1992    @Override
1993    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1994        // must give a location to copy
1995        if (pgroup == null) {
1996            log.debug("copy(): Parent group is null");
1997            return null;
1998        }
1999
2000        Dataset dataset = null;
2001        long srcdid = HDF5Constants.H5I_INVALID_HID;
2002        long dstdid = HDF5Constants.H5I_INVALID_HID;
2003        long plist = HDF5Constants.H5I_INVALID_HID;
2004        long tid = HDF5Constants.H5I_INVALID_HID;
2005        long sid = HDF5Constants.H5I_INVALID_HID;
2006        String dname = null;
2007        String path = null;
2008
2009        if (pgroup.isRoot())
2010            path = HObject.SEPARATOR;
2011        else
2012            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
2013        dname = path + dstName;
2014
2015        srcdid = open();
2016        if (srcdid >= 0) {
2017            try {
2018                tid = H5.H5Dget_type(srcdid);
2019                sid = H5.H5Screate_simple(dims.length, dims, null);
2020                plist = H5.H5Dget_create_plist(srcdid);
2021
2022                long[] chunks = new long[dims.length];
2023                boolean setChunkFlag = false;
2024                try {
2025                    H5.H5Pget_chunk(plist, dims.length, chunks);
2026                    for (int i = 0; i < dims.length; i++) {
2027                        if (dims[i] < chunks[i]) {
2028                            setChunkFlag = true;
2029                            if (dims[i] == 1)
2030                                chunks[i] = 1;
2031                            else
2032                                chunks[i] = dims[i] / 2;
2033                        }
2034                    }
2035                }
2036                catch (Exception ex) {
2037                    log.debug("copy(): chunk: ", ex);
2038                }
2039
2040                if (setChunkFlag)
2041                    H5.H5Pset_chunk(plist, dims.length, chunks);
2042
2043                try {
2044                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2045                            HDF5Constants.H5P_DEFAULT);
2046                }
2047                catch (Exception e) {
2048                    log.debug("copy(): H5Dcreate: ", e);
2049                }
2050                finally {
2051                    try {
2052                        H5.H5Dclose(dstdid);
2053                    }
2054                    catch (Exception ex2) {
2055                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
2056                    }
2057                }
2058
2059                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
2060                if (buff != null) {
2061                    dataset.init();
2062                    dataset.write(buff);
2063                }
2064
2065                dstdid = dataset.open();
2066                if (dstdid >= 0) {
2067                    try {
2068                        H5File.copyAttributes(srcdid, dstdid);
2069                    }
2070                    finally {
2071                        try {
2072                            H5.H5Dclose(dstdid);
2073                        }
2074                        catch (Exception ex) {
2075                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2076                        }
2077                    }
2078                }
2079            }
2080            finally {
2081                try {
2082                    H5.H5Pclose(plist);
2083                }
2084                catch (Exception ex) {
2085                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2086                }
2087                try {
2088                    H5.H5Sclose(sid);
2089                }
2090                catch (Exception ex) {
2091                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2092                }
2093                try {
2094                    H5.H5Tclose(tid);
2095                }
2096                catch (Exception ex) {
2097                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2098                }
2099                try {
2100                    H5.H5Dclose(srcdid);
2101                }
2102                catch (Exception ex) {
2103                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2104                }
2105            }
2106        }
2107
2108        pgroup.addToMemberList(dataset);
2109
2110        if (dataset != null)
2111            ((ScalarDS) dataset).setIsImage(isImage);
2112
2113        return dataset;
2114    }
2115
2116    /**
2117     * Get the number of pallettes for this object.
2118     *
2119     * @return the number of palettes if it has any, 0 otherwise.
2120     */
2121    public int getNumberOfPalettes() {
2122        return NumberOfPalettes;
2123    }
2124
2125    /*
2126     * (non-Javadoc)
2127     *
2128     * @see hdf.object.ScalarDS#getPalette()
2129     */
2130    @Override
2131    public byte[][] getPalette() {
2132        log.trace("getPalette(): NumberOfPalettes={}", NumberOfPalettes);
2133        if (NumberOfPalettes > 0)
2134            if (palette == null)
2135                palette = readPalette(0);
2136
2137        return palette;
2138    }
2139
2140    /*
2141     * (non-Javadoc)
2142     *
2143     * @see hdf.object.ScalarDS#getPaletteName(int)
2144     */
2145    @Override
2146    public String getPaletteName(int idx) {
2147        int count = readNumberOfPalettes();
2148        long did = HDF5Constants.H5I_INVALID_HID;
2149        long palID = HDF5Constants.H5I_INVALID_HID;
2150        String paletteName = null;
2151
2152        if (count < 1) {
2153            log.debug("getPaletteName(): no palettes are attached");
2154            return null;
2155        }
2156
2157        byte[][] refBuf = null;
2158
2159        did = open();
2160        if (did >= 0) {
2161            try {
2162                refBuf = getPaletteRefs(did);
2163                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2164                paletteName = H5.H5Iget_name(palID);
2165            }
2166            catch (Exception ex) {
2167                ex.printStackTrace();
2168            }
2169            finally {
2170                close(palID);
2171                for (int i = 0; i < count; i++)
2172                    H5.H5Rdestroy(refBuf[i]);
2173                close(did);
2174            }
2175        }
2176
2177        return paletteName;
2178    }
2179
2180    /*
2181     * (non-Javadoc)
2182     *
2183     * @see hdf.object.ScalarDS#readPalette(int)
2184     */
2185    @Override
2186    public byte[][] readPalette(int idx) {
2187        byte[][] thePalette = null;
2188        int count = readNumberOfPalettes();
2189        long did = HDF5Constants.H5I_INVALID_HID;
2190        long palID = HDF5Constants.H5I_INVALID_HID;
2191        long tid = HDF5Constants.H5I_INVALID_HID;
2192        log.trace("readPalette(): palette count={}", count);
2193
2194        if (count < 1) {
2195            log.debug("readPalette(): no palettes are attached");
2196            return null;
2197        }
2198
2199        byte[] p = null;
2200        byte[][] refBuf = null;
2201
2202        did = open();
2203        if (did >= 0) {
2204            try {
2205                refBuf = getPaletteRefs(did);
2206                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2207                log.trace("readPalette(): H5Ropen_object: {}", palID);
2208                tid = H5.H5Dget_type(palID);
2209
2210                // support only 3*256 byte palette data
2211                if (H5.H5Dget_storage_size(palID) <= 768) {
2212                    p = new byte[3 * 256];
2213                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2214                }
2215            }
2216            catch (HDF5Exception ex) {
2217                log.debug("readPalette(): failure: ", ex);
2218                p = null;
2219            }
2220            finally {
2221                try {
2222                    H5.H5Tclose(tid);
2223                }
2224                catch (HDF5Exception ex2) {
2225                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2226                }
2227                close(palID);
2228                for (int i = 0; i < count; i++)
2229                    H5.H5Rdestroy(refBuf[i]);
2230                close(did);
2231            }
2232        }
2233
2234        if (p != null) {
2235            thePalette = new byte[3][256];
2236            for (int i = 0; i < 256; i++) {
2237                thePalette[0][i] = p[i * 3];
2238                thePalette[1][i] = p[i * 3 + 1];
2239                thePalette[2][i] = p[i * 3 + 2];
2240            }
2241        }
2242
2243        return thePalette;
2244    }
2245
2246    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2247        byte[] data = null;
2248
2249        if (type == null || fillValue == null) {
2250            log.debug("parseFillValue(): datatype or fill value is null");
2251            return null;
2252        }
2253
2254        int datatypeClass = type.getDatatypeClass();
2255        int datatypeSize = (int)type.getDatatypeSize();
2256
2257        double valDbl = 0;
2258        String valStr = null;
2259
2260        if (fillValue instanceof String)
2261            valStr = (String) fillValue;
2262        else if (fillValue.getClass().isArray())
2263            valStr = Array.get(fillValue, 0).toString();
2264
2265        if (!type.isString()) {
2266            try {
2267                valDbl = Double.parseDouble(valStr);
2268            }
2269            catch (NumberFormatException ex) {
2270                log.debug("parseFillValue(): parse error: ", ex);
2271                return null;
2272            }
2273        }
2274
2275        try {
2276            switch (datatypeClass) {
2277            case Datatype.CLASS_INTEGER:
2278            case Datatype.CLASS_ENUM:
2279            case Datatype.CLASS_CHAR:
2280                log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2281                if (datatypeSize == 1)
2282                    data = new byte[] { (byte) valDbl };
2283                else if (datatypeSize == 2)
2284                    data = HDFNativeData.shortToByte((short) valDbl);
2285                else if (datatypeSize == 8)
2286                    data = HDFNativeData.longToByte((long) valDbl);
2287                else
2288                    data = HDFNativeData.intToByte((int) valDbl);
2289                break;
2290            case Datatype.CLASS_FLOAT:
2291                log.trace("parseFillValue(): class CLASS_FLOAT");
2292                if (datatypeSize > 8)
2293                    data =  valStr.getBytes();
2294                else if (datatypeSize == 8)
2295                    data = HDFNativeData.doubleToByte(valDbl);
2296                else
2297                    data = HDFNativeData.floatToByte((float) valDbl);
2298                break;
2299            case Datatype.CLASS_STRING:
2300                log.trace("parseFillValue(): class CLASS_STRING");
2301                if (valStr != null)
2302                    data = valStr.getBytes();
2303                break;
2304            case Datatype.CLASS_REFERENCE:
2305                log.trace("parseFillValue(): class CLASS_REFERENCE");
2306                data = HDFNativeData.longToByte((long) valDbl);
2307                break;
2308            default:
2309                log.debug("parseFillValue(): datatypeClass unknown");
2310                break;
2311            } // (datatypeClass)
2312        }
2313        catch (Exception ex) {
2314            log.debug("parseFillValue(): failure: ", ex);
2315            data = null;
2316        }
2317
2318        return data;
2319    }
2320
2321    /**
2322     * reads references of palettes to count the numberOfPalettes.
2323     *
2324     * @return the number of palettes referenced.
2325     */
2326    public int readNumberOfPalettes() {
2327        log.trace("readNumberOfPalettes(): isInited={}", isInited());
2328        if (!isInited())
2329            init(); // init will be called to get refs
2330
2331        return NumberOfPalettes;
2332    }
2333
2334    /**
2335     * reads references of palettes to calculate the numberOfPalettes.
2336     */
2337    private int readNumberOfPalette(long did) {
2338        long aid = HDF5Constants.H5I_INVALID_HID;
2339        long sid = HDF5Constants.H5I_INVALID_HID;
2340        long atype = HDF5Constants.H5I_INVALID_HID;
2341        int size = 0;
2342        int rank = 0;
2343        byte[] refbuf = null;
2344        log.trace("readNumberOfPalette(): did={}", did);
2345
2346        try {
2347            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2348                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2349                sid = H5.H5Aget_space(aid);
2350                rank = H5.H5Sget_simple_extent_ndims(sid);
2351                size = 1;
2352                if (rank > 0) {
2353                    long[] dims = new long[rank];
2354                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2355                    log.trace("readNumberOfPalette(): rank={}, dims={}", rank, dims);
2356                    for (int i = 0; i < rank; i++)
2357                        size *= (int) dims[i];
2358                }
2359                log.trace("readNumberOfPalette(): size={}", size);
2360
2361                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2362                    throw new HDF5Exception("Invalid int size");
2363            }
2364        }
2365        catch (HDF5Exception ex) {
2366            log.debug("readNumberOfPalette(): Palette attribute search failed: Expected", ex);
2367            refbuf = null;
2368        }
2369        finally {
2370            try {
2371                H5.H5Tclose(atype);
2372            }
2373            catch (HDF5Exception ex2) {
2374                log.debug("readNumberOfPalette(): H5Tclose(atype {}) failure: ", atype, ex2);
2375            }
2376            try {
2377                H5.H5Sclose(sid);
2378            }
2379            catch (HDF5Exception ex2) {
2380                log.debug("readNumberOfPalette(): H5Sclose(sid {}) failure: ", sid, ex2);
2381            }
2382            try {
2383                H5.H5Aclose(aid);
2384            }
2385            catch (HDF5Exception ex2) {
2386                log.debug("readNumberOfPalette(): H5Aclose(aid {}) failure: ", aid, ex2);
2387            }
2388        }
2389
2390        return size;
2391    }
2392
2393    /**
2394     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2395     * length is 8*numberOfPalettes.
2396     */
2397    private byte[][] getPaletteRefs(long did) {
2398        long aid = HDF5Constants.H5I_INVALID_HID;
2399        long sid = HDF5Constants.H5I_INVALID_HID;
2400        long atype = HDF5Constants.H5I_INVALID_HID;
2401        int size = 0;
2402        int rank = 0;
2403        byte[][] refBuf = null;
2404        log.trace("getPaletteRefs(): did={}", did);
2405
2406        try {
2407            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2408                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2409                sid = H5.H5Aget_space(aid);
2410                rank = H5.H5Sget_simple_extent_ndims(sid);
2411                size = 1;
2412                if (rank > 0) {
2413                    long[] dims = new long[rank];
2414                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2415                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2416                    for (int i = 0; i < rank; i++)
2417                        size *= (int) dims[i];
2418                }
2419                log.trace("getPaletteRefs(): size={}", size);
2420
2421                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2422                    throw new HDF5Exception("Invalid int size");
2423                refBuf = new byte[size][HDF5Constants.H5R_REF_BUF_SIZE];
2424
2425                H5.H5Aread(aid, HDF5Constants.H5T_STD_REF, refBuf);
2426            }
2427        }
2428        catch (HDF5Exception ex) {
2429            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2430            refBuf = null;
2431        }
2432        finally {
2433            try {
2434                H5.H5Sclose(sid);
2435            }
2436            catch (HDF5Exception ex2) {
2437                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2438            }
2439            try {
2440                H5.H5Aclose(aid);
2441            }
2442            catch (HDF5Exception ex2) {
2443                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2444            }
2445        }
2446
2447        return refBuf;
2448    }
2449
2450    /**
2451     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2452     * of size is the same as that of the dataspace of the dataset being changed.
2453     *
2454     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2455     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2456     * H5Screate_simple)
2457     *
2458     * @param newDims the dimension target size
2459     *
2460     * @throws HDF5Exception
2461     *             If there is an error at the HDF5 library level.
2462     */
2463    public void extend(long[] newDims) throws HDF5Exception {
2464        long did = HDF5Constants.H5I_INVALID_HID;
2465        long sid = HDF5Constants.H5I_INVALID_HID;
2466
2467        did = open();
2468        if (did >= 0) {
2469            try {
2470                H5.H5Dset_extent(did, newDims);
2471                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2472                sid = H5.H5Dget_space(did);
2473                long[] checkDims = new long[rank];
2474                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2475                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2476                for (int i = 0; i < rank; i++) {
2477                    if (checkDims[i] != newDims[i]) {
2478                        log.debug("extend(): error extending dataset");
2479                        throw new HDF5Exception("error extending dataset " + getName());
2480                    }
2481                }
2482                dims = checkDims;
2483            }
2484            catch (Exception e) {
2485                log.debug("extend(): failure: ", e);
2486                throw new HDF5Exception(e.getMessage());
2487            }
2488            finally {
2489                if (sid > 0)
2490                    H5.H5Sclose(sid);
2491
2492                close(did);
2493            }
2494        }
2495    }
2496
2497    /*
2498     * (non-Javadoc)
2499     *
2500     * @see hdf.object.Dataset#isVirtual()
2501     */
2502    @Override
2503    public boolean isVirtual() {
2504        return isVirtual;
2505    }
2506
2507    /*
2508     * (non-Javadoc)
2509     *
2510     * @see hdf.object.Dataset#getVirtualFilename(int)
2511     */
2512    @Override
2513    public String getVirtualFilename(int index) {
2514        if(isVirtual)
2515            return virtualNameList.get(index);
2516        else
2517            return null;
2518    }
2519
2520    /*
2521     * (non-Javadoc)
2522     *
2523     * @see hdf.object.Dataset#getVirtualMaps()
2524     */
2525    @Override
2526    public int getVirtualMaps() {
2527        if(isVirtual)
2528            return virtualNameList.size();
2529        else
2530            return -1;
2531    }
2532
2533    /*
2534     * (non-Javadoc)
2535     *
2536     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2537     */
2538    @Override
2539    public String toString(String delimiter, int maxItems) {
2540        Object theData = originalBuf;
2541        if (theData == null) {
2542            log.debug("toString: value is null");
2543            return null;
2544        }
2545
2546        if (theData instanceof List<?>) {
2547            log.trace("toString: value is list");
2548            return null;
2549        }
2550
2551        Class<? extends Object> valClass = theData.getClass();
2552
2553        if (!valClass.isArray()) {
2554            log.trace("toString: finish - not array");
2555            String strValue = theData.toString();
2556            if (maxItems > 0 && strValue.length() > maxItems)
2557                // truncate the extra characters
2558                strValue = strValue.substring(0, maxItems);
2559            return strValue;
2560        }
2561
2562        // value is an array
2563        StringBuilder sb = new StringBuilder();
2564        long lsize = 1;
2565        for (int j = 0; j < dims.length; j++)
2566            lsize *= dims[j];
2567
2568        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), Array.getLength(theData));
2569        if (((H5Datatype) getDatatype()).isStdRef()) {
2570            String cname = valClass.getName();
2571            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2572            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2573            for (int i = 0; i < (int)lsize; i++) {
2574                int refIndex = HDF5Constants.H5R_REF_BUF_SIZE * i;
2575                byte[] refarr = new byte[(int) HDF5Constants.H5R_REF_BUF_SIZE];
2576                System.arraycopy(theData, refIndex, refarr, 0, (int)HDF5Constants.H5R_REF_BUF_SIZE);
2577                String ref_str = ((H5ReferenceType) getDatatype()).getReferenceRegion(refarr, false);
2578                log.trace("toString: ref_str[{}]={}", i, ref_str);
2579                if (i > 0)
2580                    sb.append(", ");
2581                sb.append(ref_str);
2582
2583//                int n = ref_str.length();
2584//                if (maxItems > 0) {
2585//                    if (n > maxItems)
2586//                        break;
2587//                    else
2588//                        maxItems -= n;
2589//                }
2590            }
2591            return sb.toString();
2592        }
2593        return super.toString(delimiter, maxItems);
2594    }
2595
2596}