001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.ArrayList;
021import java.util.List;
022import java.util.Vector;
023
024import org.slf4j.Logger;
025import org.slf4j.LoggerFactory;
026
027import hdf.hdf5lib.H5;
028import hdf.hdf5lib.HDF5Constants;
029import hdf.hdf5lib.HDFArray;
030import hdf.hdf5lib.HDFNativeData;
031import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
032import hdf.hdf5lib.exceptions.HDF5Exception;
033import hdf.hdf5lib.exceptions.HDF5LibraryException;
034import hdf.hdf5lib.structs.H5O_info_t;
035import hdf.hdf5lib.structs.H5O_token_t;
036
037import hdf.object.Attribute;
038import hdf.object.Dataset;
039import hdf.object.Datatype;
040import hdf.object.FileFormat;
041import hdf.object.Group;
042import hdf.object.HObject;
043import hdf.object.MetaDataContainer;
044import hdf.object.ScalarDS;
045
046import hdf.object.h5.H5Attribute;
047import hdf.object.h5.H5MetaDataContainer;
048import hdf.object.h5.H5ReferenceType;
049
050/**
051 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
052 * float, double and string, and operations performed on the scalar dataset.
053 *
054 * The library predefines a modest number of datatypes. For details, read
055 * <a href="https://hdfgroup.github.io/hdf5/_h5_t__u_g.html#sec_datatype">HDF5 Datatypes in HDF5 User Guide</a>
056 *
057 * @version 1.1 9/4/2007
058 * @author Peter X. Cao
059 */
060public class H5ScalarDS extends ScalarDS implements MetaDataContainer
061{
062    private static final long serialVersionUID = 2887517608230611642L;
063
064    private static final Logger log = LoggerFactory.getLogger(H5ScalarDS.class);
065
066    /**
067     * The metadata object for this data object. Members of the metadata are instances of Attribute.
068     */
069    private H5MetaDataContainer objMetadata;
070
071    /** the object properties */
072    private H5O_info_t objInfo;
073
074    /** the number of palettes */
075    private int NumberOfPalettes;
076
077    /** flag to indicate if the dataset is an external dataset */
078    private boolean isExternal = false;
079
080    /** flag to indicate if the dataset is a virtual dataset */
081    private boolean isVirtual = false;
082    /** the list of virtual names */
083    private List<String> virtualNameList;
084
085    /**
086     * flag to indicate if the dataset buffers should be refreshed.
087     */
088    protected boolean refresh = false;
089
090    /**
091     * flag to indicate if the datatype in file is the same as dataype in memory
092     */
093    protected boolean isNativeDatatype = false;
094
095    /**
096     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
097     *
098     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
099     * path of the dataset.
100     *
101     * @param theFile
102     *            the file that contains the data object.
103     * @param theName
104     *            the name of the data object, e.g. "dset".
105     * @param thePath
106     *            the full path of the data object, e.g. "/arrays/".
107     */
108    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
109        this(theFile, theName, thePath, null);
110    }
111
112    /**
113     * @deprecated Not for public use in the future.<br>
114     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
115     *
116     * @param theFile
117     *            the file that contains the data object.
118     * @param theName
119     *            the name of the data object, e.g. "dset".
120     * @param thePath
121     *            the full path of the data object, e.g. "/arrays/".
122     * @param oid
123     *            the oid of the data object.
124     */
125    @Deprecated
126    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
127        super(theFile, theName, thePath, oid);
128        unsignedConverted = false;
129        NumberOfPalettes = 0;
130        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
131
132        if (theFile != null) {
133            if (oid == null) {
134                // retrieve the object ID
135                byte[] refBuf = null;
136                try {
137                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
138                    this.oid = HDFNativeData.byteToLong(refBuf);
139                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
140                }
141                catch (Exception ex) {
142                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
143                }
144                finally {
145                    if (refBuf != null)
146                        H5.H5Rdestroy(refBuf);
147                }
148            }
149            log.trace("constructor OID {}", this.oid);
150            try {
151                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
152            }
153            catch (Exception ex) {
154                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
155            }
156        }
157        else {
158            this.oid = null;
159            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
160        }
161    }
162
163    /*
164     * (non-Javadoc)
165     *
166     * @see hdf.object.HObject#open()
167     */
168    @Override
169    public long open() {
170        long did = HDF5Constants.H5I_INVALID_HID;
171
172        if (getFID() < 0)
173            log.trace("open(): file id for:{} is invalid", getPath() + getName());
174        else {
175            try {
176                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
177                log.trace("open(): did={}", did);
178            }
179            catch (HDF5Exception ex) {
180                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
181                did = HDF5Constants.H5I_INVALID_HID;
182            }
183        }
184
185        return did;
186    }
187
188    /*
189     * (non-Javadoc)
190     *
191     * @see hdf.object.HObject#close(int)
192     */
193    @Override
194    public void close(long did) {
195        if (did >= 0) {
196            try {
197                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
198            }
199            catch (Exception ex) {
200                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
201            }
202            try {
203                H5.H5Dclose(did);
204            }
205            catch (HDF5Exception ex) {
206                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
207            }
208        }
209    }
210
211    /**
212     * Retrieves datatype and dataspace information from file and sets the dataset
213     * in memory.
214     *
215     * The init() is designed to support lazy operation in a dataset object. When a
216     * data object is retrieved from file, the datatype, dataspace and raw data are
217     * not loaded into memory. When it is asked to read the raw data from file,
218     * init() is first called to get the datatype and dataspace information, then
219     * load the raw data from file.
220     *
221     * init() is also used to reset the selection of a dataset (start, stride and
222     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
223     * the following example, init() at step 1) retrieves datatype and dataspace
224     * information from file. getData() at step 3) reads only one data point. init()
225     * at step 4) resets the selection to the whole dataset. getData() at step 4)
226     * reads the values of whole dataset into memory.
227     *
228     * <pre>
229     * dset = (Dataset) file.get(NAME_DATASET);
230     *
231     * // 1) get datatype and dataspace information from file
232     * dset.init();
233     * rank = dset.getRank(); // rank = 2, a 2D dataset
234     * count = dset.getSelectedDims();
235     * start = dset.getStartDims();
236     * dims = dset.getDims();
237     *
238     * // 2) select only one data point
239     * for (int i = 0; i &lt; rank; i++) {
240     *     start[0] = 0;
241     *     count[i] = 1;
242     * }
243     *
244     * // 3) read one data point
245     * data = dset.getData();
246     *
247     * // 4) reset selection to the whole dataset
248     * dset.init();
249     *
250     * // 5) clean the memory data buffer
251     * dset.clearData();
252     *
253     * // 6) Read the whole dataset
254     * data = dset.getData();
255     * </pre>
256     */
257    @Override
258    public void init() {
259        if (inited) {
260            // already called. Initialize only once
261            resetSelection();
262            log.trace("init(): H5ScalarDS already initialized");
263            return;
264        }
265
266        long did = HDF5Constants.H5I_INVALID_HID;
267        long tid = HDF5Constants.H5I_INVALID_HID;
268        long sid = HDF5Constants.H5I_INVALID_HID;
269        long nativeTID = HDF5Constants.H5I_INVALID_HID;
270
271        did = open();
272        if (did >= 0) {
273            try {
274                H5.H5Drefresh(did);
275            }
276            catch (Exception ex) {
277                log.debug("H5Drefresh(): ", ex);
278            }
279            // check if it is an external or virtual dataset
280            long pid = HDF5Constants.H5I_INVALID_HID;
281            try {
282                pid = H5.H5Dget_create_plist(did);
283                try {
284                    int nfiles = H5.H5Pget_external_count(pid);
285                    isExternal = (nfiles > 0);
286                    int layoutType = H5.H5Pget_layout(pid);
287                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
288                        try {
289                            long vmaps = H5.H5Pget_virtual_count(pid);
290                            if (vmaps > 0) {
291                                virtualNameList = new Vector<>();
292                                for (long next = 0; next < vmaps; next++) {
293                                    try {
294                                        String fname = H5.H5Pget_virtual_filename(pid, next);
295                                        virtualNameList.add(fname);
296                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
297                                    }
298                                    catch (Exception err) {
299                                        log.trace("init(): vds[{}] continue", next);
300                                    }
301                                }
302                            }
303                        }
304                        catch (Exception err) {
305                            log.debug("init(): vds count error: ", err);
306                        }
307                    }
308                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
309                }
310                catch (Exception ex) {
311                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
312                }
313            }
314            catch (Exception ex) {
315                log.debug("init(): H5Dget_create_plist() failure: ", ex);
316            }
317            finally {
318                try {
319                    H5.H5Pclose(pid);
320                }
321                catch (Exception ex) {
322                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
323                }
324            }
325
326            NumberOfPalettes = readNumberOfPalette(did);
327
328            try {
329                sid = H5.H5Dget_space(did);
330                rank = H5.H5Sget_simple_extent_ndims(sid);
331                space_type = H5.H5Sget_simple_extent_type(sid);
332                if (space_type == HDF5Constants.H5S_NULL)
333                    isNULL = true;
334                else
335                    isNULL = false;
336                tid = H5.H5Dget_type(did);
337                log.trace("init(): tid={} sid={} rank={} space_type={} ", tid, sid, rank, space_type);
338
339                if (rank == 0) {
340                    // a scalar data point
341                    isScalar = true;
342                    rank = 1;
343                    dims = new long[] { 1 };
344                    log.trace("init(): rank is a scalar data point");
345                }
346                else {
347                    isScalar = false;
348                    dims = new long[rank];
349                    maxDims = new long[rank];
350                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
351                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
352                }
353
354                try {
355                    int nativeClass = H5.H5Tget_class(tid);
356                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
357                        long lsize = 1;
358                        if (rank > 0) {
359                            log.trace("init():rank={}, dims={}", rank, dims);
360                            for (int j = 0; j < dims.length; j++) {
361                                lsize *= dims[j];
362                            }
363                        }
364                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
365                    }
366                    else
367                        datatype = new H5Datatype(getFileFormat(), tid);
368
369                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
370                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
371                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
372                }
373                catch (Exception ex) {
374                    log.debug("init(): failed to create datatype for dataset: ", ex);
375                    datatype = null;
376                }
377
378                // Check if the datatype in the file is the native datatype
379                try {
380                    nativeTID = H5.H5Tget_native_type(tid);
381                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
382                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
383                }
384                catch (Exception ex) {
385                    log.debug("init(): check if native type failure: ", ex);
386                }
387
388                try {
389                    pid = H5.H5Dget_create_plist(did);
390                    int[] fillStatus = { 0 };
391                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
392                        // Check if fill value is user-defined before retrieving it.
393                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
394                            try {
395                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
396                            }
397                            catch (OutOfMemoryError e) {
398                                log.debug("init(): out of memory: ", e);
399                                fillValue = null;
400                            }
401                            catch (Exception ex) {
402                                log.debug("init(): allocate fill value buffer failed: ", ex);
403                                fillValue = null;
404                            }
405
406                            log.trace("init(): fillValue={}", fillValue);
407                            try {
408                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
409                                log.trace("init(): H5Pget_fill_value={}", fillValue);
410                                if (fillValue != null) {
411                                    if (datatype.isUnsigned() && !isFillValueConverted) {
412                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
413                                        isFillValueConverted = true;
414                                    }
415
416                                    int n = Array.getLength(fillValue);
417                                    for (int i = 0; i < n; i++)
418                                        addFilteredImageValue((Number) Array.get(fillValue, i));
419                                }
420                            }
421                            catch (Exception ex2) {
422                                log.debug("init(): fill value was defined: ", ex2);
423                                fillValue = null;
424                            }
425                        }
426                    }
427                }
428                catch (HDF5Exception ex) {
429                    log.debug("init(): check if fill value is defined failure: ", ex);
430                }
431                finally {
432                    try {
433                        H5.H5Pclose(pid);
434                    }
435                    catch (Exception ex) {
436                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
437                    }
438                }
439
440                inited = true;
441            }
442            catch (HDF5Exception ex) {
443                log.debug("init(): ", ex);
444            }
445            finally {
446                try {
447                    H5.H5Tclose(nativeTID);
448                }
449                catch (Exception ex2) {
450                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
451                }
452                try {
453                    H5.H5Tclose(tid);
454                }
455                catch (HDF5Exception ex2) {
456                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
457                }
458                try {
459                    H5.H5Sclose(sid);
460                }
461                catch (HDF5Exception ex2) {
462                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
463                }
464            }
465
466            // check for the type of image and interlace mode
467            // it is a true color image at one of three cases:
468            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
469            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
470            // 3) INTERLACE_MODE = INTERLACE_PLANE
471            if ((rank >= 3) && isImage) {
472                interlace = -1;
473                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
474
475                if (isTrueColor) {
476                    interlace = INTERLACE_PIXEL;
477                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
478                        interlace = INTERLACE_PLANE;
479                    }
480                }
481            }
482
483            close(did);
484
485            startDims = new long[rank];
486            selectedDims = new long[rank];
487
488            resetSelection();
489        }
490        else {
491            log.debug("init(): failed to open dataset");
492        }
493        refresh = false;
494    }
495
496    /**
497     * Get the token for this object.
498     *
499     * @return true if it has any attributes, false otherwise.
500     */
501    public long[] getToken() {
502        H5O_token_t token = objInfo.token;
503        return HDFNativeData.byteToLong(token.data);
504    }
505
506    /**
507     * Check if the object has any attributes attached.
508     *
509     * @return true if it has any attributes, false otherwise.
510     */
511    @Override
512    public boolean hasAttribute() {
513        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
514
515        if (objInfo.num_attrs < 0) {
516            long did = open();
517            if (did >= 0) {
518                objInfo.num_attrs = 0;
519
520                try {
521                    objInfo = H5.H5Oget_info(did);
522
523                    if(objInfo.num_attrs > 0) {
524                        // test if it is an image
525                        // check image
526                        Object avalue = getAttrValue(did, "CLASS");
527                        if (avalue != null) {
528                            try {
529                                isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
530                                log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
531                            }
532                            catch (Exception err) {
533                                log.debug("hasAttribute(): check image: ", err);
534                            }
535                        }
536
537                        // retrieve the IMAGE_MINMAXRANGE
538                        avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
539                        if (avalue != null) {
540                            double x0 = 0;
541                            double x1 = 0;
542                            try {
543                                x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
544                                x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
545                            }
546                            catch (Exception ex2) {
547                                x0 = x1 = 0;
548                            }
549                            if (x1 > x0) {
550                                imageDataRange = new double[2];
551                                imageDataRange[0] = x0;
552                                imageDataRange[1] = x1;
553                            }
554                        }
555
556                        try {
557                            checkCFconvention(did);
558                        }
559                        catch (Exception ex) {
560                            log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
561                        }
562                    }
563                }
564                catch (Exception ex) {
565                    objInfo.num_attrs = 0;
566                    log.debug("hasAttribute(): get object info failure: ", ex);
567                }
568                finally {
569                    close(did);
570                }
571                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
572            }
573            else {
574                log.debug("hasAttribute(): could not open dataset");
575            }
576        }
577
578        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
579        return (objInfo.num_attrs > 0);
580    }
581
582    /**
583     * Returns the datatype of the data object.
584     *
585     * @return the datatype of the data object.
586     */
587    @Override
588    public Datatype getDatatype() {
589        if (!inited)
590            init();
591
592        if (datatype == null) {
593            long did = HDF5Constants.H5I_INVALID_HID;
594            long tid = HDF5Constants.H5I_INVALID_HID;
595
596            did = open();
597            if (did >= 0) {
598                try {
599                    tid = H5.H5Dget_type(did);
600                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
601                    if (!isNativeDatatype) {
602                        long tmptid = -1;
603                        try {
604                            tmptid = H5Datatype.toNative(tid);
605                            if (tmptid >= 0) {
606                                try {
607                                    H5.H5Tclose(tid);
608                                }
609                                catch (Exception ex2) {
610                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
611                                }
612                                tid = tmptid;
613                            }
614                        }
615                        catch (Exception ex) {
616                            log.debug("getDatatype(): toNative: ", ex);
617                        }
618                    }
619                    int nativeClass = H5.H5Tget_class(tid);
620                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
621                        long lsize = 1;
622                        if (rank > 0) {
623                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
624                            for (int j = 0; j < dims.length; j++) {
625                                lsize *= dims[j];
626                            }
627                        }
628                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
629                    }
630                    else
631                        datatype = new H5Datatype(getFileFormat(), tid);
632                }
633                catch (Exception ex) {
634                    log.debug("getDatatype(): ", ex);
635                }
636                finally {
637                    try {
638                        H5.H5Tclose(tid);
639                    }
640                    catch (HDF5Exception ex) {
641                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
642                    }
643                    try {
644                        H5.H5Dclose(did);
645                    }
646                    catch (HDF5Exception ex) {
647                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
648                    }
649                }
650            }
651        }
652
653        if (isExternal) {
654            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
655
656            if (pdir == null) {
657                pdir = ".";
658            }
659            System.setProperty("user.dir", pdir);
660            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
661        }
662
663        return datatype;
664    }
665
666    /**
667     * Refreshes the dataset before re-read of data.
668     */
669    @Override
670    public Object refreshData() {
671        inited = false;
672        refresh = true;
673
674        init();
675        return super.refreshData();
676    }
677
678    /**
679     * Removes all of the elements from metadata list.
680     * The list should be empty after this call returns.
681     */
682    @Override
683    public void clear() {
684        super.clear();
685        objMetadata.clear();
686    }
687
688    /*
689     * (non-Javadoc)
690     *
691     * @see hdf.object.Dataset#readBytes()
692     */
693    @Override
694    public byte[] readBytes() throws HDF5Exception {
695        byte[] theData = null;
696
697        if (!isInited())
698            init();
699
700        long did = open();
701        if (did >= 0) {
702            long fspace = HDF5Constants.H5I_INVALID_HID;
703            long mspace = HDF5Constants.H5I_INVALID_HID;
704            long tid = HDF5Constants.H5I_INVALID_HID;
705
706            try {
707                long[] lsize = { 1 };
708                for (int j = 0; j < selectedDims.length; j++)
709                    lsize[0] *= selectedDims[j];
710
711                fspace = H5.H5Dget_space(did);
712                mspace = H5.H5Screate_simple(rank, selectedDims, null);
713
714                // set the rectangle selection
715                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
716                if (rank * dims[0] > 1)
717                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
718
719                tid = H5.H5Dget_type(did);
720                long size = H5.H5Tget_size(tid) * lsize[0];
721                log.trace("readBytes(): size = {}", size);
722
723                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
724                    throw new Exception("Invalid int size");
725
726                theData = new byte[(int)size];
727
728                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
729                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
730            }
731            catch (Exception ex) {
732                log.debug("readBytes(): failed to read data: ", ex);
733            }
734            finally {
735                try {
736                    H5.H5Sclose(fspace);
737                }
738                catch (Exception ex2) {
739                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
740                }
741                try {
742                    H5.H5Sclose(mspace);
743                }
744                catch (Exception ex2) {
745                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
746                }
747                try {
748                    H5.H5Tclose(tid);
749                }
750                catch (HDF5Exception ex2) {
751                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
752                }
753                close(did);
754            }
755        }
756
757        return theData;
758    }
759
760    /**
761     * Reads the data from file.
762     *
763     * read() reads the data from file to a memory buffer and returns the memory
764     * buffer. The dataset object does not hold the memory buffer. To store the
765     * memory buffer in the dataset object, one must call getData().
766     *
767     * By default, the whole dataset is read into memory. Users can also select
768     * a subset to read. Subsetting is done in an implicit way.
769     *
770     * <b>How to Select a Subset</b>
771     *
772     * A selection is specified by three arrays: start, stride and count.
773     * <ol>
774     * <li>start: offset of a selection
775     * <li>stride: determines how many elements to move in each dimension
776     * <li>count: number of elements to select in each dimension
777     * </ol>
778     * getStartDims(), getStride() and getSelectedDims() returns the start,
779     * stride and count arrays respectively. Applications can make a selection
780     * by changing the values of the arrays.
781     *
782     * The following example shows how to make a subset. In the example, the
783     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
784     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
785     * We want to select every other data point in dims[1] and dims[2]
786     *
787     * <pre>
788     * int rank = dataset.getRank(); // number of dimensions of the dataset
789     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
790     * long[] selected = dataset.getSelectedDims(); // the selected size of the
791     *                                              // dataset
792     * long[] start = dataset.getStartDims(); // the offset of the selection
793     * long[] stride = dataset.getStride(); // the stride of the dataset
794     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
795     *                                                   // dimensions for
796     *                                                   // display
797     *
798     * // select dim1 and dim2 as 2D data for display, and slice through dim0
799     * selectedIndex[0] = 1;
800     * selectedIndex[1] = 2;
801     * selectedIndex[1] = 0;
802     *
803     * // reset the selection arrays
804     * for (int i = 0; i &lt; rank; i++) {
805     *     start[i] = 0;
806     *     selected[i] = 1;
807     *     stride[i] = 1;
808     * }
809     *
810     * // set stride to 2 on dim1 and dim2 so that every other data point is
811     * // selected.
812     * stride[1] = 2;
813     * stride[2] = 2;
814     *
815     * // set the selection size of dim1 and dim2
816     * selected[1] = dims[1] / stride[1];
817     * selected[2] = dims[1] / stride[2];
818     *
819     * // when dataset.getData() is called, the selection above will be used
820     * // since
821     * // the dimension arrays are passed by reference. Changes of these arrays
822     * // outside the dataset object directly change the values of these array
823     * // in the dataset object.
824     * </pre>
825     *
826     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
827     * short, int, float, double or String type based on the datatype of the
828     * dataset.
829     *
830     * @return the data read from file.
831     *
832     * @see #getData()
833     * @see hdf.object.DataFormat#read()
834     *
835     * @throws Exception
836     *             if object can not be read
837     */
838    @Override
839    public Object read() throws Exception {
840        Object readData = null;
841
842        if (!isInited())
843            init();
844
845        try {
846            readData = scalarDatasetCommonIO(H5File.IO_TYPE.READ, null);
847        }
848        catch (Exception ex) {
849            log.debug("read(): failed to read scalar dataset: ", ex);
850            throw new Exception("failed to read scalar dataset: " + ex.getMessage(), ex);
851        }
852
853        return readData;
854    }
855
856    /**
857     * Writes the given data buffer into this dataset in a file.
858     *
859     * @param buf
860     *            The buffer that contains the data values.
861     *
862     * @throws Exception
863     *             If there is an error at the HDF5 library level.
864     */
865    @Override
866    public void write(Object buf) throws Exception {
867        if (this.getFileFormat().isReadOnly())
868            throw new Exception("cannot write to scalar dataset in file opened as read-only");
869
870        if (!isInited())
871            init();
872
873        try {
874            scalarDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
875        }
876        catch (Exception ex) {
877            log.debug("write(Object): failed to write to scalar dataset: ", ex);
878            throw new Exception("failed to write to scalar dataset: " + ex.getMessage(), ex);
879        }
880    }
881
882    private Object scalarDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
883        H5Datatype dsDatatype = (H5Datatype) getDatatype();
884        H5Datatype dsBaseDatatype = (H5Datatype) getDatatype().getDatatypeBase();
885        boolean BDTisRef = false;
886        if (dsBaseDatatype != null)
887            BDTisRef = dsBaseDatatype.isStdRef();
888        Object theData = null;
889
890        /*
891         * I/O type-specific pre-initialization.
892         */
893        if (ioType == H5File.IO_TYPE.WRITE) {
894            if (writeBuf == null) {
895                log.debug("scalarDatasetCommonIO(): writeBuf is null");
896                throw new Exception("write buffer is null");
897            }
898        }
899
900        long did = open();
901        if (did >= 0) {
902            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
903
904            try {
905                /*
906                 * NOTE: this call sets up a hyperslab selection in the file according to the
907                 * current selection in the dataset object.
908                 */
909                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
910                        selectedStride, selectedDims, spaceIDs);
911
912                if (ioType == H5File.IO_TYPE.READ) {
913                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
914                    if (dsDatatype.isVarStr()) {
915                        try {
916                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
917                        }
918                        catch (OutOfMemoryError err) {
919                            log.debug("scalarDatasetCommonIO(): Out of memory");
920                            throw new HDF5Exception("Out Of Memory");
921                        }
922                    }
923                    else if (dsDatatype.isVLEN()) {
924                        theData = new ArrayList[(int)totalSelectedSpacePoints];
925                        for (int j = 0; j < (int)totalSelectedSpacePoints; j++)
926                            ((ArrayList[])theData)[j] = new ArrayList<byte[]>();
927                    }
928                    else if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
929                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
930                        try {
931                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
932                        }
933                        catch (OutOfMemoryError err) {
934                            log.debug("scalarDatasetCommonIO(): Out of memory");
935                            throw new HDF5Exception("Out Of Memory");
936                        }
937                    }
938                    else {
939                        // reuse the buffer if the size is the same
940                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
941                        theData = originalBuf;
942                    }
943
944                    if (theData != null) {
945                        /*
946                         * Actually read the data now that everything has been setup.
947                         */
948                        long tid = HDF5Constants.H5I_INVALID_HID;
949                        try {
950                            log.trace("scalarDatasetCommonIO():read ioType create native");
951                            tid = dsDatatype.createNative();
952
953                            if (dsDatatype.isVarStr()) {
954                                log.trace("scalarDatasetCommonIO(): H5Dread_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
955                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
956                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
957
958                                H5.H5Dread_VLStrings(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
959                                        (Object[]) theData);
960                            }
961                            else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
962                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
963                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
964                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
965
966                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
967                                        (Object[]) theData);
968                            }
969                            else {
970                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
971                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
972                                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
973
974                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
975                            }
976                        }
977                        catch (HDF5DataFiltersException exfltr) {
978                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
979                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
980                        }
981                        catch (Exception ex) {
982                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
983                            throw new Exception(ex.getMessage(), ex);
984                        }
985                        finally {
986                            dsDatatype.close(tid);
987                        }
988
989                        /*
990                         * Perform any necessary data conversions.
991                         */
992                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
993                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
994                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
995                        }
996                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
997                            log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
998                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints, (byte[]) theData);
999                        }
1000                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1001                            log.trace("scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
1002                            long[] arrayDims = dsDatatype.getArrayDims();
1003                            int asize = (int)totalSelectedSpacePoints;
1004                            for (int j = 0; j < arrayDims.length; j++) {
1005                                asize *= arrayDims[j];
1006                            }
1007                            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
1008                        }
1009                        else if (dsDatatype.isRef() && (theData instanceof byte[])) {
1010                            log.trace("AttributeCommonIO():read ioType isRef: converting byte array to List of bytes");
1011                            ArrayList<byte[]> theListData = new ArrayList<>((int)totalSelectedSpacePoints);
1012                            for (int m = 0; m < (int) totalSelectedSpacePoints; m++) {
1013                                byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()];
1014                                try {
1015                                    System.arraycopy(theData, m * (int)dsDatatype.getDatatypeSize(), curBytes, 0, (int)dsDatatype.getDatatypeSize());
1016                                    theListData.add(curBytes);
1017                                }
1018                                catch (Exception err) {
1019                                    log.trace("AttributeCommonIO(): arraycopy failure: ", err);
1020                                }
1021                            }
1022                            theData = theListData;
1023                        }
1024                    }
1025                } // H5File.IO_TYPE.READ
1026                else {
1027                    /*
1028                     * Perform any necessary data conversions before writing the data.
1029                     *
1030                     * Note that v-len strings do not get converted, regardless of
1031                     * conversion request type.
1032                     */
1033                    Object tmpData = writeBuf;
1034                    try {
1035                        // Check if we need to convert integer data
1036                        int tsize = (int) dsDatatype.getDatatypeSize();
1037                        String cname = writeBuf.getClass().getName();
1038                        log.trace("scalarDatasetCommonIO(): cname={} of datatype size={}", cname, tsize);
1039                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1040                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
1041                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1042
1043                        if (doIntConversion) {
1044                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
1045                            tmpData = convertToUnsignedC(writeBuf, null);
1046                        }
1047                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString && !(writeBuf instanceof byte[])) {
1048                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
1049                            tmpData = stringToByte((String[]) writeBuf, tsize);
1050                        }
1051                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
1052                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
1053                            tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
1054                        }
1055                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1056                            log.trace("scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
1057                            throw new Exception("data conversion failure: cannot write BigDecimal values");
1058                            //tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints, (BigDecimal[]) writeBuf);
1059                        }
1060                    }
1061                    catch (Exception ex) {
1062                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
1063                        throw new Exception("data conversion failure: " + ex.getMessage());
1064                    }
1065
1066                    /*
1067                     * Actually write the data now that everything has been setup.
1068                     */
1069                    long tid = HDF5Constants.H5I_INVALID_HID;
1070                    try {
1071                        tid = dsDatatype.createNative();
1072
1073                        if (dsDatatype.isVarStr()) {
1074                            log.trace("scalarDatasetCommonIO(): H5Dwrite_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1075                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1076                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1077
1078                            H5.H5Dwrite_VLStrings(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1079                        }
1080                        else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1081                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1082                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1083                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1084
1085                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
1086                        }
1087                        else {
1088                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1089                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1090                                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1091
1092                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1093                        }
1094                    }
1095                    catch (Exception ex) {
1096                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
1097                        throw new Exception(ex.getMessage());
1098                    }
1099                    finally {
1100                        dsDatatype.close(tid);
1101                    }
1102                } // H5File.IO_TYPE.WRITE
1103            }
1104            finally {
1105                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1106                    try {
1107                        H5.H5Sclose(spaceIDs[0]);
1108                    }
1109                    catch (Exception ex) {
1110                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1111                    }
1112                }
1113
1114                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1115                    try {
1116                        H5.H5Sclose(spaceIDs[1]);
1117                    }
1118                    catch (Exception ex) {
1119                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1120                    }
1121                }
1122
1123                close(did);
1124            }
1125        }
1126        else
1127            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1128
1129        return theData;
1130    }
1131
1132    /**
1133     * Retrieves the object's metadata, such as attributes, from the file.
1134     *
1135     * Metadata, such as attributes, is stored in a List.
1136     *
1137     * @return the list of metadata objects.
1138     *
1139     * @throws HDF5Exception
1140     *             if the metadata can not be retrieved
1141     */
1142    @Override
1143    public List<Attribute> getMetadata() throws HDF5Exception {
1144        int gmIndexType = 0;
1145        int gmIndexOrder = 0;
1146
1147        try {
1148            gmIndexType = fileFormat.getIndexType(null);
1149        }
1150        catch (Exception ex) {
1151            log.debug("getMetadata(): getIndexType failed: ", ex);
1152        }
1153        try {
1154            gmIndexOrder = fileFormat.getIndexOrder(null);
1155        }
1156        catch (Exception ex) {
1157            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1158        }
1159        return this.getMetadata(gmIndexType, gmIndexOrder);
1160    }
1161
1162    /**
1163     * Retrieves the object's metadata, such as attributes, from the file.
1164     *
1165     * Metadata, such as attributes, is stored in a List.
1166     *
1167     * @param attrPropList
1168     *             the list of properties to get
1169     *
1170     * @return the list of metadata objects.
1171     *
1172     * @throws HDF5Exception
1173     *             if the metadata can not be retrieved
1174     */
1175    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1176        if (!isInited())
1177            init();
1178
1179        try {
1180            this.linkTargetObjName = H5File.getLinkTargetName(this);
1181        }
1182        catch (Exception ex) {
1183            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1184        }
1185
1186        if (objMetadata.getAttributeList() == null) {
1187            long did = HDF5Constants.H5I_INVALID_HID;
1188            long pcid = HDF5Constants.H5I_INVALID_HID;
1189            long paid = HDF5Constants.H5I_INVALID_HID;
1190
1191            did = open();
1192            if (did >= 0) {
1193                try {
1194                    // get the compression and chunk information
1195                    pcid = H5.H5Dget_create_plist(did);
1196                    paid = H5.H5Dget_access_plist(did);
1197                    long storageSize = H5.H5Dget_storage_size(did);
1198                    int nfilt = H5.H5Pget_nfilters(pcid);
1199                    int layoutType = H5.H5Pget_layout(pcid);
1200
1201                    storageLayout.setLength(0);
1202                    compression.setLength(0);
1203
1204                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1205                        chunkSize = new long[rank];
1206                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1207                        int n = chunkSize.length;
1208                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1209                        for (int i = 1; i < n; i++)
1210                            storageLayout.append(" X ").append(chunkSize[i]);
1211
1212                        if (nfilt > 0) {
1213                            long nelmts = 1;
1214                            long uncompSize;
1215                            long datumSize = getDatatype().getDatatypeSize();
1216
1217                            if (datumSize < 0) {
1218                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1219                                try {
1220                                    tmptid = H5.H5Dget_type(did);
1221                                    datumSize = H5.H5Tget_size(tmptid);
1222                                }
1223                                finally {
1224                                    try {
1225                                        H5.H5Tclose(tmptid);
1226                                    }
1227                                    catch (Exception ex2) {
1228                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1229                                    }
1230                                }
1231                            }
1232
1233                            for (int i = 0; i < rank; i++)
1234                                nelmts *= dims[i];
1235                            uncompSize = nelmts * datumSize;
1236
1237                            /* compression ratio = uncompressed size / compressed size */
1238
1239                            if (storageSize != 0) {
1240                                double ratio = (double) uncompSize / (double) storageSize;
1241                                DecimalFormat df = new DecimalFormat();
1242                                df.setMinimumFractionDigits(3);
1243                                df.setMaximumFractionDigits(3);
1244                                compression.append(df.format(ratio)).append(":1");
1245                            }
1246                        }
1247                    }
1248                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1249                        storageLayout.append("COMPACT");
1250                    }
1251                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1252                        storageLayout.append("CONTIGUOUS");
1253                        if (H5.H5Pget_external_count(pcid) > 0)
1254                            storageLayout.append(" - EXTERNAL ");
1255                    }
1256                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1257                        storageLayout.append("VIRTUAL - ");
1258                        try {
1259                            long vmaps = H5.H5Pget_virtual_count(pcid);
1260                            try {
1261                                int virtView = H5.H5Pget_virtual_view(paid);
1262                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1263                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1264                                    storageLayout.append("First Missing");
1265                                else
1266                                    storageLayout.append("Last Available");
1267                                storageLayout.append("\nGAP : ").append(virtGap);
1268                            }
1269                            catch (Exception err) {
1270                                log.debug("getMetadata(): vds error: ", err);
1271                                storageLayout.append("ERROR");
1272                            }
1273                            storageLayout.append("\nMAPS : ").append(vmaps);
1274                            if (vmaps > 0) {
1275                                for (long next = 0; next < vmaps; next++) {
1276                                    try {
1277                                        H5.H5Pget_virtual_vspace(pcid, next);
1278                                        H5.H5Pget_virtual_srcspace(pcid, next);
1279                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1280                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1281                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1282                                    }
1283                                    catch (Exception err) {
1284                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1285                                        storageLayout.append("ERROR");
1286                                    }
1287                                }
1288                            }
1289                        }
1290                        catch (Exception err) {
1291                            log.debug("getMetadata(): vds count error: ", err);
1292                            storageLayout.append("ERROR");
1293                        }
1294                    }
1295                    else {
1296                        chunkSize = null;
1297                        storageLayout.append("NONE");
1298                    }
1299
1300                    int[] flags = { 0, 0 };
1301                    long[] cdNelmts = { 20 };
1302                    int[] cdValues = new int[(int) cdNelmts[0]];
1303                    String[] cdName = { "", "" };
1304                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1305                    int filter = -1;
1306                    int[] filterConfig = { 1 };
1307
1308                    filters.setLength(0);
1309
1310                    if (nfilt == 0) {
1311                        filters.append("NONE");
1312                    }
1313                    else {
1314                        for (int i = 0, k = 0; i < nfilt; i++) {
1315                            log.trace("getMetadata(): filter[{}]", i);
1316                            if (i > 0)
1317                                filters.append(", ");
1318                            if (k > 0)
1319                                compression.append(", ");
1320
1321                            try {
1322                                cdNelmts[0] = 20;
1323                                cdValues = new int[(int) cdNelmts[0]];
1324                                cdValues = new int[(int) cdNelmts[0]];
1325                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1326                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1327                                for (int j = 0; j < cdNelmts[0]; j++)
1328                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1329                            }
1330                            catch (Exception err) {
1331                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1332                                filters.append("ERROR");
1333                                continue;
1334                            }
1335
1336                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1337                                filters.append("NONE");
1338                            }
1339                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1340                                filters.append("GZIP");
1341                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1342                                k++;
1343                            }
1344                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1345                                filters.append("Error detection filter");
1346                            }
1347                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1348                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1349                            }
1350                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1351                                filters.append("NBIT");
1352                            }
1353                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1354                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1355                            }
1356                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1357                                filters.append("SZIP");
1358                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1359                                k++;
1360                                int flag = -1;
1361                                try {
1362                                    flag = H5.H5Zget_filter_info(filter);
1363                                }
1364                                catch (Exception ex) {
1365                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1366                                    flag = -1;
1367                                }
1368                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1369                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1370                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1371                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1372                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1373                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1374                            }
1375                            else {
1376                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1377                                for (int j = 0; j < cdNelmts[0]; j++) {
1378                                    if (j > 0)
1379                                        filters.append(", ");
1380                                    filters.append(cdValues[j]);
1381                                }
1382                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1383                            }
1384                        } //  (int i=0; i<nfilt; i++)
1385                    }
1386
1387                    if (compression.length() == 0)
1388                        compression.append("NONE");
1389                    log.trace("getMetadata(): filter compression={}", compression);
1390                    log.trace("getMetadata(): filter information={}", filters);
1391
1392                    storage.setLength(0);
1393                    storage.append("SIZE: ").append(storageSize);
1394
1395                    try {
1396                        int[] at = { 0 };
1397                        H5.H5Pget_alloc_time(pcid, at);
1398                        storage.append(", allocation time: ");
1399                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1400                            storage.append("Early");
1401                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1402                            storage.append("Incremental");
1403                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1404                            storage.append("Late");
1405                        else
1406                            storage.append("Default");
1407                    }
1408                    catch (Exception ex) {
1409                        log.debug("getMetadata(): Storage allocation time:", ex);
1410                    }
1411                    log.trace("getMetadata(): storage={}", storage);
1412                }
1413                finally {
1414                    try {
1415                        H5.H5Pclose(paid);
1416                    }
1417                    catch (Exception ex) {
1418                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1419                    }
1420                    try {
1421                        H5.H5Pclose(pcid);
1422                    }
1423                    catch (Exception ex) {
1424                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1425                    }
1426                    close(did);
1427                }
1428            }
1429        }
1430
1431        List<Attribute> attrlist = null;
1432        try {
1433            attrlist = objMetadata.getMetadata(attrPropList);
1434        }
1435        catch (Exception ex) {
1436            log.debug("getMetadata(): getMetadata failed: ", ex);
1437        }
1438        return attrlist;
1439    }
1440
1441    /**
1442     * Writes a specific piece of metadata (such as an attribute) into the file.
1443     *
1444     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1445     * value. If the attribute does not exist in the file, it creates the
1446     * attribute in the file and attaches it to the object. It will fail to
1447     * write a new attribute to the object where an attribute with the same name
1448     * already exists. To update the value of an existing attribute in the file,
1449     * one needs to get the instance of the attribute by getMetadata(), change
1450     * its values, then use writeMetadata() to write the value.
1451     *
1452     * @param info
1453     *            the metadata to write.
1454     *
1455     * @throws Exception
1456     *             if the metadata can not be written
1457     */
1458    @Override
1459    public void writeMetadata(Object info) throws Exception {
1460        try {
1461            objMetadata.writeMetadata(info);
1462        }
1463        catch (Exception ex) {
1464            log.debug("writeMetadata(): Object not an Attribute");
1465        }
1466    }
1467
1468    /**
1469     * Deletes an existing piece of metadata from this object.
1470     *
1471     * @param info
1472     *            the metadata to delete.
1473     *
1474     * @throws HDF5Exception
1475     *             if the metadata can not be removed
1476     */
1477    @Override
1478    public void removeMetadata(Object info) throws HDF5Exception {
1479        try {
1480            objMetadata.removeMetadata(info);
1481        }
1482        catch (Exception ex) {
1483            log.debug("removeMetadata(): Object not an Attribute");
1484            return;
1485        }
1486
1487        Attribute attr = (Attribute) info;
1488        log.trace("removeMetadata(): {}", attr.getAttributeName());
1489        long did = open();
1490        if (did >= 0) {
1491            try {
1492                H5.H5Adelete(did, attr.getAttributeName());
1493            }
1494            finally {
1495                close(did);
1496            }
1497        }
1498        else {
1499            log.debug("removeMetadata(): failed to open scalar dataset");
1500        }
1501    }
1502
1503    /**
1504     * Updates an existing piece of metadata attached to this object.
1505     *
1506     * @param info
1507     *            the metadata to update.
1508     *
1509     * @throws HDF5Exception
1510     *             if the metadata can not be updated
1511     */
1512    @Override
1513    public void updateMetadata(Object info) throws HDF5Exception {
1514        try {
1515            objMetadata.updateMetadata(info);
1516        }
1517        catch (Exception ex) {
1518            log.debug("updateMetadata(): Object not an Attribute");
1519            return;
1520        }
1521    }
1522
1523    /*
1524     * (non-Javadoc)
1525     *
1526     * @see hdf.object.HObject#setName(java.lang.String)
1527     */
1528    @Override
1529    public void setName(String newName) throws Exception {
1530        if (newName == null)
1531            throw new IllegalArgumentException("The new name is NULL");
1532
1533        H5File.renameObject(this, newName);
1534        super.setName(newName);
1535    }
1536
1537    /**
1538     * Resets selection of dataspace
1539     */
1540    @Override
1541    protected void resetSelection() {
1542        super.resetSelection();
1543
1544        if (interlace == INTERLACE_PIXEL) {
1545            // 24-bit TRUE color image
1546            // [height][width][pixel components]
1547            selectedDims[2] = 3;
1548            selectedDims[0] = dims[0];
1549            selectedDims[1] = dims[1];
1550            selectedIndex[0] = 0; // index for height
1551            selectedIndex[1] = 1; // index for width
1552            selectedIndex[2] = 2; // index for depth
1553        }
1554        else if (interlace == INTERLACE_PLANE) {
1555            // 24-bit TRUE color image
1556            // [pixel components][height][width]
1557            selectedDims[0] = 3;
1558            selectedDims[1] = dims[1];
1559            selectedDims[2] = dims[2];
1560            selectedIndex[0] = 1; // index for height
1561            selectedIndex[1] = 2; // index for width
1562            selectedIndex[2] = 0; // index for depth
1563        }
1564
1565        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1566            isDefaultImageOrder = false;
1567        else
1568            isDefaultImageOrder = true;
1569    }
1570
1571    /**
1572     * Creates a scalar dataset in a file with/without chunking and compression.
1573     *
1574     * @param name
1575     *            the name of the dataset to create.
1576     * @param pgroup
1577     *            parent group where the new dataset is created.
1578     * @param type
1579     *            the datatype of the dataset.
1580     * @param dims
1581     *            the dimension size of the dataset.
1582     * @param maxdims
1583     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1584     * @param chunks
1585     *            the chunk size of the dataset. No chunking if chunk = null.
1586     * @param gzip
1587     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1588     * @param data
1589     *            the array of data values.
1590     *
1591     * @return the new scalar dataset if successful; otherwise returns null.
1592     *
1593     * @throws Exception if there is a failure.
1594     */
1595    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1596            long[] chunks, int gzip, Object data) throws Exception {
1597        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1598    }
1599
1600    /**
1601     * Creates a scalar dataset in a file with/without chunking and compression.
1602     *
1603     * The following example shows how to create a string dataset using this function.
1604     *
1605     * <pre>
1606     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1607     * int max_str_len = 120;
1608     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1609     * int size = 10000;
1610     * long dims[] = { size };
1611     * long chunks[] = { 1000 };
1612     * int gzip = 9;
1613     * String strs[] = new String[size];
1614     *
1615     * for (int i = 0; i &lt; size; i++)
1616     *     strs[i] = String.valueOf(i);
1617     *
1618     * file.open();
1619     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1620     *
1621     * try {
1622     *     file.close();
1623     * }
1624     * catch (Exception ex) {
1625     * }
1626     * </pre>
1627     *
1628     * @param name
1629     *            the name of the dataset to create.
1630     * @param pgroup
1631     *            parent group where the new dataset is created.
1632     * @param type
1633     *            the datatype of the dataset.
1634     * @param dims
1635     *            the dimension size of the dataset.
1636     * @param maxdims
1637     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1638     * @param chunks
1639     *            the chunk size of the dataset. No chunking if chunk = null.
1640     * @param gzip
1641     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1642     * @param fillValue
1643     *            the default data value.
1644     * @param data
1645     *            the array of data values.
1646     *
1647     * @return the new scalar dataset if successful; otherwise returns null.
1648     *
1649     * @throws Exception if there is a failure.
1650     */
1651    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1652            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1653        H5ScalarDS dataset = null;
1654        String fullPath = null;
1655        long did = HDF5Constants.H5I_INVALID_HID;
1656        long plist = HDF5Constants.H5I_INVALID_HID;
1657        long sid = HDF5Constants.H5I_INVALID_HID;
1658        long tid = HDF5Constants.H5I_INVALID_HID;
1659
1660        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1661            log.debug("create(): one or more parameters are null");
1662            return null;
1663        }
1664
1665        H5File file = (H5File) pgroup.getFileFormat();
1666        if (file == null) {
1667            log.debug("create(): parent group FileFormat is null");
1668            return null;
1669        }
1670
1671        String path = HObject.SEPARATOR;
1672        if (!pgroup.isRoot()) {
1673            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1674            if (name.endsWith("/"))
1675                name = name.substring(0, name.length() - 1);
1676            int idx = name.lastIndexOf('/');
1677            if (idx >= 0)
1678                name = name.substring(idx + 1);
1679        }
1680
1681        fullPath = path + name;
1682
1683        // setup chunking and compression
1684        boolean isExtentable = false;
1685        if (maxdims != null) {
1686            for (int i = 0; i < maxdims.length; i++) {
1687                if (maxdims[i] == 0)
1688                    maxdims[i] = dims[i];
1689                else if (maxdims[i] < 0)
1690                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1691
1692                if (maxdims[i] != dims[i])
1693                    isExtentable = true;
1694            }
1695        }
1696
1697        // HDF5 requires you to use chunking in order to define extendible
1698        // datasets. Chunking makes it possible to extend datasets efficiently,
1699        // without having to reorganize storage excessively. Using default size
1700        // of 64x...which has good performance
1701        if ((chunks == null) && isExtentable) {
1702            chunks = new long[dims.length];
1703            for (int i = 0; i < dims.length; i++)
1704                chunks[i] = Math.min(dims[i], 64);
1705        }
1706
1707        // prepare the dataspace and datatype
1708        int rank = dims.length;
1709
1710        tid = type.createNative();
1711        if (tid >= 0) {
1712            try {
1713                sid = H5.H5Screate_simple(rank, dims, maxdims);
1714
1715                // figure out creation properties
1716                plist = HDF5Constants.H5P_DEFAULT;
1717
1718                byte[] valFill = null;
1719                try {
1720                    valFill = parseFillValue(type, fillValue);
1721                }
1722                catch (Exception ex) {
1723                    log.debug("create(): parse fill value: ", ex);
1724                }
1725                log.trace("create(): parseFillValue={}", valFill);
1726
1727                if (chunks != null || valFill != null) {
1728                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1729
1730                    if (chunks != null) {
1731                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1732                        H5.H5Pset_chunk(plist, rank, chunks);
1733
1734                        // compression requires chunking
1735                        if (gzip > 0) {
1736                            H5.H5Pset_deflate(plist, gzip);
1737                        }
1738                    }
1739
1740                    if (valFill != null)
1741                        H5.H5Pset_fill_value(plist, tid, valFill);
1742                }
1743
1744                long fid = file.getFID();
1745
1746                log.trace("create(): create dataset fid={}", fid);
1747                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1748                log.trace("create(): create dataset did={}", did);
1749                dataset = new H5ScalarDS(file, name, path);
1750            }
1751            finally {
1752                try {
1753                    H5.H5Pclose(plist);
1754                }
1755                catch (HDF5Exception ex) {
1756                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1757                }
1758                try {
1759                    H5.H5Sclose(sid);
1760                }
1761                catch (HDF5Exception ex) {
1762                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1763                }
1764                try {
1765                    H5.H5Tclose(tid);
1766                }
1767                catch (HDF5Exception ex) {
1768                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1769                }
1770                try {
1771                    H5.H5Dclose(did);
1772                }
1773                catch (HDF5Exception ex) {
1774                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1775                }
1776            }
1777        }
1778
1779        if (dataset != null) {
1780            pgroup.addToMemberList(dataset);
1781            if (data != null) {
1782                dataset.init();
1783                long[] selected = dataset.getSelectedDims();
1784                for (int i = 0; i < rank; i++)
1785                    selected[i] = dims[i];
1786                dataset.write(data);
1787            }
1788        }
1789
1790        return dataset;
1791    }
1792
1793    // check _FillValue, valid_min, valid_max, and valid_range
1794    private void checkCFconvention(long oid) throws Exception {
1795        Object avalue = getAttrValue(oid, "_FillValue");
1796
1797        if (avalue != null) {
1798            int n = Array.getLength(avalue);
1799            for (int i = 0; i < n; i++)
1800                addFilteredImageValue((Number) Array.get(avalue, i));
1801        }
1802
1803        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1804            double x0 = 0;
1805            double x1 = 0;
1806            avalue = getAttrValue(oid, "valid_range");
1807            if (avalue != null) {
1808                try {
1809                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1810                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1811                    imageDataRange = new double[2];
1812                    imageDataRange[0] = x0;
1813                    imageDataRange[1] = x1;
1814                    return;
1815                }
1816                catch (Exception ex) {
1817                    log.debug("checkCFconvention(): valid_range: ", ex);
1818                }
1819            }
1820
1821            avalue = getAttrValue(oid, "valid_min");
1822            if (avalue != null) {
1823                try {
1824                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1825                }
1826                catch (Exception ex) {
1827                    log.debug("checkCFconvention(): valid_min: ", ex);
1828                }
1829                avalue = getAttrValue(oid, "valid_max");
1830                if (avalue != null) {
1831                    try {
1832                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1833                        imageDataRange = new double[2];
1834                        imageDataRange[0] = x0;
1835                        imageDataRange[1] = x1;
1836                    }
1837                    catch (Exception ex) {
1838                        log.debug("checkCFconvention(): valid_max:", ex);
1839                    }
1840                }
1841            }
1842        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1843    }
1844
1845    private Object getAttrValue(long oid, String aname) {
1846        log.trace("getAttrValue(): start: name={}", aname);
1847
1848        long aid = HDF5Constants.H5I_INVALID_HID;
1849        long atid = HDF5Constants.H5I_INVALID_HID;
1850        long asid = HDF5Constants.H5I_INVALID_HID;
1851        Object avalue = null;
1852
1853        try {
1854            // try to find attribute name
1855            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1856                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1857        }
1858        catch (HDF5LibraryException ex5) {
1859            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1860        }
1861        catch (Exception ex) {
1862            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1863        }
1864        if (aid > 0) {
1865            try {
1866                atid = H5.H5Aget_type(aid);
1867                long tmptid = atid;
1868                atid = H5.H5Tget_native_type(tmptid);
1869                try {
1870                    H5.H5Tclose(tmptid);
1871                }
1872                catch (Exception ex) {
1873                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1874                }
1875
1876                asid = H5.H5Aget_space(aid);
1877                long adims[] = null;
1878
1879                int arank = H5.H5Sget_simple_extent_ndims(asid);
1880                if (arank > 0) {
1881                    adims = new long[arank];
1882                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1883                }
1884                log.trace("getAttrValue(): adims={}", adims);
1885
1886                // retrieve the attribute value
1887                long lsize = 1;
1888                if (adims != null) {
1889                    for (int j = 0; j < adims.length; j++) {
1890                        lsize *= adims[j];
1891                    }
1892                }
1893                log.trace("getAttrValue(): lsize={}", lsize);
1894
1895                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1896
1897                H5Datatype dsDatatype = null;
1898                int nativeClass = H5.H5Tget_class(atid);
1899                if (nativeClass == HDF5Constants.H5T_REFERENCE)
1900                    dsDatatype = new H5ReferenceType(getFileFormat(), lsize, atid);
1901                else
1902                    dsDatatype = new H5Datatype(getFileFormat(), atid);
1903
1904                try {
1905                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1906                }
1907                catch (OutOfMemoryError e) {
1908                    log.debug("getAttrValue(): out of memory: ", e);
1909                    avalue = null;
1910                }
1911
1912                if (avalue != null) {
1913                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1914                    H5.H5Aread(aid, atid, avalue);
1915
1916                    if (dsDatatype.isUnsigned()) {
1917                        log.trace("getAttrValue(): id {} is unsigned", atid);
1918                        avalue = convertFromUnsignedC(avalue, null);
1919                    }
1920                    if (dsDatatype.isRef() && (avalue instanceof byte[]))
1921                        ((H5ReferenceType)dsDatatype).setData((ArrayList<byte[]>)avalue);
1922                    else if (dsDatatype.isRef())
1923                        ((H5ReferenceType)dsDatatype).setData(avalue);
1924                }
1925            }
1926            catch (Exception ex) {
1927                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1928            }
1929            finally {
1930                try {
1931                    H5.H5Tclose(atid);
1932                }
1933                catch (HDF5Exception ex) {
1934                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1935                }
1936                try {
1937                    H5.H5Sclose(asid);
1938                }
1939                catch (HDF5Exception ex) {
1940                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1941                }
1942                try {
1943                    H5.H5Aclose(aid);
1944                }
1945                catch (HDF5Exception ex) {
1946                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1947                }
1948            }
1949        } // (aid > 0)
1950
1951        return avalue;
1952    }
1953
1954    private boolean isStringAttributeOf(long objID, String name, String value) {
1955        boolean retValue = false;
1956        long aid = HDF5Constants.H5I_INVALID_HID;
1957        long atid = HDF5Constants.H5I_INVALID_HID;
1958
1959        try {
1960            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1961                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1962                atid = H5.H5Aget_type(aid);
1963                int size = (int)H5.H5Tget_size(atid);
1964                byte[] attrValue = new byte[size];
1965                H5.H5Aread(aid, atid, attrValue);
1966                String strValue = new String(attrValue).trim();
1967                retValue = strValue.equalsIgnoreCase(value);
1968            }
1969        }
1970        catch (Exception ex) {
1971            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1972        }
1973        finally {
1974            try {
1975                H5.H5Tclose(atid);
1976            }
1977            catch (HDF5Exception ex) {
1978                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1979            }
1980            try {
1981                H5.H5Aclose(aid);
1982            }
1983            catch (HDF5Exception ex) {
1984                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1985            }
1986        }
1987
1988        return retValue;
1989    }
1990
1991    /*
1992     * (non-Javadoc)
1993     *
1994     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1995     */
1996    @Override
1997    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1998        // must give a location to copy
1999        if (pgroup == null) {
2000            log.debug("copy(): Parent group is null");
2001            return null;
2002        }
2003
2004        Dataset dataset = null;
2005        long srcdid = HDF5Constants.H5I_INVALID_HID;
2006        long dstdid = HDF5Constants.H5I_INVALID_HID;
2007        long plist = HDF5Constants.H5I_INVALID_HID;
2008        long tid = HDF5Constants.H5I_INVALID_HID;
2009        long sid = HDF5Constants.H5I_INVALID_HID;
2010        String dname = null;
2011        String path = null;
2012
2013        if (pgroup.isRoot())
2014            path = HObject.SEPARATOR;
2015        else
2016            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
2017        dname = path + dstName;
2018
2019        srcdid = open();
2020        if (srcdid >= 0) {
2021            try {
2022                tid = H5.H5Dget_type(srcdid);
2023                sid = H5.H5Screate_simple(dims.length, dims, null);
2024                plist = H5.H5Dget_create_plist(srcdid);
2025
2026                long[] chunks = new long[dims.length];
2027                boolean setChunkFlag = false;
2028                try {
2029                    H5.H5Pget_chunk(plist, dims.length, chunks);
2030                    for (int i = 0; i < dims.length; i++) {
2031                        if (dims[i] < chunks[i]) {
2032                            setChunkFlag = true;
2033                            if (dims[i] == 1)
2034                                chunks[i] = 1;
2035                            else
2036                                chunks[i] = dims[i] / 2;
2037                        }
2038                    }
2039                }
2040                catch (Exception ex) {
2041                    log.debug("copy(): chunk: ", ex);
2042                }
2043
2044                if (setChunkFlag)
2045                    H5.H5Pset_chunk(plist, dims.length, chunks);
2046
2047                try {
2048                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2049                            HDF5Constants.H5P_DEFAULT);
2050                }
2051                catch (Exception e) {
2052                    log.debug("copy(): H5Dcreate: ", e);
2053                }
2054                finally {
2055                    try {
2056                        H5.H5Dclose(dstdid);
2057                    }
2058                    catch (Exception ex2) {
2059                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
2060                    }
2061                }
2062
2063                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
2064                if (buff != null) {
2065                    dataset.init();
2066                    dataset.write(buff);
2067                }
2068
2069                dstdid = dataset.open();
2070                if (dstdid >= 0) {
2071                    try {
2072                        H5File.copyAttributes(srcdid, dstdid);
2073                    }
2074                    finally {
2075                        try {
2076                            H5.H5Dclose(dstdid);
2077                        }
2078                        catch (Exception ex) {
2079                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2080                        }
2081                    }
2082                }
2083            }
2084            finally {
2085                try {
2086                    H5.H5Pclose(plist);
2087                }
2088                catch (Exception ex) {
2089                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2090                }
2091                try {
2092                    H5.H5Sclose(sid);
2093                }
2094                catch (Exception ex) {
2095                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2096                }
2097                try {
2098                    H5.H5Tclose(tid);
2099                }
2100                catch (Exception ex) {
2101                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2102                }
2103                try {
2104                    H5.H5Dclose(srcdid);
2105                }
2106                catch (Exception ex) {
2107                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2108                }
2109            }
2110        }
2111
2112        pgroup.addToMemberList(dataset);
2113
2114        if (dataset != null)
2115            ((ScalarDS) dataset).setIsImage(isImage);
2116
2117        return dataset;
2118    }
2119
2120    /**
2121     * Get the number of pallettes for this object.
2122     *
2123     * @return the number of palettes if it has any, 0 otherwise.
2124     */
2125    public int getNumberOfPalettes() {
2126        return NumberOfPalettes;
2127    }
2128
2129    /*
2130     * (non-Javadoc)
2131     *
2132     * @see hdf.object.ScalarDS#getPalette()
2133     */
2134    @Override
2135    public byte[][] getPalette() {
2136        log.trace("getPalette(): NumberOfPalettes={}", NumberOfPalettes);
2137        if (NumberOfPalettes > 0)
2138            if (palette == null)
2139                palette = readPalette(0);
2140
2141        return palette;
2142    }
2143
2144    /*
2145     * (non-Javadoc)
2146     *
2147     * @see hdf.object.ScalarDS#getPaletteName(int)
2148     */
2149    @Override
2150    public String getPaletteName(int idx) {
2151        int count = readNumberOfPalettes();
2152        long did = HDF5Constants.H5I_INVALID_HID;
2153        long palID = HDF5Constants.H5I_INVALID_HID;
2154        String paletteName = null;
2155
2156        if (count < 1) {
2157            log.debug("getPaletteName(): no palettes are attached");
2158            return null;
2159        }
2160
2161        byte[][] refBuf = null;
2162
2163        did = open();
2164        if (did >= 0) {
2165            try {
2166                refBuf = getPaletteRefs(did);
2167                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2168                paletteName = H5.H5Iget_name(palID);
2169            }
2170            catch (Exception ex) {
2171                ex.printStackTrace();
2172            }
2173            finally {
2174                close(palID);
2175                for (int i = 0; i < count; i++)
2176                    H5.H5Rdestroy(refBuf[i]);
2177                close(did);
2178            }
2179        }
2180
2181        return paletteName;
2182    }
2183
2184    /*
2185     * (non-Javadoc)
2186     *
2187     * @see hdf.object.ScalarDS#readPalette(int)
2188     */
2189    @Override
2190    public byte[][] readPalette(int idx) {
2191        byte[][] thePalette = null;
2192        int count = readNumberOfPalettes();
2193        long did = HDF5Constants.H5I_INVALID_HID;
2194        long palID = HDF5Constants.H5I_INVALID_HID;
2195        long tid = HDF5Constants.H5I_INVALID_HID;
2196        log.trace("readPalette(): palette count={}", count);
2197
2198        if (count < 1) {
2199            log.debug("readPalette(): no palettes are attached");
2200            return null;
2201        }
2202
2203        byte[] p = null;
2204        byte[][] refBuf = null;
2205
2206        did = open();
2207        if (did >= 0) {
2208            try {
2209                refBuf = getPaletteRefs(did);
2210                palID = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2211                log.trace("readPalette(): H5Ropen_object: {}", palID);
2212                tid = H5.H5Dget_type(palID);
2213
2214                // support only 3*256 byte palette data
2215                if (H5.H5Dget_storage_size(palID) <= 768) {
2216                    p = new byte[3 * 256];
2217                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2218                }
2219            }
2220            catch (HDF5Exception ex) {
2221                log.debug("readPalette(): failure: ", ex);
2222                p = null;
2223            }
2224            finally {
2225                try {
2226                    H5.H5Tclose(tid);
2227                }
2228                catch (HDF5Exception ex2) {
2229                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2230                }
2231                close(palID);
2232                for (int i = 0; i < count; i++)
2233                    H5.H5Rdestroy(refBuf[i]);
2234                close(did);
2235            }
2236        }
2237
2238        if (p != null) {
2239            thePalette = new byte[3][256];
2240            for (int i = 0; i < 256; i++) {
2241                thePalette[0][i] = p[i * 3];
2242                thePalette[1][i] = p[i * 3 + 1];
2243                thePalette[2][i] = p[i * 3 + 2];
2244            }
2245        }
2246
2247        return thePalette;
2248    }
2249
2250    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2251        byte[] data = null;
2252
2253        if (type == null || fillValue == null) {
2254            log.debug("parseFillValue(): datatype or fill value is null");
2255            return null;
2256        }
2257
2258        int datatypeClass = type.getDatatypeClass();
2259        int datatypeSize = (int)type.getDatatypeSize();
2260
2261        double valDbl = 0;
2262        String valStr = null;
2263
2264        if (fillValue instanceof String)
2265            valStr = (String) fillValue;
2266        else if (fillValue.getClass().isArray())
2267            valStr = Array.get(fillValue, 0).toString();
2268
2269        if (!type.isString()) {
2270            try {
2271                valDbl = Double.parseDouble(valStr);
2272            }
2273            catch (NumberFormatException ex) {
2274                log.debug("parseFillValue(): parse error: ", ex);
2275                return null;
2276            }
2277        }
2278
2279        try {
2280            switch (datatypeClass) {
2281            case Datatype.CLASS_INTEGER:
2282            case Datatype.CLASS_ENUM:
2283            case Datatype.CLASS_CHAR:
2284                log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2285                if (datatypeSize == 1)
2286                    data = new byte[] { (byte) valDbl };
2287                else if (datatypeSize == 2)
2288                    data = HDFNativeData.shortToByte((short) valDbl);
2289                else if (datatypeSize == 8)
2290                    data = HDFNativeData.longToByte((long) valDbl);
2291                else
2292                    data = HDFNativeData.intToByte((int) valDbl);
2293                break;
2294            case Datatype.CLASS_FLOAT:
2295                log.trace("parseFillValue(): class CLASS_FLOAT");
2296                if (datatypeSize > 8)
2297                    data =  valStr.getBytes();
2298                else if (datatypeSize == 8)
2299                    data = HDFNativeData.doubleToByte(valDbl);
2300                else
2301                    data = HDFNativeData.floatToByte((float) valDbl);
2302                break;
2303            case Datatype.CLASS_STRING:
2304                log.trace("parseFillValue(): class CLASS_STRING");
2305                if (valStr != null)
2306                    data = valStr.getBytes();
2307                break;
2308            case Datatype.CLASS_REFERENCE:
2309                log.trace("parseFillValue(): class CLASS_REFERENCE");
2310                data = HDFNativeData.longToByte((long) valDbl);
2311                break;
2312            default:
2313                log.debug("parseFillValue(): datatypeClass unknown");
2314                break;
2315            } // (datatypeClass)
2316        }
2317        catch (Exception ex) {
2318            log.debug("parseFillValue(): failure: ", ex);
2319            data = null;
2320        }
2321
2322        return data;
2323    }
2324
2325    /**
2326     * reads references of palettes to count the numberOfPalettes.
2327     *
2328     * @return the number of palettes referenced.
2329     */
2330    public int readNumberOfPalettes() {
2331        log.trace("readNumberOfPalettes(): isInited={}", isInited());
2332        if (!isInited())
2333            init(); // init will be called to get refs
2334
2335        return NumberOfPalettes;
2336    }
2337
2338    /**
2339     * reads references of palettes to calculate the numberOfPalettes.
2340     */
2341    private int readNumberOfPalette(long did) {
2342        long aid = HDF5Constants.H5I_INVALID_HID;
2343        long sid = HDF5Constants.H5I_INVALID_HID;
2344        long atype = HDF5Constants.H5I_INVALID_HID;
2345        int size = 0;
2346        int rank = 0;
2347        byte[] refbuf = null;
2348        log.trace("readNumberOfPalette(): did={}", did);
2349
2350        try {
2351            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2352                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2353                sid = H5.H5Aget_space(aid);
2354                rank = H5.H5Sget_simple_extent_ndims(sid);
2355                size = 1;
2356                if (rank > 0) {
2357                    long[] dims = new long[rank];
2358                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2359                    log.trace("readNumberOfPalette(): rank={}, dims={}", rank, dims);
2360                    for (int i = 0; i < rank; i++)
2361                        size *= (int) dims[i];
2362                }
2363                log.trace("readNumberOfPalette(): size={}", size);
2364
2365                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2366                    throw new HDF5Exception("Invalid int size");
2367            }
2368        }
2369        catch (HDF5Exception ex) {
2370            log.debug("readNumberOfPalette(): Palette attribute search failed: Expected", ex);
2371            refbuf = null;
2372        }
2373        finally {
2374            try {
2375                H5.H5Tclose(atype);
2376            }
2377            catch (HDF5Exception ex2) {
2378                log.debug("readNumberOfPalette(): H5Tclose(atype {}) failure: ", atype, ex2);
2379            }
2380            try {
2381                H5.H5Sclose(sid);
2382            }
2383            catch (HDF5Exception ex2) {
2384                log.debug("readNumberOfPalette(): H5Sclose(sid {}) failure: ", sid, ex2);
2385            }
2386            try {
2387                H5.H5Aclose(aid);
2388            }
2389            catch (HDF5Exception ex2) {
2390                log.debug("readNumberOfPalette(): H5Aclose(aid {}) failure: ", aid, ex2);
2391            }
2392        }
2393
2394        return size;
2395    }
2396
2397    /**
2398     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2399     * length is 8*numberOfPalettes.
2400     */
2401    private byte[][] getPaletteRefs(long did) {
2402        long aid = HDF5Constants.H5I_INVALID_HID;
2403        long sid = HDF5Constants.H5I_INVALID_HID;
2404        long atype = HDF5Constants.H5I_INVALID_HID;
2405        int size = 0;
2406        int rank = 0;
2407        byte[][] refBuf = null;
2408        log.trace("getPaletteRefs(): did={}", did);
2409
2410        try {
2411            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2412                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2413                sid = H5.H5Aget_space(aid);
2414                rank = H5.H5Sget_simple_extent_ndims(sid);
2415                size = 1;
2416                if (rank > 0) {
2417                    long[] dims = new long[rank];
2418                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2419                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2420                    for (int i = 0; i < rank; i++)
2421                        size *= (int) dims[i];
2422                }
2423                log.trace("getPaletteRefs(): size={}", size);
2424
2425                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE || (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2426                    throw new HDF5Exception("Invalid int size");
2427                refBuf = new byte[size][HDF5Constants.H5R_REF_BUF_SIZE];
2428
2429                H5.H5Aread(aid, HDF5Constants.H5T_STD_REF, refBuf);
2430            }
2431        }
2432        catch (HDF5Exception ex) {
2433            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2434            refBuf = null;
2435        }
2436        finally {
2437            try {
2438                H5.H5Sclose(sid);
2439            }
2440            catch (HDF5Exception ex2) {
2441                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2442            }
2443            try {
2444                H5.H5Aclose(aid);
2445            }
2446            catch (HDF5Exception ex2) {
2447                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2448            }
2449        }
2450
2451        return refBuf;
2452    }
2453
2454    /**
2455     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2456     * of size is the same as that of the dataspace of the dataset being changed.
2457     *
2458     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2459     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2460     * H5Screate_simple)
2461     *
2462     * @param newDims the dimension target size
2463     *
2464     * @throws HDF5Exception
2465     *             If there is an error at the HDF5 library level.
2466     */
2467    public void extend(long[] newDims) throws HDF5Exception {
2468        long did = HDF5Constants.H5I_INVALID_HID;
2469        long sid = HDF5Constants.H5I_INVALID_HID;
2470
2471        did = open();
2472        if (did >= 0) {
2473            try {
2474                H5.H5Dset_extent(did, newDims);
2475                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2476                sid = H5.H5Dget_space(did);
2477                long[] checkDims = new long[rank];
2478                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2479                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2480                for (int i = 0; i < rank; i++) {
2481                    if (checkDims[i] != newDims[i]) {
2482                        log.debug("extend(): error extending dataset");
2483                        throw new HDF5Exception("error extending dataset " + getName());
2484                    }
2485                }
2486                dims = checkDims;
2487            }
2488            catch (Exception e) {
2489                log.debug("extend(): failure: ", e);
2490                throw new HDF5Exception(e.getMessage());
2491            }
2492            finally {
2493                if (sid > 0)
2494                    H5.H5Sclose(sid);
2495
2496                close(did);
2497            }
2498        }
2499    }
2500
2501    /*
2502     * (non-Javadoc)
2503     *
2504     * @see hdf.object.Dataset#isVirtual()
2505     */
2506    @Override
2507    public boolean isVirtual() {
2508        return isVirtual;
2509    }
2510
2511    /*
2512     * (non-Javadoc)
2513     *
2514     * @see hdf.object.Dataset#getVirtualFilename(int)
2515     */
2516    @Override
2517    public String getVirtualFilename(int index) {
2518        if(isVirtual)
2519            return virtualNameList.get(index);
2520        else
2521            return null;
2522    }
2523
2524    /*
2525     * (non-Javadoc)
2526     *
2527     * @see hdf.object.Dataset#getVirtualMaps()
2528     */
2529    @Override
2530    public int getVirtualMaps() {
2531        if(isVirtual)
2532            return virtualNameList.size();
2533        else
2534            return -1;
2535    }
2536
2537    /*
2538     * (non-Javadoc)
2539     *
2540     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2541     */
2542    @Override
2543    public String toString(String delimiter, int maxItems) {
2544        Object theData = originalBuf;
2545        if (theData == null) {
2546            log.debug("toString: value is null");
2547            return null;
2548        }
2549
2550        if (theData instanceof List<?>) {
2551            log.trace("toString: value is list");
2552            return null;
2553        }
2554
2555        Class<? extends Object> valClass = theData.getClass();
2556
2557        if (!valClass.isArray()) {
2558            log.trace("toString: finish - not array");
2559            String strValue = theData.toString();
2560            if (maxItems > 0 && strValue.length() > maxItems)
2561                // truncate the extra characters
2562                strValue = strValue.substring(0, maxItems);
2563            return strValue;
2564        }
2565
2566        // value is an array
2567        StringBuilder sb = new StringBuilder();
2568        long lsize = 1;
2569        for (int j = 0; j < dims.length; j++)
2570            lsize *= dims[j];
2571
2572        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), Array.getLength(theData));
2573        if (((H5Datatype) getDatatype()).isStdRef()) {
2574            String cname = valClass.getName();
2575            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2576            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2577            for (int i = 0; i < (int)lsize; i++) {
2578                int refIndex = HDF5Constants.H5R_REF_BUF_SIZE * i;
2579                byte[] refarr = new byte[(int) HDF5Constants.H5R_REF_BUF_SIZE];
2580                System.arraycopy(theData, refIndex, refarr, 0, (int)HDF5Constants.H5R_REF_BUF_SIZE);
2581                String ref_str = ((H5ReferenceType) getDatatype()).getReferenceRegion(refarr, false);
2582                log.trace("toString: ref_str[{}]={}", i, ref_str);
2583                if (i > 0)
2584                    sb.append(", ");
2585                sb.append(ref_str);
2586
2587//                int n = ref_str.length();
2588//                if (maxItems > 0) {
2589//                    if (n > maxItems)
2590//                        break;
2591//                    else
2592//                        maxItems -= n;
2593//                }
2594            }
2595            return sb.toString();
2596        }
2597        return super.toString(delimiter, maxItems);
2598    }
2599
2600}