001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.ArrayList;
021import java.util.List;
022import java.util.Vector;
023
024import hdf.object.Attribute;
025import hdf.object.Dataset;
026import hdf.object.Datatype;
027import hdf.object.FileFormat;
028import hdf.object.Group;
029import hdf.object.HObject;
030import hdf.object.MetaDataContainer;
031import hdf.object.ScalarDS;
032import hdf.object.h5.H5Attribute;
033import hdf.object.h5.H5MetaDataContainer;
034import hdf.object.h5.H5ReferenceType;
035
036import hdf.hdf5lib.H5;
037import hdf.hdf5lib.HDF5Constants;
038import hdf.hdf5lib.HDFArray;
039import hdf.hdf5lib.HDFNativeData;
040import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
041import hdf.hdf5lib.exceptions.HDF5Exception;
042import hdf.hdf5lib.exceptions.HDF5LibraryException;
043import hdf.hdf5lib.structs.H5O_info_t;
044import hdf.hdf5lib.structs.H5O_token_t;
045
046import org.slf4j.Logger;
047import org.slf4j.LoggerFactory;
048
049/**
050 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short,
051 * long, float, double and string, and operations performed on the scalar dataset.
052 *
053 * The library predefines a modest number of datatypes. For details, read
054 * <a href="https://hdfgroup.github.io/hdf5/_h5_t__u_g.html#sec_datatype">HDF5 Datatypes in HDF5 User
055 * Guide</a>
056 *
057 * @version 1.1 9/4/2007
058 * @author Peter X. Cao
059 */
060public class H5ScalarDS extends ScalarDS implements MetaDataContainer {
061    private static final long serialVersionUID = 2887517608230611642L;
062
063    private static final Logger log = LoggerFactory.getLogger(H5ScalarDS.class);
064
065    /**
066     * The metadata object for this data object. Members of the metadata are instances of Attribute.
067     */
068    private H5MetaDataContainer objMetadata;
069
070    /** the object properties */
071    private H5O_info_t objInfo;
072
073    /** the number of palettes */
074    private int NumberOfPalettes;
075
076    /** flag to indicate if the dataset is an external dataset */
077    private boolean isExternal = false;
078
079    /** flag to indicate if the dataset is a virtual dataset */
080    private boolean isVirtual = false;
081    /** the list of virtual names */
082    private List<String> virtualNameList;
083
084    /**
085     * flag to indicate if the dataset buffers should be refreshed.
086     */
087    protected boolean refresh = false;
088
089    /**
090     * flag to indicate if the datatype in file is the same as dataype in memory
091     */
092    protected boolean isNativeDatatype = false;
093
094    /**
095     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
096     *
097     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is
098     * the group path of the dataset.
099     *
100     * @param theFile
101     *            the file that contains the data object.
102     * @param theName
103     *            the name of the data object, e.g. "dset".
104     * @param thePath
105     *            the full path of the data object, e.g. "/arrays/".
106     */
107    public H5ScalarDS(FileFormat theFile, String theName, String thePath)
108    {
109        this(theFile, theName, thePath, null);
110    }
111
112    /**
113     * @deprecated Not for public use in the future.<br>
114     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
115     *
116     * @param theFile
117     *            the file that contains the data object.
118     * @param theName
119     *            the name of the data object, e.g. "dset".
120     * @param thePath
121     *            the full path of the data object, e.g. "/arrays/".
122     * @param oid
123     *            the oid of the data object.
124     */
125    @Deprecated
126    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid)
127    {
128        super(theFile, theName, thePath, oid);
129        unsignedConverted = false;
130        NumberOfPalettes  = 0;
131        objMetadata       = new H5MetaDataContainer(theFile, theName, thePath, this);
132
133        if (theFile != null) {
134            if (oid == null) {
135                // retrieve the object ID
136                byte[] refBuf = null;
137                try {
138                    refBuf =
139                        H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
140                    this.oid = HDFNativeData.byteToLong(refBuf);
141                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
142                }
143                catch (Exception ex) {
144                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(),
145                              this.getFullName());
146                }
147                finally {
148                    if (refBuf != null)
149                        H5.H5Rdestroy(refBuf);
150                }
151            }
152            log.trace("constructor OID {}", this.oid);
153            try {
154                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(),
155                                                 HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
156            }
157            catch (Exception ex) {
158                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
159            }
160        }
161        else {
162            this.oid = null;
163            objInfo  = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
164        }
165    }
166
167    /*
168     * (non-Javadoc)
169     *
170     * @see hdf.object.HObject#open()
171     */
172    @Override
173    public long open()
174    {
175        long did = HDF5Constants.H5I_INVALID_HID;
176
177        if (getFID() < 0)
178            log.trace("open(): file id for:{} is invalid", getPath() + getName());
179        else {
180            try {
181                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
182                log.trace("open(): did={}", did);
183            }
184            catch (HDF5Exception ex) {
185                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
186                did = HDF5Constants.H5I_INVALID_HID;
187            }
188        }
189
190        return did;
191    }
192
193    /*
194     * (non-Javadoc)
195     *
196     * @see hdf.object.HObject#close(int)
197     */
198    @Override
199    public void close(long did)
200    {
201        if (did >= 0) {
202            try {
203                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
204            }
205            catch (Exception ex) {
206                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
207            }
208            try {
209                H5.H5Dclose(did);
210            }
211            catch (HDF5Exception ex) {
212                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
213            }
214        }
215    }
216
217    /**
218     * Retrieves datatype and dataspace information from file and sets the dataset
219     * in memory.
220     *
221     * The init() is designed to support lazy operation in a dataset object. When a
222     * data object is retrieved from file, the datatype, dataspace and raw data are
223     * not loaded into memory. When it is asked to read the raw data from file,
224     * init() is first called to get the datatype and dataspace information, then
225     * load the raw data from file.
226     *
227     * init() is also used to reset the selection of a dataset (start, stride and
228     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
229     * the following example, init() at step 1) retrieves datatype and dataspace
230     * information from file. getData() at step 3) reads only one data point. init()
231     * at step 4) resets the selection to the whole dataset. getData() at step 4)
232     * reads the values of whole dataset into memory.
233     *
234     * <pre>
235     * dset = (Dataset) file.get(NAME_DATASET);
236     *
237     * // 1) get datatype and dataspace information from file
238     * dset.init();
239     * rank = dset.getRank(); // rank = 2, a 2D dataset
240     * count = dset.getSelectedDims();
241     * start = dset.getStartDims();
242     * dims = dset.getDims();
243     *
244     * // 2) select only one data point
245     * for (int i = 0; i &lt; rank; i++) {
246     *     start[0] = 0;
247     *     count[i] = 1;
248     * }
249     *
250     * // 3) read one data point
251     * data = dset.getData();
252     *
253     * // 4) reset selection to the whole dataset
254     * dset.init();
255     *
256     * // 5) clean the memory data buffer
257     * dset.clearData();
258     *
259     * // 6) Read the whole dataset
260     * data = dset.getData();
261     * </pre>
262     */
263    @Override
264    public void init()
265    {
266        if (inited) {
267            // already called. Initialize only once
268            resetSelection();
269            log.trace("init(): H5ScalarDS already initialized");
270            return;
271        }
272
273        long did       = HDF5Constants.H5I_INVALID_HID;
274        long tid       = HDF5Constants.H5I_INVALID_HID;
275        long sid       = HDF5Constants.H5I_INVALID_HID;
276        long nativeTID = HDF5Constants.H5I_INVALID_HID;
277
278        did = open();
279        if (did >= 0) {
280            try {
281                H5.H5Drefresh(did);
282            }
283            catch (Exception ex) {
284                log.debug("H5Drefresh(): ", ex);
285            }
286            // check if it is an external or virtual dataset
287            long pid = HDF5Constants.H5I_INVALID_HID;
288            try {
289                pid = H5.H5Dget_create_plist(did);
290                try {
291                    int nfiles     = H5.H5Pget_external_count(pid);
292                    isExternal     = (nfiles > 0);
293                    int layoutType = H5.H5Pget_layout(pid);
294                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
295                        try {
296                            long vmaps = H5.H5Pget_virtual_count(pid);
297                            if (vmaps > 0) {
298                                virtualNameList = new Vector<>();
299                                for (long next = 0; next < vmaps; next++) {
300                                    try {
301                                        String fname = H5.H5Pget_virtual_filename(pid, next);
302                                        virtualNameList.add(fname);
303                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
304                                    }
305                                    catch (Exception err) {
306                                        log.trace("init(): vds[{}] continue", next);
307                                    }
308                                }
309                            }
310                        }
311                        catch (Exception err) {
312                            log.debug("init(): vds count error: ", err);
313                        }
314                    }
315                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal,
316                              isVirtual);
317                }
318                catch (Exception ex) {
319                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
320                }
321            }
322            catch (Exception ex) {
323                log.debug("init(): H5Dget_create_plist() failure: ", ex);
324            }
325            finally {
326                try {
327                    H5.H5Pclose(pid);
328                }
329                catch (Exception ex) {
330                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
331                }
332            }
333
334            NumberOfPalettes = readNumberOfPalette(did);
335
336            try {
337                sid        = H5.H5Dget_space(did);
338                rank       = H5.H5Sget_simple_extent_ndims(sid);
339                space_type = H5.H5Sget_simple_extent_type(sid);
340                if (space_type == HDF5Constants.H5S_NULL)
341                    isNULL = true;
342                else
343                    isNULL = false;
344                tid = H5.H5Dget_type(did);
345                log.trace("init(): tid={} sid={} rank={} space_type={} ", tid, sid, rank, space_type);
346
347                if (rank == 0) {
348                    // a scalar data point
349                    isScalar = true;
350                    rank     = 1;
351                    dims     = new long[] {1};
352                    log.trace("init(): rank is a scalar data point");
353                }
354                else {
355                    isScalar = false;
356                    dims     = new long[rank];
357                    maxDims  = new long[rank];
358                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
359                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
360                }
361
362                try {
363                    int nativeClass = H5.H5Tget_class(tid);
364                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
365                        long lsize = 1;
366                        if (rank > 0) {
367                            log.trace("init():rank={}, dims={}", rank, dims);
368                            for (int j = 0; j < dims.length; j++) {
369                                lsize *= dims[j];
370                            }
371                        }
372                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
373                    }
374                    else
375                        datatype = new H5Datatype(getFileFormat(), tid);
376
377                    log.trace(
378                        "init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
379                        tid, datatype.getDatatypeClass(), ((H5Datatype)datatype).isText(), datatype.isNamed(),
380                        datatype.isVLEN(), datatype.isEnum(), datatype.isUnsigned(),
381                        ((H5Datatype)datatype).isStdRef(), ((H5Datatype)datatype).isRegRef());
382                }
383                catch (Exception ex) {
384                    log.debug("init(): failed to create datatype for dataset: ", ex);
385                    datatype = null;
386                }
387
388                // Check if the datatype in the file is the native datatype
389                try {
390                    nativeTID        = H5.H5Tget_native_type(tid);
391                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
392                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
393                }
394                catch (Exception ex) {
395                    log.debug("init(): check if native type failure: ", ex);
396                }
397
398                try {
399                    pid              = H5.H5Dget_create_plist(did);
400                    int[] fillStatus = {0};
401                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
402                        // Check if fill value is user-defined before retrieving it.
403                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
404                            try {
405                                fillValue = H5Datatype.allocateArray((H5Datatype)datatype, 1);
406                            }
407                            catch (OutOfMemoryError e) {
408                                log.debug("init(): out of memory: ", e);
409                                fillValue = null;
410                            }
411                            catch (Exception ex) {
412                                log.debug("init(): allocate fill value buffer failed: ", ex);
413                                fillValue = null;
414                            }
415
416                            log.trace("init(): fillValue={}", fillValue);
417                            try {
418                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
419                                log.trace("init(): H5Pget_fill_value={}", fillValue);
420                                if (fillValue != null) {
421                                    if (datatype.isUnsigned() && !isFillValueConverted) {
422                                        fillValue            = ScalarDS.convertFromUnsignedC(fillValue, null);
423                                        isFillValueConverted = true;
424                                    }
425
426                                    int n = Array.getLength(fillValue);
427                                    for (int i = 0; i < n; i++)
428                                        addFilteredImageValue((Number)Array.get(fillValue, i));
429                                }
430                            }
431                            catch (Exception ex2) {
432                                log.debug("init(): fill value was defined: ", ex2);
433                                fillValue = null;
434                            }
435                        }
436                    }
437                }
438                catch (HDF5Exception ex) {
439                    log.debug("init(): check if fill value is defined failure: ", ex);
440                }
441                finally {
442                    try {
443                        H5.H5Pclose(pid);
444                    }
445                    catch (Exception ex) {
446                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
447                    }
448                }
449
450                inited = true;
451            }
452            catch (HDF5Exception ex) {
453                log.debug("init(): ", ex);
454            }
455            finally {
456                try {
457                    H5.H5Tclose(nativeTID);
458                }
459                catch (Exception ex2) {
460                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
461                }
462                try {
463                    H5.H5Tclose(tid);
464                }
465                catch (HDF5Exception ex2) {
466                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
467                }
468                try {
469                    H5.H5Sclose(sid);
470                }
471                catch (HDF5Exception ex2) {
472                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
473                }
474            }
475
476            // check for the type of image and interlace mode
477            // it is a true color image at one of three cases:
478            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
479            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
480            // 3) INTERLACE_MODE = INTERLACE_PLANE
481            if ((rank >= 3) && isImage) {
482                interlace   = -1;
483                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
484
485                if (isTrueColor) {
486                    interlace = INTERLACE_PIXEL;
487                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
488                        interlace = INTERLACE_PLANE;
489                    }
490                }
491            }
492
493            close(did);
494
495            startDims    = new long[rank];
496            selectedDims = new long[rank];
497
498            resetSelection();
499        }
500        else {
501            log.debug("init(): failed to open dataset");
502        }
503        refresh = false;
504    }
505
506    /**
507     * Get the token for this object.
508     *
509     * @return true if it has any attributes, false otherwise.
510     */
511    public long[] getToken()
512    {
513        H5O_token_t token = objInfo.token;
514        return HDFNativeData.byteToLong(token.data);
515    }
516
517    /**
518     * Check if the object has any attributes attached.
519     *
520     * @return true if it has any attributes, false otherwise.
521     */
522    @Override
523    public boolean hasAttribute()
524    {
525        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
526
527        if (objInfo.num_attrs < 0) {
528            long did = open();
529            if (did >= 0) {
530                objInfo.num_attrs = 0;
531
532                try {
533                    objInfo = H5.H5Oget_info(did);
534
535                    if (objInfo.num_attrs > 0) {
536                        // test if it is an image
537                        // check image
538                        Object avalue = getAttrValue(did, "CLASS");
539                        if (avalue != null) {
540                            try {
541                                isImageDisplay = isImage =
542                                    "IMAGE".equalsIgnoreCase(new String((byte[])avalue).trim());
543                                log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}",
544                                          isImageDisplay, avalue);
545                            }
546                            catch (Exception err) {
547                                log.debug("hasAttribute(): check image: ", err);
548                            }
549                        }
550
551                        // retrieve the IMAGE_MINMAXRANGE
552                        avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
553                        if (avalue != null) {
554                            double x0 = 0;
555                            double x1 = 0;
556                            try {
557                                x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
558                                x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
559                            }
560                            catch (Exception ex2) {
561                                x0 = x1 = 0;
562                            }
563                            if (x1 > x0) {
564                                imageDataRange    = new double[2];
565                                imageDataRange[0] = x0;
566                                imageDataRange[1] = x1;
567                            }
568                        }
569
570                        try {
571                            checkCFconvention(did);
572                        }
573                        catch (Exception ex) {
574                            log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
575                        }
576                    }
577                }
578                catch (Exception ex) {
579                    objInfo.num_attrs = 0;
580                    log.debug("hasAttribute(): get object info failure: ", ex);
581                }
582                finally {
583                    close(did);
584                }
585                objMetadata.setObjectAttributeSize((int)objInfo.num_attrs);
586            }
587            else {
588                log.debug("hasAttribute(): could not open dataset");
589            }
590        }
591
592        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
593        return (objInfo.num_attrs > 0);
594    }
595
596    /**
597     * Returns the datatype of the data object.
598     *
599     * @return the datatype of the data object.
600     */
601    @Override
602    public Datatype getDatatype()
603    {
604        if (!inited)
605            init();
606
607        if (datatype == null) {
608            long did = HDF5Constants.H5I_INVALID_HID;
609            long tid = HDF5Constants.H5I_INVALID_HID;
610
611            did = open();
612            if (did >= 0) {
613                try {
614                    tid = H5.H5Dget_type(did);
615                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
616                    if (!isNativeDatatype) {
617                        long tmptid = -1;
618                        try {
619                            tmptid = H5Datatype.toNative(tid);
620                            if (tmptid >= 0) {
621                                try {
622                                    H5.H5Tclose(tid);
623                                }
624                                catch (Exception ex2) {
625                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
626                                }
627                                tid = tmptid;
628                            }
629                        }
630                        catch (Exception ex) {
631                            log.debug("getDatatype(): toNative: ", ex);
632                        }
633                    }
634                    int nativeClass = H5.H5Tget_class(tid);
635                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
636                        long lsize = 1;
637                        if (rank > 0) {
638                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
639                            for (int j = 0; j < dims.length; j++) {
640                                lsize *= dims[j];
641                            }
642                        }
643                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
644                    }
645                    else
646                        datatype = new H5Datatype(getFileFormat(), tid);
647                }
648                catch (Exception ex) {
649                    log.debug("getDatatype(): ", ex);
650                }
651                finally {
652                    try {
653                        H5.H5Tclose(tid);
654                    }
655                    catch (HDF5Exception ex) {
656                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
657                    }
658                    try {
659                        H5.H5Dclose(did);
660                    }
661                    catch (HDF5Exception ex) {
662                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
663                    }
664                }
665            }
666        }
667
668        if (isExternal) {
669            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
670
671            if (pdir == null) {
672                pdir = ".";
673            }
674            System.setProperty("user.dir", pdir);
675            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
676        }
677
678        return datatype;
679    }
680
681    /**
682     * Refreshes the dataset before re-read of data.
683     */
684    @Override
685    public Object refreshData()
686    {
687        inited  = false;
688        refresh = true;
689
690        init();
691        return super.refreshData();
692    }
693
694    /**
695     * Removes all of the elements from metadata list.
696     * The list should be empty after this call returns.
697     */
698    @Override
699    public void clear()
700    {
701        super.clear();
702        objMetadata.clear();
703    }
704
705    /*
706     * (non-Javadoc)
707     *
708     * @see hdf.object.Dataset#readBytes()
709     */
710    @Override
711    public byte[] readBytes() throws HDF5Exception
712    {
713        byte[] theData = null;
714
715        if (!isInited())
716            init();
717
718        long did = open();
719        if (did >= 0) {
720            long fspace = HDF5Constants.H5I_INVALID_HID;
721            long mspace = HDF5Constants.H5I_INVALID_HID;
722            long tid    = HDF5Constants.H5I_INVALID_HID;
723
724            try {
725                long[] lsize = {1};
726                for (int j = 0; j < selectedDims.length; j++)
727                    lsize[0] *= selectedDims[j];
728
729                fspace = H5.H5Dget_space(did);
730                mspace = H5.H5Screate_simple(rank, selectedDims, null);
731
732                // set the rectangle selection
733                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
734                if (rank * dims[0] > 1)
735                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
736                                           selectedDims, null); // set block to 1
737
738                tid       = H5.H5Dget_type(did);
739                long size = H5.H5Tget_size(tid) * lsize[0];
740                log.trace("readBytes(): size = {}", size);
741
742                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
743                    throw new Exception("Invalid int size");
744
745                theData = new byte[(int)size];
746
747                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace,
748                          mspace);
749                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
750            }
751            catch (Exception ex) {
752                log.debug("readBytes(): failed to read data: ", ex);
753            }
754            finally {
755                try {
756                    H5.H5Sclose(fspace);
757                }
758                catch (Exception ex2) {
759                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
760                }
761                try {
762                    H5.H5Sclose(mspace);
763                }
764                catch (Exception ex2) {
765                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
766                }
767                try {
768                    H5.H5Tclose(tid);
769                }
770                catch (HDF5Exception ex2) {
771                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
772                }
773                close(did);
774            }
775        }
776
777        return theData;
778    }
779
780    /**
781     * Reads the data from file.
782     *
783     * read() reads the data from file to a memory buffer and returns the memory
784     * buffer. The dataset object does not hold the memory buffer. To store the
785     * memory buffer in the dataset object, one must call getData().
786     *
787     * By default, the whole dataset is read into memory. Users can also select
788     * a subset to read. Subsetting is done in an implicit way.
789     *
790     * <b>How to Select a Subset</b>
791     *
792     * A selection is specified by three arrays: start, stride and count.
793     * <ol>
794     * <li>start: offset of a selection
795     * <li>stride: determines how many elements to move in each dimension
796     * <li>count: number of elements to select in each dimension
797     * </ol>
798     * getStartDims(), getStride() and getSelectedDims() returns the start,
799     * stride and count arrays respectively. Applications can make a selection
800     * by changing the values of the arrays.
801     *
802     * The following example shows how to make a subset. In the example, the
803     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
804     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
805     * We want to select every other data point in dims[1] and dims[2]
806     *
807     * <pre>
808     * int rank = dataset.getRank(); // number of dimensions of the dataset
809     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
810     * long[] selected = dataset.getSelectedDims(); // the selected size of the
811     *                                              // dataset
812     * long[] start = dataset.getStartDims(); // the offset of the selection
813     * long[] stride = dataset.getStride(); // the stride of the dataset
814     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
815     *                                                   // dimensions for
816     *                                                   // display
817     *
818     * // select dim1 and dim2 as 2D data for display, and slice through dim0
819     * selectedIndex[0] = 1;
820     * selectedIndex[1] = 2;
821     * selectedIndex[1] = 0;
822     *
823     * // reset the selection arrays
824     * for (int i = 0; i &lt; rank; i++) {
825     *     start[i] = 0;
826     *     selected[i] = 1;
827     *     stride[i] = 1;
828     * }
829     *
830     * // set stride to 2 on dim1 and dim2 so that every other data point is
831     * // selected.
832     * stride[1] = 2;
833     * stride[2] = 2;
834     *
835     * // set the selection size of dim1 and dim2
836     * selected[1] = dims[1] / stride[1];
837     * selected[2] = dims[1] / stride[2];
838     *
839     * // when dataset.getData() is called, the selection above will be used
840     * // since
841     * // the dimension arrays are passed by reference. Changes of these arrays
842     * // outside the dataset object directly change the values of these array
843     * // in the dataset object.
844     * </pre>
845     *
846     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
847     * short, int, float, double or String type based on the datatype of the
848     * dataset.
849     *
850     * @return the data read from file.
851     *
852     * @see #getData()
853     * @see hdf.object.DataFormat#read()
854     *
855     * @throws Exception
856     *             if object can not be read
857     */
858    @Override
859    public Object read() throws Exception
860    {
861        Object readData = null;
862
863        if (!isInited())
864            init();
865
866        try {
867            readData = scalarDatasetCommonIO(H5File.IO_TYPE.READ, null);
868        }
869        catch (Exception ex) {
870            log.debug("read(): failed to read scalar dataset: ", ex);
871            throw new Exception("failed to read scalar dataset: " + ex.getMessage(), ex);
872        }
873
874        return readData;
875    }
876
877    /**
878     * Writes the given data buffer into this dataset in a file.
879     *
880     * @param buf
881     *            The buffer that contains the data values.
882     *
883     * @throws Exception
884     *             If there is an error at the HDF5 library level.
885     */
886    @Override
887    public void write(Object buf) throws Exception
888    {
889        if (this.getFileFormat().isReadOnly())
890            throw new Exception("cannot write to scalar dataset in file opened as read-only");
891
892        if (!isInited())
893            init();
894
895        try {
896            scalarDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
897        }
898        catch (Exception ex) {
899            log.debug("write(Object): failed to write to scalar dataset: ", ex);
900            throw new Exception("failed to write to scalar dataset: " + ex.getMessage(), ex);
901        }
902    }
903
904    private Object scalarDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception
905    {
906        H5Datatype dsDatatype     = (H5Datatype)getDatatype();
907        H5Datatype dsBaseDatatype = (H5Datatype)getDatatype().getDatatypeBase();
908        boolean BDTisRef          = false;
909        if (dsBaseDatatype != null)
910            BDTisRef = dsBaseDatatype.isStdRef();
911        Object theData = null;
912
913        /*
914         * I/O type-specific pre-initialization.
915         */
916        if (ioType == H5File.IO_TYPE.WRITE) {
917            if (writeBuf == null) {
918                log.debug("scalarDatasetCommonIO(): writeBuf is null");
919                throw new Exception("write buffer is null");
920            }
921        }
922
923        long did = open();
924        if (did >= 0) {
925            long[] spaceIDs = {HDF5Constants.H5I_INVALID_HID,
926                               HDF5Constants.H5I_INVALID_HID}; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
927
928            try {
929                /*
930                 * NOTE: this call sets up a hyperslab selection in the file according to the
931                 * current selection in the dataset object.
932                 */
933                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(
934                    did, dims, startDims, selectedStride, selectedDims, spaceIDs);
935
936                if (ioType == H5File.IO_TYPE.READ) {
937                    log.trace(
938                        "scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}",
939                        dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(),
940                        dsDatatype.isRefObj());
941                    if (dsDatatype.isVarStr()) {
942                        try {
943                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
944                        }
945                        catch (OutOfMemoryError err) {
946                            log.debug("scalarDatasetCommonIO(): Out of memory");
947                            throw new HDF5Exception("Out Of Memory");
948                        }
949                    }
950                    else if (dsDatatype.isVLEN()) {
951                        theData = new ArrayList[(int)totalSelectedSpacePoints];
952                        for (int j = 0; j < (int)totalSelectedSpacePoints; j++)
953                            ((ArrayList[])theData)[j] = new ArrayList<byte[]>();
954                    }
955                    else if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() ||
956                             dsDatatype.isRefObj() ||
957                             ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
958                        try {
959                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
960                        }
961                        catch (OutOfMemoryError err) {
962                            log.debug("scalarDatasetCommonIO(): Out of memory");
963                            throw new HDF5Exception("Out Of Memory");
964                        }
965                    }
966                    else {
967                        // reuse the buffer if the size is the same
968                        log.trace(
969                            "scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
970                        theData = originalBuf;
971                    }
972
973                    if (theData != null) {
974                        /*
975                         * Actually read the data now that everything has been setup.
976                         */
977                        long tid = HDF5Constants.H5I_INVALID_HID;
978                        try {
979                            log.trace("scalarDatasetCommonIO():read ioType create native");
980                            tid = dsDatatype.createNative();
981
982                            if (dsDatatype.isVarStr()) {
983                                log.trace(
984                                    "scalarDatasetCommonIO(): H5Dread_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
985                                    did, tid,
986                                    (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
987                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
988
989                                H5.H5Dread_VLStrings(did, tid, spaceIDs[0], spaceIDs[1],
990                                                     HDF5Constants.H5P_DEFAULT, (Object[])theData);
991                            }
992                            else if (dsDatatype.isVLEN() ||
993                                     (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
994                                log.trace(
995                                    "scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
996                                    did, tid,
997                                    (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
998                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
999
1000                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1001                                             (Object[])theData);
1002                            }
1003                            else {
1004                                log.trace(
1005                                    "scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1006                                    did, tid,
1007                                    (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1008                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1009
1010                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1011                                           theData);
1012                            }
1013                        }
1014                        catch (HDF5DataFiltersException exfltr) {
1015                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
1016                            throw new Exception("Filter not available exception: " + exfltr.getMessage(),
1017                                                exfltr);
1018                        }
1019                        catch (Exception ex) {
1020                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
1021                            throw new Exception(ex.getMessage(), ex);
1022                        }
1023                        finally {
1024                            dsDatatype.close(tid);
1025                        }
1026
1027                        /*
1028                         * Perform any necessary data conversions.
1029                         */
1030                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1031                            log.trace(
1032                                "scalarDatasetCommonIO(): isText: converting byte array to string array");
1033                            theData = byteToString((byte[])theData, (int)dsDatatype.getDatatypeSize());
1034                        }
1035                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1036                            log.trace(
1037                                "scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
1038                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints,
1039                                                                  (byte[])theData);
1040                        }
1041                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() &&
1042                                 dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1043                            log.trace(
1044                                "scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
1045                            long[] arrayDims = dsDatatype.getArrayDims();
1046                            int asize        = (int)totalSelectedSpacePoints;
1047                            for (int j = 0; j < arrayDims.length; j++) {
1048                                asize *= arrayDims[j];
1049                            }
1050                            theData = ((H5Datatype)dsDatatype.getDatatypeBase())
1051                                          .byteToBigDecimal(0, asize, (byte[])theData);
1052                        }
1053                        else if (dsDatatype.isRef() && (theData instanceof byte[])) {
1054                            log.trace(
1055                                "scalarDatasetCommonIO():read ioType isRef: converting byte array to List of bytes");
1056                            ArrayList<byte[]> theListData = new ArrayList<>((int)totalSelectedSpacePoints);
1057                            for (int m = 0; m < (int)totalSelectedSpacePoints; m++) {
1058                                byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()];
1059                                try {
1060                                    System.arraycopy(theData, m * (int)dsDatatype.getDatatypeSize(), curBytes,
1061                                                     0, (int)dsDatatype.getDatatypeSize());
1062                                    theListData.add(curBytes);
1063                                }
1064                                catch (Exception err) {
1065                                    log.trace("scalarDatasetCommonIO(): arraycopy failure: ", err);
1066                                }
1067                            }
1068                            theData = theListData;
1069                        }
1070                    }
1071                } // H5File.IO_TYPE.READ
1072                else {
1073                    /*
1074                     * Perform any necessary data conversions before writing the data.
1075                     *
1076                     * Note that v-len strings do not get converted, regardless of
1077                     * conversion request type.
1078                     */
1079                    Object tmpData = writeBuf;
1080                    try {
1081                        // Check if we need to convert integer data
1082                        int tsize    = (int)dsDatatype.getDatatypeSize();
1083                        String cname = writeBuf.getClass().getName();
1084                        log.trace("scalarDatasetCommonIO(): cname={} of datatype size={}", cname, tsize);
1085                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1086                        boolean doIntConversion =
1087                            (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I')) ||
1088                             ((tsize == 4) && (dname == 'J')) ||
1089                             (dsDatatype.isUnsigned() && unsignedConverted));
1090
1091                        if (doIntConversion) {
1092                            log.trace(
1093                                "scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
1094                            tmpData = convertToUnsignedC(writeBuf, null);
1095                        }
1096                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString &&
1097                                 !(writeBuf instanceof byte[])) {
1098                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
1099                            tmpData = stringToByte((String[])writeBuf, tsize);
1100                        }
1101                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
1102                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
1103                            tmpData = dsDatatype.convertEnumNameToValue((String[])writeBuf);
1104                        }
1105                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1106                            log.trace(
1107                                "scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
1108                            throw new Exception("data conversion failure: cannot write BigDecimal values");
1109                            // tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints,
1110                            // (BigDecimal[]) writeBuf);
1111                        }
1112                    }
1113                    catch (Exception ex) {
1114                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
1115                        throw new Exception("data conversion failure: " + ex.getMessage());
1116                    }
1117
1118                    /*
1119                     * Actually write the data now that everything has been setup.
1120                     */
1121                    long tid = HDF5Constants.H5I_INVALID_HID;
1122                    try {
1123                        tid = dsDatatype.createNative();
1124
1125                        if (dsDatatype.isVarStr()) {
1126                            log.trace(
1127                                "scalarDatasetCommonIO(): H5Dwrite_VLStrings did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1128                                did, tid,
1129                                (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1130                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1131
1132                            H5.H5Dwrite_VLStrings(did, tid, spaceIDs[0], spaceIDs[1],
1133                                                  HDF5Constants.H5P_DEFAULT, (Object[])tmpData);
1134                        }
1135                        else if (dsDatatype.isVLEN() ||
1136                                 (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1137                            log.trace(
1138                                "scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1139                                did, tid,
1140                                (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1141                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1142
1143                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1144                                          (Object[])tmpData);
1145                        }
1146                        else {
1147                            log.trace(
1148                                "scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
1149                                did, tid,
1150                                (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1151                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1152
1153                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1154                                        tmpData);
1155                        }
1156                    }
1157                    catch (Exception ex) {
1158                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
1159                        throw new Exception(ex.getMessage());
1160                    }
1161                    finally {
1162                        dsDatatype.close(tid);
1163                    }
1164                } // H5File.IO_TYPE.WRITE
1165            }
1166            finally {
1167                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1168                    try {
1169                        H5.H5Sclose(spaceIDs[0]);
1170                    }
1171                    catch (Exception ex) {
1172                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0],
1173                                  ex);
1174                    }
1175                }
1176
1177                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1178                    try {
1179                        H5.H5Sclose(spaceIDs[1]);
1180                    }
1181                    catch (Exception ex) {
1182                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1],
1183                                  ex);
1184                    }
1185                }
1186
1187                close(did);
1188            }
1189        }
1190        else
1191            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1192
1193        return theData;
1194    }
1195
1196    /**
1197     * Retrieves the object's metadata, such as attributes, from the file.
1198     *
1199     * Metadata, such as attributes, is stored in a List.
1200     *
1201     * @return the list of metadata objects.
1202     *
1203     * @throws HDF5Exception
1204     *             if the metadata can not be retrieved
1205     */
1206    @Override
1207    public List<Attribute> getMetadata() throws HDF5Exception
1208    {
1209        int gmIndexType  = 0;
1210        int gmIndexOrder = 0;
1211
1212        try {
1213            gmIndexType = fileFormat.getIndexType(null);
1214        }
1215        catch (Exception ex) {
1216            log.debug("getMetadata(): getIndexType failed: ", ex);
1217        }
1218        try {
1219            gmIndexOrder = fileFormat.getIndexOrder(null);
1220        }
1221        catch (Exception ex) {
1222            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1223        }
1224        return this.getMetadata(gmIndexType, gmIndexOrder);
1225    }
1226
1227    /**
1228     * Retrieves the object's metadata, such as attributes, from the file.
1229     *
1230     * Metadata, such as attributes, is stored in a List.
1231     *
1232     * @param attrPropList
1233     *             the list of properties to get
1234     *
1235     * @return the list of metadata objects.
1236     *
1237     * @throws HDF5Exception
1238     *             if the metadata can not be retrieved
1239     */
1240    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception
1241    {
1242        if (!isInited())
1243            init();
1244
1245        try {
1246            this.linkTargetObjName = H5File.getLinkTargetName(this);
1247        }
1248        catch (Exception ex) {
1249            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1250        }
1251
1252        if (objMetadata.getAttributeList() == null) {
1253            long did  = HDF5Constants.H5I_INVALID_HID;
1254            long pcid = HDF5Constants.H5I_INVALID_HID;
1255            long paid = HDF5Constants.H5I_INVALID_HID;
1256
1257            did = open();
1258            if (did >= 0) {
1259                try {
1260                    // get the compression and chunk information
1261                    pcid             = H5.H5Dget_create_plist(did);
1262                    paid             = H5.H5Dget_access_plist(did);
1263                    long storageSize = H5.H5Dget_storage_size(did);
1264                    int nfilt        = H5.H5Pget_nfilters(pcid);
1265                    int layoutType   = H5.H5Pget_layout(pcid);
1266
1267                    storageLayout.setLength(0);
1268                    compression.setLength(0);
1269
1270                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1271                        chunkSize = new long[rank];
1272                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1273                        int n = chunkSize.length;
1274                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1275                        for (int i = 1; i < n; i++)
1276                            storageLayout.append(" X ").append(chunkSize[i]);
1277
1278                        if (nfilt > 0) {
1279                            long nelmts = 1;
1280                            long uncompSize;
1281                            long datumSize = getDatatype().getDatatypeSize();
1282
1283                            if (datumSize < 0) {
1284                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1285                                try {
1286                                    tmptid    = H5.H5Dget_type(did);
1287                                    datumSize = H5.H5Tget_size(tmptid);
1288                                }
1289                                finally {
1290                                    try {
1291                                        H5.H5Tclose(tmptid);
1292                                    }
1293                                    catch (Exception ex2) {
1294                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid,
1295                                                  ex2);
1296                                    }
1297                                }
1298                            }
1299
1300                            for (int i = 0; i < rank; i++)
1301                                nelmts *= dims[i];
1302                            uncompSize = nelmts * datumSize;
1303
1304                            /* compression ratio = uncompressed size / compressed size */
1305
1306                            if (storageSize != 0) {
1307                                double ratio     = (double)uncompSize / (double)storageSize;
1308                                DecimalFormat df = new DecimalFormat();
1309                                df.setMinimumFractionDigits(3);
1310                                df.setMaximumFractionDigits(3);
1311                                compression.append(df.format(ratio)).append(":1");
1312                            }
1313                        }
1314                    }
1315                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1316                        storageLayout.append("COMPACT");
1317                    }
1318                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1319                        storageLayout.append("CONTIGUOUS");
1320                        if (H5.H5Pget_external_count(pcid) > 0)
1321                            storageLayout.append(" - EXTERNAL ");
1322                    }
1323                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1324                        storageLayout.append("VIRTUAL - ");
1325                        try {
1326                            long vmaps = H5.H5Pget_virtual_count(pcid);
1327                            try {
1328                                int virtView = H5.H5Pget_virtual_view(paid);
1329                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1330                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1331                                    storageLayout.append("First Missing");
1332                                else
1333                                    storageLayout.append("Last Available");
1334                                storageLayout.append("\nGAP : ").append(virtGap);
1335                            }
1336                            catch (Exception err) {
1337                                log.debug("getMetadata(): vds error: ", err);
1338                                storageLayout.append("ERROR");
1339                            }
1340                            storageLayout.append("\nMAPS : ").append(vmaps);
1341                            if (vmaps > 0) {
1342                                for (long next = 0; next < vmaps; next++) {
1343                                    try {
1344                                        H5.H5Pget_virtual_vspace(pcid, next);
1345                                        H5.H5Pget_virtual_srcspace(pcid, next);
1346                                        String fname    = H5.H5Pget_virtual_filename(pcid, next);
1347                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1348                                        storageLayout.append("\n").append(fname).append(" : ").append(
1349                                            dsetname);
1350                                    }
1351                                    catch (Exception err) {
1352                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1353                                        storageLayout.append("ERROR");
1354                                    }
1355                                }
1356                            }
1357                        }
1358                        catch (Exception err) {
1359                            log.debug("getMetadata(): vds count error: ", err);
1360                            storageLayout.append("ERROR");
1361                        }
1362                    }
1363                    else {
1364                        chunkSize = null;
1365                        storageLayout.append("NONE");
1366                    }
1367
1368                    int[] flags     = {0, 0};
1369                    long[] cdNelmts = {20};
1370                    int[] cdValues  = new int[(int)cdNelmts[0]];
1371                    String[] cdName = {"", ""};
1372                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1373                    int filter         = -1;
1374                    int[] filterConfig = {1};
1375
1376                    filters.setLength(0);
1377
1378                    if (nfilt == 0) {
1379                        filters.append("NONE");
1380                    }
1381                    else {
1382                        for (int i = 0, k = 0; i < nfilt; i++) {
1383                            log.trace("getMetadata(): filter[{}]", i);
1384                            if (i > 0)
1385                                filters.append(", ");
1386                            if (k > 0)
1387                                compression.append(", ");
1388
1389                            try {
1390                                cdNelmts[0] = 20;
1391                                cdValues    = new int[(int)cdNelmts[0]];
1392                                cdValues    = new int[(int)cdNelmts[0]];
1393                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName,
1394                                                          filterConfig);
1395                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0],
1396                                          cdNelmts[0]);
1397                                for (int j = 0; j < cdNelmts[0]; j++)
1398                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1399                            }
1400                            catch (Exception err) {
1401                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1402                                filters.append("ERROR");
1403                                continue;
1404                            }
1405
1406                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1407                                filters.append("NONE");
1408                            }
1409                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1410                                filters.append("GZIP");
1411                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1412                                k++;
1413                            }
1414                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1415                                filters.append("Error detection filter");
1416                            }
1417                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1418                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1419                            }
1420                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1421                                filters.append("NBIT");
1422                            }
1423                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1424                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1425                            }
1426                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1427                                filters.append("SZIP");
1428                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1429                                k++;
1430                                int flag = -1;
1431                                try {
1432                                    flag = H5.H5Zget_filter_info(filter);
1433                                }
1434                                catch (Exception ex) {
1435                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1436                                    flag = -1;
1437                                }
1438                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1439                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1440                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
1441                                         (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED +
1442                                                   HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1443                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1444                            }
1445                            else {
1446                                filters.append("USERDEFINED ")
1447                                    .append(cdName[0])
1448                                    .append("(")
1449                                    .append(filter)
1450                                    .append("): ");
1451                                for (int j = 0; j < cdNelmts[0]; j++) {
1452                                    if (j > 0)
1453                                        filters.append(", ");
1454                                    filters.append(cdValues[j]);
1455                                }
1456                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1457                            }
1458                        } //  (int i=0; i<nfilt; i++)
1459                    }
1460
1461                    if (compression.length() == 0)
1462                        compression.append("NONE");
1463                    log.trace("getMetadata(): filter compression={}", compression);
1464                    log.trace("getMetadata(): filter information={}", filters);
1465
1466                    storage.setLength(0);
1467                    storage.append("SIZE: ").append(storageSize);
1468
1469                    try {
1470                        int[] at = {0};
1471                        H5.H5Pget_alloc_time(pcid, at);
1472                        storage.append(", allocation time: ");
1473                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1474                            storage.append("Early");
1475                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1476                            storage.append("Incremental");
1477                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1478                            storage.append("Late");
1479                        else
1480                            storage.append("Default");
1481                    }
1482                    catch (Exception ex) {
1483                        log.debug("getMetadata(): Storage allocation time:", ex);
1484                    }
1485                    log.trace("getMetadata(): storage={}", storage);
1486                }
1487                finally {
1488                    try {
1489                        H5.H5Pclose(paid);
1490                    }
1491                    catch (Exception ex) {
1492                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1493                    }
1494                    try {
1495                        H5.H5Pclose(pcid);
1496                    }
1497                    catch (Exception ex) {
1498                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1499                    }
1500                    close(did);
1501                }
1502            }
1503        }
1504
1505        List<Attribute> attrlist = null;
1506        try {
1507            attrlist = objMetadata.getMetadata(attrPropList);
1508        }
1509        catch (Exception ex) {
1510            log.debug("getMetadata(): getMetadata failed: ", ex);
1511        }
1512        return attrlist;
1513    }
1514
1515    /**
1516     * Writes a specific piece of metadata (such as an attribute) into the file.
1517     *
1518     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1519     * value. If the attribute does not exist in the file, it creates the
1520     * attribute in the file and attaches it to the object. It will fail to
1521     * write a new attribute to the object where an attribute with the same name
1522     * already exists. To update the value of an existing attribute in the file,
1523     * one needs to get the instance of the attribute by getMetadata(), change
1524     * its values, then use writeMetadata() to write the value.
1525     *
1526     * @param info
1527     *            the metadata to write.
1528     *
1529     * @throws Exception
1530     *             if the metadata can not be written
1531     */
1532    @Override
1533    public void writeMetadata(Object info) throws Exception
1534    {
1535        try {
1536            objMetadata.writeMetadata(info);
1537        }
1538        catch (Exception ex) {
1539            log.debug("writeMetadata(): Object not an Attribute");
1540        }
1541    }
1542
1543    /**
1544     * Deletes an existing piece of metadata from this object.
1545     *
1546     * @param info
1547     *            the metadata to delete.
1548     *
1549     * @throws HDF5Exception
1550     *             if the metadata can not be removed
1551     */
1552    @Override
1553    public void removeMetadata(Object info) throws HDF5Exception
1554    {
1555        try {
1556            objMetadata.removeMetadata(info);
1557        }
1558        catch (Exception ex) {
1559            log.debug("removeMetadata(): Object not an Attribute");
1560            return;
1561        }
1562
1563        Attribute attr = (Attribute)info;
1564        log.trace("removeMetadata(): {}", attr.getAttributeName());
1565        long did = open();
1566        if (did >= 0) {
1567            try {
1568                H5.H5Adelete(did, attr.getAttributeName());
1569            }
1570            finally {
1571                close(did);
1572            }
1573        }
1574        else {
1575            log.debug("removeMetadata(): failed to open scalar dataset");
1576        }
1577    }
1578
1579    /**
1580     * Updates an existing piece of metadata attached to this object.
1581     *
1582     * @param info
1583     *            the metadata to update.
1584     *
1585     * @throws HDF5Exception
1586     *             if the metadata can not be updated
1587     */
1588    @Override
1589    public void updateMetadata(Object info) throws HDF5Exception
1590    {
1591        try {
1592            objMetadata.updateMetadata(info);
1593        }
1594        catch (Exception ex) {
1595            log.debug("updateMetadata(): Object not an Attribute");
1596            return;
1597        }
1598    }
1599
1600    /*
1601     * (non-Javadoc)
1602     *
1603     * @see hdf.object.HObject#setName(java.lang.String)
1604     */
1605    @Override
1606    public void setName(String newName) throws Exception
1607    {
1608        if (newName == null)
1609            throw new IllegalArgumentException("The new name is NULL");
1610
1611        H5File.renameObject(this, newName);
1612        super.setName(newName);
1613    }
1614
1615    /**
1616     * Resets selection of dataspace
1617     */
1618    @Override
1619    protected void resetSelection()
1620    {
1621        super.resetSelection();
1622
1623        if (interlace == INTERLACE_PIXEL) {
1624            // 24-bit TRUE color image
1625            // [height][width][pixel components]
1626            selectedDims[2]  = 3;
1627            selectedDims[0]  = dims[0];
1628            selectedDims[1]  = dims[1];
1629            selectedIndex[0] = 0; // index for height
1630            selectedIndex[1] = 1; // index for width
1631            selectedIndex[2] = 2; // index for depth
1632        }
1633        else if (interlace == INTERLACE_PLANE) {
1634            // 24-bit TRUE color image
1635            // [pixel components][height][width]
1636            selectedDims[0]  = 3;
1637            selectedDims[1]  = dims[1];
1638            selectedDims[2]  = dims[2];
1639            selectedIndex[0] = 1; // index for height
1640            selectedIndex[1] = 2; // index for width
1641            selectedIndex[2] = 0; // index for depth
1642        }
1643
1644        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1645            isDefaultImageOrder = false;
1646        else
1647            isDefaultImageOrder = true;
1648    }
1649
1650    /**
1651     * Creates a scalar dataset in a file with/without chunking and compression.
1652     *
1653     * @param name
1654     *            the name of the dataset to create.
1655     * @param pgroup
1656     *            parent group where the new dataset is created.
1657     * @param type
1658     *            the datatype of the dataset.
1659     * @param dims
1660     *            the dimension size of the dataset.
1661     * @param maxdims
1662     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1663     * @param chunks
1664     *            the chunk size of the dataset. No chunking if chunk = null.
1665     * @param gzip
1666     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1667     * @param data
1668     *            the array of data values.
1669     *
1670     * @return the new scalar dataset if successful; otherwise returns null.
1671     *
1672     * @throws Exception if there is a failure.
1673     */
1674    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1675                                 long[] chunks, int gzip, Object data) throws Exception
1676    {
1677        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1678    }
1679
1680    /**
1681     * Creates a scalar dataset in a file with/without chunking and compression.
1682     *
1683     * The following example shows how to create a string dataset using this function.
1684     *
1685     * <pre>
1686     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1687     * int max_str_len = 120;
1688     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE,
1689     * Datatype.NATIVE); int size = 10000; long dims[] = { size }; long chunks[] = { 1000 }; int gzip = 9;
1690     * String strs[] = new String[size];
1691     *
1692     * for (int i = 0; i &lt; size; i++)
1693     *     strs[i] = String.valueOf(i);
1694     *
1695     * file.open();
1696     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1697     *
1698     * try {
1699     *     file.close();
1700     * }
1701     * catch (Exception ex) {
1702     * }
1703     * </pre>
1704     *
1705     * @param name
1706     *            the name of the dataset to create.
1707     * @param pgroup
1708     *            parent group where the new dataset is created.
1709     * @param type
1710     *            the datatype of the dataset.
1711     * @param dims
1712     *            the dimension size of the dataset.
1713     * @param maxdims
1714     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1715     * @param chunks
1716     *            the chunk size of the dataset. No chunking if chunk = null.
1717     * @param gzip
1718     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1719     * @param fillValue
1720     *            the default data value.
1721     * @param data
1722     *            the array of data values.
1723     *
1724     * @return the new scalar dataset if successful; otherwise returns null.
1725     *
1726     * @throws Exception if there is a failure.
1727     */
1728    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1729                                 long[] chunks, int gzip, Object fillValue, Object data) throws Exception
1730    {
1731        H5ScalarDS dataset = null;
1732        String fullPath    = null;
1733        long did           = HDF5Constants.H5I_INVALID_HID;
1734        long plist         = HDF5Constants.H5I_INVALID_HID;
1735        long sid           = HDF5Constants.H5I_INVALID_HID;
1736        long tid           = HDF5Constants.H5I_INVALID_HID;
1737
1738        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1739            log.debug("create(): one or more parameters are null");
1740            return null;
1741        }
1742
1743        H5File file = (H5File)pgroup.getFileFormat();
1744        if (file == null) {
1745            log.debug("create(): parent group FileFormat is null");
1746            return null;
1747        }
1748
1749        String path = HObject.SEPARATOR;
1750        if (!pgroup.isRoot()) {
1751            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1752            if (name.endsWith("/"))
1753                name = name.substring(0, name.length() - 1);
1754            int idx = name.lastIndexOf('/');
1755            if (idx >= 0)
1756                name = name.substring(idx + 1);
1757        }
1758
1759        fullPath = path + name;
1760
1761        // setup chunking and compression
1762        boolean isExtentable = false;
1763        if (maxdims != null) {
1764            for (int i = 0; i < maxdims.length; i++) {
1765                if (maxdims[i] == 0)
1766                    maxdims[i] = dims[i];
1767                else if (maxdims[i] < 0)
1768                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1769
1770                if (maxdims[i] != dims[i])
1771                    isExtentable = true;
1772            }
1773        }
1774
1775        // HDF5 requires you to use chunking in order to define extendible
1776        // datasets. Chunking makes it possible to extend datasets efficiently,
1777        // without having to reorganize storage excessively. Using default size
1778        // of 64x...which has good performance
1779        if ((chunks == null) && isExtentable) {
1780            chunks = new long[dims.length];
1781            for (int i = 0; i < dims.length; i++)
1782                chunks[i] = Math.min(dims[i], 64);
1783        }
1784
1785        // prepare the dataspace and datatype
1786        int rank = dims.length;
1787
1788        tid = type.createNative();
1789        if (tid >= 0) {
1790            try {
1791                sid = H5.H5Screate_simple(rank, dims, maxdims);
1792
1793                // figure out creation properties
1794                plist = HDF5Constants.H5P_DEFAULT;
1795
1796                byte[] valFill = null;
1797                try {
1798                    valFill = parseFillValue(type, fillValue);
1799                }
1800                catch (Exception ex) {
1801                    log.debug("create(): parse fill value: ", ex);
1802                }
1803                log.trace("create(): parseFillValue={}", valFill);
1804
1805                if (chunks != null || valFill != null) {
1806                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1807
1808                    if (chunks != null) {
1809                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1810                        H5.H5Pset_chunk(plist, rank, chunks);
1811
1812                        // compression requires chunking
1813                        if (gzip > 0) {
1814                            H5.H5Pset_deflate(plist, gzip);
1815                        }
1816                    }
1817
1818                    if (valFill != null)
1819                        H5.H5Pset_fill_value(plist, tid, valFill);
1820                }
1821
1822                long fid = file.getFID();
1823
1824                log.trace("create(): create dataset fid={}", fid);
1825                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1826                                   HDF5Constants.H5P_DEFAULT);
1827                log.trace("create(): create dataset did={}", did);
1828                dataset = new H5ScalarDS(file, name, path);
1829            }
1830            finally {
1831                try {
1832                    H5.H5Pclose(plist);
1833                }
1834                catch (HDF5Exception ex) {
1835                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1836                }
1837                try {
1838                    H5.H5Sclose(sid);
1839                }
1840                catch (HDF5Exception ex) {
1841                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1842                }
1843                try {
1844                    H5.H5Tclose(tid);
1845                }
1846                catch (HDF5Exception ex) {
1847                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1848                }
1849                try {
1850                    H5.H5Dclose(did);
1851                }
1852                catch (HDF5Exception ex) {
1853                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1854                }
1855            }
1856        }
1857
1858        if (dataset != null) {
1859            pgroup.addToMemberList(dataset);
1860            if (data != null) {
1861                dataset.init();
1862                long[] selected = dataset.getSelectedDims();
1863                for (int i = 0; i < rank; i++)
1864                    selected[i] = dims[i];
1865                dataset.write(data);
1866            }
1867        }
1868
1869        return dataset;
1870    }
1871
1872    // check _FillValue, valid_min, valid_max, and valid_range
1873    private void checkCFconvention(long oid) throws Exception
1874    {
1875        Object avalue = getAttrValue(oid, "_FillValue");
1876
1877        if (avalue != null) {
1878            int n = Array.getLength(avalue);
1879            for (int i = 0; i < n; i++)
1880                addFilteredImageValue((Number)Array.get(avalue, i));
1881        }
1882
1883        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1884            double x0 = 0;
1885            double x1 = 0;
1886            avalue    = getAttrValue(oid, "valid_range");
1887            if (avalue != null) {
1888                try {
1889                    x0                = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1890                    x1                = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1891                    imageDataRange    = new double[2];
1892                    imageDataRange[0] = x0;
1893                    imageDataRange[1] = x1;
1894                    return;
1895                }
1896                catch (Exception ex) {
1897                    log.debug("checkCFconvention(): valid_range: ", ex);
1898                }
1899            }
1900
1901            avalue = getAttrValue(oid, "valid_min");
1902            if (avalue != null) {
1903                try {
1904                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1905                }
1906                catch (Exception ex) {
1907                    log.debug("checkCFconvention(): valid_min: ", ex);
1908                }
1909                avalue = getAttrValue(oid, "valid_max");
1910                if (avalue != null) {
1911                    try {
1912                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1913                        imageDataRange    = new double[2];
1914                        imageDataRange[0] = x0;
1915                        imageDataRange[1] = x1;
1916                    }
1917                    catch (Exception ex) {
1918                        log.debug("checkCFconvention(): valid_max:", ex);
1919                    }
1920                }
1921            }
1922        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1923    }
1924
1925    private Object getAttrValue(long oid, String aname)
1926    {
1927        log.trace("getAttrValue(): start: name={}", aname);
1928
1929        long aid      = HDF5Constants.H5I_INVALID_HID;
1930        long atid     = HDF5Constants.H5I_INVALID_HID;
1931        long asid     = HDF5Constants.H5I_INVALID_HID;
1932        Object avalue = null;
1933
1934        try {
1935            // try to find attribute name
1936            if (H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1937                aid =
1938                    H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1939        }
1940        catch (HDF5LibraryException ex5) {
1941            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1942        }
1943        catch (Exception ex) {
1944            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1945        }
1946        if (aid > 0) {
1947            try {
1948                atid        = H5.H5Aget_type(aid);
1949                long tmptid = atid;
1950                atid        = H5.H5Tget_native_type(tmptid);
1951                try {
1952                    H5.H5Tclose(tmptid);
1953                }
1954                catch (Exception ex) {
1955                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1956                }
1957
1958                asid         = H5.H5Aget_space(aid);
1959                long adims[] = null;
1960
1961                int arank = H5.H5Sget_simple_extent_ndims(asid);
1962                if (arank > 0) {
1963                    adims = new long[arank];
1964                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1965                }
1966                log.trace("getAttrValue(): adims={}", adims);
1967
1968                // retrieve the attribute value
1969                long lsize = 1;
1970                if (adims != null) {
1971                    for (int j = 0; j < adims.length; j++) {
1972                        lsize *= adims[j];
1973                    }
1974                }
1975                log.trace("getAttrValue(): lsize={}", lsize);
1976
1977                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE)
1978                    throw new Exception("Invalid int size");
1979
1980                H5Datatype dsDatatype = null;
1981                int nativeClass       = H5.H5Tget_class(atid);
1982                if (nativeClass == HDF5Constants.H5T_REFERENCE)
1983                    dsDatatype = new H5ReferenceType(getFileFormat(), lsize, atid);
1984                else
1985                    dsDatatype = new H5Datatype(getFileFormat(), atid);
1986
1987                try {
1988                    avalue = H5Datatype.allocateArray(dsDatatype, (int)lsize);
1989                }
1990                catch (OutOfMemoryError e) {
1991                    log.debug("getAttrValue(): out of memory: ", e);
1992                    avalue = null;
1993                }
1994
1995                if (avalue != null) {
1996                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1997                    H5.H5Aread(aid, atid, avalue);
1998
1999                    if (dsDatatype.isUnsigned()) {
2000                        log.trace("getAttrValue(): id {} is unsigned", atid);
2001                        avalue = convertFromUnsignedC(avalue, null);
2002                    }
2003                    if (dsDatatype.isRef() && (avalue instanceof byte[]))
2004                        ((H5ReferenceType)dsDatatype).setData((ArrayList<byte[]>)avalue);
2005                    else if (dsDatatype.isRef())
2006                        ((H5ReferenceType)dsDatatype).setData(avalue);
2007                }
2008            }
2009            catch (Exception ex) {
2010                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
2011            }
2012            finally {
2013                try {
2014                    H5.H5Tclose(atid);
2015                }
2016                catch (HDF5Exception ex) {
2017                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
2018                }
2019                try {
2020                    H5.H5Sclose(asid);
2021                }
2022                catch (HDF5Exception ex) {
2023                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
2024                }
2025                try {
2026                    H5.H5Aclose(aid);
2027                }
2028                catch (HDF5Exception ex) {
2029                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
2030                }
2031            }
2032        } // (aid > 0)
2033
2034        return avalue;
2035    }
2036
2037    private boolean isStringAttributeOf(long objID, String name, String value)
2038    {
2039        boolean retValue = false;
2040        long aid         = HDF5Constants.H5I_INVALID_HID;
2041        long atid        = HDF5Constants.H5I_INVALID_HID;
2042
2043        try {
2044            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
2045                aid              = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT,
2046                                                      HDF5Constants.H5P_DEFAULT);
2047                atid             = H5.H5Aget_type(aid);
2048                int size         = (int)H5.H5Tget_size(atid);
2049                byte[] attrValue = new byte[size];
2050                H5.H5Aread(aid, atid, attrValue);
2051                String strValue = new String(attrValue).trim();
2052                retValue        = strValue.equalsIgnoreCase(value);
2053            }
2054        }
2055        catch (Exception ex) {
2056            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
2057        }
2058        finally {
2059            try {
2060                H5.H5Tclose(atid);
2061            }
2062            catch (HDF5Exception ex) {
2063                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
2064            }
2065            try {
2066                H5.H5Aclose(aid);
2067            }
2068            catch (HDF5Exception ex) {
2069                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
2070            }
2071        }
2072
2073        return retValue;
2074    }
2075
2076    /*
2077     * (non-Javadoc)
2078     *
2079     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
2080     */
2081    @Override
2082    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
2083    {
2084        // must give a location to copy
2085        if (pgroup == null) {
2086            log.debug("copy(): Parent group is null");
2087            return null;
2088        }
2089
2090        Dataset dataset = null;
2091        long srcdid     = HDF5Constants.H5I_INVALID_HID;
2092        long dstdid     = HDF5Constants.H5I_INVALID_HID;
2093        long plist      = HDF5Constants.H5I_INVALID_HID;
2094        long tid        = HDF5Constants.H5I_INVALID_HID;
2095        long sid        = HDF5Constants.H5I_INVALID_HID;
2096        String dname    = null;
2097        String path     = null;
2098
2099        if (pgroup.isRoot())
2100            path = HObject.SEPARATOR;
2101        else
2102            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
2103        dname = path + dstName;
2104
2105        srcdid = open();
2106        if (srcdid >= 0) {
2107            try {
2108                tid   = H5.H5Dget_type(srcdid);
2109                sid   = H5.H5Screate_simple(dims.length, dims, null);
2110                plist = H5.H5Dget_create_plist(srcdid);
2111
2112                long[] chunks        = new long[dims.length];
2113                boolean setChunkFlag = false;
2114                try {
2115                    H5.H5Pget_chunk(plist, dims.length, chunks);
2116                    for (int i = 0; i < dims.length; i++) {
2117                        if (dims[i] < chunks[i]) {
2118                            setChunkFlag = true;
2119                            if (dims[i] == 1)
2120                                chunks[i] = 1;
2121                            else
2122                                chunks[i] = dims[i] / 2;
2123                        }
2124                    }
2125                }
2126                catch (Exception ex) {
2127                    log.debug("copy(): chunk: ", ex);
2128                }
2129
2130                if (setChunkFlag)
2131                    H5.H5Pset_chunk(plist, dims.length, chunks);
2132
2133                try {
2134                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2135                                          HDF5Constants.H5P_DEFAULT);
2136                }
2137                catch (Exception e) {
2138                    log.debug("copy(): H5Dcreate: ", e);
2139                }
2140                finally {
2141                    try {
2142                        H5.H5Dclose(dstdid);
2143                    }
2144                    catch (Exception ex2) {
2145                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
2146                    }
2147                }
2148
2149                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
2150                if (buff != null) {
2151                    dataset.init();
2152                    dataset.write(buff);
2153                }
2154
2155                dstdid = dataset.open();
2156                if (dstdid >= 0) {
2157                    try {
2158                        H5File.copyAttributes(srcdid, dstdid);
2159                    }
2160                    finally {
2161                        try {
2162                            H5.H5Dclose(dstdid);
2163                        }
2164                        catch (Exception ex) {
2165                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2166                        }
2167                    }
2168                }
2169            }
2170            finally {
2171                try {
2172                    H5.H5Pclose(plist);
2173                }
2174                catch (Exception ex) {
2175                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2176                }
2177                try {
2178                    H5.H5Sclose(sid);
2179                }
2180                catch (Exception ex) {
2181                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2182                }
2183                try {
2184                    H5.H5Tclose(tid);
2185                }
2186                catch (Exception ex) {
2187                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2188                }
2189                try {
2190                    H5.H5Dclose(srcdid);
2191                }
2192                catch (Exception ex) {
2193                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2194                }
2195            }
2196        }
2197
2198        pgroup.addToMemberList(dataset);
2199
2200        if (dataset != null)
2201            ((ScalarDS)dataset).setIsImage(isImage);
2202
2203        return dataset;
2204    }
2205
2206    /**
2207     * Get the number of pallettes for this object.
2208     *
2209     * @return the number of palettes if it has any, 0 otherwise.
2210     */
2211    public int getNumberOfPalettes() { return NumberOfPalettes; }
2212
2213    /*
2214     * (non-Javadoc)
2215     *
2216     * @see hdf.object.ScalarDS#getPalette()
2217     */
2218    @Override
2219    public byte[][] getPalette()
2220    {
2221        log.trace("getPalette(): NumberOfPalettes={}", NumberOfPalettes);
2222        if (NumberOfPalettes > 0)
2223            if (palette == null)
2224                palette = readPalette(0);
2225
2226        return palette;
2227    }
2228
2229    /*
2230     * (non-Javadoc)
2231     *
2232     * @see hdf.object.ScalarDS#getPaletteName(int)
2233     */
2234    @Override
2235    public String getPaletteName(int idx)
2236    {
2237        int count          = readNumberOfPalettes();
2238        long did           = HDF5Constants.H5I_INVALID_HID;
2239        long palID         = HDF5Constants.H5I_INVALID_HID;
2240        String paletteName = null;
2241
2242        if (count < 1) {
2243            log.debug("getPaletteName(): no palettes are attached");
2244            return null;
2245        }
2246
2247        byte[][] refBuf = null;
2248
2249        did = open();
2250        if (did >= 0) {
2251            try {
2252                refBuf = getPaletteRefs(did);
2253                palID  = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2254                paletteName = H5.H5Iget_name(palID);
2255            }
2256            catch (Exception ex) {
2257                ex.printStackTrace();
2258            }
2259            finally {
2260                close(palID);
2261                for (int i = 0; i < count; i++)
2262                    H5.H5Rdestroy(refBuf[i]);
2263                close(did);
2264            }
2265        }
2266
2267        return paletteName;
2268    }
2269
2270    /*
2271     * (non-Javadoc)
2272     *
2273     * @see hdf.object.ScalarDS#readPalette(int)
2274     */
2275    @Override
2276    public byte[][] readPalette(int idx)
2277    {
2278        byte[][] thePalette = null;
2279        int count           = readNumberOfPalettes();
2280        long did            = HDF5Constants.H5I_INVALID_HID;
2281        long palID          = HDF5Constants.H5I_INVALID_HID;
2282        long tid            = HDF5Constants.H5I_INVALID_HID;
2283        log.trace("readPalette(): palette count={}", count);
2284
2285        if (count < 1) {
2286            log.debug("readPalette(): no palettes are attached");
2287            return null;
2288        }
2289
2290        byte[] p        = null;
2291        byte[][] refBuf = null;
2292
2293        did = open();
2294        if (did >= 0) {
2295            try {
2296                refBuf = getPaletteRefs(did);
2297                palID  = H5.H5Ropen_object(refBuf[idx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2298                log.trace("readPalette(): H5Ropen_object: {}", palID);
2299                tid = H5.H5Dget_type(palID);
2300
2301                // support only 3*256 byte palette data
2302                if (H5.H5Dget_storage_size(palID) <= 768) {
2303                    p = new byte[3 * 256];
2304                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
2305                               HDF5Constants.H5P_DEFAULT, p);
2306                }
2307            }
2308            catch (HDF5Exception ex) {
2309                log.debug("readPalette(): failure: ", ex);
2310                p = null;
2311            }
2312            finally {
2313                try {
2314                    H5.H5Tclose(tid);
2315                }
2316                catch (HDF5Exception ex2) {
2317                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2318                }
2319                close(palID);
2320                for (int i = 0; i < count; i++)
2321                    H5.H5Rdestroy(refBuf[i]);
2322                close(did);
2323            }
2324        }
2325
2326        if (p != null) {
2327            thePalette = new byte[3][256];
2328            for (int i = 0; i < 256; i++) {
2329                thePalette[0][i] = p[i * 3];
2330                thePalette[1][i] = p[i * 3 + 1];
2331                thePalette[2][i] = p[i * 3 + 2];
2332            }
2333        }
2334
2335        return thePalette;
2336    }
2337
2338    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception
2339    {
2340        byte[] data = null;
2341
2342        if (type == null || fillValue == null) {
2343            log.debug("parseFillValue(): datatype or fill value is null");
2344            return null;
2345        }
2346
2347        int datatypeClass = type.getDatatypeClass();
2348        int datatypeSize  = (int)type.getDatatypeSize();
2349
2350        double valDbl = 0;
2351        String valStr = null;
2352
2353        if (fillValue instanceof String)
2354            valStr = (String)fillValue;
2355        else if (fillValue.getClass().isArray())
2356            valStr = Array.get(fillValue, 0).toString();
2357
2358        if (!type.isString()) {
2359            try {
2360                valDbl = Double.parseDouble(valStr);
2361            }
2362            catch (NumberFormatException ex) {
2363                log.debug("parseFillValue(): parse error: ", ex);
2364                return null;
2365            }
2366        }
2367
2368        try {
2369            switch (datatypeClass) {
2370            case Datatype.CLASS_INTEGER:
2371            case Datatype.CLASS_ENUM:
2372            case Datatype.CLASS_CHAR:
2373                log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2374                if (datatypeSize == 1)
2375                    data = new byte[] {(byte)valDbl};
2376                else if (datatypeSize == 2)
2377                    data = HDFNativeData.shortToByte((short)valDbl);
2378                else if (datatypeSize == 8)
2379                    data = HDFNativeData.longToByte((long)valDbl);
2380                else
2381                    data = HDFNativeData.intToByte((int)valDbl);
2382                break;
2383            case Datatype.CLASS_FLOAT:
2384                log.trace("parseFillValue(): class CLASS_FLOAT");
2385                if (datatypeSize > 8)
2386                    data = valStr.getBytes();
2387                else if (datatypeSize == 8)
2388                    data = HDFNativeData.doubleToByte(valDbl);
2389                else if (datatypeSize == 4)
2390                    data = HDFNativeData.floatToByte((float)valDbl);
2391                else
2392                    data = HDFNativeData.shortToByte((short)Float.floatToFloat16((float)valDbl));
2393                break;
2394            case Datatype.CLASS_STRING:
2395                log.trace("parseFillValue(): class CLASS_STRING");
2396                if (valStr != null)
2397                    data = valStr.getBytes();
2398                break;
2399            case Datatype.CLASS_REFERENCE:
2400                log.trace("parseFillValue(): class CLASS_REFERENCE");
2401                data = HDFNativeData.longToByte((long)valDbl);
2402                break;
2403            default:
2404                log.debug("parseFillValue(): datatypeClass unknown");
2405                break;
2406            } // (datatypeClass)
2407        }
2408        catch (Exception ex) {
2409            log.debug("parseFillValue(): failure: ", ex);
2410            data = null;
2411        }
2412
2413        return data;
2414    }
2415
2416    /**
2417     * reads references of palettes to count the numberOfPalettes.
2418     *
2419     * @return the number of palettes referenced.
2420     */
2421    public int readNumberOfPalettes()
2422    {
2423        log.trace("readNumberOfPalettes(): isInited={}", isInited());
2424        if (!isInited())
2425            init(); // init will be called to get refs
2426
2427        return NumberOfPalettes;
2428    }
2429
2430    /**
2431     * reads references of palettes to calculate the numberOfPalettes.
2432     */
2433    private int readNumberOfPalette(long did)
2434    {
2435        long aid      = HDF5Constants.H5I_INVALID_HID;
2436        long sid      = HDF5Constants.H5I_INVALID_HID;
2437        long atype    = HDF5Constants.H5I_INVALID_HID;
2438        int size      = 0;
2439        int rank      = 0;
2440        byte[] refbuf = null;
2441        log.trace("readNumberOfPalette(): did={}", did);
2442
2443        try {
2444            if (H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2445                aid  = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT,
2446                                          HDF5Constants.H5P_DEFAULT);
2447                sid  = H5.H5Aget_space(aid);
2448                rank = H5.H5Sget_simple_extent_ndims(sid);
2449                size = 1;
2450                if (rank > 0) {
2451                    long[] dims = new long[rank];
2452                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2453                    log.trace("readNumberOfPalette(): rank={}, dims={}", rank, dims);
2454                    for (int i = 0; i < rank; i++)
2455                        size *= (int)dims[i];
2456                }
2457                log.trace("readNumberOfPalette(): size={}", size);
2458
2459                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE ||
2460                    (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2461                    throw new HDF5Exception("Invalid int size");
2462            }
2463        }
2464        catch (HDF5Exception ex) {
2465            log.debug("readNumberOfPalette(): Palette attribute search failed: Expected", ex);
2466            refbuf = null;
2467        }
2468        finally {
2469            try {
2470                H5.H5Tclose(atype);
2471            }
2472            catch (HDF5Exception ex2) {
2473                log.debug("readNumberOfPalette(): H5Tclose(atype {}) failure: ", atype, ex2);
2474            }
2475            try {
2476                H5.H5Sclose(sid);
2477            }
2478            catch (HDF5Exception ex2) {
2479                log.debug("readNumberOfPalette(): H5Sclose(sid {}) failure: ", sid, ex2);
2480            }
2481            try {
2482                H5.H5Aclose(aid);
2483            }
2484            catch (HDF5Exception ex2) {
2485                log.debug("readNumberOfPalette(): H5Aclose(aid {}) failure: ", aid, ex2);
2486            }
2487        }
2488
2489        return size;
2490    }
2491
2492    /**
2493     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore,
2494     * the array length is 8*numberOfPalettes.
2495     */
2496    private byte[][] getPaletteRefs(long did)
2497    {
2498        long aid        = HDF5Constants.H5I_INVALID_HID;
2499        long sid        = HDF5Constants.H5I_INVALID_HID;
2500        long atype      = HDF5Constants.H5I_INVALID_HID;
2501        int size        = 0;
2502        int rank        = 0;
2503        byte[][] refBuf = null;
2504        log.trace("getPaletteRefs(): did={}", did);
2505
2506        try {
2507            if (H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2508                aid  = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT,
2509                                          HDF5Constants.H5P_DEFAULT);
2510                sid  = H5.H5Aget_space(aid);
2511                rank = H5.H5Sget_simple_extent_ndims(sid);
2512                size = 1;
2513                if (rank > 0) {
2514                    long[] dims = new long[rank];
2515                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2516                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2517                    for (int i = 0; i < rank; i++)
2518                        size *= (int)dims[i];
2519                }
2520                log.trace("getPaletteRefs(): size={}", size);
2521
2522                if ((size * HDF5Constants.H5R_REF_BUF_SIZE) < Integer.MIN_VALUE ||
2523                    (size * HDF5Constants.H5R_REF_BUF_SIZE) > Integer.MAX_VALUE)
2524                    throw new HDF5Exception("Invalid int size");
2525                refBuf = new byte[size][HDF5Constants.H5R_REF_BUF_SIZE];
2526
2527                H5.H5Aread(aid, HDF5Constants.H5T_STD_REF, refBuf);
2528            }
2529        }
2530        catch (HDF5Exception ex) {
2531            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2532            refBuf = null;
2533        }
2534        finally {
2535            try {
2536                H5.H5Sclose(sid);
2537            }
2538            catch (HDF5Exception ex2) {
2539                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2540            }
2541            try {
2542                H5.H5Aclose(aid);
2543            }
2544            catch (HDF5Exception ex2) {
2545                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2546            }
2547        }
2548
2549        return refBuf;
2550    }
2551
2552    /**
2553     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The
2554     * dimensionality of size is the same as that of the dataspace of the dataset being changed.
2555     *
2556     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A
2557     * dataset with fixed dimensions if the current dimension sizes are less than the maximum sizes set with
2558     * maxdims (see H5Screate_simple)
2559     *
2560     * @param newDims the dimension target size
2561     *
2562     * @throws HDF5Exception
2563     *             If there is an error at the HDF5 library level.
2564     */
2565    public void extend(long[] newDims) throws HDF5Exception
2566    {
2567        long did = HDF5Constants.H5I_INVALID_HID;
2568        long sid = HDF5Constants.H5I_INVALID_HID;
2569
2570        did = open();
2571        if (did >= 0) {
2572            try {
2573                H5.H5Dset_extent(did, newDims);
2574                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2575                sid              = H5.H5Dget_space(did);
2576                long[] checkDims = new long[rank];
2577                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2578                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2579                for (int i = 0; i < rank; i++) {
2580                    if (checkDims[i] != newDims[i]) {
2581                        log.debug("extend(): error extending dataset");
2582                        throw new HDF5Exception("error extending dataset " + getName());
2583                    }
2584                }
2585                dims = checkDims;
2586            }
2587            catch (Exception e) {
2588                log.debug("extend(): failure: ", e);
2589                throw new HDF5Exception(e.getMessage());
2590            }
2591            finally {
2592                if (sid > 0)
2593                    H5.H5Sclose(sid);
2594
2595                close(did);
2596            }
2597        }
2598    }
2599
2600    /*
2601     * (non-Javadoc)
2602     *
2603     * @see hdf.object.Dataset#isVirtual()
2604     */
2605    @Override
2606    public boolean isVirtual()
2607    {
2608        return isVirtual;
2609    }
2610
2611    /*
2612     * (non-Javadoc)
2613     *
2614     * @see hdf.object.Dataset#getVirtualFilename(int)
2615     */
2616    @Override
2617    public String getVirtualFilename(int index)
2618    {
2619        if (isVirtual)
2620            return virtualNameList.get(index);
2621        else
2622            return null;
2623    }
2624
2625    /*
2626     * (non-Javadoc)
2627     *
2628     * @see hdf.object.Dataset#getVirtualMaps()
2629     */
2630    @Override
2631    public int getVirtualMaps()
2632    {
2633        if (isVirtual)
2634            return virtualNameList.size();
2635        else
2636            return -1;
2637    }
2638
2639    /*
2640     * (non-Javadoc)
2641     *
2642     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2643     */
2644    @Override
2645    public String toString(String delimiter, int maxItems)
2646    {
2647        Object theData = originalBuf;
2648        if (theData == null) {
2649            log.debug("toString: value is null");
2650            return null;
2651        }
2652
2653        if (theData instanceof List<?>) {
2654            log.trace("toString: value is list");
2655            return null;
2656        }
2657
2658        Class<? extends Object> valClass = theData.getClass();
2659
2660        if (!valClass.isArray()) {
2661            log.trace("toString: finish - not array");
2662            String strValue = theData.toString();
2663            if (maxItems > 0 && strValue.length() > maxItems)
2664                // truncate the extra characters
2665                strValue = strValue.substring(0, maxItems);
2666            return strValue;
2667        }
2668
2669        // value is an array
2670        StringBuilder sb = new StringBuilder();
2671        long lsize       = 1;
2672        for (int j = 0; j < dims.length; j++)
2673            lsize *= dims[j];
2674
2675        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype)getDatatype()).isStdRef(),
2676                  Array.getLength(theData));
2677        if (((H5Datatype)getDatatype()).isStdRef()) {
2678            String cname = valClass.getName();
2679            char dname   = cname.charAt(cname.lastIndexOf('[') + 1);
2680            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2681            for (int i = 0; i < (int)lsize; i++) {
2682                int refIndex  = HDF5Constants.H5R_REF_BUF_SIZE * i;
2683                byte[] refarr = new byte[(int)HDF5Constants.H5R_REF_BUF_SIZE];
2684                System.arraycopy(theData, refIndex, refarr, 0, (int)HDF5Constants.H5R_REF_BUF_SIZE);
2685                String ref_str = ((H5ReferenceType)getDatatype()).getReferenceRegion(refarr, false);
2686                log.trace("toString: ref_str[{}]={}", i, ref_str);
2687                if (i > 0)
2688                    sb.append(", ");
2689                sb.append(ref_str);
2690
2691                //                int n = ref_str.length();
2692                //                if (maxItems > 0) {
2693                //                    if (n > maxItems)
2694                //                        break;
2695                //                    else
2696                //                        maxItems -= n;
2697                //                }
2698            }
2699            return sb.toString();
2700        }
2701        return super.toString(delimiter, maxItems);
2702    }
2703}