001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.List;
021import java.util.Vector;
022
023import hdf.hdf5lib.H5;
024import hdf.hdf5lib.HDF5Constants;
025import hdf.hdf5lib.HDFNativeData;
026import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
027import hdf.hdf5lib.exceptions.HDF5Exception;
028import hdf.hdf5lib.exceptions.HDF5LibraryException;
029import hdf.hdf5lib.structs.H5O_info_t;
030
031import hdf.object.Attribute;
032import hdf.object.Dataset;
033import hdf.object.Datatype;
034import hdf.object.FileFormat;
035import hdf.object.Group;
036import hdf.object.HObject;
037import hdf.object.MetaDataContainer;
038import hdf.object.ScalarDS;
039
040import hdf.object.h5.H5Attribute;
041import hdf.object.h5.H5MetaDataContainer;
042
043/**
044 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
045 * float, double and string, and operations performed on the scalar dataset.
046 *
047 * The library predefines a modest number of datatypes. For details,
048 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
049 *
050 * @version 1.1 9/4/2007
051 * @author Peter X. Cao
052 */
053public class H5ScalarDS extends ScalarDS implements MetaDataContainer
054{
055    private static final long serialVersionUID = 2887517608230611642L;
056
057    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
058
059    /**
060     * The metadata object for this data object. Members of the metadata are instances of Attribute.
061     */
062    private H5MetaDataContainer objMetadata;
063
064    /** the object properties */
065    private H5O_info_t objInfo;
066
067    /**
068     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
069     * array length is 8*numberOfPalettes.
070     */
071    private byte[] paletteRefs;
072
073    /** flag to indicate if the dataset is an external dataset */
074    private boolean isExternal = false;
075
076    /** flag to indicate if the dataset is a virtual dataset */
077    private boolean isVirtual = false;
078    /** the list of virtual names */
079    private List<String> virtualNameList;
080
081    /**
082     * flag to indicate if the datatype in file is the same as dataype in memory
083     */
084    protected boolean isNativeDatatype = false;
085
086    /**
087     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
088     *
089     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
090     * path of the dataset.
091     *
092     * @param theFile
093     *            the file that contains the data object.
094     * @param theName
095     *            the name of the data object, e.g. "dset".
096     * @param thePath
097     *            the full path of the data object, e.g. "/arrays/".
098     */
099    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
100        this(theFile, theName, thePath, null);
101    }
102
103    /**
104     * @deprecated Not for public use in the future.<br>
105     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
106     *
107     * @param theFile
108     *            the file that contains the data object.
109     * @param theName
110     *            the name of the data object, e.g. "dset".
111     * @param thePath
112     *            the full path of the data object, e.g. "/arrays/".
113     * @param oid
114     *            the oid of the data object.
115     */
116    @Deprecated
117    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
118        super(theFile, theName, thePath, oid);
119        unsignedConverted = false;
120        paletteRefs = null;
121        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
122        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
123
124        if (theFile != null) {
125            if (oid == null) {
126                // retrieve the object ID
127                try {
128                    byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
129                    this.oid = new long[1];
130                    this.oid[0] = HDFNativeData.byteToLong(refBuf, 0);
131                }
132                catch (Exception ex) {
133                    log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
134                }
135            }
136        }
137        else
138            this.oid = null;
139    }
140
141    /*
142     * (non-Javadoc)
143     *
144     * @see hdf.object.HObject#open()
145     */
146    @Override
147    public long open() {
148        long did = HDF5Constants.H5I_INVALID_HID;
149
150        if (getFID() < 0)
151            log.trace("open(): file id for:{} is invalid", getPath() + getName());
152        else {
153            try {
154                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
155                log.trace("open(): did={}", did);
156            }
157            catch (HDF5Exception ex) {
158                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
159                did = HDF5Constants.H5I_INVALID_HID;
160            }
161        }
162
163        return did;
164    }
165
166    /*
167     * (non-Javadoc)
168     *
169     * @see hdf.object.HObject#close(int)
170     */
171    @Override
172    public void close(long did) {
173        if (did >= 0) {
174            try {
175                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
176            }
177            catch (Exception ex) {
178                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
179            }
180            try {
181                H5.H5Dclose(did);
182            }
183            catch (HDF5Exception ex) {
184                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
185            }
186        }
187    }
188
189    /**
190     * Retrieves datatype and dataspace information from file and sets the dataset
191     * in memory.
192     *
193     * The init() is designed to support lazy operation in a dataset object. When a
194     * data object is retrieved from file, the datatype, dataspace and raw data are
195     * not loaded into memory. When it is asked to read the raw data from file,
196     * init() is first called to get the datatype and dataspace information, then
197     * load the raw data from file.
198     *
199     * init() is also used to reset the selection of a dataset (start, stride and
200     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
201     * the following example, init() at step 1) retrieves datatype and dataspace
202     * information from file. getData() at step 3) reads only one data point. init()
203     * at step 4) resets the selection to the whole dataset. getData() at step 4)
204     * reads the values of whole dataset into memory.
205     *
206     * <pre>
207     * dset = (Dataset) file.get(NAME_DATASET);
208     *
209     * // 1) get datatype and dataspace information from file
210     * dset.init();
211     * rank = dset.getRank(); // rank = 2, a 2D dataset
212     * count = dset.getSelectedDims();
213     * start = dset.getStartDims();
214     * dims = dset.getDims();
215     *
216     * // 2) select only one data point
217     * for (int i = 0; i &lt; rank; i++) {
218     *     start[0] = 0;
219     *     count[i] = 1;
220     * }
221     *
222     * // 3) read one data point
223     * data = dset.getData();
224     *
225     * // 4) reset selection to the whole dataset
226     * dset.init();
227     *
228     * // 5) clean the memory data buffer
229     * dset.clearData();
230     *
231     * // 6) Read the whole dataset
232     * data = dset.getData();
233     * </pre>
234     */
235    @Override
236    public void init() {
237        if (inited) {
238            resetSelection();
239            log.trace("init(): Dataset already initialized");
240            return; // already called. Initialize only once
241        }
242
243        long did = HDF5Constants.H5I_INVALID_HID;
244        long tid = HDF5Constants.H5I_INVALID_HID;
245        long sid = HDF5Constants.H5I_INVALID_HID;
246        long nativeTID = HDF5Constants.H5I_INVALID_HID;
247
248        did = open();
249        if (did >= 0) {
250            // check if it is an external or virtual dataset
251            long pid = HDF5Constants.H5I_INVALID_HID;
252            try {
253                pid = H5.H5Dget_create_plist(did);
254                try {
255                    int nfiles = H5.H5Pget_external_count(pid);
256                    isExternal = (nfiles > 0);
257                    int layoutType = H5.H5Pget_layout(pid);
258                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
259                        try {
260                            long vmaps = H5.H5Pget_virtual_count(pid);
261                            if (vmaps > 0) {
262                                virtualNameList = new Vector<>();
263                                for (long next = 0; next < vmaps; next++) {
264                                    try {
265                                        String fname = H5.H5Pget_virtual_filename(pid, next);
266                                        virtualNameList.add(fname);
267                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
268                                    }
269                                    catch (Exception err) {
270                                        log.trace("init(): vds[{}] continue", next);
271                                    }
272                                }
273                            }
274                        }
275                        catch (Exception err) {
276                            log.debug("init(): vds count error: ", err);
277                        }
278                    }
279                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
280                }
281                catch (Exception ex) {
282                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
283                }
284            }
285            catch (Exception ex) {
286                log.debug("init(): H5Dget_create_plist() failure: ", ex);
287            }
288            finally {
289                try {
290                    H5.H5Pclose(pid);
291                }
292                catch (Exception ex) {
293                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
294                }
295            }
296
297            paletteRefs = getPaletteRefs(did);
298
299            try {
300                sid = H5.H5Dget_space(did);
301                rank = H5.H5Sget_simple_extent_ndims(sid);
302                space_type = H5.H5Sget_simple_extent_type(sid);
303                tid = H5.H5Dget_type(did);
304                log.trace("init(): tid={} sid={} rank={} space_type={} ", tid, sid, rank, space_type);
305
306                if (rank == 0) {
307                    // a scalar data point
308                    isScalar = true;
309                    rank = 1;
310                    dims = new long[] { 1 };
311                    log.trace("init(): rank is a scalar data point");
312                }
313                else {
314                    isScalar = false;
315                    dims = new long[rank];
316                    maxDims = new long[rank];
317                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
318                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
319                }
320
321                try {
322                    datatype = new H5Datatype(getFileFormat(), tid);
323
324                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
325                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
326                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
327                }
328                catch (Exception ex) {
329                    log.debug("init(): failed to create datatype for dataset: ", ex);
330                    datatype = null;
331                }
332
333                // Check if the datatype in the file is the native datatype
334                try {
335                    nativeTID = H5.H5Tget_native_type(tid);
336                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
337                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
338                }
339                catch (Exception ex) {
340                    log.debug("init(): check if native type failure: ", ex);
341                }
342
343                try {
344                    pid = H5.H5Dget_create_plist(did);
345                    int[] fillStatus = { 0 };
346                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
347                        // Check if fill value is user-defined before retrieving it.
348                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
349                            try {
350                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
351                            }
352                            catch (OutOfMemoryError e) {
353                                log.debug("init(): out of memory: ", e);
354                                fillValue = null;
355                            }
356                            catch (Exception ex) {
357                                log.debug("init(): allocate fill value buffer failed: ", ex);
358                                fillValue = null;
359                            }
360
361                            log.trace("init(): fillValue={}", fillValue);
362                            try {
363                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
364                                log.trace("init(): H5Pget_fill_value={}", fillValue);
365                                if (fillValue != null) {
366                                    if (datatype.isUnsigned() && !isFillValueConverted) {
367                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
368                                        isFillValueConverted = true;
369                                    }
370
371                                    int n = Array.getLength(fillValue);
372                                    for (int i = 0; i < n; i++)
373                                        addFilteredImageValue((Number) Array.get(fillValue, i));
374                                }
375                            }
376                            catch (Exception ex2) {
377                                log.debug("init(): fill value was defined: ", ex2);
378                                fillValue = null;
379                            }
380                        }
381                    }
382                }
383                catch (HDF5Exception ex) {
384                    log.debug("init(): check if fill value is defined failure: ", ex);
385                }
386                finally {
387                    try {
388                        H5.H5Pclose(pid);
389                    }
390                    catch (Exception ex) {
391                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
392                    }
393                }
394
395                inited = true;
396            }
397            catch (HDF5Exception ex) {
398                log.debug("init(): ", ex);
399            }
400            finally {
401                try {
402                    H5.H5Tclose(nativeTID);
403                }
404                catch (Exception ex2) {
405                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
406                }
407                try {
408                    H5.H5Tclose(tid);
409                }
410                catch (HDF5Exception ex2) {
411                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
412                }
413                try {
414                    H5.H5Sclose(sid);
415                }
416                catch (HDF5Exception ex2) {
417                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
418                }
419            }
420
421            // check for the type of image and interlace mode
422            // it is a true color image at one of three cases:
423            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
424            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
425            // 3) INTERLACE_MODE = INTERLACE_PLANE
426            if ((rank >= 3) && isImage) {
427                interlace = -1;
428                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
429
430                if (isTrueColor) {
431                    interlace = INTERLACE_PIXEL;
432                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
433                        interlace = INTERLACE_PLANE;
434                    }
435                }
436            }
437
438            close(did);
439
440            startDims = new long[rank];
441            selectedDims = new long[rank];
442
443            resetSelection();
444        }
445        else {
446            log.debug("init(): failed to open dataset");
447        }
448    }
449
450    /**
451     * Check if the object has any attributes attached.
452     *
453     * @return true if it has any attributes, false otherwise.
454     */
455    @Override
456    public boolean hasAttribute() {
457        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
458
459        if (objInfo.num_attrs < 0) {
460            long did = open();
461            if (did >= 0) {
462                objInfo.num_attrs = 0;
463
464                try {
465                    objInfo = H5.H5Oget_info(did);
466
467                    if(objInfo.num_attrs > 0) {
468                        // test if it is an image
469                        // check image
470                        Object avalue = getAttrValue(did, "CLASS");
471                        if (avalue != null) {
472                            try {
473                                isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
474                                log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
475                            }
476                            catch (Exception err) {
477                                log.debug("hasAttribute(): check image: ", err);
478                            }
479                        }
480
481                        // retrieve the IMAGE_MINMAXRANGE
482                        avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
483                        if (avalue != null) {
484                            double x0 = 0;
485                            double x1 = 0;
486                            try {
487                                x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
488                                x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
489                            }
490                            catch (Exception ex2) {
491                                x0 = x1 = 0;
492                            }
493                            if (x1 > x0) {
494                                imageDataRange = new double[2];
495                                imageDataRange[0] = x0;
496                                imageDataRange[1] = x1;
497                            }
498                        }
499
500                        try {
501                            checkCFconvention(did);
502                        }
503                        catch (Exception ex) {
504                            log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
505                        }
506                    }
507                }
508                catch (Exception ex) {
509                    objInfo.num_attrs = 0;
510                    log.debug("hasAttribute(): get object info failure: ", ex);
511                }
512                finally {
513                    close(did);
514                }
515                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
516            }
517            else {
518                log.debug("hasAttribute(): could not open dataset");
519            }
520        }
521
522        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
523        return (objInfo.num_attrs > 0);
524    }
525
526    /**
527     * Returns the datatype of the data object.
528     *
529     * @return the datatype of the data object.
530     */
531    @Override
532    public Datatype getDatatype() {
533        if (!inited)
534            init();
535
536        if (datatype == null) {
537            long did = HDF5Constants.H5I_INVALID_HID;
538            long tid = HDF5Constants.H5I_INVALID_HID;
539
540            did = open();
541            if (did >= 0) {
542                try {
543                    tid = H5.H5Dget_type(did);
544                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
545                    if (!isNativeDatatype) {
546                        long tmptid = -1;
547                        try {
548                            tmptid = H5Datatype.toNative(tid);
549                            if (tmptid >= 0) {
550                                try {
551                                    H5.H5Tclose(tid);
552                                }
553                                catch (Exception ex2) {
554                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
555                                }
556                                tid = tmptid;
557                            }
558                        }
559                        catch (Exception ex) {
560                            log.debug("getDatatype(): toNative: ", ex);
561                        }
562                    }
563                    datatype = new H5Datatype(getFileFormat(), tid);
564                }
565                catch (Exception ex) {
566                    log.debug("getDatatype(): ", ex);
567                }
568                finally {
569                    try {
570                        H5.H5Tclose(tid);
571                    }
572                    catch (HDF5Exception ex) {
573                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
574                    }
575                    try {
576                        H5.H5Dclose(did);
577                    }
578                    catch (HDF5Exception ex) {
579                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
580                    }
581                }
582            }
583        }
584
585        if (isExternal) {
586            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
587
588            if (pdir == null) {
589                pdir = ".";
590            }
591            System.setProperty("user.dir", pdir);
592            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
593        }
594
595        return datatype;
596    }
597
598    /**
599     * Removes all of the elements from metadata list.
600     * The list should be empty after this call returns.
601     */
602    @Override
603    public void clear() {
604        super.clear();
605        objMetadata.clear();
606    }
607
608    /*
609     * (non-Javadoc)
610     *
611     * @see hdf.object.Dataset#readBytes()
612     */
613    @Override
614    public byte[] readBytes() throws HDF5Exception {
615        byte[] theData = null;
616
617        if (!isInited())
618            init();
619
620        long did = open();
621        if (did >= 0) {
622            long fspace = HDF5Constants.H5I_INVALID_HID;
623            long mspace = HDF5Constants.H5I_INVALID_HID;
624            long tid = HDF5Constants.H5I_INVALID_HID;
625
626            try {
627                long[] lsize = { 1 };
628                for (int j = 0; j < selectedDims.length; j++)
629                    lsize[0] *= selectedDims[j];
630
631                fspace = H5.H5Dget_space(did);
632                mspace = H5.H5Screate_simple(rank, selectedDims, null);
633
634                // set the rectangle selection
635                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
636                if (rank * dims[0] > 1)
637                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
638
639                tid = H5.H5Dget_type(did);
640                long size = H5.H5Tget_size(tid) * lsize[0];
641                log.trace("readBytes(): size = {}", size);
642
643                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
644                    throw new Exception("Invalid int size");
645
646                theData = new byte[(int)size];
647
648                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
649                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
650            }
651            catch (Exception ex) {
652                log.debug("readBytes(): failed to read data: ", ex);
653            }
654            finally {
655                try {
656                    H5.H5Sclose(fspace);
657                }
658                catch (Exception ex2) {
659                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
660                }
661                try {
662                    H5.H5Sclose(mspace);
663                }
664                catch (Exception ex2) {
665                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
666                }
667                try {
668                    H5.H5Tclose(tid);
669                }
670                catch (HDF5Exception ex2) {
671                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
672                }
673                close(did);
674            }
675        }
676
677        return theData;
678    }
679
680    /**
681     * Reads the data from file.
682     *
683     * read() reads the data from file to a memory buffer and returns the memory
684     * buffer. The dataset object does not hold the memory buffer. To store the
685     * memory buffer in the dataset object, one must call getData().
686     *
687     * By default, the whole dataset is read into memory. Users can also select
688     * a subset to read. Subsetting is done in an implicit way.
689     *
690     * <b>How to Select a Subset</b>
691     *
692     * A selection is specified by three arrays: start, stride and count.
693     * <ol>
694     * <li>start: offset of a selection
695     * <li>stride: determines how many elements to move in each dimension
696     * <li>count: number of elements to select in each dimension
697     * </ol>
698     * getStartDims(), getStride() and getSelectedDims() returns the start,
699     * stride and count arrays respectively. Applications can make a selection
700     * by changing the values of the arrays.
701     *
702     * The following example shows how to make a subset. In the example, the
703     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
704     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
705     * We want to select every other data point in dims[1] and dims[2]
706     *
707     * <pre>
708     * int rank = dataset.getRank(); // number of dimensions of the dataset
709     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
710     * long[] selected = dataset.getSelectedDims(); // the selected size of the
711     *                                              // dataset
712     * long[] start = dataset.getStartDims(); // the offset of the selection
713     * long[] stride = dataset.getStride(); // the stride of the dataset
714     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
715     *                                                   // dimensions for
716     *                                                   // display
717     *
718     * // select dim1 and dim2 as 2D data for display, and slice through dim0
719     * selectedIndex[0] = 1;
720     * selectedIndex[1] = 2;
721     * selectedIndex[1] = 0;
722     *
723     * // reset the selection arrays
724     * for (int i = 0; i &lt; rank; i++) {
725     *     start[i] = 0;
726     *     selected[i] = 1;
727     *     stride[i] = 1;
728     * }
729     *
730     * // set stride to 2 on dim1 and dim2 so that every other data point is
731     * // selected.
732     * stride[1] = 2;
733     * stride[2] = 2;
734     *
735     * // set the selection size of dim1 and dim2
736     * selected[1] = dims[1] / stride[1];
737     * selected[2] = dims[1] / stride[2];
738     *
739     * // when dataset.getData() is called, the selection above will be used
740     * // since
741     * // the dimension arrays are passed by reference. Changes of these arrays
742     * // outside the dataset object directly change the values of these array
743     * // in the dataset object.
744     * </pre>
745     *
746     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
747     * short, int, float, double or String type based on the datatype of the
748     * dataset.
749     *
750     * @return the data read from file.
751     *
752     * @see #getData()
753     * @see hdf.object.DataFormat#read()
754     *
755     * @throws Exception
756     *             if object can not be read
757     */
758    @Override
759    public Object read() throws Exception {
760        Object readData = null;
761
762        if (!isInited())
763            init();
764
765        try {
766            readData = scalarDatasetCommonIO(H5File.IO_TYPE.READ, null);
767        }
768        catch (Exception ex) {
769            log.debug("read(): failed to read scalar dataset: ", ex);
770            throw new Exception("failed to read scalar dataset: " + ex.getMessage(), ex);
771        }
772
773        return readData;
774    }
775
776    /**
777     * Writes the given data buffer into this dataset in a file.
778     *
779     * @param buf
780     *            The buffer that contains the data values.
781     *
782     * @throws Exception
783     *             If there is an error at the HDF5 library level.
784     */
785    @Override
786    public void write(Object buf) throws Exception {
787        if (this.getFileFormat().isReadOnly())
788            throw new Exception("cannot write to scalar dataset in file opened as read-only");
789
790        if (!isInited())
791            init();
792
793        try {
794            scalarDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
795        }
796        catch (Exception ex) {
797            log.debug("write(Object): failed to write to scalar dataset: ", ex);
798            throw new Exception("failed to write to scalar dataset: " + ex.getMessage(), ex);
799        }
800    }
801
802    private Object scalarDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
803        H5Datatype dsDatatype = (H5Datatype) getDatatype();
804        Object theData = null;
805
806        /*
807         * I/O type-specific pre-initialization.
808         */
809        if (ioType == H5File.IO_TYPE.WRITE) {
810            if (writeBuf == null) {
811                log.debug("scalarDatasetCommonIO(): writeBuf is null");
812                throw new Exception("write buffer is null");
813            }
814            log.trace("scalarDatasetCommonIO(): check write unsupported datatype data");
815
816            /*
817             * Check for any unsupported datatypes and fail early before
818             * attempting to write to the dataset.
819             */
820            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
821                log.debug("scalarDatasetCommonIO(): Cannot write non-string variable-length data");
822                throw new HDF5Exception("Writing non-string variable-length data is not supported");
823            }
824
825            if (dsDatatype.isRegRef()) {
826                log.debug("scalarDatasetCommonIO(): Cannot write region reference data");
827                throw new HDF5Exception("Writing region reference data is not supported");
828            }
829        }
830
831        long did = open();
832        if (did >= 0) {
833            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
834
835            try {
836                /*
837                 * NOTE: this call sets up a hyperslab selection in the file according to the
838                 * current selection in the dataset object.
839                 */
840                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
841                        selectedStride, selectedDims, spaceIDs);
842
843                if (ioType == H5File.IO_TYPE.READ) {
844                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
845                    if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
846                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
847                        try {
848                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
849                        }
850                        catch (OutOfMemoryError err) {
851                            log.debug("scalarDatasetCommonIO(): Out of memory");
852                            throw new HDF5Exception("Out Of Memory");
853                        }
854                    }
855                    else {
856                        // reuse the buffer if the size is the same
857                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
858                        theData = originalBuf;
859                    }
860
861                    if (theData != null) {
862                        /*
863                         * Actually read the data now that everything has been setup.
864                         */
865                        long tid = HDF5Constants.H5I_INVALID_HID;
866                        try {
867                            log.trace("scalarDatasetCommonIO():read ioType create native");
868                            tid = dsDatatype.createNative();
869
870                            if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
871                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
872                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
873                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
874
875                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
876                                        (Object[]) theData);
877                            }
878                            else {
879                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
880                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
881                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
882
883                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
884                            }
885                        }
886                        catch (HDF5DataFiltersException exfltr) {
887                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
888                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
889                        }
890                        catch (Exception ex) {
891                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
892                            throw new Exception(ex.getMessage(), ex);
893                        }
894                        finally {
895                            dsDatatype.close(tid);
896                        }
897
898                        /*
899                         * Perform any necessary data conversions.
900                         */
901                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
902                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
903                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
904                        }
905                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
906                            log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
907                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints, (byte[]) theData);
908                        }
909                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
910                            log.trace("scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
911                            long[] arrayDims = dsDatatype.getArrayDims();
912                            int asize = (int)totalSelectedSpacePoints;
913                            for (int j = 0; j < arrayDims.length; j++) {
914                                asize *= arrayDims[j];
915                            }
916                            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
917                        }
918                        else if (dsDatatype.isRefObj()) {
919                            log.trace("scalarDatasetCommonIO(): isREF: converting byte array to long array");
920                            theData = HDFNativeData.byteToLong((byte[]) theData);
921                        }
922                    }
923                } // H5File.IO_TYPE.READ
924                else {
925                    /*
926                     * Perform any necessary data conversions before writing the data.
927                     *
928                     * Note that v-len strings do not get converted, regardless of
929                     * conversion request type.
930                     */
931                    Object tmpData = writeBuf;
932                    try {
933                        // Check if we need to convert integer data
934                        int tsize = (int) dsDatatype.getDatatypeSize();
935                        String cname = writeBuf.getClass().getName();
936                        log.trace("scalarDatasetCommonIO(): cname={} of datatype size={}", cname, tsize);
937                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
938                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
939                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
940
941                        if (doIntConversion) {
942                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
943                            tmpData = convertToUnsignedC(writeBuf, null);
944                        }
945                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
946                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
947                            tmpData = stringToByte((String[]) writeBuf, tsize);
948                        }
949                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
950                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
951                            tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
952                        }
953                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
954                            log.trace("scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
955                            throw new Exception("data conversion failure: cannot write BigDecimal values");
956                            //tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints, (BigDecimal[]) writeBuf);
957                        }
958                    }
959                    catch (Exception ex) {
960                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
961                        throw new Exception("data conversion failure: " + ex.getMessage());
962                    }
963
964                    /*
965                     * Actually write the data now that everything has been setup.
966                     */
967                    long tid = HDF5Constants.H5I_INVALID_HID;
968                    try {
969                        tid = dsDatatype.createNative();
970
971                        if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
972                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
973                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
974                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
975
976                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
977                        }
978                        else {
979                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
980                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
981                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
982
983                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
984                        }
985                    }
986                    catch (Exception ex) {
987                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
988                        throw new Exception(ex.getMessage());
989                    }
990                    finally {
991                        dsDatatype.close(tid);
992                    }
993                } // H5File.IO_TYPE.WRITE
994            }
995            finally {
996                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
997                    try {
998                        H5.H5Sclose(spaceIDs[0]);
999                    }
1000                    catch (Exception ex) {
1001                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1002                    }
1003                }
1004
1005                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1006                    try {
1007                        H5.H5Sclose(spaceIDs[1]);
1008                    }
1009                    catch (Exception ex) {
1010                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1011                    }
1012                }
1013
1014                close(did);
1015            }
1016        }
1017        else
1018            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1019
1020        return theData;
1021    }
1022
1023    /**
1024     * Retrieves the object's metadata, such as attributes, from the file.
1025     *
1026     * Metadata, such as attributes, is stored in a List.
1027     *
1028     * @return the list of metadata objects.
1029     *
1030     * @throws HDF5Exception
1031     *             if the metadata can not be retrieved
1032     */
1033    @Override
1034    public List<Attribute> getMetadata() throws HDF5Exception {
1035        int gmIndexType = 0;
1036        int gmIndexOrder = 0;
1037
1038        try {
1039            gmIndexType = fileFormat.getIndexType(null);
1040        }
1041        catch (Exception ex) {
1042            log.debug("getMetadata(): getIndexType failed: ", ex);
1043        }
1044        try {
1045            gmIndexOrder = fileFormat.getIndexOrder(null);
1046        }
1047        catch (Exception ex) {
1048            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1049        }
1050        return this.getMetadata(gmIndexType, gmIndexOrder);
1051    }
1052
1053    /**
1054     * Retrieves the object's metadata, such as attributes, from the file.
1055     *
1056     * Metadata, such as attributes, is stored in a List.
1057     *
1058     * @param attrPropList
1059     *             the list of properties to get
1060     *
1061     * @return the list of metadata objects.
1062     *
1063     * @throws HDF5Exception
1064     *             if the metadata can not be retrieved
1065     */
1066    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1067        if (!isInited())
1068            init();
1069
1070        try {
1071            this.linkTargetObjName = H5File.getLinkTargetName(this);
1072        }
1073        catch (Exception ex) {
1074            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1075        }
1076
1077        if (objMetadata.getAttributeList() == null) {
1078            long did = HDF5Constants.H5I_INVALID_HID;
1079            long pcid = HDF5Constants.H5I_INVALID_HID;
1080            long paid = HDF5Constants.H5I_INVALID_HID;
1081
1082            did = open();
1083            if (did >= 0) {
1084                try {
1085                    // get the compression and chunk information
1086                    pcid = H5.H5Dget_create_plist(did);
1087                    paid = H5.H5Dget_access_plist(did);
1088                    long storageSize = H5.H5Dget_storage_size(did);
1089                    int nfilt = H5.H5Pget_nfilters(pcid);
1090                    int layoutType = H5.H5Pget_layout(pcid);
1091
1092                    storageLayout.setLength(0);
1093                    compression.setLength(0);
1094
1095                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1096                        chunkSize = new long[rank];
1097                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1098                        int n = chunkSize.length;
1099                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1100                        for (int i = 1; i < n; i++)
1101                            storageLayout.append(" X ").append(chunkSize[i]);
1102
1103                        if (nfilt > 0) {
1104                            long nelmts = 1;
1105                            long uncompSize;
1106                            long datumSize = getDatatype().getDatatypeSize();
1107
1108                            if (datumSize < 0) {
1109                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1110                                try {
1111                                    tmptid = H5.H5Dget_type(did);
1112                                    datumSize = H5.H5Tget_size(tmptid);
1113                                }
1114                                finally {
1115                                    try {
1116                                        H5.H5Tclose(tmptid);
1117                                    }
1118                                    catch (Exception ex2) {
1119                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1120                                    }
1121                                }
1122                            }
1123
1124                            for (int i = 0; i < rank; i++)
1125                                nelmts *= dims[i];
1126                            uncompSize = nelmts * datumSize;
1127
1128                            /* compression ratio = uncompressed size / compressed size */
1129
1130                            if (storageSize != 0) {
1131                                double ratio = (double) uncompSize / (double) storageSize;
1132                                DecimalFormat df = new DecimalFormat();
1133                                df.setMinimumFractionDigits(3);
1134                                df.setMaximumFractionDigits(3);
1135                                compression.append(df.format(ratio)).append(":1");
1136                            }
1137                        }
1138                    }
1139                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1140                        storageLayout.append("COMPACT");
1141                    }
1142                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1143                        storageLayout.append("CONTIGUOUS");
1144                        if (H5.H5Pget_external_count(pcid) > 0)
1145                            storageLayout.append(" - EXTERNAL ");
1146                    }
1147                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1148                        storageLayout.append("VIRTUAL - ");
1149                        try {
1150                            long vmaps = H5.H5Pget_virtual_count(pcid);
1151                            try {
1152                                int virtView = H5.H5Pget_virtual_view(paid);
1153                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1154                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1155                                    storageLayout.append("First Missing");
1156                                else
1157                                    storageLayout.append("Last Available");
1158                                storageLayout.append("\nGAP : ").append(virtGap);
1159                            }
1160                            catch (Exception err) {
1161                                log.debug("getMetadata(): vds error: ", err);
1162                                storageLayout.append("ERROR");
1163                            }
1164                            storageLayout.append("\nMAPS : ").append(vmaps);
1165                            if (vmaps > 0) {
1166                                for (long next = 0; next < vmaps; next++) {
1167                                    try {
1168                                        H5.H5Pget_virtual_vspace(pcid, next);
1169                                        H5.H5Pget_virtual_srcspace(pcid, next);
1170                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1171                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1172                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1173                                    }
1174                                    catch (Exception err) {
1175                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1176                                        storageLayout.append("ERROR");
1177                                    }
1178                                }
1179                            }
1180                        }
1181                        catch (Exception err) {
1182                            log.debug("getMetadata(): vds count error: ", err);
1183                            storageLayout.append("ERROR");
1184                        }
1185                    }
1186                    else {
1187                        chunkSize = null;
1188                        storageLayout.append("NONE");
1189                    }
1190
1191                    int[] flags = { 0, 0 };
1192                    long[] cdNelmts = { 20 };
1193                    int[] cdValues = new int[(int) cdNelmts[0]];
1194                    String[] cdName = { "", "" };
1195                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1196                    int filter = -1;
1197                    int[] filterConfig = { 1 };
1198
1199                    filters.setLength(0);
1200
1201                    if (nfilt == 0) {
1202                        filters.append("NONE");
1203                    }
1204                    else {
1205                        for (int i = 0, k = 0; i < nfilt; i++) {
1206                            log.trace("getMetadata(): filter[{}]", i);
1207                            if (i > 0)
1208                                filters.append(", ");
1209                            if (k > 0)
1210                                compression.append(", ");
1211
1212                            try {
1213                                cdNelmts[0] = 20;
1214                                cdValues = new int[(int) cdNelmts[0]];
1215                                cdValues = new int[(int) cdNelmts[0]];
1216                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1217                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1218                                for (int j = 0; j < cdNelmts[0]; j++)
1219                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1220                            }
1221                            catch (Exception err) {
1222                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1223                                filters.append("ERROR");
1224                                continue;
1225                            }
1226
1227                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1228                                filters.append("NONE");
1229                            }
1230                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1231                                filters.append("GZIP");
1232                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1233                                k++;
1234                            }
1235                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1236                                filters.append("Error detection filter");
1237                            }
1238                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1239                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1240                            }
1241                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1242                                filters.append("NBIT");
1243                            }
1244                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1245                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1246                            }
1247                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1248                                filters.append("SZIP");
1249                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1250                                k++;
1251                                int flag = -1;
1252                                try {
1253                                    flag = H5.H5Zget_filter_info(filter);
1254                                }
1255                                catch (Exception ex) {
1256                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1257                                    flag = -1;
1258                                }
1259                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1260                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1261                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1262                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1263                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1264                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1265                            }
1266                            else {
1267                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1268                                for (int j = 0; j < cdNelmts[0]; j++) {
1269                                    if (j > 0)
1270                                        filters.append(", ");
1271                                    filters.append(cdValues[j]);
1272                                }
1273                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1274                            }
1275                        } //  (int i=0; i<nfilt; i++)
1276                    }
1277
1278                    if (compression.length() == 0)
1279                        compression.append("NONE");
1280                    log.trace("getMetadata(): filter compression={}", compression);
1281                    log.trace("getMetadata(): filter information={}", filters);
1282
1283                    storage.setLength(0);
1284                    storage.append("SIZE: ").append(storageSize);
1285
1286                    try {
1287                        int[] at = { 0 };
1288                        H5.H5Pget_alloc_time(pcid, at);
1289                        storage.append(", allocation time: ");
1290                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1291                            storage.append("Early");
1292                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1293                            storage.append("Incremental");
1294                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1295                            storage.append("Late");
1296                        else
1297                            storage.append("Default");
1298                    }
1299                    catch (Exception ex) {
1300                        log.debug("getMetadata(): Storage allocation time:", ex);
1301                    }
1302                    log.trace("getMetadata(): storage={}", storage);
1303                }
1304                finally {
1305                    try {
1306                        H5.H5Pclose(paid);
1307                    }
1308                    catch (Exception ex) {
1309                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1310                    }
1311                    try {
1312                        H5.H5Pclose(pcid);
1313                    }
1314                    catch (Exception ex) {
1315                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1316                    }
1317                    close(did);
1318                }
1319            }
1320        }
1321
1322        List<Attribute> attrlist = null;
1323        try {
1324            attrlist = objMetadata.getMetadata(attrPropList);
1325        }
1326        catch (Exception ex) {
1327            log.debug("getMetadata(): getMetadata failed: ", ex);
1328        }
1329        return attrlist;
1330    }
1331
1332    /**
1333     * Writes a specific piece of metadata (such as an attribute) into the file.
1334     *
1335     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1336     * value. If the attribute does not exist in the file, it creates the
1337     * attribute in the file and attaches it to the object. It will fail to
1338     * write a new attribute to the object where an attribute with the same name
1339     * already exists. To update the value of an existing attribute in the file,
1340     * one needs to get the instance of the attribute by getMetadata(), change
1341     * its values, then use writeMetadata() to write the value.
1342     *
1343     * @param info
1344     *            the metadata to write.
1345     *
1346     * @throws Exception
1347     *             if the metadata can not be written
1348     */
1349    @Override
1350    public void writeMetadata(Object info) throws Exception {
1351        try {
1352            objMetadata.writeMetadata(info);
1353        }
1354        catch (Exception ex) {
1355            log.debug("writeMetadata(): Object not an Attribute");
1356        }
1357    }
1358
1359    /**
1360     * Deletes an existing piece of metadata from this object.
1361     *
1362     * @param info
1363     *            the metadata to delete.
1364     *
1365     * @throws HDF5Exception
1366     *             if the metadata can not be removed
1367     */
1368    @Override
1369    public void removeMetadata(Object info) throws HDF5Exception {
1370        try {
1371            objMetadata.removeMetadata(info);
1372        }
1373        catch (Exception ex) {
1374            log.debug("removeMetadata(): Object not an Attribute");
1375            return;
1376        }
1377
1378        Attribute attr = (Attribute) info;
1379        log.trace("removeMetadata(): {}", attr.getAttributeName());
1380        long did = open();
1381        if (did >= 0) {
1382            try {
1383                H5.H5Adelete(did, attr.getAttributeName());
1384            }
1385            finally {
1386                close(did);
1387            }
1388        }
1389        else {
1390            log.debug("removeMetadata(): failed to open scalar dataset");
1391        }
1392    }
1393
1394    /**
1395     * Updates an existing piece of metadata attached to this object.
1396     *
1397     * @param info
1398     *            the metadata to update.
1399     *
1400     * @throws HDF5Exception
1401     *             if the metadata can not be updated
1402     */
1403    @Override
1404    public void updateMetadata(Object info) throws HDF5Exception {
1405        try {
1406            objMetadata.updateMetadata(info);
1407        }
1408        catch (Exception ex) {
1409            log.debug("updateMetadata(): Object not an Attribute");
1410            return;
1411        }
1412    }
1413
1414    /*
1415     * (non-Javadoc)
1416     *
1417     * @see hdf.object.HObject#setName(java.lang.String)
1418     */
1419    @Override
1420    public void setName(String newName) throws Exception {
1421        if (newName == null)
1422            throw new IllegalArgumentException("The new name is NULL");
1423
1424        H5File.renameObject(this, newName);
1425        super.setName(newName);
1426    }
1427
1428    /**
1429     * Resets selection of dataspace
1430     */
1431    protected void resetSelection() {
1432        super.resetSelection();
1433
1434        if (interlace == INTERLACE_PIXEL) {
1435            // 24-bit TRUE color image
1436            // [height][width][pixel components]
1437            selectedDims[2] = 3;
1438            selectedDims[0] = dims[0];
1439            selectedDims[1] = dims[1];
1440            selectedIndex[0] = 0; // index for height
1441            selectedIndex[1] = 1; // index for width
1442            selectedIndex[2] = 2; // index for depth
1443        }
1444        else if (interlace == INTERLACE_PLANE) {
1445            // 24-bit TRUE color image
1446            // [pixel components][height][width]
1447            selectedDims[0] = 3;
1448            selectedDims[1] = dims[1];
1449            selectedDims[2] = dims[2];
1450            selectedIndex[0] = 1; // index for height
1451            selectedIndex[1] = 2; // index for width
1452            selectedIndex[2] = 0; // index for depth
1453        }
1454
1455        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1456            isDefaultImageOrder = false;
1457        else
1458            isDefaultImageOrder = true;
1459    }
1460
1461    /**
1462     * Creates a scalar dataset in a file with/without chunking and compression.
1463     *
1464     * @param name
1465     *            the name of the dataset to create.
1466     * @param pgroup
1467     *            parent group where the new dataset is created.
1468     * @param type
1469     *            the datatype of the dataset.
1470     * @param dims
1471     *            the dimension size of the dataset.
1472     * @param maxdims
1473     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1474     * @param chunks
1475     *            the chunk size of the dataset. No chunking if chunk = null.
1476     * @param gzip
1477     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1478     * @param data
1479     *            the array of data values.
1480     *
1481     * @return the new scalar dataset if successful; otherwise returns null.
1482     *
1483     * @throws Exception if there is a failure.
1484     */
1485    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1486            long[] chunks, int gzip, Object data) throws Exception {
1487        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1488    }
1489
1490    /**
1491     * Creates a scalar dataset in a file with/without chunking and compression.
1492     *
1493     * The following example shows how to create a string dataset using this function.
1494     *
1495     * <pre>
1496     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1497     * int max_str_len = 120;
1498     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1499     * int size = 10000;
1500     * long dims[] = { size };
1501     * long chunks[] = { 1000 };
1502     * int gzip = 9;
1503     * String strs[] = new String[size];
1504     *
1505     * for (int i = 0; i &lt; size; i++)
1506     *     strs[i] = String.valueOf(i);
1507     *
1508     * file.open();
1509     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1510     *
1511     * try {
1512     *     file.close();
1513     * }
1514     * catch (Exception ex) {
1515     * }
1516     * </pre>
1517     *
1518     * @param name
1519     *            the name of the dataset to create.
1520     * @param pgroup
1521     *            parent group where the new dataset is created.
1522     * @param type
1523     *            the datatype of the dataset.
1524     * @param dims
1525     *            the dimension size of the dataset.
1526     * @param maxdims
1527     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1528     * @param chunks
1529     *            the chunk size of the dataset. No chunking if chunk = null.
1530     * @param gzip
1531     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1532     * @param fillValue
1533     *            the default data value.
1534     * @param data
1535     *            the array of data values.
1536     *
1537     * @return the new scalar dataset if successful; otherwise returns null.
1538     *
1539     * @throws Exception if there is a failure.
1540     */
1541    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1542            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1543        H5ScalarDS dataset = null;
1544        String fullPath = null;
1545        long did = HDF5Constants.H5I_INVALID_HID;
1546        long plist = HDF5Constants.H5I_INVALID_HID;
1547        long sid = HDF5Constants.H5I_INVALID_HID;
1548        long tid = HDF5Constants.H5I_INVALID_HID;
1549
1550        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1551            log.debug("create(): one or more parameters are null");
1552            return null;
1553        }
1554
1555        H5File file = (H5File) pgroup.getFileFormat();
1556        if (file == null) {
1557            log.debug("create(): parent group FileFormat is null");
1558            return null;
1559        }
1560
1561        String path = HObject.SEPARATOR;
1562        if (!pgroup.isRoot()) {
1563            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1564            if (name.endsWith("/"))
1565                name = name.substring(0, name.length() - 1);
1566            int idx = name.lastIndexOf('/');
1567            if (idx >= 0)
1568                name = name.substring(idx + 1);
1569        }
1570
1571        fullPath = path + name;
1572
1573        // setup chunking and compression
1574        boolean isExtentable = false;
1575        if (maxdims != null) {
1576            for (int i = 0; i < maxdims.length; i++) {
1577                if (maxdims[i] == 0)
1578                    maxdims[i] = dims[i];
1579                else if (maxdims[i] < 0)
1580                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1581
1582                if (maxdims[i] != dims[i])
1583                    isExtentable = true;
1584            }
1585        }
1586
1587        // HDF5 requires you to use chunking in order to define extendible
1588        // datasets. Chunking makes it possible to extend datasets efficiently,
1589        // without having to reorganize storage excessively. Using default size
1590        // of 64x...which has good performance
1591        if ((chunks == null) && isExtentable) {
1592            chunks = new long[dims.length];
1593            for (int i = 0; i < dims.length; i++)
1594                chunks[i] = Math.min(dims[i], 64);
1595        }
1596
1597        // prepare the dataspace and datatype
1598        int rank = dims.length;
1599
1600        tid = type.createNative();
1601        if (tid >= 0) {
1602            try {
1603                sid = H5.H5Screate_simple(rank, dims, maxdims);
1604
1605                // figure out creation properties
1606                plist = HDF5Constants.H5P_DEFAULT;
1607
1608                byte[] valFill = null;
1609                try {
1610                    valFill = parseFillValue(type, fillValue);
1611                }
1612                catch (Exception ex) {
1613                    log.debug("create(): parse fill value: ", ex);
1614                }
1615                log.trace("create(): parseFillValue={}", valFill);
1616
1617                if (chunks != null || valFill != null) {
1618                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1619
1620                    if (chunks != null) {
1621                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1622                        H5.H5Pset_chunk(plist, rank, chunks);
1623
1624                        // compression requires chunking
1625                        if (gzip > 0) {
1626                            H5.H5Pset_deflate(plist, gzip);
1627                        }
1628                    }
1629
1630                    if (valFill != null)
1631                        H5.H5Pset_fill_value(plist, tid, valFill);
1632                }
1633
1634                long fid = file.getFID();
1635
1636                log.trace("create(): create dataset fid={}", fid);
1637                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1638                log.trace("create(): create dataset did={}", did);
1639                dataset = new H5ScalarDS(file, name, path);
1640            }
1641            finally {
1642                try {
1643                    H5.H5Pclose(plist);
1644                }
1645                catch (HDF5Exception ex) {
1646                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1647                }
1648                try {
1649                    H5.H5Sclose(sid);
1650                }
1651                catch (HDF5Exception ex) {
1652                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1653                }
1654                try {
1655                    H5.H5Tclose(tid);
1656                }
1657                catch (HDF5Exception ex) {
1658                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1659                }
1660                try {
1661                    H5.H5Dclose(did);
1662                }
1663                catch (HDF5Exception ex) {
1664                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1665                }
1666            }
1667        }
1668
1669        if (dataset != null) {
1670            pgroup.addToMemberList(dataset);
1671            if (data != null) {
1672                dataset.init();
1673                long[] selected = dataset.getSelectedDims();
1674                for (int i = 0; i < rank; i++)
1675                    selected[i] = dims[i];
1676                dataset.write(data);
1677            }
1678        }
1679
1680        return dataset;
1681    }
1682
1683    // check _FillValue, valid_min, valid_max, and valid_range
1684    private void checkCFconvention(long oid) throws Exception {
1685        Object avalue = getAttrValue(oid, "_FillValue");
1686
1687        if (avalue != null) {
1688            int n = Array.getLength(avalue);
1689            for (int i = 0; i < n; i++)
1690                addFilteredImageValue((Number) Array.get(avalue, i));
1691        }
1692
1693        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1694            double x0 = 0;
1695            double x1 = 0;
1696            avalue = getAttrValue(oid, "valid_range");
1697            if (avalue != null) {
1698                try {
1699                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1700                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1701                    imageDataRange = new double[2];
1702                    imageDataRange[0] = x0;
1703                    imageDataRange[1] = x1;
1704                    return;
1705                }
1706                catch (Exception ex) {
1707                    log.debug("checkCFconvention(): valid_range: ", ex);
1708                }
1709            }
1710
1711            avalue = getAttrValue(oid, "valid_min");
1712            if (avalue != null) {
1713                try {
1714                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1715                }
1716                catch (Exception ex) {
1717                    log.debug("checkCFconvention(): valid_min: ", ex);
1718                }
1719                avalue = getAttrValue(oid, "valid_max");
1720                if (avalue != null) {
1721                    try {
1722                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1723                        imageDataRange = new double[2];
1724                        imageDataRange[0] = x0;
1725                        imageDataRange[1] = x1;
1726                    }
1727                    catch (Exception ex) {
1728                        log.debug("checkCFconvention(): valid_max:", ex);
1729                    }
1730                }
1731            }
1732        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1733    }
1734
1735    private Object getAttrValue(long oid, String aname) {
1736        log.trace("getAttrValue(): start: name={}", aname);
1737
1738        long aid = HDF5Constants.H5I_INVALID_HID;
1739        long atid = HDF5Constants.H5I_INVALID_HID;
1740        long asid = HDF5Constants.H5I_INVALID_HID;
1741        Object avalue = null;
1742
1743        try {
1744            // try to find attribute name
1745            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1746                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1747        }
1748        catch (HDF5LibraryException ex5) {
1749            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1750        }
1751        catch (Exception ex) {
1752            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1753        }
1754        if (aid > 0) {
1755            try {
1756                atid = H5.H5Aget_type(aid);
1757                long tmptid = atid;
1758                atid = H5.H5Tget_native_type(tmptid);
1759                try {
1760                    H5.H5Tclose(tmptid);
1761                }
1762                catch (Exception ex) {
1763                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1764                }
1765                H5Datatype dsDatatype = new H5Datatype(getFileFormat(), atid);
1766
1767                asid = H5.H5Aget_space(aid);
1768                long adims[] = null;
1769
1770                int arank = H5.H5Sget_simple_extent_ndims(asid);
1771                if (arank > 0) {
1772                    adims = new long[arank];
1773                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1774                }
1775                log.trace("getAttrValue(): adims={}", adims);
1776
1777                // retrieve the attribute value
1778                long lsize = 1;
1779                if (adims != null) {
1780                    for (int j = 0; j < adims.length; j++) {
1781                        lsize *= adims[j];
1782                    }
1783                }
1784                log.trace("getAttrValue(): lsize={}", lsize);
1785
1786                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1787
1788                try {
1789                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1790                }
1791                catch (OutOfMemoryError e) {
1792                    log.debug("getAttrValue(): out of memory: ", e);
1793                    avalue = null;
1794                }
1795
1796                if (avalue != null) {
1797                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1798                    H5.H5Aread(aid, atid, avalue);
1799
1800                    if (dsDatatype.isUnsigned()) {
1801                        log.trace("getAttrValue(): id {} is unsigned", atid);
1802                        avalue = convertFromUnsignedC(avalue, null);
1803                    }
1804                }
1805            }
1806            catch (Exception ex) {
1807                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1808            }
1809            finally {
1810                try {
1811                    H5.H5Tclose(atid);
1812                }
1813                catch (HDF5Exception ex) {
1814                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1815                }
1816                try {
1817                    H5.H5Sclose(asid);
1818                }
1819                catch (HDF5Exception ex) {
1820                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1821                }
1822                try {
1823                    H5.H5Aclose(aid);
1824                }
1825                catch (HDF5Exception ex) {
1826                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1827                }
1828            }
1829        } // (aid > 0)
1830
1831        return avalue;
1832    }
1833
1834    private boolean isStringAttributeOf(long objID, String name, String value) {
1835        boolean retValue = false;
1836        long aid = HDF5Constants.H5I_INVALID_HID;
1837        long atid = HDF5Constants.H5I_INVALID_HID;
1838
1839        try {
1840            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1841                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1842                atid = H5.H5Aget_type(aid);
1843                int size = (int)H5.H5Tget_size(atid);
1844                byte[] attrValue = new byte[size];
1845                H5.H5Aread(aid, atid, attrValue);
1846                String strValue = new String(attrValue).trim();
1847                retValue = strValue.equalsIgnoreCase(value);
1848            }
1849        }
1850        catch (Exception ex) {
1851            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1852        }
1853        finally {
1854            try {
1855                H5.H5Tclose(atid);
1856            }
1857            catch (HDF5Exception ex) {
1858                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1859            }
1860            try {
1861                H5.H5Aclose(aid);
1862            }
1863            catch (HDF5Exception ex) {
1864                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1865            }
1866        }
1867
1868        return retValue;
1869    }
1870
1871    /*
1872     * (non-Javadoc)
1873     *
1874     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1875     */
1876    @Override
1877    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1878        // must give a location to copy
1879        if (pgroup == null) {
1880            log.debug("copy(): Parent group is null");
1881            return null;
1882        }
1883
1884        Dataset dataset = null;
1885        long srcdid = HDF5Constants.H5I_INVALID_HID;
1886        long dstdid = HDF5Constants.H5I_INVALID_HID;
1887        long plist = HDF5Constants.H5I_INVALID_HID;
1888        long tid = HDF5Constants.H5I_INVALID_HID;
1889        long sid = HDF5Constants.H5I_INVALID_HID;
1890        String dname = null;
1891        String path = null;
1892
1893        if (pgroup.isRoot())
1894            path = HObject.SEPARATOR;
1895        else
1896            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1897        dname = path + dstName;
1898
1899        srcdid = open();
1900        if (srcdid >= 0) {
1901            try {
1902                tid = H5.H5Dget_type(srcdid);
1903                sid = H5.H5Screate_simple(dims.length, dims, null);
1904                plist = H5.H5Dget_create_plist(srcdid);
1905
1906                long[] chunks = new long[dims.length];
1907                boolean setChunkFlag = false;
1908                try {
1909                    H5.H5Pget_chunk(plist, dims.length, chunks);
1910                    for (int i = 0; i < dims.length; i++) {
1911                        if (dims[i] < chunks[i]) {
1912                            setChunkFlag = true;
1913                            if (dims[i] == 1)
1914                                chunks[i] = 1;
1915                            else
1916                                chunks[i] = dims[i] / 2;
1917                        }
1918                    }
1919                }
1920                catch (Exception ex) {
1921                    log.debug("copy(): chunk: ", ex);
1922                }
1923
1924                if (setChunkFlag)
1925                    H5.H5Pset_chunk(plist, dims.length, chunks);
1926
1927                try {
1928                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1929                            HDF5Constants.H5P_DEFAULT);
1930                }
1931                catch (Exception e) {
1932                    log.debug("copy(): H5Dcreate: ", e);
1933                }
1934                finally {
1935                    try {
1936                        H5.H5Dclose(dstdid);
1937                    }
1938                    catch (Exception ex2) {
1939                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
1940                    }
1941                }
1942
1943                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1944                if (buff != null) {
1945                    dataset.init();
1946                    dataset.write(buff);
1947                }
1948
1949                dstdid = dataset.open();
1950                if (dstdid >= 0) {
1951                    try {
1952                        H5File.copyAttributes(srcdid, dstdid);
1953                    }
1954                    finally {
1955                        try {
1956                            H5.H5Dclose(dstdid);
1957                        }
1958                        catch (Exception ex) {
1959                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
1960                        }
1961                    }
1962                }
1963            }
1964            finally {
1965                try {
1966                    H5.H5Pclose(plist);
1967                }
1968                catch (Exception ex) {
1969                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
1970                }
1971                try {
1972                    H5.H5Sclose(sid);
1973                }
1974                catch (Exception ex) {
1975                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
1976                }
1977                try {
1978                    H5.H5Tclose(tid);
1979                }
1980                catch (Exception ex) {
1981                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
1982                }
1983                try {
1984                    H5.H5Dclose(srcdid);
1985                }
1986                catch (Exception ex) {
1987                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
1988                }
1989            }
1990        }
1991
1992        pgroup.addToMemberList(dataset);
1993
1994        if (dataset != null)
1995            ((ScalarDS) dataset).setIsImage(isImage);
1996
1997        return dataset;
1998    }
1999
2000    /*
2001     * (non-Javadoc)
2002     *
2003     * @see hdf.object.ScalarDS#getPalette()
2004     */
2005    @Override
2006    public byte[][] getPalette() {
2007        if (palette == null)
2008            palette = readPalette(0);
2009
2010        return palette;
2011    }
2012
2013    /*
2014     * (non-Javadoc)
2015     *
2016     * @see hdf.object.ScalarDS#getPaletteName(int)
2017     */
2018    @Override
2019    public String getPaletteName(int idx) {
2020        byte[] refs = getPaletteRefs();
2021        long did = HDF5Constants.H5I_INVALID_HID;
2022        long palID = HDF5Constants.H5I_INVALID_HID;
2023        String paletteName = null;
2024
2025        if (refs == null) {
2026            log.debug("getPaletteName(): refs is null");
2027            return null;
2028        }
2029
2030        byte[] refBuf = new byte[8];
2031
2032        try {
2033            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2034        }
2035        catch (Exception err) {
2036            log.debug("getPaletteName(): arraycopy failure: ", err);
2037            return null;
2038        }
2039
2040        did = open();
2041        if (did >= 0) {
2042            try {
2043                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2044                paletteName = H5.H5Iget_name(palID);
2045            }
2046            catch (Exception ex) {
2047                ex.printStackTrace();
2048            }
2049            finally {
2050                close(palID);
2051                close(did);
2052            }
2053        }
2054
2055        return paletteName;
2056    }
2057
2058    /*
2059     * (non-Javadoc)
2060     *
2061     * @see hdf.object.ScalarDS#readPalette(int)
2062     */
2063    @Override
2064    public byte[][] readPalette(int idx) {
2065        byte[][] thePalette = null;
2066        byte[] refs = getPaletteRefs();
2067        long did = HDF5Constants.H5I_INVALID_HID;
2068        long palID = HDF5Constants.H5I_INVALID_HID;
2069        long tid = HDF5Constants.H5I_INVALID_HID;
2070
2071        if (refs == null) {
2072            log.debug("readPalette(): refs is null");
2073            return null;
2074        }
2075
2076        byte[] p = null;
2077        byte[] refBuf = new byte[8];
2078
2079        try {
2080            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2081        }
2082        catch (Exception err) {
2083            log.debug("readPalette(): arraycopy failure: ", err);
2084            return null;
2085        }
2086
2087        did = open();
2088        if (did >= 0) {
2089            try {
2090                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2091                log.trace("readPalette(): H5Rdereference: {}", palID);
2092                tid = H5.H5Dget_type(palID);
2093
2094                // support only 3*256 byte palette data
2095                if (H5.H5Dget_storage_size(palID) <= 768) {
2096                    p = new byte[3 * 256];
2097                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2098                }
2099            }
2100            catch (HDF5Exception ex) {
2101                log.debug("readPalette(): failure: ", ex);
2102                p = null;
2103            }
2104            finally {
2105                try {
2106                    H5.H5Tclose(tid);
2107                }
2108                catch (HDF5Exception ex2) {
2109                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2110                }
2111                close(palID);
2112                close(did);
2113            }
2114        }
2115
2116        if (p != null) {
2117            thePalette = new byte[3][256];
2118            for (int i = 0; i < 256; i++) {
2119                thePalette[0][i] = p[i * 3];
2120                thePalette[1][i] = p[i * 3 + 1];
2121                thePalette[2][i] = p[i * 3 + 2];
2122            }
2123        }
2124
2125        return thePalette;
2126    }
2127
2128    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2129        byte[] data = null;
2130
2131        if (type == null || fillValue == null) {
2132            log.debug("parseFillValue(): datatype or fill value is null");
2133            return null;
2134        }
2135
2136        int datatypeClass = type.getDatatypeClass();
2137        int datatypeSize = (int)type.getDatatypeSize();
2138
2139        double valDbl = 0;
2140        String valStr = null;
2141
2142        if (fillValue instanceof String)
2143            valStr = (String) fillValue;
2144        else if (fillValue.getClass().isArray())
2145            valStr = Array.get(fillValue, 0).toString();
2146
2147        if (!type.isString()) {
2148            try {
2149                valDbl = Double.parseDouble(valStr);
2150            }
2151            catch (NumberFormatException ex) {
2152                log.debug("parseFillValue(): parse error: ", ex);
2153                return null;
2154            }
2155        }
2156
2157        try {
2158            switch (datatypeClass) {
2159                case Datatype.CLASS_INTEGER:
2160                case Datatype.CLASS_ENUM:
2161                case Datatype.CLASS_CHAR:
2162                    log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2163                    if (datatypeSize == 1)
2164                        data = new byte[] { (byte) valDbl };
2165                    else if (datatypeSize == 2)
2166                        data = HDFNativeData.shortToByte((short) valDbl);
2167                    else if (datatypeSize == 8)
2168                        data = HDFNativeData.longToByte((long) valDbl);
2169                    else
2170                        data = HDFNativeData.intToByte((int) valDbl);
2171                    break;
2172                case Datatype.CLASS_FLOAT:
2173                    log.trace("parseFillValue(): class CLASS_FLOAT");
2174                    if (datatypeSize > 8)
2175                        data =  valStr.getBytes();
2176                    else if (datatypeSize == 8)
2177                        data = HDFNativeData.doubleToByte(valDbl);
2178                    else
2179                        data = HDFNativeData.floatToByte((float) valDbl);
2180                    break;
2181                case Datatype.CLASS_STRING:
2182                    log.trace("parseFillValue(): class CLASS_STRING");
2183                    if (valStr != null)
2184                        data = valStr.getBytes();
2185                    break;
2186                case Datatype.CLASS_REFERENCE:
2187                    log.trace("parseFillValue(): class CLASS_REFERENCE");
2188                    data = HDFNativeData.longToByte((long) valDbl);
2189                    break;
2190                default:
2191                    log.debug("parseFillValue(): datatypeClass unknown");
2192                    break;
2193            } // (datatypeClass)
2194        }
2195        catch (Exception ex) {
2196            log.debug("parseFillValue(): failure: ", ex);
2197            data = null;
2198        }
2199
2200        return data;
2201    }
2202
2203    /*
2204     * (non-Javadoc)
2205     *
2206     * @see hdf.object.ScalarDS#getPaletteRefs()
2207     */
2208    @Override
2209    public byte[] getPaletteRefs() {
2210        if (!isInited())
2211            init(); // init will be called to get refs
2212
2213        return paletteRefs;
2214    }
2215
2216    /**
2217     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2218     * length is 8*numberOfPalettes.
2219     */
2220    private byte[] getPaletteRefs(long did) {
2221        long aid = HDF5Constants.H5I_INVALID_HID;
2222        long sid = HDF5Constants.H5I_INVALID_HID;
2223        long atype = HDF5Constants.H5I_INVALID_HID;
2224        int size = 0;
2225        int rank = 0;
2226        byte[] refbuf = null;
2227
2228        try {
2229            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2230                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2231                sid = H5.H5Aget_space(aid);
2232                rank = H5.H5Sget_simple_extent_ndims(sid);
2233                size = 1;
2234                if (rank > 0) {
2235                    long[] dims = new long[rank];
2236                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2237                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2238                    for (int i = 0; i < rank; i++)
2239                        size *= (int) dims[i];
2240                }
2241
2242                if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE)
2243                    throw new HDF5Exception("Invalid int size");
2244
2245                refbuf = new byte[size * 8];
2246                atype = H5.H5Aget_type(aid);
2247
2248                H5.H5Aread(aid, atype, refbuf);
2249            }
2250        }
2251        catch (HDF5Exception ex) {
2252            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2253            refbuf = null;
2254        }
2255        finally {
2256            try {
2257                H5.H5Tclose(atype);
2258            }
2259            catch (HDF5Exception ex2) {
2260                log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2);
2261            }
2262            try {
2263                H5.H5Sclose(sid);
2264            }
2265            catch (HDF5Exception ex2) {
2266                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2267            }
2268            try {
2269                H5.H5Aclose(aid);
2270            }
2271            catch (HDF5Exception ex2) {
2272                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2273            }
2274        }
2275
2276        return refbuf;
2277    }
2278
2279    /**
2280     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2281     * of size is the same as that of the dataspace of the dataset being changed.
2282     *
2283     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2284     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2285     * H5Screate_simple)
2286     *
2287     * @param newDims the dimension target size
2288     *
2289     * @throws HDF5Exception
2290     *             If there is an error at the HDF5 library level.
2291     */
2292    public void extend(long[] newDims) throws HDF5Exception {
2293        long did = HDF5Constants.H5I_INVALID_HID;
2294        long sid = HDF5Constants.H5I_INVALID_HID;
2295
2296        did = open();
2297        if (did >= 0) {
2298            try {
2299                H5.H5Dset_extent(did, newDims);
2300                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2301                sid = H5.H5Dget_space(did);
2302                long[] checkDims = new long[rank];
2303                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2304                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2305                for (int i = 0; i < rank; i++) {
2306                    if (checkDims[i] != newDims[i]) {
2307                        log.debug("extend(): error extending dataset");
2308                        throw new HDF5Exception("error extending dataset " + getName());
2309                    }
2310                }
2311                dims = checkDims;
2312            }
2313            catch (Exception e) {
2314                log.debug("extend(): failure: ", e);
2315                throw new HDF5Exception(e.getMessage());
2316            }
2317            finally {
2318                if (sid > 0)
2319                    H5.H5Sclose(sid);
2320
2321                close(did);
2322            }
2323        }
2324    }
2325
2326    /*
2327     * (non-Javadoc)
2328     *
2329     * @see hdf.object.Dataset#isVirtual()
2330     */
2331    @Override
2332    public boolean isVirtual() {
2333        return isVirtual;
2334    }
2335
2336    /*
2337     * (non-Javadoc)
2338     *
2339     * @see hdf.object.Dataset#getVirtualFilename(int)
2340     */
2341    @Override
2342    public String getVirtualFilename(int index) {
2343        if(isVirtual)
2344            return virtualNameList.get(index);
2345        else
2346            return null;
2347    }
2348
2349    /*
2350     * (non-Javadoc)
2351     *
2352     * @see hdf.object.Dataset#getVirtualMaps()
2353     */
2354    @Override
2355    public int getVirtualMaps() {
2356        if(isVirtual)
2357            return virtualNameList.size();
2358        else
2359            return -1;
2360    }
2361
2362}