001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.exceptions.HDF5LibraryException;
028import hdf.hdf5lib.structs.H5O_info_t;
029import hdf.object.Attribute;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
039 * float, double and string, and operations performed on the scalar dataset.
040 * <p>
041 * The library predefines a modest number of datatypes. For details,
042 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
043 *
044 * @version 1.1 9/4/2007
045 * @author Peter X. Cao
046 */
047public class H5ScalarDS extends ScalarDS {
048    private static final long serialVersionUID = 2887517608230611642L;
049
050    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
051
052    /**
053     * The list of attributes of this data object. Members of the list are instance of Attribute.
054     */
055    private List<Attribute> attributeList;
056
057    private int nAttributes = -1;
058
059    private H5O_info_t objInfo;
060
061    /**
062     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
063     * array length is 8*numberOfPalettes.
064     */
065    private byte[] paletteRefs;
066
067    /** flag to indicate if the dataset is an external dataset */
068    private boolean isExternal = false;
069
070    /** flag to indicate is the dataset is a virtual dataset */
071    private boolean isVirtual = false;
072    private List<String> virtualNameList;
073
074    /**
075     * flag to indicate if the datatype in file is the same as dataype in memory
076     */
077    private boolean isNativeDatatype = false;
078
079    /*
080     * Enum to indicate the type of I/O to perform inside of the common I/O
081     * function.
082     */
083    protected enum IO_TYPE {
084        READ, WRITE
085    };
086
087    /**
088     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
089     * <p>
090     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
091     * path of the dataset.
092     *
093     * @param theFile
094     *            the file that contains the data object.
095     * @param theName
096     *            the name of the data object, e.g. "dset".
097     * @param thePath
098     *            the full path of the data object, e.g. "/arrays/".
099     */
100    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
101        this(theFile, theName, thePath, null);
102    }
103
104    /**
105     * @deprecated Not for public use in the future.<br>
106     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
107     *
108     * @param theFile
109     *            the file that contains the data object.
110     * @param theName
111     *            the name of the data object, e.g. "dset".
112     * @param thePath
113     *            the full path of the data object, e.g. "/arrays/".
114     * @param oid
115     *            the oid of the data object.
116     */
117    @Deprecated
118    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
119        super(theFile, theName, thePath, oid);
120        unsignedConverted = false;
121        paletteRefs = null;
122        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
123
124        if ((oid == null) && (theFile != null)) {
125            // retrieve the object ID
126            try {
127                byte[] refbuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
128                this.oid = new long[1];
129                this.oid[0] = HDFNativeData.byteToLong(refbuf, 0);
130            }
131            catch (Exception ex) {
132                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
133            }
134        }
135    }
136
137    /*
138     * (non-Javadoc)
139     *
140     * @see hdf.object.HObject#open()
141     */
142    @Override
143    public long open() {
144        long did = -1;
145
146        try {
147            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
148            log.trace("open(): did={}", did);
149        }
150        catch (HDF5Exception ex) {
151            log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
152            did = -1;
153        }
154
155        return did;
156    }
157
158    /*
159     * (non-Javadoc)
160     *
161     * @see hdf.object.HObject#close(int)
162     */
163    @Override
164    public void close(long did) {
165        if (did >= 0) {
166            try {
167                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
168            }
169            catch (Exception ex) {
170                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
171            }
172            try {
173                H5.H5Dclose(did);
174            }
175            catch (HDF5Exception ex) {
176                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
177            }
178        }
179    }
180
181    /**
182     * Retrieves datatype and dataspace information from file and sets the dataset
183     * in memory.
184     * <p>
185     * The init() is designed to support lazy operation in a dataset object. When a
186     * data object is retrieved from file, the datatype, dataspace and raw data are
187     * not loaded into memory. When it is asked to read the raw data from file,
188     * init() is first called to get the datatype and dataspace information, then
189     * load the raw data from file.
190     * <p>
191     * init() is also used to reset the selection of a dataset (start, stride and
192     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
193     * the following example, init() at step 1) retrieves datatype and dataspace
194     * information from file. getData() at step 3) reads only one data point. init()
195     * at step 4) resets the selection to the whole dataset. getData() at step 4)
196     * reads the values of whole dataset into memory.
197     *
198     * <pre>
199     * dset = (Dataset) file.get(NAME_DATASET);
200     *
201     * // 1) get datatype and dataspace information from file
202     * dset.init();
203     * rank = dset.getRank(); // rank = 2, a 2D dataset
204     * count = dset.getSelectedDims();
205     * start = dset.getStartDims();
206     * dims = dset.getDims();
207     *
208     * // 2) select only one data point
209     * for (int i = 0; i &lt; rank; i++) {
210     *     start[0] = 0;
211     *     count[i] = 1;
212     * }
213     *
214     * // 3) read one data point
215     * data = dset.getData();
216     *
217     * // 4) reset selection to the whole dataset
218     * dset.init();
219     *
220     * // 5) clean the memory data buffer
221     * dset.clearData();
222     *
223     * // 6) Read the whole dataset
224     * data = dset.getData();
225     * </pre>
226     */
227    @Override
228    public void init() {
229
230        if (inited) {
231            resetSelection();
232            log.trace("init(): Dataset already intialized");
233            return; // already called. Initialize only once
234        }
235
236        long did = -1;
237        long tid = -1;
238        long sid = -1;
239        long nativeTID = -1;
240
241        did = open();
242        if (did >= 0) {
243            // check if it is an external or virtual dataset
244            long pid = -1;
245            try {
246                pid = H5.H5Dget_create_plist(did);
247                try {
248                    int nfiles = H5.H5Pget_external_count(pid);
249                    isExternal = (nfiles > 0);
250                    int layoutType = H5.H5Pget_layout(pid);
251                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
252                        try {
253                            long vmaps = H5.H5Pget_virtual_count(pid);
254                            if (vmaps > 0) {
255                                virtualNameList = new Vector<>();
256                                for (long next = 0; next < vmaps; next++) {
257                                    try {
258                                        String fname = H5.H5Pget_virtual_filename(pid, next);
259                                        virtualNameList.add(fname);
260                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
261                                    }
262                                    catch (Exception err) {
263                                        continue;
264                                    }
265                                }
266                            }
267                        }
268                        catch (Exception err) {
269                            log.debug("init(): vds count error: ", err);
270                        }
271                    }
272                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
273                }
274                catch (Exception ex) {
275                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
276                }
277            }
278            catch (Exception ex) {
279                log.debug("init(): H5Dget_create_plist: ", ex);
280            }
281            finally {
282                try {
283                    H5.H5Pclose(pid);
284                }
285                catch (Exception ex) {
286                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
287                }
288            }
289
290            paletteRefs = getPaletteRefs(did);
291
292            try {
293                sid = H5.H5Dget_space(did);
294                rank = H5.H5Sget_simple_extent_ndims(sid);
295                tid = H5.H5Dget_type(did);
296
297                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
298
299                try {
300                    datatype = new H5Datatype(getFileFormat(), tid);
301
302                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
303                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
304                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
305                }
306                catch (Exception ex) {
307                    log.debug("init(): failed to create datatype for dataset: ", ex);
308                    datatype = null;
309                }
310
311                // Check if the datatype in the file is the native datatype
312                try {
313                    nativeTID = H5.H5Tget_native_type(tid);
314                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
315                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
316                }
317                catch (Exception ex) {
318                    log.debug("init(): check if native type failure: ", ex);
319                }
320
321                try {
322                    pid = H5.H5Dget_create_plist(did);
323                    int[] fillStatus = { 0 };
324                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
325                        // Check if fill value is user-defined before retrieving it.
326                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
327                            try {
328                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
329                            }
330                            catch (OutOfMemoryError e) {
331                                log.debug("init(): out of memory: ", e);
332                                fillValue = null;
333                            }
334                            catch (Exception ex) {
335                                log.debug("init(): allocate fill value buffer failed: ", ex);
336                                fillValue = null;
337                            }
338
339                            log.trace("init(): fillValue={}", fillValue);
340                            try {
341                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
342                                log.trace("init(): H5Pget_fill_value={}", fillValue);
343                                if (fillValue != null) {
344                                    if (datatype.isUnsigned() && !isFillValueConverted) {
345                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
346                                        isFillValueConverted = true;
347                                    }
348
349                                    int n = Array.getLength(fillValue);
350                                    for (int i = 0; i < n; i++)
351                                        addFilteredImageValue((Number) Array.get(fillValue, i));
352                                }
353                            }
354                            catch (Exception ex2) {
355                                log.debug("init(): fill value was defined: ", ex2);
356                                fillValue = null;
357                            }
358                        }
359                    }
360                }
361                catch (HDF5Exception ex) {
362                    log.debug("init(): check if fill value is defined failure: ", ex);
363                }
364                finally {
365                    try {
366                        H5.H5Pclose(pid);
367                    }
368                    catch (Exception ex) {
369                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
370                    }
371                }
372
373                if (rank == 0) {
374                    // a scalar data point
375                    rank = 1;
376                    dims = new long[1];
377                    dims[0] = 1;
378                    log.trace("init(): rank is a scalar data point");
379                }
380                else {
381                    dims = new long[rank];
382                    maxDims = new long[rank];
383                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
384                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
385                }
386
387                inited = true;
388            }
389            catch (HDF5Exception ex) {
390                log.debug("init(): ", ex);
391            }
392            finally {
393                try {
394                    H5.H5Tclose(nativeTID);
395                }
396                catch (Exception ex2) {
397                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
398                }
399                try {
400                    H5.H5Tclose(tid);
401                }
402                catch (HDF5Exception ex2) {
403                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
404                }
405                try {
406                    H5.H5Sclose(sid);
407                }
408                catch (HDF5Exception ex2) {
409                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
410                }
411            }
412
413            // check for the type of image and interlace mode
414            // it is a true color image at one of three cases:
415            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
416            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
417            // 3) INTERLACE_MODE = INTERLACE_PLANE
418            if ((rank >= 3) && isImage) {
419                interlace = -1;
420                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
421
422                if (isTrueColor) {
423                    interlace = INTERLACE_PIXEL;
424                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
425                        interlace = INTERLACE_PLANE;
426                    }
427                }
428            }
429
430            close(did);
431
432            startDims = new long[rank];
433            selectedDims = new long[rank];
434
435            resetSelection();
436        }
437        else {
438            log.debug("init(): failed to open dataset");
439        }
440        log.trace("init(): rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims);
441    }
442
443    /*
444     * (non-Javadoc)
445     *
446     * @see hdf.object.DataFormat#hasAttribute()
447     */
448    @Override
449    public boolean hasAttribute() {
450        objInfo.num_attrs = nAttributes;
451
452        if (objInfo.num_attrs < 0) {
453            long did = open();
454            if (did >= 0) {
455                objInfo.num_attrs = 0;
456
457                try {
458                    objInfo = H5.H5Oget_info(did);
459                    nAttributes = (int) objInfo.num_attrs;
460                }
461                catch (Exception ex) {
462                    objInfo.num_attrs = 0;
463                    log.debug("hasAttribute(): get object info: ", ex);
464                }
465
466                if(nAttributes > 0) {
467                    // test if it is an image
468                    // check image
469                    Object avalue = getAttrValue(did, "CLASS");
470                    if (avalue != null) {
471                        try {
472                            isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
473                            log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
474                        }
475                        catch (Exception err) {
476                            log.debug("hasAttribute(): check image: ", err);
477                        }
478                    }
479
480                    // retrieve the IMAGE_MINMAXRANGE
481                    avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
482                    if (avalue != null) {
483                        double x0 = 0;
484                        double x1 = 0;
485                        try {
486                            x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
487                            x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
488                        }
489                        catch (Exception ex2) {
490                            x0 = x1 = 0;
491                        }
492                        if (x1 > x0) {
493                            imageDataRange = new double[2];
494                            imageDataRange[0] = x0;
495                            imageDataRange[1] = x1;
496                        }
497                    }
498
499                    try {
500                        checkCFconvention(did);
501                    }
502                    catch (Exception ex) {
503                        log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
504                    }
505                }
506                close(did);
507            }
508            else {
509                log.debug("hasAttribute(): could not open dataset");
510            }
511        }
512
513        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
514        return (objInfo.num_attrs > 0);
515    }
516
517    /*
518     * (non-Javadoc)
519     *
520     * @see hdf.object.Dataset#getDatatype()
521     */
522    @Override
523    public Datatype getDatatype() {
524        if (!inited)
525            init();
526
527        if (datatype == null) {
528            long did = -1;
529            long tid = -1;
530
531            did = open();
532            if (did >= 0) {
533                try {
534                    tid = H5.H5Dget_type(did);
535                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
536                    if (!isNativeDatatype) {
537                        long tmptid = -1;
538                        try {
539                            tmptid = H5Datatype.toNative(tid);
540                            if (tmptid >= 0) {
541                                try {
542                                    H5.H5Tclose(tid);
543                                }
544                                catch (Exception ex2) {
545                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
546                                }
547                                tid = tmptid;
548                            }
549                        }
550                        catch (Exception ex) {
551                            log.debug("getDatatype(): toNative: ", ex);
552                        }
553                    }
554                    datatype = new H5Datatype(getFileFormat(), tid);
555                }
556                catch (Exception ex) {
557                    log.debug("getDatatype(): get datatype failure: ", ex);
558                }
559                finally {
560                    try {
561                        H5.H5Tclose(tid);
562                    }
563                    catch (HDF5Exception ex) {
564                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
565                    }
566                    try {
567                        H5.H5Dclose(did);
568                    }
569                    catch (HDF5Exception ex) {
570                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
571                    }
572                }
573            }
574        }
575
576        if (isExternal) {
577            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
578
579            if (pdir == null) {
580                pdir = ".";
581            }
582            System.setProperty("user.dir", pdir);
583            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
584        }
585
586        return datatype;
587    }
588
589    /*
590     * (non-Javadoc)
591     *
592     * @see hdf.object.Dataset#clear()
593     */
594    @Override
595    public void clear() {
596        super.clear();
597
598        if (attributeList != null) {
599            ((Vector<Attribute>) attributeList).setSize(0);
600        }
601    }
602
603    /*
604     * (non-Javadoc)
605     *
606     * @see hdf.object.Dataset#readBytes()
607     */
608    @Override
609    public byte[] readBytes() throws HDF5Exception {
610        byte[] theData = null;
611
612        if (!isInited())
613            init();
614
615        long did = open();
616        if (did >= 0) {
617            long fspace = -1;
618            long mspace = -1;
619            long tid = -1;
620
621            try {
622                long[] lsize = { 1 };
623                for (int j = 0; j < selectedDims.length; j++) {
624                    lsize[0] *= selectedDims[j];
625                }
626
627                fspace = H5.H5Dget_space(did);
628                mspace = H5.H5Screate_simple(rank, selectedDims, null);
629
630                // set the rectangle selection
631                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
632                if (rank * dims[0] > 1) {
633                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
634                            selectedDims, null); // set block to 1
635                }
636
637                tid = H5.H5Dget_type(did);
638                long size = H5.H5Tget_size(tid) * lsize[0];
639                log.trace("readBytes(): size = {}", size);
640
641                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
642
643                theData = new byte[(int)size];
644
645                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
646                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
647            }
648            catch (Exception ex) {
649                log.debug("readBytes(): failed to read data: ", ex);
650            }
651            finally {
652                try {
653                    H5.H5Sclose(fspace);
654                }
655                catch (Exception ex2) {
656                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
657                }
658                try {
659                    H5.H5Sclose(mspace);
660                }
661                catch (Exception ex2) {
662                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
663                }
664                try {
665                    H5.H5Tclose(tid);
666                }
667                catch (HDF5Exception ex2) {
668                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
669                }
670                close(did);
671            }
672        }
673
674        return theData;
675    }
676
677    /**
678     * Reads the data from file.
679     * <p>
680     * read() reads the data from file to a memory buffer and returns the memory
681     * buffer. The dataset object does not hold the memory buffer. To store the
682     * memory buffer in the dataset object, one must call getData().
683     * <p>
684     * By default, the whole dataset is read into memory. Users can also select
685     * a subset to read. Subsetting is done in an implicit way.
686     * <p>
687     * <b>How to Select a Subset</b>
688     * <p>
689     * A selection is specified by three arrays: start, stride and count.
690     * <ol>
691     * <li>start: offset of a selection
692     * <li>stride: determines how many elements to move in each dimension
693     * <li>count: number of elements to select in each dimension
694     * </ol>
695     * getStartDims(), getStride() and getSelectedDims() returns the start,
696     * stride and count arrays respectively. Applications can make a selection
697     * by changing the values of the arrays.
698     * <p>
699     * The following example shows how to make a subset. In the example, the
700     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
701     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
702     * We want to select every other data point in dims[1] and dims[2]
703     *
704     * <pre>
705     * int rank = dataset.getRank(); // number of dimensions of the dataset
706     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
707     * long[] selected = dataset.getSelectedDims(); // the selected size of the
708     *                                              // dataset
709     * long[] start = dataset.getStartDims(); // the offset of the selection
710     * long[] stride = dataset.getStride(); // the stride of the dataset
711     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
712     *                                                   // dimensions for
713     *                                                   // display
714     *
715     * // select dim1 and dim2 as 2D data for display, and slice through dim0
716     * selectedIndex[0] = 1;
717     * selectedIndex[1] = 2;
718     * selectedIndex[1] = 0;
719     *
720     * // reset the selection arrays
721     * for (int i = 0; i &lt; rank; i++) {
722     *     start[i] = 0;
723     *     selected[i] = 1;
724     *     stride[i] = 1;
725     * }
726     *
727     * // set stride to 2 on dim1 and dim2 so that every other data point is
728     * // selected.
729     * stride[1] = 2;
730     * stride[2] = 2;
731     *
732     * // set the selection size of dim1 and dim2
733     * selected[1] = dims[1] / stride[1];
734     * selected[2] = dims[1] / stride[2];
735     *
736     * // when dataset.getData() is called, the selection above will be used
737     * // since
738     * // the dimension arrays are passed by reference. Changes of these arrays
739     * // outside the dataset object directly change the values of these array
740     * // in the dataset object.
741     * </pre>
742     * <p>
743     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
744     * short, int, float, double or String type based on the datatype of the
745     * dataset.
746     * <p>
747     * For CompoundDS, the memory data object is an java.util.List object. Each
748     * element of the list is a data array that corresponds to a compound field.
749     * <p>
750     * For example, if compound dataset "comp" has the following nested
751     * structure, and member datatypes
752     *
753     * <pre>
754     * comp --&gt; m01 (int)
755     * comp --&gt; m02 (float)
756     * comp --&gt; nest1 --&gt; m11 (char)
757     * comp --&gt; nest1 --&gt; m12 (String)
758     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
759     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
760     * </pre>
761     *
762     * getData() returns a list of six arrays: {int[], float[], char[],
763     * String[], long[] and double[]}.
764     *
765     * @return the data read from file.
766     *
767     * @see #getData()
768     * @see hdf.object.DataFormat#read()
769     *
770     * @throws Exception
771     *             if object can not be read
772     */
773    @Override
774    public Object read() throws Exception {
775        Object readData = null;
776
777        if (!isInited())
778            init();
779
780        try {
781            readData = scalarDatasetCommonIO(IO_TYPE.READ, null);
782        }
783        catch (Exception ex) {
784            log.debug("read(): failed to read scalar dataset: ", ex);
785            throw new Exception("Failed to read scalar dataset: " + ex.getMessage(), ex);
786        }
787
788        return readData;
789    }
790
791    /**
792     * Writes the given data buffer into this dataset in a file.
793     *
794     * @param buf
795     *            The buffer that contains the data values.
796     *
797     * @throws Exception
798     *             If there is an error at the HDF5 library level.
799     */
800    @Override
801    public void write(Object buf) throws Exception {
802        if (this.getFileFormat().isReadOnly())
803            throw new Exception("cannot write to scalar dataset in file opened as read-only");
804
805        if (!isInited())
806            init();
807
808        try {
809            scalarDatasetCommonIO(IO_TYPE.WRITE, buf);
810        }
811        catch (Exception ex) {
812            log.debug("write(): failed to write to scalar dataset: ", ex);
813            throw new Exception("Failed to write to scalar dataset: " + ex.getMessage(), ex);
814        }
815    }
816
817    private Object scalarDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception {
818        H5Datatype dsDatatype = (H5Datatype) getDatatype();
819        Object theData = null;
820
821        /*
822         * I/O type-specific pre-initialization.
823         */
824        if (ioType == IO_TYPE.WRITE) {
825            if (writeBuf == null) {
826                log.debug("scalarDatasetCommonIO(): writeBuf is null");
827                throw new Exception("write buffer is null");
828            }
829
830            /*
831             * Check for any unsupported datatypes and fail early before
832             * attempting to write to the dataset.
833             */
834            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
835                log.debug("scalarDatasetCommonIO(): Cannot write non-string variable-length data");
836                throw new HDF5Exception("Writing non-string variable-length data is not supported");
837            }
838
839            if (dsDatatype.isRegRef()) {
840                log.debug("scalarDatasetCommonIO(): Cannot write region reference data");
841                throw new HDF5Exception("Writing region reference data is not supported");
842            }
843        }
844
845        long did = open();
846        if (did >= 0) {
847            long[] spaceIDs = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
848
849            try {
850                /*
851                 * NOTE: this call sets up a hyperslab selection in the file according to the
852                 * current selection in the dataset object.
853                 */
854                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
855                        selectedStride, selectedDims, spaceIDs);
856                log.trace("scalarDatasetCommonIO(): totalSelectedSpacePoints={}", totalSelectedSpacePoints);
857
858                if (ioType == IO_TYPE.READ) {
859                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
860                    if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
861                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
862                        try {
863                            theData = H5Datatype.allocateArray(dsDatatype, (int) totalSelectedSpacePoints);
864                        }
865                        catch (OutOfMemoryError err) {
866                            log.debug("scalarDatasetCommonIO(): Out of memory");
867                            throw new HDF5Exception("Out Of Memory");
868                        }
869                    }
870                    else {
871                        // reuse the buffer if the size is the same
872                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
873                        theData = originalBuf;
874                    }
875
876                    if (theData != null) {
877                        /*
878                         * Actually read the data now that everything has been setup.
879                         */
880                        long tid = -1;
881                        try {
882                            log.trace("scalarDatasetCommonIO():read ioType create native");
883                            tid = dsDatatype.createNative();
884
885                            if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
886                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
887                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
888                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
889
890                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
891                                        (Object[]) theData);
892                            }
893                            else {
894                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
895                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
896                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
897
898                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
899                            }
900                        }
901                        catch (HDF5DataFiltersException exfltr) {
902                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
903                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
904                        }
905                        catch (Exception ex) {
906                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
907                            throw new Exception(ex.getMessage(), ex);
908                        }
909                        finally {
910                            dsDatatype.close(tid);
911                        }
912
913                        /*
914                         * Perform any necessary data conversions.
915                         */
916                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
917                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
918                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
919                        }
920                        else if (dsDatatype.isRefObj()) {
921                            log.trace("scalarDatasetCommonIO(): isREF: converting byte array to long array");
922                            theData = HDFNativeData.byteToLong((byte[]) theData);
923                        }
924                    }
925                } // IO_TYPE.READ
926                else {
927                    /*
928                     * Perform any necessary data conversions before writing the data.
929                     *
930                     * Note that v-len strings do not get converted, regardless of
931                     * conversion request type.
932                     */
933                    Object tmpData = writeBuf;
934                    try {
935                        // Check if we need to convert integer data
936                        int tsize = (int) dsDatatype.getDatatypeSize();
937                        String cname = writeBuf.getClass().getName();
938                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
939                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
940                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
941
942                        if (doIntConversion) {
943                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
944                            tmpData = convertToUnsignedC(writeBuf, null);
945                        }
946                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
947                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
948                            tmpData = stringToByte((String[]) writeBuf, tsize);
949                        }
950                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
951                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
952                            tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
953                        }
954                    }
955                    catch (Exception ex) {
956                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
957                        throw new Exception("data conversion failure: " + ex.getMessage());
958                    }
959
960                    /*
961                     * Actually write the data now that everything has been setup.
962                     */
963                    long tid = -1;
964                    try {
965                        tid = dsDatatype.createNative();
966
967                        if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
968                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
969                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
970                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
971
972                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
973                        }
974                        else {
975                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
976                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
977                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
978
979                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
980                        }
981                    }
982                    catch (Exception ex) {
983                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
984                        throw new Exception(ex.getMessage());
985                    }
986                    finally {
987                        dsDatatype.close(tid);
988                    }
989                } // IO_TYPE.WRITE
990            }
991            finally {
992                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
993                    try {
994                        H5.H5Sclose(spaceIDs[0]);
995                    }
996                    catch (Exception ex) {
997                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
998                    }
999                }
1000
1001                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1002                    try {
1003                        H5.H5Sclose(spaceIDs[1]);
1004                    }
1005                    catch (Exception ex) {
1006                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1007                    }
1008                }
1009
1010                close(did);
1011            }
1012        }
1013        else
1014            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1015
1016        return theData;
1017    }
1018
1019    /*
1020     * (non-Javadoc)
1021     *
1022     * @see hdf.object.DataFormat#getMetadata()
1023     */
1024    @Override
1025    public List<Attribute> getMetadata() throws HDF5Exception {
1026        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1027    }
1028
1029    /*
1030     * (non-Javadoc)
1031     *
1032     * @see hdf.object.DataFormat#getMetadata(int...)
1033     */
1034    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1035        if (!isInited()) {
1036            init();
1037        }
1038
1039        try {
1040            this.linkTargetObjName = H5File.getLinkTargetName(this);
1041        }
1042        catch (Exception ex) {
1043            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1044        }
1045
1046        if (attributeList != null) {
1047            log.trace("getMetadata(): attributeList != null");
1048            return attributeList;
1049        }
1050
1051        long did = -1;
1052        long pcid = -1;
1053        long paid = -1;
1054        int indxType = fileFormat.getIndexType(null);
1055        int order = fileFormat.getIndexOrder(null);
1056
1057        // load attributes first
1058        if (attrPropList.length > 0) {
1059            indxType = attrPropList[0];
1060            if (attrPropList.length > 1) {
1061                order = attrPropList[1];
1062            }
1063        }
1064
1065        attributeList = H5File.getAttribute(this, indxType, order);
1066
1067        did = open();
1068        if (did >= 0) {
1069            try {
1070                // get the compression and chunk information
1071                pcid = H5.H5Dget_create_plist(did);
1072                paid = H5.H5Dget_access_plist(did);
1073                long storageSize = H5.H5Dget_storage_size(did);
1074                int nfilt = H5.H5Pget_nfilters(pcid);
1075                int layoutType = H5.H5Pget_layout(pcid);
1076
1077                storageLayout.setLength(0);
1078                compression.setLength(0);
1079
1080                if (layoutType == HDF5Constants.H5D_CHUNKED) {
1081                    chunkSize = new long[rank];
1082                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1083                    int n = chunkSize.length;
1084                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1085                    for (int i = 1; i < n; i++) {
1086                        storageLayout.append(" X ").append(chunkSize[i]);
1087                    }
1088
1089                    if (nfilt > 0) {
1090                        long nelmts = 1;
1091                        long uncompSize;
1092                        long datumSize = getDatatype().getDatatypeSize();
1093
1094                        if (datumSize < 0) {
1095                            long tmptid = -1;
1096                            try {
1097                                tmptid = H5.H5Dget_type(did);
1098                                datumSize = H5.H5Tget_size(tmptid);
1099                            }
1100                            finally {
1101                                try {
1102                                    H5.H5Tclose(tmptid);
1103                                }
1104                                catch (Exception ex2) {
1105                                    log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1106                                }
1107                            }
1108                        }
1109
1110                        for (int i = 0; i < rank; i++) {
1111                            nelmts *= dims[i];
1112                        }
1113                        uncompSize = nelmts * datumSize;
1114
1115                        /* compression ratio = uncompressed size / compressed size */
1116
1117                        if (storageSize != 0) {
1118                            double ratio = (double) uncompSize / (double) storageSize;
1119                            DecimalFormat df = new DecimalFormat();
1120                            df.setMinimumFractionDigits(3);
1121                            df.setMaximumFractionDigits(3);
1122                            compression.append(df.format(ratio)).append(":1");
1123                        }
1124                    }
1125                }
1126                else if (layoutType == HDF5Constants.H5D_COMPACT) {
1127                    storageLayout.append("COMPACT");
1128                }
1129                else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1130                    storageLayout.append("CONTIGUOUS");
1131                    if (H5.H5Pget_external_count(pcid) > 0)
1132                        storageLayout.append(" - EXTERNAL ");
1133                }
1134                else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1135                    storageLayout.append("VIRTUAL - ");
1136                    try {
1137                        long vmaps = H5.H5Pget_virtual_count(pcid);
1138                        try {
1139                            int virtView = H5.H5Pget_virtual_view(paid);
1140                            long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1141                            if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1142                                storageLayout.append("First Missing");
1143                            else
1144                                storageLayout.append("Last Available");
1145                            storageLayout.append("\nGAP : " + virtGap);
1146                        }
1147                        catch (Exception err) {
1148                            log.debug("getMetadata(): vds error: ", err);
1149                            storageLayout.append("ERROR");
1150                        }
1151
1152                        storageLayout.append("\nMAPS : " + vmaps);
1153                        if (vmaps > 0) {
1154                            for (long next = 0; next < vmaps; next++) {
1155                                try {
1156                                    H5.H5Pget_virtual_vspace(pcid, next);
1157                                    H5.H5Pget_virtual_srcspace(pcid, next);
1158                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1159                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1160                                    storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1161                                }
1162                                catch (Exception err) {
1163                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1164                                    storageLayout.append("ERROR");
1165                                }
1166                            }
1167                        }
1168                    }
1169                    catch (Exception err) {
1170                        log.debug("getMetadata(): vds count error: ", err);
1171                        storageLayout.append("ERROR");
1172                    }
1173                }
1174                else {
1175                    chunkSize = null;
1176                    storageLayout.append("NONE");
1177                }
1178
1179                int[] flags = { 0, 0 };
1180                long[] cdNelmts = { 20 };
1181                int[] cdValues = new int[(int) cdNelmts[0]];
1182                String[] cdName = { "", "" };
1183                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1184                int filter = -1;
1185                int[] filterConfig = { 1 };
1186
1187                filters.setLength(0);
1188
1189                if (nfilt == 0) {
1190                    filters.append("NONE");
1191                }
1192                else {
1193                    for (int i = 0, k = 0; i < nfilt; i++) {
1194                        log.trace("getMetadata(): filter[{}]", i);
1195                        if (i > 0) {
1196                            filters.append(", ");
1197                        }
1198                        if (k > 0) {
1199                            compression.append(", ");
1200                        }
1201
1202                        try {
1203                            cdNelmts[0] = 20;
1204                            cdValues = new int[(int) cdNelmts[0]];
1205                            cdValues = new int[(int) cdNelmts[0]];
1206                            filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1207                            log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1208                            for (int j = 0; j < cdNelmts[0]; j++) {
1209                                log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1210                            }
1211                        }
1212                        catch (Exception err) {
1213                            log.debug("getMetadata(): filter[{}] error: ", i, err);
1214                            filters.append("ERROR");
1215                            continue;
1216                        }
1217
1218                        if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1219                            filters.append("NONE");
1220                        }
1221                        else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1222                            filters.append("GZIP");
1223                            compression.append(COMPRESSION_GZIP_TXT + cdValues[0]);
1224                            k++;
1225                        }
1226                        else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1227                            filters.append("Error detection filter");
1228                        }
1229                        else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1230                            filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1231                        }
1232                        else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1233                            filters.append("NBIT");
1234                        }
1235                        else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1236                            filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1237                        }
1238                        else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1239                            filters.append("SZIP");
1240                            compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1241                            k++;
1242                            int flag = -1;
1243                            try {
1244                                flag = H5.H5Zget_filter_info(filter);
1245                            }
1246                            catch (Exception ex) {
1247                                log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1248                                flag = -1;
1249                            }
1250                            if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1251                                compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1252                            }
1253                            else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1254                                    || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1255                                compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1256                            }
1257                        }
1258                        else {
1259                            filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1260                            for (int j = 0; j < cdNelmts[0]; j++) {
1261                                if (j > 0)
1262                                    filters.append(", ");
1263                                filters.append(cdValues[j]);
1264                            }
1265                            log.debug("getMetadata(): filter[{}] is user defined compression", i);
1266                        }
1267                    } // (int i=0; i<nfilt; i++)
1268                }
1269
1270                if (compression.length() == 0) {
1271                    compression.append("NONE");
1272                }
1273                log.trace("getMetadata(): filter compression={}", compression);
1274                log.trace("getMetadata(): filter information={}", filters);
1275
1276                storage.setLength(0);
1277                storage.append("SIZE: ").append(storageSize);
1278
1279                try {
1280                    int[] at = { 0 };
1281                    H5.H5Pget_alloc_time(pcid, at);
1282                    storage.append(", allocation time: ");
1283                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1284                        storage.append("Early");
1285                    }
1286                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1287                        storage.append("Incremental");
1288                    }
1289                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1290                        storage.append("Late");
1291                    }
1292                    else
1293                        storage.append("Default");
1294                }
1295                catch (Exception ex) {
1296                    log.debug("getMetadata(): Storage allocation time:", ex);
1297                }
1298                log.trace("getMetadata(): storage={}", storage);
1299            }
1300            finally {
1301                try {
1302                    H5.H5Pclose(paid);
1303                }
1304                catch (Exception ex) {
1305                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1306                }
1307                try {
1308                    H5.H5Pclose(pcid);
1309                }
1310                catch (Exception ex) {
1311                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1312                }
1313                close(did);
1314            }
1315        }
1316
1317        return attributeList;
1318    }
1319
1320    /*
1321     * (non-Javadoc)
1322     *
1323     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1324     */
1325    @Override
1326    public void writeMetadata(Object info) throws Exception {
1327        // only attribute metadata is supported.
1328        if (!(info instanceof Attribute)) {
1329            log.debug("writeMetadata(): Object not an Attribute");
1330            return;
1331        }
1332
1333        boolean attrExisted = false;
1334        Attribute attr = (Attribute) info;
1335        log.trace("writeMetadata(): {}", attr.getName());
1336
1337        if (attributeList == null) {
1338            this.getMetadata();
1339        }
1340
1341        if (attributeList != null)
1342            attrExisted = attributeList.contains(attr);
1343
1344        getFileFormat().writeAttribute(this, attr, attrExisted);
1345        // add the new attribute into attribute list
1346        if (!attrExisted) {
1347            attributeList.add(attr);
1348            nAttributes = attributeList.size();
1349        }
1350    }
1351
1352    /*
1353     * (non-Javadoc)
1354     *
1355     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1356     */
1357    @Override
1358    public void removeMetadata(Object info) throws HDF5Exception {
1359        // only attribute metadata is supported.
1360        if (!(info instanceof Attribute)) {
1361            log.debug("removeMetadata(): Object not an Attribute");
1362            return;
1363        }
1364
1365        Attribute attr = (Attribute) info;
1366        log.trace("removeMetadata(): {}", attr.getName());
1367        long did = open();
1368        if (did >= 0) {
1369            try {
1370                H5.H5Adelete(did, attr.getName());
1371                List<Attribute> attrList = getMetadata();
1372                attrList.remove(attr);
1373                nAttributes = attrList.size();
1374            }
1375            finally {
1376                close(did);
1377            }
1378        }
1379    }
1380
1381    /*
1382     * (non-Javadoc)
1383     *
1384     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1385     */
1386    @Override
1387    public void updateMetadata(Object info) throws HDF5Exception {
1388        // only attribute metadata is supported.
1389        if (!(info instanceof Attribute)) {
1390            log.debug("updateMetadata(): Object not an Attribute");
1391            return;
1392        }
1393
1394        nAttributes = -1;
1395    }
1396
1397    /*
1398     * (non-Javadoc)
1399     *
1400     * @see hdf.object.HObject#setName(java.lang.String)
1401     */
1402    @Override
1403    public void setName(String newName) throws Exception {
1404        if (newName == null)
1405            throw new IllegalArgumentException("The new name is NULL");
1406
1407        H5File.renameObject(this, newName);
1408        super.setName(newName);
1409    }
1410
1411    /**
1412     * Resets selection of dataspace
1413     */
1414    private void resetSelection() {
1415        for (int i = 0; i < rank; i++) {
1416            startDims[i] = 0;
1417            selectedDims[i] = 1;
1418            if (selectedStride != null) {
1419                selectedStride[i] = 1;
1420            }
1421        }
1422
1423        if (interlace == INTERLACE_PIXEL) {
1424            // 24-bit TRUE color image
1425            // [height][width][pixel components]
1426            selectedDims[2] = 3;
1427            selectedDims[0] = dims[0];
1428            selectedDims[1] = dims[1];
1429            selectedIndex[0] = 0; // index for height
1430            selectedIndex[1] = 1; // index for width
1431            selectedIndex[2] = 2; // index for depth
1432        }
1433        else if (interlace == INTERLACE_PLANE) {
1434            // 24-bit TRUE color image
1435            // [pixel components][height][width]
1436            selectedDims[0] = 3;
1437            selectedDims[1] = dims[1];
1438            selectedDims[2] = dims[2];
1439            selectedIndex[0] = 1; // index for height
1440            selectedIndex[1] = 2; // index for width
1441            selectedIndex[2] = 0; // index for depth
1442        }
1443        else if (rank == 1) {
1444            selectedIndex[0] = 0;
1445            selectedDims[0] = dims[0];
1446        }
1447        else if (rank == 2) {
1448            selectedIndex[0] = 0;
1449            selectedIndex[1] = 1;
1450            selectedDims[0] = dims[0];
1451            selectedDims[1] = dims[1];
1452        }
1453        else if (rank > 2) {
1454            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1455            // [frame][height][width] by default
1456            // selectedIndex[1] = rank-1; // width, the fastest dimension
1457            // selectedIndex[0] = rank-2; // height
1458            // selectedIndex[2] = rank-3; // frames
1459
1460            //
1461            // (5/4/09) Modified the default dimension order. See bug#1379
1462            // We change the default order to the following. In most situation,
1463            // users want to use the natural order of
1464            // selectedIndex[0] = 0
1465            // selectedIndex[1] = 1
1466            // selectedIndex[2] = 2
1467            // Most of NPOESS data is the the order above.
1468
1469            if (isImage) {
1470                // 3D dataset is arranged in the order of [frame][height][width]
1471                selectedIndex[1] = rank - 1; // width, the fastest dimension
1472                selectedIndex[0] = rank - 2; // height
1473                selectedIndex[2] = rank - 3; // frames
1474            }
1475            else {
1476                selectedIndex[0] = 0; // width, the fastest dimension
1477                selectedIndex[1] = 1; // height
1478                selectedIndex[2] = 2; // frames
1479            }
1480
1481            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1482            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1483            selectedDims[selectedIndex[2]] = dims[selectedIndex[2]];
1484        }
1485
1486        isDataLoaded = false;
1487
1488        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1489            isDefaultImageOrder = false;
1490        else
1491            isDefaultImageOrder = true;
1492    }
1493
1494    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1495            long[] chunks, int gzip, Object data) throws Exception {
1496        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1497    }
1498
1499    /**
1500     * Creates a scalar dataset in a file with/without chunking and compression.
1501     * <p>
1502     * The following example shows how to create a string dataset using this function.
1503     *
1504     * <pre>
1505     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1506     * int max_str_len = 120;
1507     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1508     * int size = 10000;
1509     * long dims[] = { size };
1510     * long chunks[] = { 1000 };
1511     * int gzip = 9;
1512     * String strs[] = new String[size];
1513     *
1514     * for (int i = 0; i &lt; size; i++)
1515     *     strs[i] = String.valueOf(i);
1516     *
1517     * file.open();
1518     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1519     *
1520     * try {
1521     *     file.close();
1522     * }
1523     * catch (Exception ex) {
1524     * }
1525     * </pre>
1526     *
1527     * @param name
1528     *            the name of the dataset to create.
1529     * @param pgroup
1530     *            parent group where the new dataset is created.
1531     * @param type
1532     *            the datatype of the dataset.
1533     * @param dims
1534     *            the dimension size of the dataset.
1535     * @param maxdims
1536     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1537     * @param chunks
1538     *            the chunk size of the dataset. No chunking if chunk = null.
1539     * @param gzip
1540     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1541     * @param fillValue
1542     *            the default data value.
1543     * @param data
1544     *            the array of data values.
1545     *
1546     * @return the new scalar dataset if successful; otherwise returns null.
1547     *
1548     * @throws Exception if there is a failure.
1549     */
1550    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1551            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1552        H5ScalarDS dataset = null;
1553        String fullPath = null;
1554        long did = -1;
1555        long plist = -1;
1556        long sid = -1;
1557        long tid = -1;
1558
1559        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1560            log.debug("create(): one or more parameters are null");
1561            return null;
1562        }
1563
1564        H5File file = (H5File) pgroup.getFileFormat();
1565        if (file == null) {
1566            log.debug("create(): parent group FileFormat is null");
1567            return null;
1568        }
1569
1570        String path = HObject.SEPARATOR;
1571        if (!pgroup.isRoot()) {
1572            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1573            if (name.endsWith("/")) {
1574                name = name.substring(0, name.length() - 1);
1575            }
1576            int idx = name.lastIndexOf('/');
1577            if (idx >= 0) {
1578                name = name.substring(idx + 1);
1579            }
1580        }
1581
1582        fullPath = path + name;
1583        log.trace("create(): fullPath={}", fullPath);
1584
1585        // setup chunking and compression
1586        boolean isExtentable = false;
1587        if (maxdims != null) {
1588            for (int i = 0; i < maxdims.length; i++) {
1589                if (maxdims[i] == 0) {
1590                    maxdims[i] = dims[i];
1591                }
1592                else if (maxdims[i] < 0) {
1593                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1594                }
1595
1596                if (maxdims[i] != dims[i]) {
1597                    isExtentable = true;
1598                }
1599            }
1600        }
1601
1602        // HDF5 requires you to use chunking in order to define extendible
1603        // datasets. Chunking makes it possible to extend datasets efficiently,
1604        // without having to reorganize storage excessively. Using default size
1605        // of 64x...which has good performance
1606        if ((chunks == null) && isExtentable) {
1607            chunks = new long[dims.length];
1608            for (int i = 0; i < dims.length; i++)
1609                chunks[i] = Math.min(dims[i], 64);
1610        }
1611
1612        // prepare the dataspace and datatype
1613        int rank = dims.length;
1614        log.trace("create(): rank={}", rank);
1615
1616        if ((tid = type.createNative()) >= 0) {
1617            log.trace("create(): createNative={}", tid);
1618            try {
1619                sid = H5.H5Screate_simple(rank, dims, maxdims);
1620                log.trace("create(): H5Screate_simple={}", sid);
1621
1622                // figure out creation properties
1623                plist = HDF5Constants.H5P_DEFAULT;
1624
1625                byte[] valFill = null;
1626                try {
1627                    valFill = parseFillValue(type, fillValue);
1628                }
1629                catch (Exception ex) {
1630                    log.debug("create(): parse fill value: ", ex);
1631                }
1632                log.trace("create(): parseFillValue={}", valFill);
1633
1634                if (chunks != null || valFill != null) {
1635                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1636
1637                    if (chunks != null) {
1638                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1639                        H5.H5Pset_chunk(plist, rank, chunks);
1640
1641                        // compression requires chunking
1642                        if (gzip > 0) {
1643                            H5.H5Pset_deflate(plist, gzip);
1644                        }
1645                    }
1646
1647                    if (valFill != null) {
1648                        H5.H5Pset_fill_value(plist, tid, valFill);
1649                    }
1650                }
1651
1652                long fid = file.getFID();
1653
1654                log.trace("create(): create dataset fid={}", fid);
1655                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1656                log.trace("create(): create dataset did={}", did);
1657                dataset = new H5ScalarDS(file, name, path);
1658            }
1659            finally {
1660                try {
1661                    H5.H5Pclose(plist);
1662                }
1663                catch (HDF5Exception ex) {
1664                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1665                }
1666                try {
1667                    H5.H5Sclose(sid);
1668                }
1669                catch (HDF5Exception ex) {
1670                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1671                }
1672                try {
1673                    H5.H5Tclose(tid);
1674                }
1675                catch (HDF5Exception ex) {
1676                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1677                }
1678                try {
1679                    H5.H5Dclose(did);
1680                }
1681                catch (HDF5Exception ex) {
1682                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1683                }
1684            }
1685        }
1686
1687        if (dataset != null) {
1688            pgroup.addToMemberList(dataset);
1689            if (data != null) {
1690                dataset.init();
1691                long[] selected = dataset.getSelectedDims();
1692                for (int i = 0; i < rank; i++) {
1693                    selected[i] = dims[i];
1694                }
1695                dataset.write(data);
1696            }
1697        }
1698
1699        return dataset;
1700    }
1701
1702    // check _FillValue, valid_min, valid_max, and valid_range
1703    private void checkCFconvention(long oid) throws Exception {
1704        Object avalue = getAttrValue(oid, "_FillValue");
1705
1706        if (avalue != null) {
1707            int n = Array.getLength(avalue);
1708            for (int i = 0; i < n; i++)
1709                addFilteredImageValue((Number) Array.get(avalue, i));
1710        }
1711
1712        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1713            double x0 = 0;
1714            double x1 = 0;
1715            avalue = getAttrValue(oid, "valid_range");
1716            if (avalue != null) {
1717                try {
1718                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1719                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1720                    imageDataRange = new double[2];
1721                    imageDataRange[0] = x0;
1722                    imageDataRange[1] = x1;
1723                    return;
1724                }
1725                catch (Exception ex) {
1726                    log.debug("checkCFconvention(): valid_range: ", ex);
1727                }
1728            }
1729
1730            avalue = getAttrValue(oid, "valid_min");
1731            if (avalue != null) {
1732                try {
1733                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1734                }
1735                catch (Exception ex) {
1736                    log.debug("checkCFconvention(): valid_min: ", ex);
1737                }
1738                avalue = getAttrValue(oid, "valid_max");
1739                if (avalue != null) {
1740                    try {
1741                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1742                        imageDataRange = new double[2];
1743                        imageDataRange[0] = x0;
1744                        imageDataRange[1] = x1;
1745                    }
1746                    catch (Exception ex) {
1747                        log.debug("checkCFconvention(): valid_max:", ex);
1748                    }
1749                }
1750            }
1751        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1752    }
1753
1754    private Object getAttrValue(long oid, String aname) {
1755        log.trace("getAttrValue(): start: name={}", aname);
1756
1757        long aid = -1;
1758        long atid = -1;
1759        long asid = -1;
1760        Object avalue = null;
1761
1762        try {
1763            // try to find attribute name
1764            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1765                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1766        }
1767        catch (HDF5LibraryException ex5) {
1768            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1769        }
1770        catch (Exception ex) {
1771            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1772        }
1773        if (aid > 0) {
1774            try {
1775                atid = H5.H5Aget_type(aid);
1776                long tmptid = atid;
1777                atid = H5.H5Tget_native_type(tmptid);
1778                try {
1779                    H5.H5Tclose(tmptid);
1780                }
1781                catch (Exception ex) {
1782                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1783                }
1784                H5Datatype dsDatatype = new H5Datatype(getFileFormat(), atid);
1785
1786                asid = H5.H5Aget_space(aid);
1787                long adims[] = null;
1788
1789                int arank = H5.H5Sget_simple_extent_ndims(asid);
1790                if (arank > 0) {
1791                    adims = new long[arank];
1792                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1793                }
1794                log.trace("getAttrValue(): adims={}", adims);
1795
1796                // retrieve the attribute value
1797                long lsize = 1;
1798                if (adims != null) {
1799                    for (int j = 0; j < adims.length; j++) {
1800                        lsize *= adims[j];
1801                    }
1802                }
1803                log.trace("getAttrValue(): lsize={}", lsize);
1804
1805                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1806
1807                try {
1808                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1809                }
1810                catch (OutOfMemoryError e) {
1811                    log.debug("getAttrValue(): out of memory: ", e);
1812                    avalue = null;
1813                }
1814
1815                if (avalue != null) {
1816                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1817                    H5.H5Aread(aid, atid, avalue);
1818
1819                    if (dsDatatype.isUnsigned()) {
1820                        log.trace("getAttrValue(): id {} is unsigned", atid);
1821                        avalue = convertFromUnsignedC(avalue, null);
1822                    }
1823                }
1824            }
1825            catch (Exception ex) {
1826                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1827            }
1828            finally {
1829                try {
1830                    H5.H5Tclose(atid);
1831                }
1832                catch (HDF5Exception ex) {
1833                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1834                }
1835                try {
1836                    H5.H5Sclose(asid);
1837                }
1838                catch (HDF5Exception ex) {
1839                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1840                }
1841                try {
1842                    H5.H5Aclose(aid);
1843                }
1844                catch (HDF5Exception ex) {
1845                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1846                }
1847            }
1848        } // (aid > 0)
1849
1850        return avalue;
1851    }
1852
1853    private boolean isStringAttributeOf(long objID, String name, String value) {
1854        boolean retValue = false;
1855        long aid = -1;
1856        long atid = -1;
1857
1858        try {
1859            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1860                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1861                atid = H5.H5Aget_type(aid);
1862                int size = (int)H5.H5Tget_size(atid);
1863                byte[] attrValue = new byte[size];
1864                H5.H5Aread(aid, atid, attrValue);
1865                String strValue = new String(attrValue).trim();
1866                retValue = strValue.equalsIgnoreCase(value);
1867            }
1868        }
1869        catch (Exception ex) {
1870            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1871        }
1872        finally {
1873            try {
1874                H5.H5Tclose(atid);
1875            }
1876            catch (HDF5Exception ex) {
1877                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1878            }
1879            try {
1880                H5.H5Aclose(aid);
1881            }
1882            catch (HDF5Exception ex) {
1883                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1884            }
1885        }
1886
1887        return retValue;
1888    }
1889
1890    /*
1891     * (non-Javadoc)
1892     *
1893     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1894     */
1895    @Override
1896    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1897        // must give a location to copy
1898        if (pgroup == null) {
1899            log.debug("copy(): Parent group is null");
1900            return null;
1901        }
1902
1903        Dataset dataset = null;
1904        long srcdid = -1;
1905        long dstdid = -1;
1906        long plist = -1;
1907        long tid = -1;
1908        long sid = -1;
1909        String dname = null;
1910        String path = null;
1911
1912        if (pgroup.isRoot()) {
1913            path = HObject.SEPARATOR;
1914        }
1915        else {
1916            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1917        }
1918        dname = path + dstName;
1919
1920        srcdid = open();
1921        if (srcdid >= 0) {
1922            try {
1923                tid = H5.H5Dget_type(srcdid);
1924                sid = H5.H5Screate_simple(dims.length, dims, null);
1925                plist = H5.H5Dget_create_plist(srcdid);
1926
1927                long[] chunks = new long[dims.length];
1928                boolean setChunkFlag = false;
1929                try {
1930                    H5.H5Pget_chunk(plist, dims.length, chunks);
1931                    for (int i = 0; i < dims.length; i++) {
1932                        if (dims[i] < chunks[i]) {
1933                            setChunkFlag = true;
1934                            if (dims[i] == 1)
1935                                chunks[i] = 1;
1936                            else
1937                                chunks[i] = dims[i] / 2;
1938                        }
1939                    }
1940                }
1941                catch (Exception ex) {
1942                    log.debug("copy(): chunk: ", ex);
1943                }
1944
1945                if (setChunkFlag)
1946                    H5.H5Pset_chunk(plist, dims.length, chunks);
1947
1948                try {
1949                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1950                            HDF5Constants.H5P_DEFAULT);
1951                }
1952                catch (Exception e) {
1953                    log.debug("copy(): H5Dcreate: ", e);
1954                }
1955                finally {
1956                    try {
1957                        H5.H5Dclose(dstdid);
1958                    }
1959                    catch (Exception ex2) {
1960                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
1961                    }
1962                }
1963
1964                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1965                if (buff != null) {
1966                    dataset.init();
1967                    dataset.write(buff);
1968                }
1969
1970                dstdid = dataset.open();
1971                if (dstdid >= 0) {
1972                    try {
1973                        H5File.copyAttributes(srcdid, dstdid);
1974                    }
1975                    finally {
1976                        try {
1977                            H5.H5Dclose(dstdid);
1978                        }
1979                        catch (Exception ex) {
1980                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
1981                        }
1982                    }
1983                }
1984            }
1985            finally {
1986                try {
1987                    H5.H5Pclose(plist);
1988                }
1989                catch (Exception ex) {
1990                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
1991                }
1992                try {
1993                    H5.H5Sclose(sid);
1994                }
1995                catch (Exception ex) {
1996                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
1997                }
1998                try {
1999                    H5.H5Tclose(tid);
2000                }
2001                catch (Exception ex) {
2002                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2003                }
2004                try {
2005                    H5.H5Dclose(srcdid);
2006                }
2007                catch (Exception ex) {
2008                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2009                }
2010            }
2011        }
2012
2013        pgroup.addToMemberList(dataset);
2014
2015        if (dataset != null)
2016            ((ScalarDS) dataset).setIsImage(isImage);
2017
2018        return dataset;
2019    }
2020
2021    /*
2022     * (non-Javadoc)
2023     *
2024     * @see hdf.object.ScalarDS#getPalette()
2025     */
2026    @Override
2027    public byte[][] getPalette() {
2028        if (palette == null) {
2029            palette = readPalette(0);
2030        }
2031
2032        return palette;
2033    }
2034
2035    /*
2036     * (non-Javadoc)
2037     *
2038     * @see hdf.object.ScalarDS#getPaletteName(int)
2039     */
2040    @Override
2041    public String getPaletteName(int idx) {
2042        byte[] refs = getPaletteRefs();
2043        long did = -1;
2044        long palID = -1;
2045        String paletteName = null;
2046
2047        if (refs == null) {
2048            log.debug("getPaletteName(): refs is null");
2049            return null;
2050        }
2051
2052        byte[] refBuf = new byte[8];
2053
2054        try {
2055            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2056        }
2057        catch (Exception err) {
2058            log.debug("getPaletteName(): arraycopy failure: ", err);
2059            return null;
2060        }
2061
2062        did = open();
2063        if (did >= 0) {
2064            try {
2065                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2066                paletteName = H5.H5Iget_name(palID);
2067            }
2068            catch (Exception ex) {
2069                ex.printStackTrace();
2070            }
2071            finally {
2072                close(palID);
2073                close(did);
2074            }
2075        }
2076
2077        return paletteName;
2078    }
2079
2080    /*
2081     * (non-Javadoc)
2082     *
2083     * @see hdf.object.ScalarDS#readPalette(int)
2084     */
2085    @Override
2086    public byte[][] readPalette(int idx) {
2087        byte[][] thePalette = null;
2088        byte[] refs = getPaletteRefs();
2089        long did = -1;
2090        long palID = -1;
2091        long tid = -1;
2092
2093        if (refs == null) {
2094            log.debug("readPalette(): refs is null");
2095            return null;
2096        }
2097
2098        byte[] p = null;
2099        byte[] refBuf = new byte[8];
2100
2101        try {
2102            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2103        }
2104        catch (Exception err) {
2105            log.debug("readPalette(): arraycopy failure: ", err);
2106            return null;
2107        }
2108
2109        did = open();
2110        if (did >= 0) {
2111            try {
2112                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2113                log.trace("readPalette(): H5Rdereference: {}", palID);
2114                tid = H5.H5Dget_type(palID);
2115
2116                // support only 3*256 byte palette data
2117                if (H5.H5Dget_storage_size(palID) <= 768) {
2118                    p = new byte[3 * 256];
2119                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2120                }
2121            }
2122            catch (HDF5Exception ex) {
2123                log.debug("readPalette(): failure: ", ex);
2124                p = null;
2125            }
2126            finally {
2127                try {
2128                    H5.H5Tclose(tid);
2129                }
2130                catch (HDF5Exception ex2) {
2131                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2132                }
2133                close(palID);
2134                close(did);
2135            }
2136        }
2137
2138        if (p != null) {
2139            thePalette = new byte[3][256];
2140            for (int i = 0; i < 256; i++) {
2141                thePalette[0][i] = p[i * 3];
2142                thePalette[1][i] = p[i * 3 + 1];
2143                thePalette[2][i] = p[i * 3 + 2];
2144            }
2145        }
2146
2147        return thePalette;
2148    }
2149
2150    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2151        byte[] data = null;
2152
2153        if (type == null || fillValue == null) {
2154            log.debug("parseFillValue(): datatype or fill value is null");
2155            return null;
2156        }
2157
2158        int datatypeClass = type.getDatatypeClass();
2159        int datatypeSize = (int)type.getDatatypeSize();
2160
2161        double valDbl = 0;
2162        String valStr = null;
2163
2164        if (fillValue instanceof String) {
2165            valStr = (String) fillValue;
2166        }
2167        else if (fillValue.getClass().isArray()) {
2168            valStr = Array.get(fillValue, 0).toString();
2169        }
2170
2171        if (!type.isString()) {
2172            try {
2173                valDbl = Double.parseDouble(valStr);
2174            }
2175            catch (NumberFormatException ex) {
2176                log.debug("parseFillValue(): parse error: ", ex);
2177                return null;
2178            }
2179        }
2180
2181        try {
2182            switch (datatypeClass) {
2183                case Datatype.CLASS_INTEGER:
2184                case Datatype.CLASS_ENUM:
2185                case Datatype.CLASS_CHAR:
2186                    log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2187                    if (datatypeSize == 1) {
2188                        data = new byte[] { (byte) valDbl };
2189                    }
2190                    else if (datatypeSize == 2) {
2191                        data = HDFNativeData.shortToByte((short) valDbl);
2192                    }
2193                    else if (datatypeSize == 8) {
2194                        data = HDFNativeData.longToByte((long) valDbl);
2195                    }
2196                    else {
2197                        data = HDFNativeData.intToByte((int) valDbl);
2198                    }
2199                    break;
2200                case Datatype.CLASS_FLOAT:
2201                    log.trace("parseFillValue(): class CLASS_FLOAT");
2202                    if (datatypeSize == 8) {
2203                        data = HDFNativeData.doubleToByte(valDbl);
2204                    }
2205                    else {
2206                        data = HDFNativeData.floatToByte((float) valDbl);
2207                    }
2208                    break;
2209                case Datatype.CLASS_STRING:
2210                    log.trace("parseFillValue(): class CLASS_STRING");
2211                    if (valStr != null)
2212                        data = valStr.getBytes();
2213                    break;
2214                case Datatype.CLASS_REFERENCE:
2215                    log.trace("parseFillValue(): class CLASS_REFERENCE");
2216                    data = HDFNativeData.longToByte((long) valDbl);
2217                    break;
2218                default:
2219                    log.debug("parseFillValue(): datatypeClass unknown");
2220                    break;
2221            } // (datatypeClass)
2222        }
2223        catch (Exception ex) {
2224            log.debug("parseFillValue(): failure: ", ex);
2225            data = null;
2226        }
2227
2228        return data;
2229    }
2230
2231    /*
2232     * (non-Javadoc)
2233     *
2234     * @see hdf.object.ScalarDS#getPaletteRefs()
2235     */
2236    @Override
2237    public byte[] getPaletteRefs() {
2238        if (!isInited())
2239            init(); // init will be called to get refs
2240
2241        return paletteRefs;
2242    }
2243
2244    /**
2245     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2246     * length is 8*numberOfPalettes.
2247     */
2248    private byte[] getPaletteRefs(long did) {
2249        long aid = -1;
2250        long sid = -1;
2251        long atype = -1;
2252        int size = 0;
2253        int rank = 0;
2254        byte[] refbuf = null;
2255
2256        try {
2257            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2258                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2259                sid = H5.H5Aget_space(aid);
2260                rank = H5.H5Sget_simple_extent_ndims(sid);
2261                size = 1;
2262                if (rank > 0) {
2263                    long[] dims = new long[rank];
2264                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2265                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2266                    for (int i = 0; i < rank; i++) {
2267                        size *= (int) dims[i];
2268                    }
2269                }
2270
2271                if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE) throw new HDF5Exception("Invalid int size");
2272
2273                refbuf = new byte[size * 8];
2274                atype = H5.H5Aget_type(aid);
2275
2276                H5.H5Aread(aid, atype, refbuf);
2277            }
2278        }
2279        catch (HDF5Exception ex) {
2280            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2281            refbuf = null;
2282        }
2283        finally {
2284            try {
2285                H5.H5Tclose(atype);
2286            }
2287            catch (HDF5Exception ex2) {
2288                log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2);
2289            }
2290            try {
2291                H5.H5Sclose(sid);
2292            }
2293            catch (HDF5Exception ex2) {
2294                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2295            }
2296            try {
2297                H5.H5Aclose(aid);
2298            }
2299            catch (HDF5Exception ex2) {
2300                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2301            }
2302        }
2303
2304        return refbuf;
2305    }
2306
2307    /**
2308     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2309     * of size is the same as that of the dataspace of the dataset being changed.
2310     *
2311     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2312     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2313     * H5Screate_simple)
2314     *
2315     * @param newDims the dimension target size
2316     *
2317     * @throws HDF5Exception
2318     *             If there is an error at the HDF5 library level.
2319     */
2320    public void extend(long[] newDims) throws HDF5Exception {
2321        long did = -1;
2322        long sid = -1;
2323
2324        did = open();
2325        if (did >= 0) {
2326            try {
2327                H5.H5Dset_extent(did, newDims);
2328                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2329                sid = H5.H5Dget_space(did);
2330                long[] checkDims = new long[rank];
2331                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2332                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2333                for (int i = 0; i < rank; i++) {
2334                    if (checkDims[i] != newDims[i]) {
2335                        log.debug("extend(): error extending dataset");
2336                        throw new HDF5Exception("error extending dataset " + getName());
2337                    }
2338                }
2339                dims = checkDims;
2340            }
2341            catch (Exception e) {
2342                log.debug("extend(): failure: ", e);
2343                throw new HDF5Exception(e.getMessage());
2344            }
2345            finally {
2346                if (sid > 0)
2347                    H5.H5Sclose(sid);
2348
2349                close(did);
2350            }
2351        }
2352    }
2353
2354    /*
2355     * (non-Javadoc)
2356     *
2357     * @see hdf.object.Dataset#isVirtual()
2358     */
2359    @Override
2360    public boolean isVirtual() {
2361        return isVirtual;
2362    }
2363
2364    /*
2365     * (non-Javadoc)
2366     *
2367     * @see hdf.object.Dataset#getVirtualFilename(int)
2368     */
2369    @Override
2370    public String getVirtualFilename(int index) {
2371        if(isVirtual)
2372            return virtualNameList.get(index);
2373        else
2374            return null;
2375    }
2376
2377    /*
2378     * (non-Javadoc)
2379     *
2380     * @see hdf.object.Dataset#getVirtualMaps()
2381     */
2382    @Override
2383    public int getVirtualMaps() {
2384        if(isVirtual)
2385            return virtualNameList.size();
2386        else
2387            return -1;
2388    }
2389}