001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.text.DecimalFormat;
020import java.util.List;
021import java.util.Vector;
022
023import hdf.hdf5lib.H5;
024import hdf.hdf5lib.HDF5Constants;
025import hdf.hdf5lib.HDFNativeData;
026import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
027import hdf.hdf5lib.exceptions.HDF5Exception;
028import hdf.hdf5lib.exceptions.HDF5LibraryException;
029import hdf.hdf5lib.structs.H5O_info_t;
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.ScalarDS;
037
038/**
039 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
040 * float, double and string, and operations performed on the scalar dataset.
041 * <p>
042 * The library predefines a modest number of datatypes. For details,
043 * read <a href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
044 *
045 * @version 1.1 9/4/2007
046 * @author Peter X. Cao
047 */
048public class H5ScalarDS extends ScalarDS {
049    private static final long serialVersionUID = 2887517608230611642L;
050
051    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
052
053    /**
054     * The list of attributes of this data object. Members of the list are instance of Attribute.
055     */
056    private List<Attribute> attributeList;
057
058    private int nAttributes = -1;
059
060    private H5O_info_t objInfo;
061
062    /**
063     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
064     * array length is 8*numberOfPalettes.
065     */
066    private byte[] paletteRefs;
067
068    /** flag to indicate if the dataset is an external dataset */
069    private boolean isExternal = false;
070
071    /** flag to indicate is the dataset is a virtual dataset */
072    private boolean isVirtual = false;
073    private List<String> virtualNameList;
074
075    /**
076     * flag to indicate if the datatype in file is the same as dataype in memory
077     */
078    private boolean isNativeDatatype = false;
079
080    /*
081     * Enum to indicate the type of I/O to perform inside of the common I/O
082     * function.
083     */
084    protected enum IO_TYPE {
085        READ, WRITE
086    };
087
088    /**
089     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
090     * <p>
091     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
092     * path of the dataset.
093     *
094     * @param theFile
095     *            the file that contains the data object.
096     * @param theName
097     *            the name of the data object, e.g. "dset".
098     * @param thePath
099     *            the full path of the data object, e.g. "/arrays/".
100     */
101    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
102        this(theFile, theName, thePath, null);
103    }
104
105    /**
106     * @deprecated Not for public use in the future.<br>
107     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
108     *
109     * @param theFile
110     *            the file that contains the data object.
111     * @param theName
112     *            the name of the data object, e.g. "dset".
113     * @param thePath
114     *            the full path of the data object, e.g. "/arrays/".
115     * @param oid
116     *            the oid of the data object.
117     */
118    @Deprecated
119    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
120        super(theFile, theName, thePath, oid);
121        unsignedConverted = false;
122        paletteRefs = null;
123        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
124
125        if ((oid == null) && (theFile != null)) {
126            // retrieve the object ID
127            try {
128                byte[] refbuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
129                this.oid = new long[1];
130                this.oid[0] = HDFNativeData.byteToLong(refbuf, 0);
131            }
132            catch (Exception ex) {
133                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
134            }
135        }
136    }
137
138    /*
139     * (non-Javadoc)
140     *
141     * @see hdf.object.HObject#open()
142     */
143    @Override
144    public long open() {
145        long did = HDF5Constants.H5I_INVALID_HID;
146
147        try {
148            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
149            log.trace("open(): did={}", did);
150        }
151        catch (HDF5Exception ex) {
152            log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
153            did = HDF5Constants.H5I_INVALID_HID;
154        }
155
156        return did;
157    }
158
159    /*
160     * (non-Javadoc)
161     *
162     * @see hdf.object.HObject#close(int)
163     */
164    @Override
165    public void close(long did) {
166        if (did >= 0) {
167            try {
168                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
169            }
170            catch (Exception ex) {
171                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
172            }
173            try {
174                H5.H5Dclose(did);
175            }
176            catch (HDF5Exception ex) {
177                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
178            }
179        }
180    }
181
182    /**
183     * Retrieves datatype and dataspace information from file and sets the dataset
184     * in memory.
185     * <p>
186     * The init() is designed to support lazy operation in a dataset object. When a
187     * data object is retrieved from file, the datatype, dataspace and raw data are
188     * not loaded into memory. When it is asked to read the raw data from file,
189     * init() is first called to get the datatype and dataspace information, then
190     * load the raw data from file.
191     * <p>
192     * init() is also used to reset the selection of a dataset (start, stride and
193     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
194     * the following example, init() at step 1) retrieves datatype and dataspace
195     * information from file. getData() at step 3) reads only one data point. init()
196     * at step 4) resets the selection to the whole dataset. getData() at step 4)
197     * reads the values of whole dataset into memory.
198     *
199     * <pre>
200     * dset = (Dataset) file.get(NAME_DATASET);
201     *
202     * // 1) get datatype and dataspace information from file
203     * dset.init();
204     * rank = dset.getRank(); // rank = 2, a 2D dataset
205     * count = dset.getSelectedDims();
206     * start = dset.getStartDims();
207     * dims = dset.getDims();
208     *
209     * // 2) select only one data point
210     * for (int i = 0; i &lt; rank; i++) {
211     *     start[0] = 0;
212     *     count[i] = 1;
213     * }
214     *
215     * // 3) read one data point
216     * data = dset.getData();
217     *
218     * // 4) reset selection to the whole dataset
219     * dset.init();
220     *
221     * // 5) clean the memory data buffer
222     * dset.clearData();
223     *
224     * // 6) Read the whole dataset
225     * data = dset.getData();
226     * </pre>
227     */
228    @Override
229    public void init() {
230
231        if (inited) {
232            resetSelection();
233            log.trace("init(): Dataset already intialized");
234            return; // already called. Initialize only once
235        }
236
237        long did = HDF5Constants.H5I_INVALID_HID;
238        long tid = HDF5Constants.H5I_INVALID_HID;
239        long sid = HDF5Constants.H5I_INVALID_HID;
240        long nativeTID = HDF5Constants.H5I_INVALID_HID;
241
242        did = open();
243        if (did >= 0) {
244            // check if it is an external or virtual dataset
245            long pid = HDF5Constants.H5I_INVALID_HID;
246            try {
247                pid = H5.H5Dget_create_plist(did);
248                try {
249                    int nfiles = H5.H5Pget_external_count(pid);
250                    isExternal = (nfiles > 0);
251                    int layoutType = H5.H5Pget_layout(pid);
252                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
253                        try {
254                            long vmaps = H5.H5Pget_virtual_count(pid);
255                            if (vmaps > 0) {
256                                virtualNameList = new Vector<>();
257                                for (long next = 0; next < vmaps; next++) {
258                                    try {
259                                        String fname = H5.H5Pget_virtual_filename(pid, next);
260                                        virtualNameList.add(fname);
261                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
262                                    }
263                                    catch (Exception err) {
264                                        continue;
265                                    }
266                                }
267                            }
268                        }
269                        catch (Exception err) {
270                            log.debug("init(): vds count error: ", err);
271                        }
272                    }
273                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
274                }
275                catch (Exception ex) {
276                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
277                }
278            }
279            catch (Exception ex) {
280                log.debug("init(): H5Dget_create_plist: ", ex);
281            }
282            finally {
283                try {
284                    H5.H5Pclose(pid);
285                }
286                catch (Exception ex) {
287                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
288                }
289            }
290
291            paletteRefs = getPaletteRefs(did);
292
293            try {
294                sid = H5.H5Dget_space(did);
295                rank = H5.H5Sget_simple_extent_ndims(sid);
296                tid = H5.H5Dget_type(did);
297
298                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
299
300                try {
301                    datatype = new H5Datatype(getFileFormat(), tid);
302
303                    log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
304                            tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(),
305                            datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
306                }
307                catch (Exception ex) {
308                    log.debug("init(): failed to create datatype for dataset: ", ex);
309                    datatype = null;
310                }
311
312                // Check if the datatype in the file is the native datatype
313                try {
314                    nativeTID = H5.H5Tget_native_type(tid);
315                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
316                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
317                }
318                catch (Exception ex) {
319                    log.debug("init(): check if native type failure: ", ex);
320                }
321
322                try {
323                    pid = H5.H5Dget_create_plist(did);
324                    int[] fillStatus = { 0 };
325                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
326                        // Check if fill value is user-defined before retrieving it.
327                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
328                            try {
329                                fillValue = H5Datatype.allocateArray((H5Datatype) datatype, 1);
330                            }
331                            catch (OutOfMemoryError e) {
332                                log.debug("init(): out of memory: ", e);
333                                fillValue = null;
334                            }
335                            catch (Exception ex) {
336                                log.debug("init(): allocate fill value buffer failed: ", ex);
337                                fillValue = null;
338                            }
339
340                            log.trace("init(): fillValue={}", fillValue);
341                            try {
342                                H5.H5Pget_fill_value(pid, nativeTID, fillValue);
343                                log.trace("init(): H5Pget_fill_value={}", fillValue);
344                                if (fillValue != null) {
345                                    if (datatype.isUnsigned() && !isFillValueConverted) {
346                                        fillValue = ScalarDS.convertFromUnsignedC(fillValue, null);
347                                        isFillValueConverted = true;
348                                    }
349
350                                    int n = Array.getLength(fillValue);
351                                    for (int i = 0; i < n; i++)
352                                        addFilteredImageValue((Number) Array.get(fillValue, i));
353                                }
354                            }
355                            catch (Exception ex2) {
356                                log.debug("init(): fill value was defined: ", ex2);
357                                fillValue = null;
358                            }
359                        }
360                    }
361                }
362                catch (HDF5Exception ex) {
363                    log.debug("init(): check if fill value is defined failure: ", ex);
364                }
365                finally {
366                    try {
367                        H5.H5Pclose(pid);
368                    }
369                    catch (Exception ex) {
370                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
371                    }
372                }
373
374                if (rank == 0) {
375                    // a scalar data point
376                    rank = 1;
377                    dims = new long[1];
378                    dims[0] = 1;
379                    log.trace("init(): rank is a scalar data point");
380                }
381                else {
382                    dims = new long[rank];
383                    maxDims = new long[rank];
384                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
385                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
386                }
387
388                inited = true;
389            }
390            catch (HDF5Exception ex) {
391                log.debug("init(): ", ex);
392            }
393            finally {
394                try {
395                    H5.H5Tclose(nativeTID);
396                }
397                catch (Exception ex2) {
398                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
399                }
400                try {
401                    H5.H5Tclose(tid);
402                }
403                catch (HDF5Exception ex2) {
404                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
405                }
406                try {
407                    H5.H5Sclose(sid);
408                }
409                catch (HDF5Exception ex2) {
410                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
411                }
412            }
413
414            // check for the type of image and interlace mode
415            // it is a true color image at one of three cases:
416            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
417            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
418            // 3) INTERLACE_MODE = INTERLACE_PLANE
419            if ((rank >= 3) && isImage) {
420                interlace = -1;
421                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
422
423                if (isTrueColor) {
424                    interlace = INTERLACE_PIXEL;
425                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
426                        interlace = INTERLACE_PLANE;
427                    }
428                }
429            }
430
431            close(did);
432
433            startDims = new long[rank];
434            selectedDims = new long[rank];
435
436            resetSelection();
437        }
438        else {
439            log.debug("init(): failed to open dataset");
440        }
441        log.trace("init(): rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims);
442    }
443
444    /*
445     * (non-Javadoc)
446     *
447     * @see hdf.object.DataFormat#hasAttribute()
448     */
449    @Override
450    public boolean hasAttribute() {
451        objInfo.num_attrs = nAttributes;
452
453        if (objInfo.num_attrs < 0) {
454            long did = open();
455            if (did >= 0) {
456                objInfo.num_attrs = 0;
457
458                try {
459                    objInfo = H5.H5Oget_info(did);
460                    nAttributes = (int) objInfo.num_attrs;
461                }
462                catch (Exception ex) {
463                    objInfo.num_attrs = 0;
464                    log.debug("hasAttribute(): get object info: ", ex);
465                }
466
467                if(nAttributes > 0) {
468                    // test if it is an image
469                    // check image
470                    Object avalue = getAttrValue(did, "CLASS");
471                    if (avalue != null) {
472                        try {
473                            isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
474                            log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
475                        }
476                        catch (Exception err) {
477                            log.debug("hasAttribute(): check image: ", err);
478                        }
479                    }
480
481                    // retrieve the IMAGE_MINMAXRANGE
482                    avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
483                    if (avalue != null) {
484                        double x0 = 0;
485                        double x1 = 0;
486                        try {
487                            x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
488                            x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
489                        }
490                        catch (Exception ex2) {
491                            x0 = x1 = 0;
492                        }
493                        if (x1 > x0) {
494                            imageDataRange = new double[2];
495                            imageDataRange[0] = x0;
496                            imageDataRange[1] = x1;
497                        }
498                    }
499
500                    try {
501                        checkCFconvention(did);
502                    }
503                    catch (Exception ex) {
504                        log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
505                    }
506                }
507                close(did);
508            }
509            else {
510                log.debug("hasAttribute(): could not open dataset");
511            }
512        }
513
514        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
515        return (objInfo.num_attrs > 0);
516    }
517
518    /*
519     * (non-Javadoc)
520     *
521     * @see hdf.object.Dataset#getDatatype()
522     */
523    @Override
524    public Datatype getDatatype() {
525        if (!inited)
526            init();
527
528        if (datatype == null) {
529            long did = HDF5Constants.H5I_INVALID_HID;
530            long tid = HDF5Constants.H5I_INVALID_HID;
531
532            did = open();
533            if (did >= 0) {
534                try {
535                    tid = H5.H5Dget_type(did);
536                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
537                    if (!isNativeDatatype) {
538                        long tmptid = -1;
539                        try {
540                            tmptid = H5Datatype.toNative(tid);
541                            if (tmptid >= 0) {
542                                try {
543                                    H5.H5Tclose(tid);
544                                }
545                                catch (Exception ex2) {
546                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
547                                }
548                                tid = tmptid;
549                            }
550                        }
551                        catch (Exception ex) {
552                            log.debug("getDatatype(): toNative: ", ex);
553                        }
554                    }
555                    datatype = new H5Datatype(getFileFormat(), tid);
556                }
557                catch (Exception ex) {
558                    log.debug("getDatatype(): get datatype failure: ", ex);
559                }
560                finally {
561                    try {
562                        H5.H5Tclose(tid);
563                    }
564                    catch (HDF5Exception ex) {
565                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
566                    }
567                    try {
568                        H5.H5Dclose(did);
569                    }
570                    catch (HDF5Exception ex) {
571                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
572                    }
573                }
574            }
575        }
576
577        if (isExternal) {
578            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
579
580            if (pdir == null) {
581                pdir = ".";
582            }
583            System.setProperty("user.dir", pdir);
584            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
585        }
586
587        return datatype;
588    }
589
590    /*
591     * (non-Javadoc)
592     *
593     * @see hdf.object.Dataset#clear()
594     */
595    @Override
596    public void clear() {
597        super.clear();
598
599        if (attributeList != null) {
600            ((Vector<Attribute>) attributeList).setSize(0);
601        }
602    }
603
604    /*
605     * (non-Javadoc)
606     *
607     * @see hdf.object.Dataset#readBytes()
608     */
609    @Override
610    public byte[] readBytes() throws HDF5Exception {
611        byte[] theData = null;
612
613        if (!isInited())
614            init();
615
616        long did = open();
617        if (did >= 0) {
618            long fspace = HDF5Constants.H5I_INVALID_HID;
619            long mspace = HDF5Constants.H5I_INVALID_HID;
620            long tid = HDF5Constants.H5I_INVALID_HID;
621
622            try {
623                long[] lsize = { 1 };
624                for (int j = 0; j < selectedDims.length; j++) {
625                    lsize[0] *= selectedDims[j];
626                }
627
628                fspace = H5.H5Dget_space(did);
629                mspace = H5.H5Screate_simple(rank, selectedDims, null);
630
631                // set the rectangle selection
632                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
633                if (rank * dims[0] > 1) {
634                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
635                            selectedDims, null); // set block to 1
636                }
637
638                tid = H5.H5Dget_type(did);
639                long size = H5.H5Tget_size(tid) * lsize[0];
640                log.trace("readBytes(): size = {}", size);
641
642                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
643
644                theData = new byte[(int)size];
645
646                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
647                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
648            }
649            catch (Exception ex) {
650                log.debug("readBytes(): failed to read data: ", ex);
651            }
652            finally {
653                try {
654                    H5.H5Sclose(fspace);
655                }
656                catch (Exception ex2) {
657                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
658                }
659                try {
660                    H5.H5Sclose(mspace);
661                }
662                catch (Exception ex2) {
663                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
664                }
665                try {
666                    H5.H5Tclose(tid);
667                }
668                catch (HDF5Exception ex2) {
669                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
670                }
671                close(did);
672            }
673        }
674
675        return theData;
676    }
677
678    /**
679     * Reads the data from file.
680     * <p>
681     * read() reads the data from file to a memory buffer and returns the memory
682     * buffer. The dataset object does not hold the memory buffer. To store the
683     * memory buffer in the dataset object, one must call getData().
684     * <p>
685     * By default, the whole dataset is read into memory. Users can also select
686     * a subset to read. Subsetting is done in an implicit way.
687     * <p>
688     * <b>How to Select a Subset</b>
689     * <p>
690     * A selection is specified by three arrays: start, stride and count.
691     * <ol>
692     * <li>start: offset of a selection
693     * <li>stride: determines how many elements to move in each dimension
694     * <li>count: number of elements to select in each dimension
695     * </ol>
696     * getStartDims(), getStride() and getSelectedDims() returns the start,
697     * stride and count arrays respectively. Applications can make a selection
698     * by changing the values of the arrays.
699     * <p>
700     * The following example shows how to make a subset. In the example, the
701     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
702     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
703     * We want to select every other data point in dims[1] and dims[2]
704     *
705     * <pre>
706     * int rank = dataset.getRank(); // number of dimensions of the dataset
707     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
708     * long[] selected = dataset.getSelectedDims(); // the selected size of the
709     *                                              // dataset
710     * long[] start = dataset.getStartDims(); // the offset of the selection
711     * long[] stride = dataset.getStride(); // the stride of the dataset
712     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
713     *                                                   // dimensions for
714     *                                                   // display
715     *
716     * // select dim1 and dim2 as 2D data for display, and slice through dim0
717     * selectedIndex[0] = 1;
718     * selectedIndex[1] = 2;
719     * selectedIndex[1] = 0;
720     *
721     * // reset the selection arrays
722     * for (int i = 0; i &lt; rank; i++) {
723     *     start[i] = 0;
724     *     selected[i] = 1;
725     *     stride[i] = 1;
726     * }
727     *
728     * // set stride to 2 on dim1 and dim2 so that every other data point is
729     * // selected.
730     * stride[1] = 2;
731     * stride[2] = 2;
732     *
733     * // set the selection size of dim1 and dim2
734     * selected[1] = dims[1] / stride[1];
735     * selected[2] = dims[1] / stride[2];
736     *
737     * // when dataset.getData() is called, the selection above will be used
738     * // since
739     * // the dimension arrays are passed by reference. Changes of these arrays
740     * // outside the dataset object directly change the values of these array
741     * // in the dataset object.
742     * </pre>
743     * <p>
744     * For ScalarDS, the memory data buffer is a one-dimensional array of byte,
745     * short, int, float, double or String type based on the datatype of the
746     * dataset.
747     * <p>
748     * For CompoundDS, the memory data object is an java.util.List object. Each
749     * element of the list is a data array that corresponds to a compound field.
750     * <p>
751     * For example, if compound dataset "comp" has the following nested
752     * structure, and member datatypes
753     *
754     * <pre>
755     * comp --&gt; m01 (int)
756     * comp --&gt; m02 (float)
757     * comp --&gt; nest1 --&gt; m11 (char)
758     * comp --&gt; nest1 --&gt; m12 (String)
759     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
760     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
761     * </pre>
762     *
763     * getData() returns a list of six arrays: {int[], float[], char[],
764     * String[], long[] and double[]}.
765     *
766     * @return the data read from file.
767     *
768     * @see #getData()
769     * @see hdf.object.DataFormat#read()
770     *
771     * @throws Exception
772     *             if object can not be read
773     */
774    @Override
775    public Object read() throws Exception {
776        Object readData = null;
777
778        if (!isInited())
779            init();
780
781        try {
782            readData = scalarDatasetCommonIO(IO_TYPE.READ, null);
783        }
784        catch (Exception ex) {
785            log.debug("read(): failed to read scalar dataset: ", ex);
786            throw new Exception("Failed to read scalar dataset: " + ex.getMessage(), ex);
787        }
788
789        return readData;
790    }
791
792    /**
793     * Writes the given data buffer into this dataset in a file.
794     *
795     * @param buf
796     *            The buffer that contains the data values.
797     *
798     * @throws Exception
799     *             If there is an error at the HDF5 library level.
800     */
801    @Override
802    public void write(Object buf) throws Exception {
803        if (this.getFileFormat().isReadOnly())
804            throw new Exception("cannot write to scalar dataset in file opened as read-only");
805
806        if (!isInited())
807            init();
808
809        try {
810            scalarDatasetCommonIO(IO_TYPE.WRITE, buf);
811        }
812        catch (Exception ex) {
813            log.debug("write(): failed to write to scalar dataset: ", ex);
814            throw new Exception("Failed to write to scalar dataset: " + ex.getMessage(), ex);
815        }
816    }
817
818    private Object scalarDatasetCommonIO(IO_TYPE ioType, Object writeBuf) throws Exception {
819        H5Datatype dsDatatype = (H5Datatype) getDatatype();
820        Object theData = null;
821
822        /*
823         * I/O type-specific pre-initialization.
824         */
825        if (ioType == IO_TYPE.WRITE) {
826            if (writeBuf == null) {
827                log.debug("scalarDatasetCommonIO(): writeBuf is null");
828                throw new Exception("write buffer is null");
829            }
830
831            /*
832             * Check for any unsupported datatypes and fail early before
833             * attempting to write to the dataset.
834             */
835            if (dsDatatype.isVLEN() && !dsDatatype.isText()) {
836                log.debug("scalarDatasetCommonIO(): Cannot write non-string variable-length data");
837                throw new HDF5Exception("Writing non-string variable-length data is not supported");
838            }
839
840            if (dsDatatype.isRegRef()) {
841                log.debug("scalarDatasetCommonIO(): Cannot write region reference data");
842                throw new HDF5Exception("Writing region reference data is not supported");
843            }
844        }
845
846        long did = open();
847        if (did >= 0) {
848            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
849
850            try {
851                /*
852                 * NOTE: this call sets up a hyperslab selection in the file according to the
853                 * current selection in the dataset object.
854                 */
855                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
856                        selectedStride, selectedDims, spaceIDs);
857                log.trace("scalarDatasetCommonIO(): totalSelectedSpacePoints={}", totalSelectedSpacePoints);
858
859                if (ioType == IO_TYPE.READ) {
860                    log.trace("scalarDatasetCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
861                    if ((originalBuf == null) || dsDatatype.isEnum() || dsDatatype.isText() || dsDatatype.isRefObj()
862                            || ((originalBuf != null) && (totalSelectedSpacePoints != nPoints))) {
863                        try {
864                            theData = H5Datatype.allocateArray(dsDatatype, (int)totalSelectedSpacePoints);
865                        }
866                        catch (OutOfMemoryError err) {
867                            log.debug("scalarDatasetCommonIO(): Out of memory");
868                            throw new HDF5Exception("Out Of Memory");
869                        }
870                    }
871                    else {
872                        // reuse the buffer if the size is the same
873                        log.trace("scalarDatasetCommonIO():read ioType reuse the buffer if the size is the same");
874                        theData = originalBuf;
875                    }
876
877                    if (theData != null) {
878                        /*
879                         * Actually read the data now that everything has been setup.
880                         */
881                        long tid = HDF5Constants.H5I_INVALID_HID;
882                        try {
883                            log.trace("scalarDatasetCommonIO():read ioType create native");
884                            tid = dsDatatype.createNative();
885
886                            if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
887                                log.trace("scalarDatasetCommonIO(): H5DreadVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
888                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
889                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
890
891                                H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
892                                        (Object[]) theData);
893                            }
894                            else {
895                                log.trace("scalarDatasetCommonIO(): H5Dread did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
896                                        did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
897                                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
898
899                                H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
900                            }
901                        }
902                        catch (HDF5DataFiltersException exfltr) {
903                            log.debug("scalarDatasetCommonIO(): read failure: ", exfltr);
904                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
905                        }
906                        catch (Exception ex) {
907                            log.debug("scalarDatasetCommonIO(): read failure: ", ex);
908                            throw new Exception(ex.getMessage(), ex);
909                        }
910                        finally {
911                            dsDatatype.close(tid);
912                        }
913
914                        /*
915                         * Perform any necessary data conversions.
916                         */
917                        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
918                            log.trace("scalarDatasetCommonIO(): isText: converting byte array to string array");
919                            theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize());
920                        }
921                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
922                            log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array");
923                            theData = dsDatatype.byteToBigDecimal(0, (int)totalSelectedSpacePoints, (byte[]) theData);
924                        }
925                        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
926                            log.trace("scalarDatasetCommonIO(): isArray and isFloat: converting byte array to BigDecimal array");
927                            long[] arrayDims = dsDatatype.getArrayDims();
928                            int asize = (int)totalSelectedSpacePoints;
929                            for (int j = 0; j < arrayDims.length; j++) {
930                                asize *= arrayDims[j];
931                            }
932                            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData);
933                        }
934                        else if (dsDatatype.isRefObj()) {
935                            log.trace("scalarDatasetCommonIO(): isREF: converting byte array to long array");
936                            theData = HDFNativeData.byteToLong((byte[]) theData);
937                        }
938                    }
939                } // IO_TYPE.READ
940                else {
941                    /*
942                     * Perform any necessary data conversions before writing the data.
943                     *
944                     * Note that v-len strings do not get converted, regardless of
945                     * conversion request type.
946                     */
947                    Object tmpData = writeBuf;
948                    try {
949                        // Check if we need to convert integer data
950                        int tsize = (int) dsDatatype.getDatatypeSize();
951                        String cname = writeBuf.getClass().getName();
952                        char dname = cname.charAt(cname.lastIndexOf("[") + 1);
953                        boolean doIntConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
954                                || ((tsize == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
955
956                        if (doIntConversion) {
957                            log.trace("scalarDatasetCommonIO(): converting integer data to unsigned C-type integers");
958                            tmpData = convertToUnsignedC(writeBuf, null);
959                        }
960                        else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString) {
961                            log.trace("scalarDatasetCommonIO(): converting string array to byte array");
962                            tmpData = stringToByte((String[]) writeBuf, tsize);
963                        }
964                        else if (dsDatatype.isEnum() && (Array.get(writeBuf, 0) instanceof String)) {
965                            log.trace("scalarDatasetCommonIO(): converting enum names to values");
966                            throw new Exception("data conversion failure: cannot write BigDecimal values");
967                            //tmpData = dsDatatype.convertEnumNameToValue((String[]) writeBuf);
968                        }
969                        else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
970                            log.trace("scalarDatasetCommonIO(): isFloat: converting BigDecimal array to byte array");
971                            tmpData = dsDatatype.bigDecimalToByte(0, (int)totalSelectedSpacePoints, (BigDecimal[]) writeBuf);
972                        }
973                    }
974                    catch (Exception ex) {
975                        log.debug("scalarDatasetCommonIO(): data conversion failure: ", ex);
976                        throw new Exception("data conversion failure: " + ex.getMessage());
977                    }
978
979                    /*
980                     * Actually write the data now that everything has been setup.
981                     */
982                    long tid = HDF5Constants.H5I_INVALID_HID;
983                    try {
984                        tid = dsDatatype.createNative();
985
986                        if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
987                            log.trace("scalarDatasetCommonIO(): H5DwriteVL did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
988                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
989                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
990
991                            H5.H5DwriteVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) tmpData);
992                        }
993                        else {
994                            log.trace("scalarDatasetCommonIO(): H5Dwrite did={} tid={} spaceIDs[0]={} spaceIDs[1]={}",
995                                    did, tid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
996                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
997
998                            H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
999                        }
1000                    }
1001                    catch (Exception ex) {
1002                        log.debug("scalarDatasetCommonIO(): write failure: ", ex);
1003                        throw new Exception(ex.getMessage());
1004                    }
1005                    finally {
1006                        dsDatatype.close(tid);
1007                    }
1008                } // IO_TYPE.WRITE
1009            }
1010            finally {
1011                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
1012                    try {
1013                        H5.H5Sclose(spaceIDs[0]);
1014                    }
1015                    catch (Exception ex) {
1016                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
1017                    }
1018                }
1019
1020                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
1021                    try {
1022                        H5.H5Sclose(spaceIDs[1]);
1023                    }
1024                    catch (Exception ex) {
1025                        log.debug("scalarDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
1026                    }
1027                }
1028
1029                close(did);
1030            }
1031        }
1032        else
1033            log.debug("scalarDatasetCommonIO(): failed to open dataset");
1034
1035        return theData;
1036    }
1037
1038    /*
1039     * (non-Javadoc)
1040     *
1041     * @see hdf.object.DataFormat#getMetadata()
1042     */
1043    @Override
1044    public List<Attribute> getMetadata() throws HDF5Exception {
1045        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1046    }
1047
1048    /*
1049     * (non-Javadoc)
1050     *
1051     * @see hdf.object.DataFormat#getMetadata(int...)
1052     */
1053    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1054        if (!isInited()) {
1055            init();
1056        }
1057
1058        try {
1059            this.linkTargetObjName = H5File.getLinkTargetName(this);
1060        }
1061        catch (Exception ex) {
1062            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1063        }
1064
1065        if (attributeList != null) {
1066            log.trace("getMetadata(): attributeList != null");
1067            return attributeList;
1068        }
1069
1070        long did = HDF5Constants.H5I_INVALID_HID;
1071        long pcid = HDF5Constants.H5I_INVALID_HID;
1072        long paid = HDF5Constants.H5I_INVALID_HID;
1073        int indxType = fileFormat.getIndexType(null);
1074        int order = fileFormat.getIndexOrder(null);
1075
1076        // load attributes first
1077        if (attrPropList.length > 0) {
1078            indxType = attrPropList[0];
1079            if (attrPropList.length > 1) {
1080                order = attrPropList[1];
1081            }
1082        }
1083
1084        attributeList = H5File.getAttribute(this, indxType, order);
1085
1086        did = open();
1087        if (did >= 0) {
1088            try {
1089                // get the compression and chunk information
1090                pcid = H5.H5Dget_create_plist(did);
1091                paid = H5.H5Dget_access_plist(did);
1092                long storageSize = H5.H5Dget_storage_size(did);
1093                int nfilt = H5.H5Pget_nfilters(pcid);
1094                int layoutType = H5.H5Pget_layout(pcid);
1095
1096                storageLayout.setLength(0);
1097                compression.setLength(0);
1098
1099                if (layoutType == HDF5Constants.H5D_CHUNKED) {
1100                    chunkSize = new long[rank];
1101                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1102                    int n = chunkSize.length;
1103                    storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1104                    for (int i = 1; i < n; i++) {
1105                        storageLayout.append(" X ").append(chunkSize[i]);
1106                    }
1107
1108                    if (nfilt > 0) {
1109                        long nelmts = 1;
1110                        long uncompSize;
1111                        long datumSize = getDatatype().getDatatypeSize();
1112
1113                        if (datumSize < 0) {
1114                            long tmptid = HDF5Constants.H5I_INVALID_HID;
1115                            try {
1116                                tmptid = H5.H5Dget_type(did);
1117                                datumSize = H5.H5Tget_size(tmptid);
1118                            }
1119                            finally {
1120                                try {
1121                                    H5.H5Tclose(tmptid);
1122                                }
1123                                catch (Exception ex2) {
1124                                    log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1125                                }
1126                            }
1127                        }
1128
1129                        for (int i = 0; i < rank; i++) {
1130                            nelmts *= dims[i];
1131                        }
1132                        uncompSize = nelmts * datumSize;
1133
1134                        /* compression ratio = uncompressed size / compressed size */
1135
1136                        if (storageSize != 0) {
1137                            double ratio = (double) uncompSize / (double) storageSize;
1138                            DecimalFormat df = new DecimalFormat();
1139                            df.setMinimumFractionDigits(3);
1140                            df.setMaximumFractionDigits(3);
1141                            compression.append(df.format(ratio)).append(":1");
1142                        }
1143                    }
1144                }
1145                else if (layoutType == HDF5Constants.H5D_COMPACT) {
1146                    storageLayout.append("COMPACT");
1147                }
1148                else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1149                    storageLayout.append("CONTIGUOUS");
1150                    if (H5.H5Pget_external_count(pcid) > 0)
1151                        storageLayout.append(" - EXTERNAL ");
1152                }
1153                else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1154                    storageLayout.append("VIRTUAL - ");
1155                    try {
1156                        long vmaps = H5.H5Pget_virtual_count(pcid);
1157                        try {
1158                            int virtView = H5.H5Pget_virtual_view(paid);
1159                            long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1160                            if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1161                                storageLayout.append("First Missing");
1162                            else
1163                                storageLayout.append("Last Available");
1164                            storageLayout.append("\nGAP : " + virtGap);
1165                        }
1166                        catch (Exception err) {
1167                            log.debug("getMetadata(): vds error: ", err);
1168                            storageLayout.append("ERROR");
1169                        }
1170
1171                        storageLayout.append("\nMAPS : " + vmaps);
1172                        if (vmaps > 0) {
1173                            for (long next = 0; next < vmaps; next++) {
1174                                try {
1175                                    H5.H5Pget_virtual_vspace(pcid, next);
1176                                    H5.H5Pget_virtual_srcspace(pcid, next);
1177                                    String fname = H5.H5Pget_virtual_filename(pcid, next);
1178                                    String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1179                                    storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1180                                }
1181                                catch (Exception err) {
1182                                    log.debug("getMetadata(): vds space[{}] error: ", next, err);
1183                                    storageLayout.append("ERROR");
1184                                }
1185                            }
1186                        }
1187                    }
1188                    catch (Exception err) {
1189                        log.debug("getMetadata(): vds count error: ", err);
1190                        storageLayout.append("ERROR");
1191                    }
1192                }
1193                else {
1194                    chunkSize = null;
1195                    storageLayout.append("NONE");
1196                }
1197
1198                int[] flags = { 0, 0 };
1199                long[] cdNelmts = { 20 };
1200                int[] cdValues = new int[(int) cdNelmts[0]];
1201                String[] cdName = { "", "" };
1202                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1203                int filter = -1;
1204                int[] filterConfig = { 1 };
1205
1206                filters.setLength(0);
1207
1208                if (nfilt == 0) {
1209                    filters.append("NONE");
1210                }
1211                else {
1212                    for (int i = 0, k = 0; i < nfilt; i++) {
1213                        log.trace("getMetadata(): filter[{}]", i);
1214                        if (i > 0) {
1215                            filters.append(", ");
1216                        }
1217                        if (k > 0) {
1218                            compression.append(", ");
1219                        }
1220
1221                        try {
1222                            cdNelmts[0] = 20;
1223                            cdValues = new int[(int) cdNelmts[0]];
1224                            cdValues = new int[(int) cdNelmts[0]];
1225                            filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1226                            log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1227                            for (int j = 0; j < cdNelmts[0]; j++) {
1228                                log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1229                            }
1230                        }
1231                        catch (Exception err) {
1232                            log.debug("getMetadata(): filter[{}] error: ", i, err);
1233                            filters.append("ERROR");
1234                            continue;
1235                        }
1236
1237                        if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1238                            filters.append("NONE");
1239                        }
1240                        else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1241                            filters.append("GZIP");
1242                            compression.append(COMPRESSION_GZIP_TXT + cdValues[0]);
1243                            k++;
1244                        }
1245                        else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1246                            filters.append("Error detection filter");
1247                        }
1248                        else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1249                            filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1250                        }
1251                        else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1252                            filters.append("NBIT");
1253                        }
1254                        else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1255                            filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1256                        }
1257                        else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1258                            filters.append("SZIP");
1259                            compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1260                            k++;
1261                            int flag = -1;
1262                            try {
1263                                flag = H5.H5Zget_filter_info(filter);
1264                            }
1265                            catch (Exception ex) {
1266                                log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1267                                flag = -1;
1268                            }
1269                            if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1270                                compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1271                            }
1272                            else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1273                                    || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1274                                compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1275                            }
1276                        }
1277                        else {
1278                            filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1279                            for (int j = 0; j < cdNelmts[0]; j++) {
1280                                if (j > 0)
1281                                    filters.append(", ");
1282                                filters.append(cdValues[j]);
1283                            }
1284                            log.debug("getMetadata(): filter[{}] is user defined compression", i);
1285                        }
1286                    } // (int i=0; i<nfilt; i++)
1287                }
1288
1289                if (compression.length() == 0) {
1290                    compression.append("NONE");
1291                }
1292                log.trace("getMetadata(): filter compression={}", compression);
1293                log.trace("getMetadata(): filter information={}", filters);
1294
1295                storage.setLength(0);
1296                storage.append("SIZE: ").append(storageSize);
1297
1298                try {
1299                    int[] at = { 0 };
1300                    H5.H5Pget_alloc_time(pcid, at);
1301                    storage.append(", allocation time: ");
1302                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1303                        storage.append("Early");
1304                    }
1305                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1306                        storage.append("Incremental");
1307                    }
1308                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1309                        storage.append("Late");
1310                    }
1311                    else
1312                        storage.append("Default");
1313                }
1314                catch (Exception ex) {
1315                    log.debug("getMetadata(): Storage allocation time:", ex);
1316                }
1317                log.trace("getMetadata(): storage={}", storage);
1318            }
1319            finally {
1320                try {
1321                    H5.H5Pclose(paid);
1322                }
1323                catch (Exception ex) {
1324                    log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1325                }
1326                try {
1327                    H5.H5Pclose(pcid);
1328                }
1329                catch (Exception ex) {
1330                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1331                }
1332                close(did);
1333            }
1334        }
1335
1336        return attributeList;
1337    }
1338
1339    /*
1340     * (non-Javadoc)
1341     *
1342     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1343     */
1344    @Override
1345    public void writeMetadata(Object info) throws Exception {
1346        // only attribute metadata is supported.
1347        if (!(info instanceof Attribute)) {
1348            log.debug("writeMetadata(): Object not an Attribute");
1349            return;
1350        }
1351
1352        boolean attrExisted = false;
1353        Attribute attr = (Attribute) info;
1354        log.trace("writeMetadata(): {}", attr.getName());
1355
1356        if (attributeList == null) {
1357            this.getMetadata();
1358        }
1359
1360        if (attributeList != null)
1361            attrExisted = attributeList.contains(attr);
1362
1363        getFileFormat().writeAttribute(this, attr, attrExisted);
1364        // add the new attribute into attribute list
1365        if (!attrExisted) {
1366            attributeList.add(attr);
1367            nAttributes = attributeList.size();
1368        }
1369    }
1370
1371    /*
1372     * (non-Javadoc)
1373     *
1374     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1375     */
1376    @Override
1377    public void removeMetadata(Object info) throws HDF5Exception {
1378        // only attribute metadata is supported.
1379        if (!(info instanceof Attribute)) {
1380            log.debug("removeMetadata(): Object not an Attribute");
1381            return;
1382        }
1383
1384        Attribute attr = (Attribute) info;
1385        log.trace("removeMetadata(): {}", attr.getName());
1386        long did = open();
1387        if (did >= 0) {
1388            try {
1389                H5.H5Adelete(did, attr.getName());
1390                List<Attribute> attrList = getMetadata();
1391                attrList.remove(attr);
1392                nAttributes = attrList.size();
1393            }
1394            finally {
1395                close(did);
1396            }
1397        }
1398    }
1399
1400    /*
1401     * (non-Javadoc)
1402     *
1403     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1404     */
1405    @Override
1406    public void updateMetadata(Object info) throws HDF5Exception {
1407        // only attribute metadata is supported.
1408        if (!(info instanceof Attribute)) {
1409            log.debug("updateMetadata(): Object not an Attribute");
1410            return;
1411        }
1412
1413        nAttributes = -1;
1414    }
1415
1416    /*
1417     * (non-Javadoc)
1418     *
1419     * @see hdf.object.HObject#setName(java.lang.String)
1420     */
1421    @Override
1422    public void setName(String newName) throws Exception {
1423        if (newName == null)
1424            throw new IllegalArgumentException("The new name is NULL");
1425
1426        H5File.renameObject(this, newName);
1427        super.setName(newName);
1428    }
1429
1430    /**
1431     * Resets selection of dataspace
1432     */
1433    private void resetSelection() {
1434        for (int i = 0; i < rank; i++) {
1435            startDims[i] = 0;
1436            selectedDims[i] = 1;
1437            if (selectedStride != null) {
1438                selectedStride[i] = 1;
1439            }
1440        }
1441
1442        if (interlace == INTERLACE_PIXEL) {
1443            // 24-bit TRUE color image
1444            // [height][width][pixel components]
1445            selectedDims[2] = 3;
1446            selectedDims[0] = dims[0];
1447            selectedDims[1] = dims[1];
1448            selectedIndex[0] = 0; // index for height
1449            selectedIndex[1] = 1; // index for width
1450            selectedIndex[2] = 2; // index for depth
1451        }
1452        else if (interlace == INTERLACE_PLANE) {
1453            // 24-bit TRUE color image
1454            // [pixel components][height][width]
1455            selectedDims[0] = 3;
1456            selectedDims[1] = dims[1];
1457            selectedDims[2] = dims[2];
1458            selectedIndex[0] = 1; // index for height
1459            selectedIndex[1] = 2; // index for width
1460            selectedIndex[2] = 0; // index for depth
1461        }
1462        else if (rank == 1) {
1463            selectedIndex[0] = 0;
1464            selectedDims[0] = dims[0];
1465        }
1466        else if (rank == 2) {
1467            selectedIndex[0] = 0;
1468            selectedIndex[1] = 1;
1469            selectedDims[0] = dims[0];
1470            selectedDims[1] = dims[1];
1471        }
1472        else if (rank > 2) {
1473            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1474            // [frame][height][width] by default
1475            // selectedIndex[1] = rank-1; // width, the fastest dimension
1476            // selectedIndex[0] = rank-2; // height
1477            // selectedIndex[2] = rank-3; // frames
1478
1479            //
1480            // (5/4/09) Modified the default dimension order. See bug#1379
1481            // We change the default order to the following. In most situation,
1482            // users want to use the natural order of
1483            // selectedIndex[0] = 0
1484            // selectedIndex[1] = 1
1485            // selectedIndex[2] = 2
1486            // Most of NPOESS data is the the order above.
1487
1488            if (isImage) {
1489                // 3D dataset is arranged in the order of [frame][height][width]
1490                selectedIndex[1] = rank - 1; // width, the fastest dimension
1491                selectedIndex[0] = rank - 2; // height
1492                selectedIndex[2] = rank - 3; // frames
1493            }
1494            else {
1495                selectedIndex[0] = 0; // width, the fastest dimension
1496                selectedIndex[1] = 1; // height
1497                selectedIndex[2] = 2; // frames
1498            }
1499
1500            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1501            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1502            selectedDims[selectedIndex[2]] = dims[selectedIndex[2]];
1503        }
1504
1505        isDataLoaded = false;
1506
1507        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
1508            isDefaultImageOrder = false;
1509        else
1510            isDefaultImageOrder = true;
1511    }
1512
1513    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1514            long[] chunks, int gzip, Object data) throws Exception {
1515        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1516    }
1517
1518    /**
1519     * Creates a scalar dataset in a file with/without chunking and compression.
1520     * <p>
1521     * The following example shows how to create a string dataset using this function.
1522     *
1523     * <pre>
1524     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1525     * int max_str_len = 120;
1526     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, Datatype.NATIVE, Datatype.NATIVE);
1527     * int size = 10000;
1528     * long dims[] = { size };
1529     * long chunks[] = { 1000 };
1530     * int gzip = 9;
1531     * String strs[] = new String[size];
1532     *
1533     * for (int i = 0; i &lt; size; i++)
1534     *     strs[i] = String.valueOf(i);
1535     *
1536     * file.open();
1537     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1538     *
1539     * try {
1540     *     file.close();
1541     * }
1542     * catch (Exception ex) {
1543     * }
1544     * </pre>
1545     *
1546     * @param name
1547     *            the name of the dataset to create.
1548     * @param pgroup
1549     *            parent group where the new dataset is created.
1550     * @param type
1551     *            the datatype of the dataset.
1552     * @param dims
1553     *            the dimension size of the dataset.
1554     * @param maxdims
1555     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1556     * @param chunks
1557     *            the chunk size of the dataset. No chunking if chunk = null.
1558     * @param gzip
1559     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1560     * @param fillValue
1561     *            the default data value.
1562     * @param data
1563     *            the array of data values.
1564     *
1565     * @return the new scalar dataset if successful; otherwise returns null.
1566     *
1567     * @throws Exception if there is a failure.
1568     */
1569    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1570            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1571        H5ScalarDS dataset = null;
1572        String fullPath = null;
1573        long did = HDF5Constants.H5I_INVALID_HID;
1574        long plist = HDF5Constants.H5I_INVALID_HID;
1575        long sid = HDF5Constants.H5I_INVALID_HID;
1576        long tid = HDF5Constants.H5I_INVALID_HID;
1577
1578        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1579            log.debug("create(): one or more parameters are null");
1580            return null;
1581        }
1582
1583        H5File file = (H5File) pgroup.getFileFormat();
1584        if (file == null) {
1585            log.debug("create(): parent group FileFormat is null");
1586            return null;
1587        }
1588
1589        String path = HObject.SEPARATOR;
1590        if (!pgroup.isRoot()) {
1591            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1592            if (name.endsWith("/")) {
1593                name = name.substring(0, name.length() - 1);
1594            }
1595            int idx = name.lastIndexOf('/');
1596            if (idx >= 0) {
1597                name = name.substring(idx + 1);
1598            }
1599        }
1600
1601        fullPath = path + name;
1602        log.trace("create(): fullPath={}", fullPath);
1603
1604        // setup chunking and compression
1605        boolean isExtentable = false;
1606        if (maxdims != null) {
1607            for (int i = 0; i < maxdims.length; i++) {
1608                if (maxdims[i] == 0) {
1609                    maxdims[i] = dims[i];
1610                }
1611                else if (maxdims[i] < 0) {
1612                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1613                }
1614
1615                if (maxdims[i] != dims[i]) {
1616                    isExtentable = true;
1617                }
1618            }
1619        }
1620
1621        // HDF5 requires you to use chunking in order to define extendible
1622        // datasets. Chunking makes it possible to extend datasets efficiently,
1623        // without having to reorganize storage excessively. Using default size
1624        // of 64x...which has good performance
1625        if ((chunks == null) && isExtentable) {
1626            chunks = new long[dims.length];
1627            for (int i = 0; i < dims.length; i++)
1628                chunks[i] = Math.min(dims[i], 64);
1629        }
1630
1631        // prepare the dataspace and datatype
1632        int rank = dims.length;
1633        log.trace("create(): rank={}", rank);
1634
1635        if ((tid = type.createNative()) >= 0) {
1636            log.trace("create(): createNative={}", tid);
1637            try {
1638                sid = H5.H5Screate_simple(rank, dims, maxdims);
1639                log.trace("create(): H5Screate_simple={}", sid);
1640
1641                // figure out creation properties
1642                plist = HDF5Constants.H5P_DEFAULT;
1643
1644                byte[] valFill = null;
1645                try {
1646                    valFill = parseFillValue(type, fillValue);
1647                }
1648                catch (Exception ex) {
1649                    log.debug("create(): parse fill value: ", ex);
1650                }
1651                log.trace("create(): parseFillValue={}", valFill);
1652
1653                if (chunks != null || valFill != null) {
1654                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1655
1656                    if (chunks != null) {
1657                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1658                        H5.H5Pset_chunk(plist, rank, chunks);
1659
1660                        // compression requires chunking
1661                        if (gzip > 0) {
1662                            H5.H5Pset_deflate(plist, gzip);
1663                        }
1664                    }
1665
1666                    if (valFill != null) {
1667                        H5.H5Pset_fill_value(plist, tid, valFill);
1668                    }
1669                }
1670
1671                long fid = file.getFID();
1672
1673                log.trace("create(): create dataset fid={}", fid);
1674                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1675                log.trace("create(): create dataset did={}", did);
1676                dataset = new H5ScalarDS(file, name, path);
1677            }
1678            finally {
1679                try {
1680                    H5.H5Pclose(plist);
1681                }
1682                catch (HDF5Exception ex) {
1683                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1684                }
1685                try {
1686                    H5.H5Sclose(sid);
1687                }
1688                catch (HDF5Exception ex) {
1689                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1690                }
1691                try {
1692                    H5.H5Tclose(tid);
1693                }
1694                catch (HDF5Exception ex) {
1695                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1696                }
1697                try {
1698                    H5.H5Dclose(did);
1699                }
1700                catch (HDF5Exception ex) {
1701                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1702                }
1703            }
1704        }
1705
1706        if (dataset != null) {
1707            pgroup.addToMemberList(dataset);
1708            if (data != null) {
1709                dataset.init();
1710                long[] selected = dataset.getSelectedDims();
1711                for (int i = 0; i < rank; i++) {
1712                    selected[i] = dims[i];
1713                }
1714                dataset.write(data);
1715            }
1716        }
1717
1718        return dataset;
1719    }
1720
1721    // check _FillValue, valid_min, valid_max, and valid_range
1722    private void checkCFconvention(long oid) throws Exception {
1723        Object avalue = getAttrValue(oid, "_FillValue");
1724
1725        if (avalue != null) {
1726            int n = Array.getLength(avalue);
1727            for (int i = 0; i < n; i++)
1728                addFilteredImageValue((Number) Array.get(avalue, i));
1729        }
1730
1731        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1732            double x0 = 0;
1733            double x1 = 0;
1734            avalue = getAttrValue(oid, "valid_range");
1735            if (avalue != null) {
1736                try {
1737                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1738                    x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 1).toString());
1739                    imageDataRange = new double[2];
1740                    imageDataRange[0] = x0;
1741                    imageDataRange[1] = x1;
1742                    return;
1743                }
1744                catch (Exception ex) {
1745                    log.debug("checkCFconvention(): valid_range: ", ex);
1746                }
1747            }
1748
1749            avalue = getAttrValue(oid, "valid_min");
1750            if (avalue != null) {
1751                try {
1752                    x0 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1753                }
1754                catch (Exception ex) {
1755                    log.debug("checkCFconvention(): valid_min: ", ex);
1756                }
1757                avalue = getAttrValue(oid, "valid_max");
1758                if (avalue != null) {
1759                    try {
1760                        x1 = Double.parseDouble(java.lang.reflect.Array.get(avalue, 0).toString());
1761                        imageDataRange = new double[2];
1762                        imageDataRange[0] = x0;
1763                        imageDataRange[1] = x1;
1764                    }
1765                    catch (Exception ex) {
1766                        log.debug("checkCFconvention(): valid_max:", ex);
1767                    }
1768                }
1769            }
1770        } // (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1771    }
1772
1773    private Object getAttrValue(long oid, String aname) {
1774        log.trace("getAttrValue(): start: name={}", aname);
1775
1776        long aid = HDF5Constants.H5I_INVALID_HID;
1777        long atid = HDF5Constants.H5I_INVALID_HID;
1778        long asid = HDF5Constants.H5I_INVALID_HID;
1779        Object avalue = null;
1780
1781        try {
1782            // try to find attribute name
1783            if(H5.H5Aexists_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT))
1784                aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1785        }
1786        catch (HDF5LibraryException ex5) {
1787            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1788        }
1789        catch (Exception ex) {
1790            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1791        }
1792        if (aid > 0) {
1793            try {
1794                atid = H5.H5Aget_type(aid);
1795                long tmptid = atid;
1796                atid = H5.H5Tget_native_type(tmptid);
1797                try {
1798                    H5.H5Tclose(tmptid);
1799                }
1800                catch (Exception ex) {
1801                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1802                }
1803                H5Datatype dsDatatype = new H5Datatype(getFileFormat(), atid);
1804
1805                asid = H5.H5Aget_space(aid);
1806                long adims[] = null;
1807
1808                int arank = H5.H5Sget_simple_extent_ndims(asid);
1809                if (arank > 0) {
1810                    adims = new long[arank];
1811                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1812                }
1813                log.trace("getAttrValue(): adims={}", adims);
1814
1815                // retrieve the attribute value
1816                long lsize = 1;
1817                if (adims != null) {
1818                    for (int j = 0; j < adims.length; j++) {
1819                        lsize *= adims[j];
1820                    }
1821                }
1822                log.trace("getAttrValue(): lsize={}", lsize);
1823
1824                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1825
1826                try {
1827                    avalue = H5Datatype.allocateArray(dsDatatype, (int) lsize);
1828                }
1829                catch (OutOfMemoryError e) {
1830                    log.debug("getAttrValue(): out of memory: ", e);
1831                    avalue = null;
1832                }
1833
1834                if (avalue != null) {
1835                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1836                    H5.H5Aread(aid, atid, avalue);
1837
1838                    if (dsDatatype.isUnsigned()) {
1839                        log.trace("getAttrValue(): id {} is unsigned", atid);
1840                        avalue = convertFromUnsignedC(avalue, null);
1841                    }
1842                }
1843            }
1844            catch (Exception ex) {
1845                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1846            }
1847            finally {
1848                try {
1849                    H5.H5Tclose(atid);
1850                }
1851                catch (HDF5Exception ex) {
1852                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1853                }
1854                try {
1855                    H5.H5Sclose(asid);
1856                }
1857                catch (HDF5Exception ex) {
1858                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1859                }
1860                try {
1861                    H5.H5Aclose(aid);
1862                }
1863                catch (HDF5Exception ex) {
1864                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1865                }
1866            }
1867        } // (aid > 0)
1868
1869        return avalue;
1870    }
1871
1872    private boolean isStringAttributeOf(long objID, String name, String value) {
1873        boolean retValue = false;
1874        long aid = HDF5Constants.H5I_INVALID_HID;
1875        long atid = HDF5Constants.H5I_INVALID_HID;
1876
1877        try {
1878            if (H5.H5Aexists_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT)) {
1879                aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1880                atid = H5.H5Aget_type(aid);
1881                int size = (int)H5.H5Tget_size(atid);
1882                byte[] attrValue = new byte[size];
1883                H5.H5Aread(aid, atid, attrValue);
1884                String strValue = new String(attrValue).trim();
1885                retValue = strValue.equalsIgnoreCase(value);
1886            }
1887        }
1888        catch (Exception ex) {
1889            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1890        }
1891        finally {
1892            try {
1893                H5.H5Tclose(atid);
1894            }
1895            catch (HDF5Exception ex) {
1896                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1897            }
1898            try {
1899                H5.H5Aclose(aid);
1900            }
1901            catch (HDF5Exception ex) {
1902                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1903            }
1904        }
1905
1906        return retValue;
1907    }
1908
1909    /*
1910     * (non-Javadoc)
1911     *
1912     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1913     */
1914    @Override
1915    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1916        // must give a location to copy
1917        if (pgroup == null) {
1918            log.debug("copy(): Parent group is null");
1919            return null;
1920        }
1921
1922        Dataset dataset = null;
1923        long srcdid = HDF5Constants.H5I_INVALID_HID;
1924        long dstdid = HDF5Constants.H5I_INVALID_HID;
1925        long plist = HDF5Constants.H5I_INVALID_HID;
1926        long tid = HDF5Constants.H5I_INVALID_HID;
1927        long sid = HDF5Constants.H5I_INVALID_HID;
1928        String dname = null;
1929        String path = null;
1930
1931        if (pgroup.isRoot()) {
1932            path = HObject.SEPARATOR;
1933        }
1934        else {
1935            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1936        }
1937        dname = path + dstName;
1938
1939        srcdid = open();
1940        if (srcdid >= 0) {
1941            try {
1942                tid = H5.H5Dget_type(srcdid);
1943                sid = H5.H5Screate_simple(dims.length, dims, null);
1944                plist = H5.H5Dget_create_plist(srcdid);
1945
1946                long[] chunks = new long[dims.length];
1947                boolean setChunkFlag = false;
1948                try {
1949                    H5.H5Pget_chunk(plist, dims.length, chunks);
1950                    for (int i = 0; i < dims.length; i++) {
1951                        if (dims[i] < chunks[i]) {
1952                            setChunkFlag = true;
1953                            if (dims[i] == 1)
1954                                chunks[i] = 1;
1955                            else
1956                                chunks[i] = dims[i] / 2;
1957                        }
1958                    }
1959                }
1960                catch (Exception ex) {
1961                    log.debug("copy(): chunk: ", ex);
1962                }
1963
1964                if (setChunkFlag)
1965                    H5.H5Pset_chunk(plist, dims.length, chunks);
1966
1967                try {
1968                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1969                            HDF5Constants.H5P_DEFAULT);
1970                }
1971                catch (Exception e) {
1972                    log.debug("copy(): H5Dcreate: ", e);
1973                }
1974                finally {
1975                    try {
1976                        H5.H5Dclose(dstdid);
1977                    }
1978                    catch (Exception ex2) {
1979                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
1980                    }
1981                }
1982
1983                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1984                if (buff != null) {
1985                    dataset.init();
1986                    dataset.write(buff);
1987                }
1988
1989                dstdid = dataset.open();
1990                if (dstdid >= 0) {
1991                    try {
1992                        H5File.copyAttributes(srcdid, dstdid);
1993                    }
1994                    finally {
1995                        try {
1996                            H5.H5Dclose(dstdid);
1997                        }
1998                        catch (Exception ex) {
1999                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
2000                        }
2001                    }
2002                }
2003            }
2004            finally {
2005                try {
2006                    H5.H5Pclose(plist);
2007                }
2008                catch (Exception ex) {
2009                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
2010                }
2011                try {
2012                    H5.H5Sclose(sid);
2013                }
2014                catch (Exception ex) {
2015                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
2016                }
2017                try {
2018                    H5.H5Tclose(tid);
2019                }
2020                catch (Exception ex) {
2021                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
2022                }
2023                try {
2024                    H5.H5Dclose(srcdid);
2025                }
2026                catch (Exception ex) {
2027                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
2028                }
2029            }
2030        }
2031
2032        pgroup.addToMemberList(dataset);
2033
2034        if (dataset != null)
2035            ((ScalarDS) dataset).setIsImage(isImage);
2036
2037        return dataset;
2038    }
2039
2040    /*
2041     * (non-Javadoc)
2042     *
2043     * @see hdf.object.ScalarDS#getPalette()
2044     */
2045    @Override
2046    public byte[][] getPalette() {
2047        if (palette == null) {
2048            palette = readPalette(0);
2049        }
2050
2051        return palette;
2052    }
2053
2054    /*
2055     * (non-Javadoc)
2056     *
2057     * @see hdf.object.ScalarDS#getPaletteName(int)
2058     */
2059    @Override
2060    public String getPaletteName(int idx) {
2061        byte[] refs = getPaletteRefs();
2062        long did = HDF5Constants.H5I_INVALID_HID;
2063        long palID = HDF5Constants.H5I_INVALID_HID;
2064        String paletteName = null;
2065
2066        if (refs == null) {
2067            log.debug("getPaletteName(): refs is null");
2068            return null;
2069        }
2070
2071        byte[] refBuf = new byte[8];
2072
2073        try {
2074            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2075        }
2076        catch (Exception err) {
2077            log.debug("getPaletteName(): arraycopy failure: ", err);
2078            return null;
2079        }
2080
2081        did = open();
2082        if (did >= 0) {
2083            try {
2084                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2085                paletteName = H5.H5Iget_name(palID);
2086            }
2087            catch (Exception ex) {
2088                ex.printStackTrace();
2089            }
2090            finally {
2091                close(palID);
2092                close(did);
2093            }
2094        }
2095
2096        return paletteName;
2097    }
2098
2099    /*
2100     * (non-Javadoc)
2101     *
2102     * @see hdf.object.ScalarDS#readPalette(int)
2103     */
2104    @Override
2105    public byte[][] readPalette(int idx) {
2106        byte[][] thePalette = null;
2107        byte[] refs = getPaletteRefs();
2108        long did = HDF5Constants.H5I_INVALID_HID;
2109        long palID = HDF5Constants.H5I_INVALID_HID;
2110        long tid = HDF5Constants.H5I_INVALID_HID;
2111
2112        if (refs == null) {
2113            log.debug("readPalette(): refs is null");
2114            return null;
2115        }
2116
2117        byte[] p = null;
2118        byte[] refBuf = new byte[8];
2119
2120        try {
2121            System.arraycopy(refs, idx * 8, refBuf, 0, 8);
2122        }
2123        catch (Exception err) {
2124            log.debug("readPalette(): arraycopy failure: ", err);
2125            return null;
2126        }
2127
2128        did = open();
2129        if (did >= 0) {
2130            try {
2131                palID = H5.H5Rdereference(getFID(), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, refBuf);
2132                log.trace("readPalette(): H5Rdereference: {}", palID);
2133                tid = H5.H5Dget_type(palID);
2134
2135                // support only 3*256 byte palette data
2136                if (H5.H5Dget_storage_size(palID) <= 768) {
2137                    p = new byte[3 * 256];
2138                    H5.H5Dread(palID, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2139                }
2140            }
2141            catch (HDF5Exception ex) {
2142                log.debug("readPalette(): failure: ", ex);
2143                p = null;
2144            }
2145            finally {
2146                try {
2147                    H5.H5Tclose(tid);
2148                }
2149                catch (HDF5Exception ex2) {
2150                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2151                }
2152                close(palID);
2153                close(did);
2154            }
2155        }
2156
2157        if (p != null) {
2158            thePalette = new byte[3][256];
2159            for (int i = 0; i < 256; i++) {
2160                thePalette[0][i] = p[i * 3];
2161                thePalette[1][i] = p[i * 3 + 1];
2162                thePalette[2][i] = p[i * 3 + 2];
2163            }
2164        }
2165
2166        return thePalette;
2167    }
2168
2169    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2170        byte[] data = null;
2171
2172        if (type == null || fillValue == null) {
2173            log.debug("parseFillValue(): datatype or fill value is null");
2174            return null;
2175        }
2176
2177        int datatypeClass = type.getDatatypeClass();
2178        int datatypeSize = (int)type.getDatatypeSize();
2179
2180        double valDbl = 0;
2181        String valStr = null;
2182
2183        if (fillValue instanceof String) {
2184            valStr = (String) fillValue;
2185        }
2186        else if (fillValue.getClass().isArray()) {
2187            valStr = Array.get(fillValue, 0).toString();
2188        }
2189
2190        if (!type.isString()) {
2191            try {
2192                valDbl = Double.parseDouble(valStr);
2193            }
2194            catch (NumberFormatException ex) {
2195                log.debug("parseFillValue(): parse error: ", ex);
2196                return null;
2197            }
2198        }
2199
2200        try {
2201            switch (datatypeClass) {
2202                case Datatype.CLASS_INTEGER:
2203                case Datatype.CLASS_ENUM:
2204                case Datatype.CLASS_CHAR:
2205                    log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2206                    if (datatypeSize == 1) {
2207                        data = new byte[] { (byte) valDbl };
2208                    }
2209                    else if (datatypeSize == 2) {
2210                        data = HDFNativeData.shortToByte((short) valDbl);
2211                    }
2212                    else if (datatypeSize == 8) {
2213                        data = HDFNativeData.longToByte((long) valDbl);
2214                    }
2215                    else {
2216                        data = HDFNativeData.intToByte((int) valDbl);
2217                    }
2218                    break;
2219                case Datatype.CLASS_FLOAT:
2220                    log.trace("parseFillValue(): class CLASS_FLOAT");
2221                    if (datatypeSize > 8) {
2222                        data =  valStr.getBytes();
2223                    }
2224                    else if (datatypeSize == 8) {
2225                        data = HDFNativeData.doubleToByte(valDbl);
2226                    }
2227                    else {
2228                        data = HDFNativeData.floatToByte((float) valDbl);
2229                    }
2230                    break;
2231                case Datatype.CLASS_STRING:
2232                    log.trace("parseFillValue(): class CLASS_STRING");
2233                    if (valStr != null)
2234                        data = valStr.getBytes();
2235                    break;
2236                case Datatype.CLASS_REFERENCE:
2237                    log.trace("parseFillValue(): class CLASS_REFERENCE");
2238                    data = HDFNativeData.longToByte((long) valDbl);
2239                    break;
2240                default:
2241                    log.debug("parseFillValue(): datatypeClass unknown");
2242                    break;
2243            } // (datatypeClass)
2244        }
2245        catch (Exception ex) {
2246            log.debug("parseFillValue(): failure: ", ex);
2247            data = null;
2248        }
2249
2250        return data;
2251    }
2252
2253    /*
2254     * (non-Javadoc)
2255     *
2256     * @see hdf.object.ScalarDS#getPaletteRefs()
2257     */
2258    @Override
2259    public byte[] getPaletteRefs() {
2260        if (!isInited())
2261            init(); // init will be called to get refs
2262
2263        return paletteRefs;
2264    }
2265
2266    /**
2267     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2268     * length is 8*numberOfPalettes.
2269     */
2270    private byte[] getPaletteRefs(long did) {
2271        long aid = HDF5Constants.H5I_INVALID_HID;
2272        long sid = HDF5Constants.H5I_INVALID_HID;
2273        long atype = HDF5Constants.H5I_INVALID_HID;
2274        int size = 0;
2275        int rank = 0;
2276        byte[] refbuf = null;
2277
2278        try {
2279            if(H5.H5Aexists_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT)) {
2280                aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2281                sid = H5.H5Aget_space(aid);
2282                rank = H5.H5Sget_simple_extent_ndims(sid);
2283                size = 1;
2284                if (rank > 0) {
2285                    long[] dims = new long[rank];
2286                    H5.H5Sget_simple_extent_dims(sid, dims, null);
2287                    log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2288                    for (int i = 0; i < rank; i++) {
2289                        size *= (int) dims[i];
2290                    }
2291                }
2292
2293                if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE) throw new HDF5Exception("Invalid int size");
2294
2295                refbuf = new byte[size * 8];
2296                atype = H5.H5Aget_type(aid);
2297
2298                H5.H5Aread(aid, atype, refbuf);
2299            }
2300        }
2301        catch (HDF5Exception ex) {
2302            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2303            refbuf = null;
2304        }
2305        finally {
2306            try {
2307                H5.H5Tclose(atype);
2308            }
2309            catch (HDF5Exception ex2) {
2310                log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2);
2311            }
2312            try {
2313                H5.H5Sclose(sid);
2314            }
2315            catch (HDF5Exception ex2) {
2316                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2317            }
2318            try {
2319                H5.H5Aclose(aid);
2320            }
2321            catch (HDF5Exception ex2) {
2322                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2323            }
2324        }
2325
2326        return refbuf;
2327    }
2328
2329    /**
2330     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2331     * of size is the same as that of the dataspace of the dataset being changed.
2332     *
2333     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2334     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2335     * H5Screate_simple)
2336     *
2337     * @param newDims the dimension target size
2338     *
2339     * @throws HDF5Exception
2340     *             If there is an error at the HDF5 library level.
2341     */
2342    public void extend(long[] newDims) throws HDF5Exception {
2343        long did = HDF5Constants.H5I_INVALID_HID;
2344        long sid = HDF5Constants.H5I_INVALID_HID;
2345
2346        did = open();
2347        if (did >= 0) {
2348            try {
2349                H5.H5Dset_extent(did, newDims);
2350                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2351                sid = H5.H5Dget_space(did);
2352                long[] checkDims = new long[rank];
2353                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2354                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2355                for (int i = 0; i < rank; i++) {
2356                    if (checkDims[i] != newDims[i]) {
2357                        log.debug("extend(): error extending dataset");
2358                        throw new HDF5Exception("error extending dataset " + getName());
2359                    }
2360                }
2361                dims = checkDims;
2362            }
2363            catch (Exception e) {
2364                log.debug("extend(): failure: ", e);
2365                throw new HDF5Exception(e.getMessage());
2366            }
2367            finally {
2368                if (sid > 0)
2369                    H5.H5Sclose(sid);
2370
2371                close(did);
2372            }
2373        }
2374    }
2375
2376    /*
2377     * (non-Javadoc)
2378     *
2379     * @see hdf.object.Dataset#isVirtual()
2380     */
2381    @Override
2382    public boolean isVirtual() {
2383        return isVirtual;
2384    }
2385
2386    /*
2387     * (non-Javadoc)
2388     *
2389     * @see hdf.object.Dataset#getVirtualFilename(int)
2390     */
2391    @Override
2392    public String getVirtualFilename(int index) {
2393        if(isVirtual)
2394            return virtualNameList.get(index);
2395        else
2396            return null;
2397    }
2398
2399    /*
2400     * (non-Javadoc)
2401     *
2402     * @see hdf.object.Dataset#getVirtualMaps()
2403     */
2404    @Override
2405    public int getVirtualMaps() {
2406        if(isVirtual)
2407            return virtualNameList.size();
2408        else
2409            return -1;
2410    }
2411}