001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the file COPYING.                     *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * If you do not have access to this file, you may request a copy from       *
011 * help@hdfgroup.org.                                                        *
012 ****************************************************************************/
013
014package hdf.object.h5;
015
016import java.lang.reflect.Array;
017import java.text.DecimalFormat;
018import java.util.List;
019import java.util.Vector;
020
021import hdf.hdf5lib.H5;
022import hdf.hdf5lib.HDF5Constants;
023import hdf.hdf5lib.HDFNativeData;
024import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
025import hdf.hdf5lib.exceptions.HDF5Exception;
026import hdf.hdf5lib.exceptions.HDF5LibraryException;
027import hdf.hdf5lib.structs.H5O_info_t;
028import hdf.object.Attribute;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034import hdf.object.ScalarDS;
035
036/**
037 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
038 * float, double and string, and operations performed on the scalar dataset.
039 * <p>
040 * The library predefines a modest number of datatypes. For details,
041 * read <a href="http://hdfgroup.org/HDF5/doc/Datatypes.html">The Datatype Interface (H5T).</a>
042 *
043 * @version 1.1 9/4/2007
044 * @author Peter X. Cao
045 */
046public class H5ScalarDS extends ScalarDS {
047    private static final long serialVersionUID = 2887517608230611642L;
048
049    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
050
051    /**
052     * The list of attributes of this data object. Members of the list are instance of Attribute.
053     */
054    private List<Attribute> attributeList;
055
056    private int nAttributes = -1;
057
058    private H5O_info_t obj_info;
059
060    /**
061     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
062     * array length is 8*numberOfPalettes.
063     */
064    private byte[] paletteRefs;
065
066    /** flag to indicate if the dataset is a variable length */
067    private boolean isVLEN = false;
068
069    /** flag to indicate if the dataset is enum */
070    private boolean isEnum = false;
071
072    /** flag to indicate if the dataset is an external dataset */
073    private boolean isExternal = false;
074
075    private boolean isArrayOfCompound = false;
076
077    private boolean isArrayOfVLEN = false;
078    /**
079     * flag to indicate if the datatype in file is the same as dataype in memory
080     */
081    private boolean isNativeDatatype = false;
082
083    /** flag to indicate is the datatype is reg. ref. */
084    private boolean isRegRef = false;
085
086    /**
087     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
088     * <p>
089     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
090     * path of the dataset.
091     *
092     * @param theFile
093     *            the file that contains the data object.
094     * @param theName
095     *            the name of the data object, e.g. "dset".
096     * @param thePath
097     *            the full path of the data object, e.g. "/arrays/".
098     */
099    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
100        this(theFile, theName, thePath, null);
101    }
102
103    /**
104     * @deprecated Not for public use in the future.<br>
105     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
106     *
107     * @param theFile
108     *            the file that contains the data object.
109     * @param theName
110     *            the name of the data object, e.g. "dset".
111     * @param thePath
112     *            the full path of the data object, e.g. "/arrays/".
113     * @param oid
114     *            the oid of the data object.
115     */
116    @Deprecated
117    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
118        super(theFile, theName, thePath, oid);
119        unsignedConverted = false;
120        paletteRefs = null;
121        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
122
123        if ((oid == null) && (theFile != null)) {
124            // retrieve the object ID
125            try {
126                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
127                this.oid = new long[1];
128                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
129            }
130            catch (Exception ex) {
131                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
132            }
133        }
134    }
135
136    /*
137     * (non-Javadoc)
138     *
139     * @see hdf.object.HObject#open()
140     */
141    @Override
142    public int open() {
143        log.trace("open(): start");
144        int did = -1;
145
146        try {
147            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
148        }
149        catch (HDF5Exception ex) {
150            log.debug("open(): Failed to open dataset {}", getPath() + getName());
151            did = -1;
152        }
153
154        log.trace("open(): finish");
155        return did;
156    }
157
158    /*
159     * (non-Javadoc)
160     *
161     * @see hdf.object.HObject#close(int)
162     */
163    @Override
164    public void close(int did) {
165        if (did >= 0) {
166            try {
167                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
168            }
169            catch (Exception ex) {
170                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
171            }
172            try {
173                H5.H5Dclose(did);
174            }
175            catch (HDF5Exception ex) {
176                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
177            }
178        }
179    }
180
181    /*
182     * (non-Javadoc)
183     *
184     * @see hdf.object.Dataset#init()
185     */
186    @Override
187    public void init() {
188        log.trace("init(): start");
189
190        if (rank > 0) {
191            resetSelection();
192            log.trace("init(): Dataset already intialized");
193            log.trace("init(): finish");
194            return; // already called. Initialize only once
195        }
196
197        int did = -1;
198        int sid = -1;
199        int tid = -1;
200        int tclass = -1;
201
202        did = open();
203        if (did >= 0) {
204            // check if it is an external dataset
205            int pid = -1;
206            try {
207                log.trace("init() check if it is an external dataset");
208                pid = H5.H5Dget_create_plist(did);
209                int nfiles = H5.H5Pget_external_count(pid);
210                log.trace("init() external dataset nfiles={}",nfiles);
211                isExternal = (nfiles > 0);
212            }
213            catch (Exception ex) {
214                log.debug("init(): check if it is an external dataset: ", ex);
215            }
216            finally {
217                try {
218                    H5.H5Pclose(pid);
219                }
220                catch (Exception ex) {
221                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
222                }
223            }
224
225            paletteRefs = getPaletteRefs(did);
226
227            try {
228                sid = H5.H5Dget_space(did);
229                rank = H5.H5Sget_simple_extent_ndims(sid);
230                tid = H5.H5Dget_type(did);
231                tclass = H5.H5Tget_class(tid);
232                log.debug("init(): H5Tget_class: {} is Array {}", tclass, HDF5Constants.H5T_ARRAY);
233
234                int tmptid = 0;
235                if (tclass == HDF5Constants.H5T_ARRAY) {
236                    // use the base datatype to define the array
237                    int basetid = -1;
238                    try {
239                        // use the base datatype to define the array
240                        basetid = H5.H5Tget_super(tid);
241                        int baseclass = H5.H5Tget_class(basetid);
242                        isArrayOfCompound = (baseclass == HDF5Constants.H5T_COMPOUND);
243                        isArrayOfVLEN = (baseclass == HDF5Constants.H5T_VLEN);
244                        isVLEN = isVLEN || ((baseclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(basetid));
245                        isVLEN = isVLEN || H5.H5Tdetect_class(basetid, HDF5Constants.H5T_VLEN);
246                    }
247                    catch (Exception ex) {
248                        log.debug("init():  use the base datatype to define the array: ", ex);
249                    }
250                    finally {
251                        try {
252                            H5.H5Pclose(basetid);
253                        }
254                        catch (Exception ex) {
255                            log.debug("init(): H5Pclose(basetid {}) failure: ", basetid, ex);
256                        }
257                    }
258                }
259
260                isText = (tclass == HDF5Constants.H5T_STRING);
261                isVLEN = isVLEN || ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
262                isEnum = (tclass == HDF5Constants.H5T_ENUM);
263                isUnsigned = H5Datatype.isUnsigned(tid);
264                isRegRef = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_DSETREG);
265                log.trace(
266                        "init(): tid={} is tclass={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
267                        tid, tclass, isText, isVLEN, isEnum, isUnsigned, isRegRef);
268
269                // check if datatype in file is native datatype
270                try {
271                    tmptid = H5.H5Tget_native_type(tid);
272                    isNativeDatatype = H5.H5Tequal(tid, tmptid);
273                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
274
275                    /* see if fill value is defined */
276                    pid = H5.H5Dget_create_plist(did);
277                    int[] fillStatus = { 0 };
278                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
279                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
280                            fillValue = H5Datatype.allocateArray(tmptid, 1);
281                            log.trace("init(): fillValue={}", fillValue);
282                            try {
283                                H5.H5Pget_fill_value(pid, tmptid, fillValue);
284                                log.trace("init(): H5Pget_fill_value={}", fillValue);
285                                if (fillValue != null) {
286                                    if (isFillValueConverted)
287                                        fillValue = ScalarDS.convertToUnsignedC(fillValue, null);
288
289                                    int n = Array.getLength(fillValue);
290                                    for (int i = 0; i < n; i++)
291                                        addFilteredImageValue((Number) Array.get(fillValue, i));
292                                }
293                            }
294                            catch (Exception ex2) {
295                                log.debug("init(): fill value was defined: ", ex2);
296                                fillValue = null;
297                            }
298                        }
299                    }
300                }
301                catch (HDF5Exception ex) {
302                    log.debug("init(): check if datatype in file is native datatype: ", ex);
303                }
304                finally {
305                    try {
306                        H5.H5Tclose(tmptid);
307                    }
308                    catch (HDF5Exception ex) {
309                        log.debug("init(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
310                    }
311                    try {
312                        H5.H5Pclose(pid);
313                    }
314                    catch (Exception ex) {
315                        log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
316                    }
317                }
318
319                if (rank == 0) {
320                    // a scalar data point
321                    rank = 1;
322                    dims = new long[1];
323                    dims[0] = 1;
324                    log.trace("init() rank is a scalar data point");
325                }
326                else {
327                    dims = new long[rank];
328                    maxDims = new long[rank];
329                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
330                    log.trace("init() rank={}, dims={}, maxDims={}", rank, dims, maxDims);
331                }
332            }
333            catch (HDF5Exception ex) {
334                log.debug("init(): ", ex);
335            }
336            finally {
337                try {
338                    H5.H5Tclose(tid);
339                }
340                catch (HDF5Exception ex2) {
341                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
342                }
343                try {
344                    H5.H5Sclose(sid);
345                }
346                catch (HDF5Exception ex2) {
347                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
348                }
349            }
350
351            // check for the type of image and interlace mode
352            // it is a true color image at one of three cases:
353            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
354            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
355            // 3) INTERLACE_MODE = INTERLACE_PLANE
356            if ((rank >= 3) && isImage) {
357                interlace = -1;
358                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
359
360                if (isTrueColor) {
361                    interlace = INTERLACE_PIXEL;
362                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
363                        interlace = INTERLACE_PLANE;
364                    }
365                }
366            }
367
368            close(did);
369        }
370        else {
371            log.debug("init(): failed to open dataset");
372        }
373
374        startDims = new long[rank];
375        selectedDims = new long[rank];
376        resetSelection();
377        log.trace("init(): rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims);
378        log.trace("init(): finish");
379    }
380
381    /*
382     * (non-Javadoc)
383     *
384     * @see hdf.object.DataFormat#hasAttribute()
385     */
386    public boolean hasAttribute() {
387        obj_info.num_attrs = nAttributes;
388
389        log.trace("hasAttribute start: nAttributes = {}", nAttributes);
390        if (obj_info.num_attrs < 0) {
391            int did = open();
392            if (did >= 0) {
393                int tid = -1;
394                obj_info.num_attrs = 0;
395
396                try {
397                    obj_info = H5.H5Oget_info(did);
398                    nAttributes = (int) obj_info.num_attrs;
399
400                    tid = H5.H5Dget_type(did);
401
402                    int tclass = H5.H5Tget_class(tid);
403                    isText = (tclass == HDF5Constants.H5T_STRING);
404                    isVLEN = ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
405                    isEnum = (tclass == HDF5Constants.H5T_ENUM);
406                    log.trace("hasAttribute(): tclass type: isText={},isVLEN={},isEnum={}", isText, isVLEN, isEnum);
407                }
408                catch (Exception ex) {
409                    obj_info.num_attrs = 0;
410                    log.debug("hasAttribute(): get object info: ", ex);
411                }
412                finally {
413                    try {
414                        H5.H5Tclose(tid);
415                    } catch (HDF5Exception ex) {
416                        log.debug("hasAttribute(): H5Tclose(tid {}) failure: ", tid, ex);
417                    }
418                }
419
420                if(nAttributes > 0) {
421                    // test if it is an image
422                    // check image
423                    Object avalue = getAttrValue(did, "CLASS");
424                    if (avalue != null) {
425                        try {
426                            isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
427                            log.trace("hasAttribute(): isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
428                        }
429                        catch (Throwable err) {
430                            log.debug("hasAttribute(): check image: ", err);
431                        }
432                    }
433
434                    // retrieve the IMAGE_MINMAXRANGE
435                    avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
436                    if (avalue != null) {
437                        double x0 = 0, x1 = 0;
438                        try {
439                            x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
440                            x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
441                        }
442                        catch (Exception ex2) {
443                            x0 = x1 = 0;
444                        }
445                        if (x1 > x0) {
446                            imageDataRange = new double[2];
447                            imageDataRange[0] = x0;
448                            imageDataRange[1] = x1;
449                        }
450                    }
451
452                    try {
453                        checkCFconvention(did);
454                    }
455                    catch (Exception ex) {
456                        log.debug("hasAttribute(): checkCFconvention(did {}):", did, ex);
457                    }
458                }
459                close(did);
460            }
461            else {
462                log.debug("hasAttribute(): could not open dataset");
463            }
464        }
465
466        log.trace("hasAttribute(): nAttributes={}", obj_info.num_attrs);
467        return (obj_info.num_attrs > 0);
468    }
469
470    /*
471     * (non-Javadoc)
472     *
473     * @see hdf.object.Dataset#getDatatype()
474     */
475    @Override
476    public Datatype getDatatype() {
477        log.trace("getDatatype(): start");
478        if (datatype == null) {
479            log.trace("getDatatype(): datatype == null");
480            int did = -1;
481            int tid = -1;
482
483            did = open();
484            if (did >= 0) {
485                try {
486                    tid = H5.H5Dget_type(did);
487
488                    log.trace("getDatatype(): isNativeDatatype", isNativeDatatype);
489                    if (!isNativeDatatype) {
490                        int tmptid = -1;
491                        try {
492                            tmptid = tid;
493                            tid = H5.H5Tget_native_type(tmptid);
494                        }
495                        finally {
496                            try {
497                                H5.H5Tclose(tmptid);
498                            }
499                            catch (Exception ex2) {
500                                log.debug("getDatatype(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
501                            }
502                        }
503                    }
504                    datatype = new H5Datatype(tid);
505                }
506                catch (Exception ex) {
507                    log.debug("getDatatype(): ", ex);
508                }
509                finally {
510                    try {
511                        H5.H5Tclose(tid);
512                    }
513                    catch (HDF5Exception ex) {
514                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
515                    }
516                    try {
517                        H5.H5Dclose(did);
518                    }
519                    catch (HDF5Exception ex) {
520                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
521                    }
522                }
523            }
524        }
525
526        log.trace("getDatatype(): finish");
527        return datatype;
528    }
529
530    /*
531     * (non-Javadoc)
532     *
533     * @see hdf.object.Dataset#clear()
534     */
535    @Override
536    public void clear() {
537        super.clear();
538
539        if (attributeList != null) {
540            ((Vector<Attribute>) attributeList).setSize(0);
541        }
542    }
543
544    /*
545     * (non-Javadoc)
546     *
547     * @see hdf.object.Dataset#readBytes()
548     */
549    @Override
550    public byte[] readBytes() throws HDF5Exception {
551        log.trace("readBytes(0: start");
552
553        byte[] theData = null;
554
555        if (rank <= 0) {
556            init();
557        }
558
559        int did = open();
560        if (did >= 0) {
561            int fspace = -1;
562            int mspace = -1;
563            int tid = -1;
564
565            try {
566                long[] lsize = { 1 };
567                for (int j = 0; j < selectedDims.length; j++) {
568                    lsize[0] *= selectedDims[j];
569                }
570
571                fspace = H5.H5Dget_space(did);
572                mspace = H5.H5Screate_simple(rank, selectedDims, null);
573
574                // set the rectangle selection
575                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
576                if (rank * dims[0] > 1) {
577                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
578                            selectedDims, null); // set block to 1
579                }
580
581                tid = H5.H5Dget_type(did);
582                int size = H5.H5Tget_size(tid) * (int) lsize[0];
583                log.trace("readBytes(): size = {}", size);
584
585                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) throw new Exception("Invalid int size");
586
587                theData = new byte[(int)size];
588                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
589            }
590            catch (Exception ex) {
591                log.debug("readBytes(): failed to read data: ", ex);
592            }
593            finally {
594                try {
595                    H5.H5Sclose(fspace);
596                }
597                catch (Exception ex2) {
598                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
599                }
600                try {
601                    H5.H5Sclose(mspace);
602                }
603                catch (Exception ex2) {
604                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
605                }
606                try {
607                    H5.H5Tclose(tid);
608                }
609                catch (HDF5Exception ex2) {
610                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
611                }
612                close(did);
613            }
614        }
615
616        log.trace("readBytes(): finish");
617        return theData;
618    }
619
620    /*
621     * (non-Javadoc)
622     *
623     * @see hdf.object.Dataset#read()
624     */
625    @Override
626    public Object read() throws Exception {
627        log.trace("read(): start");
628
629        Object theData = null;
630        int did = -1;
631        int tid = -1;
632        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
633
634        if (rank <= 0) {
635            init(); // read data information into memory
636        }
637
638        if (isArrayOfCompound) {
639            log.debug("read(): Cannot show data of type ARRAY of COMPOUND");
640            log.trace("read(): finish");
641            throw new HDF5Exception("Cannot show data with datatype of ARRAY of COMPOUND.");
642        }
643        if (isArrayOfVLEN) {
644            log.debug("read(): Cannot show data of type ARRAY of VL");
645            log.trace("read(): finish");
646            throw new HDF5Exception("Cannot show data with datatype of ARRAY of VL.");
647        }
648
649        if (isExternal) {
650            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
651
652            if (pdir == null) {
653                pdir = ".";
654            }
655            System.setProperty("user.dir", pdir);//H5.H5Dchdir_ext(pdir);
656        }
657
658        boolean isREF = false;
659        long[] lsize = { 1 };
660        log.trace("read(): open dataset");
661        did = open();
662        if (did >= 0) {
663            try {
664                lsize[0] = selectHyperslab(did, spaceIDs);
665                log.trace("read(): opened dataset size {} for {}", lsize[0], nPoints);
666
667                if (lsize[0] == 0) {
668                    log.debug("read(): No data to read. Dataset or selected subset is empty.");
669                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
670                }
671
672                if (lsize[0] < Integer.MIN_VALUE || lsize[0] > Integer.MAX_VALUE) {
673                    log.debug("read(): lsize outside valid Java int range; unsafe cast");
674                    throw new HDF5Exception("Dataset too large to read.");
675                }
676
677                if (log.isDebugEnabled()) {
678                    // check is storage space is allocated
679                    try {
680                        long ssize = H5.H5Dget_storage_size(did);
681                        log.trace("read(): Storage space allocated = {}.", ssize);
682                    }
683                    catch (Exception ex) {
684                        log.debug("read(): check if storage space is allocated:", ex);
685                    }
686                }
687
688                tid = H5.H5Dget_type(did);
689                log.trace("read(): H5Tget_native_type:");
690                log.trace("read(): isNativeDatatype={}", isNativeDatatype);
691                if (!isNativeDatatype) {
692                    int tmptid = -1;
693                    try {
694                        tmptid = tid;
695                        tid = H5.H5Tget_native_type(tmptid);
696                    }
697                    finally {
698                        try {H5.H5Tclose(tmptid);}
699                        catch (Exception ex2) {log.debug("read(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
700                    }
701                }
702
703                isREF = (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ));
704
705                log.trace("read(): originalBuf={} isText={} isREF={} lsize[0]={} nPoints={}", originalBuf, isText, isREF, lsize[0], nPoints);
706                if ((originalBuf == null) || isEnum || isText || isREF || ((originalBuf != null) && (lsize[0] != nPoints))) {
707                    try {
708                        theData = H5Datatype.allocateArray(tid, (int) lsize[0]);
709                    }
710                    catch (OutOfMemoryError err) {
711                        log.debug("read(): Out of memory.");
712                        log.trace("read(): finish");
713                        throw new HDF5Exception("Out Of Memory.");
714                    }
715                }
716                else {
717                    theData = originalBuf; // reuse the buffer if the size is the same
718                }
719
720                if (theData != null) {
721                    if (isVLEN) {
722                        log.trace("read(): H5DreadVL");
723                        H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) theData);
724                    }
725                    else {
726                        log.trace("read(): H5Dread did={} spaceIDs[0]={} spaceIDs[1]={}", did, spaceIDs[0], spaceIDs[1]);
727                        H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
728                    }
729                } // if (theData != null)
730            }
731            catch (HDF5DataFiltersException exfltr) {
732                log.debug("read(): read failure:", exfltr);
733                log.trace("read(): finish");
734                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
735            }
736            catch (HDF5Exception h5ex) {
737                log.debug("read(): read failure", h5ex);
738                log.trace("read(): finish");
739                throw new HDF5Exception(h5ex.toString());
740            }
741            finally {
742                try {
743                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
744                        H5.H5Sclose(spaceIDs[0]);
745                }
746                catch (Exception ex2) {
747                    log.debug("read(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex2);
748                }
749                try {
750                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
751                        H5.H5Sclose(spaceIDs[1]);
752                }
753                catch (Exception ex2) {
754                    log.debug("read(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex2);
755                }
756                try {
757                    if (isText && convertByteToString && theData instanceof byte[]) {
758                        log.trace("read(): H5Dread isText convertByteToString");
759                        theData = byteToString((byte[]) theData, (int)H5.H5Tget_size(tid));
760                    }
761                    else if (isREF) {
762                        log.trace("read(): H5Dread isREF byteToLong");
763                        theData = HDFNativeData.byteToLong((byte[]) theData);
764                    }
765                    else if (isEnum && isEnumConverted()) {
766                        log.trace("H5ScalarDS read: H5Dread isEnum theData={}", theData);
767                        theData = H5Datatype.convertEnumValueToName(tid, theData, null);
768                    }
769                }
770                catch (Exception ex) {
771                    log.debug("read(): convert data: ", ex);
772                }
773                try {H5.H5Tclose(tid);}
774                catch (Exception ex2) {log.debug("read(): H5Tclose(tid {}) failure: ", tid, ex2);}
775
776                close(did);
777            }
778        }
779
780        log.trace("read(): finish");
781        return theData;
782    }
783
784
785    /**
786     * Writes the given data buffer into this dataset in a file.
787     *
788     * @param buf
789     *            The buffer that contains the data values.
790     *
791     * @throws HDF5Exception
792     *             If there is an error at the HDF5 library level.
793     */
794    @Override
795    public void write(Object buf) throws HDF5Exception {
796        log.trace("write(): start");
797        int did = -1;
798        int tid = -1;
799        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
800        Object tmpData = null;
801
802        if (buf == null) {
803            log.debug("write(): buf is null");
804            log.trace("write(): finish");
805            return;
806        }
807
808        if (isVLEN && !isText) {
809            log.trace("write(): VL data={}", buf);
810            log.debug("write(): Cannot write non-string variable-length data");
811            log.trace("write(): finish");
812            throw (new HDF5Exception("Writing non-string variable-length data is not supported"));
813        }
814        else if (isRegRef) {
815            log.debug("write(): Cannot write region reference data");
816            log.trace("write(): finish");
817            throw (new HDF5Exception("Writing region references data is not supported"));
818        }
819
820        long[] lsize = { 1 };
821        did = open();
822        log.trace("write(): dataset opened");
823        if (did >= 0) {
824            try {
825                lsize[0] = selectHyperslab(did, spaceIDs);
826                tid = H5.H5Dget_type(did);
827
828                log.trace("write(): isNativeDatatype={}", isNativeDatatype);
829                if (!isNativeDatatype) {
830                    int tmptid = -1;
831                    try {
832                        tmptid = tid;
833                        tid = H5.H5Tget_native_type(tmptid);
834                    }
835                    finally {
836                        try {H5.H5Tclose(tmptid);}
837                        catch (Exception ex2) {log.debug("write(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
838                    }
839                }
840
841                isText = (H5.H5Tget_class(tid) == HDF5Constants.H5T_STRING);
842
843                // check if need to convert integer data
844                int tsize = H5.H5Tget_size(tid);
845                String cname = buf.getClass().getName();
846                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
847                boolean doConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
848                        || ((tsize == 4) && (dname == 'J')) || (isUnsigned && unsignedConverted));
849                log.trace("write(): tsize={} cname={} dname={} doConversion={}", tsize, cname, dname, doConversion);
850
851                tmpData = buf;
852                if (doConversion) {
853                    tmpData = convertToUnsignedC(buf, null);
854                }
855                // do not convert v-len strings, regardless of conversion request
856                // type
857                else if (isText && convertByteToString && !H5.H5Tis_variable_str(tid)) {
858                    tmpData = stringToByte((String[]) buf, H5.H5Tget_size(tid));
859                }
860                else if (isEnum && (Array.get(buf, 0) instanceof String)) {
861                    tmpData = H5Datatype.convertEnumNameToValue(tid, (String[]) buf, null);
862                }
863
864                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
865            }
866            finally {
867                tmpData = null;
868                try {
869                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
870                        H5.H5Sclose(spaceIDs[0]);
871                }
872                catch (Exception ex2) {
873                    log.debug("write(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex2);
874                }
875                try {
876                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
877                        H5.H5Sclose(spaceIDs[1]);
878                }
879                catch (Exception ex2) {
880                    log.debug("write(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex2);
881                }
882                try {
883                    H5.H5Tclose(tid);
884                }
885                catch (Exception ex2) {
886                    log.debug("write(): H5Tclose(tid {}) failure: ", tid, ex2);
887                }
888            }
889            close(did);
890        }
891        log.trace("write(): finish");
892    }
893
894    /**
895     * Set up the selection of hyperslab
896     *
897     * @param did
898     *            IN dataset ID
899     * @param spaceIDs
900     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
901     *
902     * @return total number of data point selected
903     *
904     * @throws HDF5Exception
905     *             If there is an error at the HDF5 library level.
906     */
907    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
908        log.trace("selectHyperslab(): start");
909        long lsize = 1;
910
911        boolean isAllSelected = true;
912        for (int i = 0; i < rank; i++) {
913            lsize *= selectedDims[i];
914            if (selectedDims[i] < dims[i]) {
915                isAllSelected = false;
916            }
917        }
918        log.trace("selectHyperslab(): isAllSelected={}", isAllSelected);
919
920        if (isAllSelected) {
921            spaceIDs[0] = HDF5Constants.H5S_ALL;
922            spaceIDs[1] = HDF5Constants.H5S_ALL;
923        }
924        else {
925            spaceIDs[1] = H5.H5Dget_space(did);
926
927            // When 1D dataspace is used in chunked dataset, reading is very
928            // slow.
929            // It is a known problem on HDF5 library for chunked dataset.
930            // mspace = H5.H5Screate_simple(1, lsize, null);
931            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
932            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
933                    null);
934        }
935
936        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
937            isDefaultImageOrder = false;
938        else
939            isDefaultImageOrder = true;
940
941        log.trace("selectHyperslab(): isDefaultImageOrder={}", isDefaultImageOrder);
942        log.trace("selectHyperslab(): finish");
943        return lsize;
944    }
945
946    /*
947     * (non-Javadoc)
948     *
949     * @see hdf.object.DataFormat#getMetadata()
950     */
951    public List<Attribute> getMetadata() throws HDF5Exception {
952        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
953    }
954
955    /*
956     * (non-Javadoc)
957     *
958     * @see hdf.object.DataFormat#getMetadata(int...)
959     */
960    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
961        log.trace("getMetadata(): start");
962
963        if (rank <= 0) {
964            init();
965        }
966
967        try {
968            this.linkTargetObjName = H5File.getLinkTargetName(this);
969        }
970        catch (Exception ex) {
971            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
972        }
973
974        if (attributeList != null) {
975            log.trace("getMetadata(): attributeList != null");
976            log.trace("getMetadata(): finish");
977            return attributeList;
978        }
979
980        // load attributes first
981        int did = -1;
982        int pcid = -1;
983        int indxType = fileFormat.getIndexType(null);
984        int order = fileFormat.getIndexOrder(null);
985
986        // load attributes first
987        if (attrPropList.length > 0) {
988            indxType = attrPropList[0];
989            if (attrPropList.length > 1) {
990                order = attrPropList[1];
991            }
992        }
993        log.trace("getMetadata(): open dataset");
994        did = open();
995        if (did >= 0) {
996            log.trace("getMetadata(): dataset opened");
997            try {
998                compression = "";
999                attributeList = H5File.getAttribute(did, indxType, order);
1000                log.trace("getMetadata(): attributeList loaded");
1001
1002                // get the compression and chunk information
1003                pcid = H5.H5Dget_create_plist(did);
1004                long storage_size = H5.H5Dget_storage_size(did);
1005                int nfilt = H5.H5Pget_nfilters(pcid);
1006                int layout_type = H5.H5Pget_layout(pcid);
1007                if (layout_type == HDF5Constants.H5D_CHUNKED) {
1008                    chunkSize = new long[rank];
1009                    H5.H5Pget_chunk(pcid, rank, chunkSize);
1010                    int n = chunkSize.length;
1011                    storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]);
1012                    for (int i = 1; i < n; i++) {
1013                        storage_layout += " X " + chunkSize[i];
1014                    }
1015
1016                    if(nfilt > 0) {
1017                        long    nelmts = 1;
1018                        long    uncomp_size;
1019                        long    datum_size = getDatatype().getDatatypeSize();
1020                        if (datum_size < 0) {
1021                            int tmptid = -1;
1022                            try {
1023                                tmptid = H5.H5Dget_type(did);
1024                                datum_size = H5.H5Tget_size(tmptid);
1025                            }
1026                            finally {
1027                                try {H5.H5Tclose(tmptid);}
1028                                catch (Exception ex2) {log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);}
1029                            }
1030                        }
1031
1032                        for(int i = 0; i < rank; i++) {
1033                            nelmts *= dims[i];
1034                        }
1035                        uncomp_size = nelmts * datum_size;
1036
1037                        /* compression ratio = uncompressed size /  compressed size */
1038
1039                        if(storage_size != 0) {
1040                            double ratio = (double) uncomp_size / (double) storage_size;
1041                            DecimalFormat df = new DecimalFormat();
1042                            df.setMinimumFractionDigits(3);
1043                            df.setMaximumFractionDigits(3);
1044                            compression +=  df.format(ratio) + ":1";
1045                        }
1046                    }
1047                }
1048                else if (layout_type == HDF5Constants.H5D_COMPACT) {
1049                    storage_layout = "COMPACT";
1050                }
1051                else if (layout_type == HDF5Constants.H5D_CONTIGUOUS) {
1052                    storage_layout = "CONTIGUOUS";
1053                    if (H5.H5Pget_external_count(pcid) > 0)
1054                        storage_layout += " - EXTERNAL ";
1055                }
1056                else {
1057                    chunkSize = null;
1058                    storage_layout = "NONE";
1059                }
1060
1061                int[] flags = { 0, 0 };
1062                long[] cd_nelmts = { 20 };
1063                int[] cd_values = new int[(int) cd_nelmts[0]];;
1064                String[] cd_name = { "", "" };
1065                log.trace("getMetadata(): {} filters in pipeline", nfilt);
1066                int filter = -1;
1067                int[] filter_config = { 1 };
1068                filters = "";
1069
1070                for (int i = 0, k = 0; i < nfilt; i++) {
1071                    log.trace("getMetadata(): filter[{}]", i);
1072                    if (i > 0) {
1073                        filters += ", ";
1074                    }
1075                    if (k > 0) {
1076                        compression += ", ";
1077                    }
1078
1079                    try {
1080                        cd_nelmts[0] = 20;
1081                        cd_values = new int[(int) cd_nelmts[0]];
1082                        cd_values = new int[(int) cd_nelmts[0]];
1083                        filter = H5.H5Pget_filter(pcid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1084                        log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1085                        for (int j = 0; j < cd_nelmts[0]; j++) {
1086                            log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cd_values[j]);
1087                        }
1088                    }
1089                    catch (Throwable err) {
1090                        log.debug("getMetadata(): filter[{}] error: ", i, err);
1091                        log.trace("getMetadata(): filter[{}] continue", i);
1092                        filters += "ERROR";
1093                        continue;
1094                    }
1095
1096                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1097                        filters += "NONE";
1098                    }
1099                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1100                        filters += "GZIP";
1101                        compression += compression_gzip_txt + cd_values[0];
1102                        k++;
1103                    }
1104                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1105                        filters += "Error detection filter";
1106                    }
1107                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1108                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1109                    }
1110                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1111                        filters += "NBIT";
1112                    }
1113                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1114                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1115                    }
1116                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1117                        filters += "SZIP";
1118                        compression += "SZIP: Pixels per block = " + cd_values[1];
1119                        k++;
1120                        int flag = -1;
1121                        try {
1122                            flag = H5.H5Zget_filter_info(filter);
1123                        }
1124                        catch (Exception ex) {
1125                            flag = -1;
1126                        }
1127                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1128                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1129                        }
1130                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1131                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1132                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1133                        }
1134                    }
1135                    else {
1136                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1137                        for (int j = 0; j < cd_nelmts[0]; j++) {
1138                            if (j > 0)
1139                                filters += ", ";
1140                            filters += cd_values[j];
1141                        }
1142                        log.debug("getMetadata(): filter[{}] is user defined compression", i);
1143                    }
1144                } // for (int i=0; i<nfilt; i++)
1145
1146                if (compression.length() == 0) {
1147                    compression = "NONE";
1148                }
1149                log.trace("getMetadata(): filter compression={}", compression);
1150
1151                if (filters.length() == 0) {
1152                    filters = "NONE";
1153                }
1154                log.trace("getMetadata(): filter information={}", filters);
1155
1156                storage = "SIZE: " + storage_size;
1157                try {
1158                    int[] at = { 0 };
1159                    H5.H5Pget_alloc_time(pcid, at);
1160                    storage += ", allocation time: ";
1161                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1162                        storage += "Early";
1163                    }
1164                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1165                        storage += "Incremental";
1166                    }
1167                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1168                        storage += "Late";
1169                    }
1170                }
1171                catch (Exception ex) {
1172                    log.debug("getMetadata(): Storage allocation time:", ex);
1173                }
1174                if (storage.length() == 0) {
1175                    storage = "NONE";
1176                }
1177                log.trace("getMetadata(): storage={}", storage);
1178            }
1179            finally {
1180                try {
1181                    H5.H5Pclose(pcid);
1182                }
1183                catch (Exception ex) {
1184                    log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1185                }
1186                close(did);
1187            }
1188        }
1189
1190        log.trace("getMetadata(): finish");
1191        return attributeList;
1192    }
1193
1194    /*
1195     * (non-Javadoc)
1196     *
1197     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1198     */
1199    public void writeMetadata(Object info) throws Exception {
1200        log.trace("writeMetadata(): start");
1201        // only attribute metadata is supported.
1202        if (!(info instanceof Attribute)) {
1203            log.debug("writeMetadata(): Object not an Attribute");
1204            log.trace("writeMetadata(): finish");
1205            return;
1206        }
1207
1208        boolean attrExisted = false;
1209        Attribute attr = (Attribute) info;
1210        log.trace("writeMetadata(): {}", attr.getName());
1211
1212        if (attributeList == null) {
1213            this.getMetadata();
1214        }
1215
1216        if (attributeList != null)
1217            attrExisted = attributeList.contains(attr);
1218
1219        getFileFormat().writeAttribute(this, attr, attrExisted);
1220        // add the new attribute into attribute list
1221        if (!attrExisted) {
1222            attributeList.add(attr);
1223            nAttributes = attributeList.size();
1224        }
1225        log.trace("writeMetadata(): finish");
1226    }
1227
1228    /*
1229     * (non-Javadoc)
1230     *
1231     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1232     */
1233    public void removeMetadata(Object info) throws HDF5Exception {
1234        log.trace("removeMetadata(): start");
1235        // only attribute metadata is supported.
1236        if (!(info instanceof Attribute)) {
1237            log.debug("removeMetadata(): Object not an Attribute");
1238            log.trace("removeMetadata(): finish");
1239            return;
1240        }
1241
1242        Attribute attr = (Attribute) info;
1243        log.trace("removeMetadata(): {}", attr.getName());
1244        int did = open();
1245        if (did >= 0) {
1246            try {
1247                H5.H5Adelete(did, attr.getName());
1248                List<Attribute> attrList = getMetadata();
1249                attrList.remove(attr);
1250                nAttributes = attrList.size();
1251            }
1252            finally {
1253                close(did);
1254            }
1255        }
1256        log.trace("removeMetadata(): finish");
1257    }
1258
1259    /*
1260     * (non-Javadoc)
1261     *
1262     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1263     */
1264    public void updateMetadata(Object info) throws HDF5Exception {
1265        log.trace("updateMetadata(): start");
1266        // only attribute metadata is supported.
1267        if (!(info instanceof Attribute)) {
1268            log.debug("updateMetadata(): Object not an Attribute");
1269            log.trace("updateMetadata(): finish");
1270            return;
1271        }
1272
1273        Attribute attr = (Attribute) info;
1274        log.trace("updateMetadata(): {}", attr.getName());
1275        nAttributes = -1;
1276        log.trace("updateMetadata(): finish");
1277    }
1278
1279    /*
1280     * (non-Javadoc)
1281     *
1282     * @see hdf.object.HObject#setName(java.lang.String)
1283     */
1284    @Override
1285    public void setName(String newName) throws Exception {
1286        H5File.renameObject(this, newName);
1287        super.setName(newName);
1288    }
1289
1290    /**
1291     * Resets selection of dataspace
1292     */
1293    private void resetSelection() {
1294        log.trace("resetSelection(): start");
1295
1296        for (int i = 0; i < rank; i++) {
1297            startDims[i] = 0;
1298            selectedDims[i] = 1;
1299            if (selectedStride != null) {
1300                selectedStride[i] = 1;
1301            }
1302        }
1303
1304        if (interlace == INTERLACE_PIXEL) {
1305            // 24-bit TRUE color image
1306            // [height][width][pixel components]
1307            selectedDims[2] = 3;
1308            selectedDims[0] = dims[0];
1309            selectedDims[1] = dims[1];
1310            selectedIndex[0] = 0; // index for height
1311            selectedIndex[1] = 1; // index for width
1312            selectedIndex[2] = 2; // index for depth
1313        }
1314        else if (interlace == INTERLACE_PLANE) {
1315            // 24-bit TRUE color image
1316            // [pixel components][height][width]
1317            selectedDims[0] = 3;
1318            selectedDims[1] = dims[1];
1319            selectedDims[2] = dims[2];
1320            selectedIndex[0] = 1; // index for height
1321            selectedIndex[1] = 2; // index for width
1322            selectedIndex[2] = 0; // index for depth
1323        }
1324        else if (rank == 1) {
1325            selectedIndex[0] = 0;
1326            selectedDims[0] = dims[0];
1327        }
1328        else if (rank == 2) {
1329            selectedIndex[0] = 0;
1330            selectedIndex[1] = 1;
1331            selectedDims[0] = dims[0];
1332            selectedDims[1] = dims[1];
1333        }
1334        else if (rank > 2) {
1335            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1336            // [frame][height][width] by default
1337            // selectedIndex[1] = rank-1; // width, the fastest dimension
1338            // selectedIndex[0] = rank-2; // height
1339            // selectedIndex[2] = rank-3; // frames
1340
1341            //
1342            // (5/4/09) Modified the default dimension order. See bug#1379
1343            // We change the default order to the following. In most situation,
1344            // users want to use the natural order of
1345            // selectedIndex[0] = 0
1346            // selectedIndex[1] = 1
1347            // selectedIndex[2] = 2
1348            // Most of NPOESS data is the the order above.
1349
1350            if (isImage) {
1351                // 3D dataset is arranged in the order of [frame][height][width]
1352                selectedIndex[1] = rank - 1; // width, the fastest dimension
1353                selectedIndex[0] = rank - 2; // height
1354                selectedIndex[2] = rank - 3; // frames
1355            }
1356            else {
1357                selectedIndex[0] = 0; // width, the fastest dimension
1358                selectedIndex[1] = 1; // height
1359                selectedIndex[2] = 2; // frames
1360            }
1361
1362            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1363            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1364            selectedDims[selectedIndex[2]] = dims[selectedIndex[2]];
1365        }
1366
1367        // by default, only one-D is selected for text data
1368        if ((rank > 1) && isText) {
1369            selectedIndex[0] = rank - 1;
1370            selectedIndex[1] = 0;
1371            selectedDims[0] = 1;
1372            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1373        }
1374
1375        isDataLoaded = false;
1376        isDefaultImageOrder = true;
1377        log.trace("resetSelection(): finish");
1378    }
1379
1380    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1381            long[] chunks, int gzip, Object data) throws Exception {
1382        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1383    }
1384
1385    /**
1386     * Creates a scalar dataset in a file with/without chunking and compression.
1387     * <p>
1388     * The following example shows how to create a string dataset using this function.
1389     *
1390     * <pre>
1391     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1392     * int max_str_len = 120;
1393     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, -1, -1);
1394     * int size = 10000;
1395     * long dims[] = { size };
1396     * long chunks[] = { 1000 };
1397     * int gzip = 9;
1398     * String strs[] = new String[size];
1399     *
1400     * for (int i = 0; i &lt; size; i++)
1401     *     strs[i] = String.valueOf(i);
1402     *
1403     * file.open();
1404     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1405     *
1406     * try {
1407     *     file.close();
1408     * }
1409     * catch (Exception ex) {
1410     * }
1411     * </pre>
1412     *
1413     * @param name
1414     *            the name of the dataset to create.
1415     * @param pgroup
1416     *            parent group where the new dataset is created.
1417     * @param type
1418     *            the datatype of the dataset.
1419     * @param dims
1420     *            the dimension size of the dataset.
1421     * @param maxdims
1422     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1423     * @param chunks
1424     *            the chunk size of the dataset. No chunking if chunk = null.
1425     * @param gzip
1426     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1427     * @param fillValue
1428     *            the default data value.
1429     * @param data
1430     *            the array of data values.
1431     *
1432     * @return the new scalar dataset if successful; otherwise returns null.
1433     *
1434     * @throws Exception if there is a failure.
1435     */
1436    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1437            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1438        log.trace("create(): start");
1439
1440        H5ScalarDS dataset = null;
1441        String fullPath = null;
1442        int did = -1;
1443        int sid = -1;
1444        int tid = -1;
1445        int plist = -1;
1446
1447        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1448            log.debug("create(): one or more parameters are null");
1449            log.trace("create(): finish");
1450            return null;
1451        }
1452
1453        H5File file = (H5File) pgroup.getFileFormat();
1454        if (file == null) {
1455            log.debug("create(): Parent Group FileFormat is null");
1456            log.trace("create(): finish");
1457            return null;
1458        }
1459
1460        String path = HObject.separator;
1461        if (!pgroup.isRoot()) {
1462            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1463            if (name.endsWith("/")) {
1464                name = name.substring(0, name.length() - 1);
1465            }
1466            int idx = name.lastIndexOf("/");
1467            if (idx >= 0) {
1468                name = name.substring(idx + 1);
1469            }
1470        }
1471
1472        fullPath = path + name;
1473
1474        // setup chunking and compression
1475        boolean isExtentable = false;
1476        if (maxdims != null) {
1477            for (int i = 0; i < maxdims.length; i++) {
1478                if (maxdims[i] == 0) {
1479                    maxdims[i] = dims[i];
1480                }
1481                else if (maxdims[i] < 0) {
1482                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1483                }
1484
1485                if (maxdims[i] != dims[i]) {
1486                    isExtentable = true;
1487                }
1488            }
1489        }
1490
1491        // HDF5 requires you to use chunking in order to define extendible
1492        // datasets. Chunking makes it possible to extend datasets efficiently,
1493        // without having to reorganize storage excessively. Using default size
1494        // of 64x...which has good performance
1495        if ((chunks == null) && isExtentable) {
1496            chunks = new long[dims.length];
1497            for (int i = 0; i < dims.length; i++)
1498                chunks[i] = Math.min(dims[i], 64);
1499        }
1500
1501        // prepare the dataspace and datatype
1502        int rank = dims.length;
1503
1504        if ((tid = type.toNative()) >= 0) {
1505            try {
1506                sid = H5.H5Screate_simple(rank, dims, maxdims);
1507
1508                // figure out creation properties
1509                plist = HDF5Constants.H5P_DEFAULT;
1510
1511                byte[] val_fill = null;
1512                try {
1513                    val_fill = parseFillValue(type, fillValue);
1514                }
1515                catch (Exception ex) {
1516                    log.debug("create(): parse fill value: ", ex);
1517                }
1518
1519                if (chunks != null || val_fill != null) {
1520                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1521
1522                    if (chunks != null) {
1523                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1524                        H5.H5Pset_chunk(plist, rank, chunks);
1525
1526                        // compression requires chunking
1527                        if (gzip > 0) {
1528                            H5.H5Pset_deflate(plist, gzip);
1529                        }
1530                    }
1531
1532                    if (val_fill != null) {
1533                        H5.H5Pset_fill_value(plist, tid, val_fill);
1534                    }
1535                }
1536
1537                int fid = file.getFID();
1538
1539                log.trace("create(): create dataset");
1540                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1541                dataset = new H5ScalarDS(file, name, path);
1542            }
1543            finally {
1544                try {
1545                    H5.H5Pclose(plist);
1546                }
1547                catch (HDF5Exception ex) {
1548                    log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1549                }
1550                try {
1551                    H5.H5Sclose(sid);
1552                }
1553                catch (HDF5Exception ex) {
1554                    log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1555                }
1556                try {
1557                    H5.H5Tclose(tid);
1558                }
1559                catch (HDF5Exception ex) {
1560                    log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1561                }
1562                try {
1563                    H5.H5Dclose(did);
1564                }
1565                catch (HDF5Exception ex) {
1566                    log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1567                }
1568            }
1569        }
1570
1571        if (dataset != null) {
1572            pgroup.addToMemberList(dataset);
1573            if (data != null) {
1574                dataset.init();
1575                long selected[] = dataset.getSelectedDims();
1576                for (int i = 0; i < rank; i++) {
1577                    selected[i] = dims[i];
1578                }
1579                dataset.write(data);
1580            }
1581        }
1582        log.trace("create(): finish");
1583        return dataset;
1584    }
1585
1586    // check _FillValue, valid_min, valid_max, and valid_range
1587    private void checkCFconvention(int oid) throws Exception {
1588        log.trace("checkCFconvention(): start");
1589
1590        Object avalue = getAttrValue(oid, "_FillValue");
1591
1592        if (avalue != null) {
1593            int n = Array.getLength(avalue);
1594            for (int i = 0; i < n; i++)
1595                addFilteredImageValue((Number) Array.get(avalue, i));
1596        }
1597
1598        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1599            double x0 = 0, x1 = 0;
1600            avalue = getAttrValue(oid, "valid_range");
1601            if (avalue != null) {
1602                try {
1603                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1604                    x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
1605                    imageDataRange = new double[2];
1606                    imageDataRange[0] = x0;
1607                    imageDataRange[1] = x1;
1608                    return;
1609                }
1610                catch (Exception ex) {
1611                    log.debug("checkCFconvention(): valid_range: ", ex);
1612                }
1613            }
1614
1615            avalue = getAttrValue(oid, "valid_min");
1616            if (avalue != null) {
1617                try {
1618                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1619                }
1620                catch (Exception ex) {
1621                    log.debug("checkCFconvention(): valid_min: ", ex);
1622                }
1623                avalue = getAttrValue(oid, "valid_max");
1624                if (avalue != null) {
1625                    try {
1626                        x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1627                        imageDataRange = new double[2];
1628                        imageDataRange[0] = x0;
1629                        imageDataRange[1] = x1;
1630                    }
1631                    catch (Exception ex) {
1632                        log.debug("checkCFconvention(): valid_max:", ex);
1633                    }
1634                }
1635            }
1636        } // if (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1637        log.trace("checkCFconvention(): finish");
1638    }
1639
1640    private Object getAttrValue(int oid, String aname) {
1641        log.trace("getAttrValue(): start: name={}", aname);
1642
1643        int aid = -1;
1644        int atid = -1;
1645        int asid = -1;
1646        Object avalue = null;
1647
1648        try {
1649            // try to find attribute name
1650            aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1651        }
1652        catch (HDF5LibraryException ex5) {
1653            log.debug("getAttrValue(): Failed to find attribute {} : Expected", aname);
1654        }
1655        catch (Exception ex) {
1656            log.debug("getAttrValue(): try to find attribute {}:", aname, ex);
1657        }
1658        if (aid > 0) {
1659            try {
1660                atid = H5.H5Aget_type(aid);
1661                int tmptid = atid;
1662                atid = H5.H5Tget_native_type(tmptid);
1663                try {
1664                    H5.H5Tclose(tmptid);
1665                }
1666                catch (Exception ex) {
1667                    log.debug("getAttrValue(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1668                }
1669
1670                asid = H5.H5Aget_space(aid);
1671                long adims[] = null;
1672
1673                int arank = H5.H5Sget_simple_extent_ndims(asid);
1674                if (arank > 0) {
1675                    adims = new long[arank];
1676                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1677                }
1678                log.trace("getAttrValue(): adims={}", adims);
1679
1680                // retrieve the attribute value
1681                long lsize = 1;
1682                if (adims != null) {
1683                    for (int j = 0; j < adims.length; j++) {
1684                        lsize *= adims[j];
1685                    }
1686                }
1687                log.trace("getAttrValue(): lsize={}", lsize);
1688
1689                if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) throw new Exception("Invalid int size");
1690
1691                avalue = H5Datatype.allocateArray(atid, (int) lsize);
1692
1693                if (avalue != null) {
1694                    log.trace("getAttrValue(): read attribute id {} of size={}", atid, lsize);
1695                    H5.H5Aread(aid, atid, avalue);
1696
1697                    if (H5Datatype.isUnsigned(atid)) {
1698                        log.trace("getAttrValue(): id {} is unsigned", atid);
1699                        avalue = convertFromUnsignedC(avalue, null);
1700                    }
1701                }
1702            }
1703            catch (Exception ex) {
1704                log.debug("getAttrValue(): try to get value for attribute {}: ", aname, ex);
1705            }
1706            finally {
1707                try {
1708                    H5.H5Tclose(atid);
1709                }
1710                catch (HDF5Exception ex) {
1711                    log.debug("getAttrValue(): H5Tclose(atid {}) failure: ", atid, ex);
1712                }
1713                try {
1714                    H5.H5Sclose(asid);
1715                }
1716                catch (HDF5Exception ex) {
1717                    log.debug("getAttrValue(): H5Sclose(asid {}) failure: ", asid, ex);
1718                }
1719                try {
1720                    H5.H5Aclose(aid);
1721                }
1722                catch (HDF5Exception ex) {
1723                    log.debug("getAttrValue(): H5Aclose(aid {}) failure: ", aid, ex);
1724                }
1725            }
1726        } // if (aid > 0)
1727
1728        log.trace("getAttrValue(): finish");
1729        return avalue;
1730    }
1731
1732    private boolean isStringAttributeOf(int objID, String name, String value) {
1733        boolean retValue = false;
1734        int aid = -1;
1735        int atid = -1;
1736
1737        try {
1738            // try to find out interlace mode
1739            aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1740            atid = H5.H5Aget_type(aid);
1741            int size = H5.H5Tget_size(atid);
1742            byte[] attrValue = new byte[size];
1743            H5.H5Aread(aid, atid, attrValue);
1744            String strValue = new String(attrValue).trim();
1745            retValue = strValue.equalsIgnoreCase(value);
1746        }
1747        catch (Exception ex) {
1748            log.debug("isStringAttributeOf(): try to find out interlace mode:", ex);
1749        }
1750        finally {
1751            try {
1752                H5.H5Tclose(atid);
1753            }
1754            catch (HDF5Exception ex) {
1755                log.debug("isStringAttributeOf(): H5Tclose(atid {}) failure: ", atid, ex);
1756            }
1757            try {
1758                H5.H5Aclose(aid);
1759            }
1760            catch (HDF5Exception ex) {
1761                log.debug("isStringAttributeOf(): H5Aclose(aid {}) failure: ", aid, ex);
1762            }
1763        }
1764
1765        return retValue;
1766    }
1767
1768    /*
1769     * (non-Javadoc)
1770     *
1771     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1772     */
1773    @Override
1774    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1775        log.trace("copy(): start");
1776        // must give a location to copy
1777        if (pgroup == null) {
1778            log.debug("copy(): Parent group is null");
1779            log.trace("copy(): finish");
1780            return null;
1781        }
1782
1783        Dataset dataset = null;
1784        int srcdid = -1;
1785        int dstdid = -1;
1786        int tid = -1;
1787        int sid = -1;
1788        int plist = -1;
1789        String dname = null, path = null;
1790
1791        if (pgroup.isRoot()) {
1792            path = HObject.separator;
1793        }
1794        else {
1795            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1796        }
1797        dname = path + dstName;
1798
1799        srcdid = open();
1800        if (srcdid >= 0) {
1801            try {
1802                tid = H5.H5Dget_type(srcdid);
1803                sid = H5.H5Screate_simple(dims.length, dims, null);
1804                plist = H5.H5Dget_create_plist(srcdid);
1805
1806                long[] chunks = new long[dims.length];
1807                boolean setChunkFlag = false;
1808                try {
1809                    H5.H5Pget_chunk(plist, dims.length, chunks);
1810                    for (int i = 0; i < dims.length; i++) {
1811                        if (dims[i] < chunks[i]) {
1812                            setChunkFlag = true;
1813                            if (dims[i] == 1)
1814                                chunks[i] = 1;
1815                            else
1816                                chunks[i] = dims[i] / 2;
1817                        }
1818                    }
1819                }
1820                catch (Exception ex) {
1821                    log.debug("copy(): chunk: ", ex);
1822                }
1823
1824                if (setChunkFlag)
1825                    H5.H5Pset_chunk(plist, dims.length, chunks);
1826
1827                try {
1828                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1829                            HDF5Constants.H5P_DEFAULT);
1830                }
1831                catch (Exception e) {
1832                    log.debug("copy(): H5Dcreate: ", e);
1833                }
1834                finally {
1835                    try {
1836                        H5.H5Dclose(dstdid);
1837                    }
1838                    catch (Exception ex2) {
1839                        log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex2);
1840                    }
1841                }
1842
1843                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1844                if (buff != null) {
1845                    dataset.init();
1846                    dataset.write(buff);
1847                }
1848
1849                dstdid = dataset.open();
1850                if (dstdid >= 0) {
1851                    try {
1852                        H5File.copyAttributes(srcdid, dstdid);
1853                    }
1854                    finally {
1855                        try {
1856                            H5.H5Dclose(dstdid);
1857                        }
1858                        catch (Exception ex) {
1859                            log.debug("copy(): H5Dclose(dstdid {}) failure: ", dstdid, ex);
1860                        }
1861                    }
1862                }
1863            }
1864            finally {
1865                try {
1866                    H5.H5Pclose(plist);
1867                }
1868                catch (Exception ex) {
1869                    log.debug("copy(): H5Pclose(plist {}) failure: ", plist, ex);
1870                }
1871                try {
1872                    H5.H5Sclose(sid);
1873                }
1874                catch (Exception ex) {
1875                    log.debug("copy(): H5Sclose(sid {}) failure: ", sid, ex);
1876                }
1877                try {
1878                    H5.H5Tclose(tid);
1879                }
1880                catch (Exception ex) {
1881                    log.debug("copy(): H5Tclose(tid {}) failure: ", tid, ex);
1882                }
1883                try {
1884                    H5.H5Dclose(srcdid);
1885                }
1886                catch (Exception ex) {
1887                    log.debug("copy(): H5Dclose(srcdid {}) failure: ", srcdid, ex);
1888                }
1889            }
1890        }
1891
1892        pgroup.addToMemberList(dataset);
1893
1894        ((ScalarDS) dataset).setIsImage(isImage);
1895
1896        log.trace("copy(): finish");
1897        return dataset;
1898    }
1899
1900    /*
1901     * (non-Javadoc)
1902     *
1903     * @see hdf.object.ScalarDS#getPalette()
1904     */
1905    @Override
1906    public byte[][] getPalette() {
1907        if (palette == null) {
1908            palette = readPalette(0);
1909        }
1910
1911        return palette;
1912    }
1913
1914    /*
1915     * (non-Javadoc)
1916     *
1917     * @see hdf.object.ScalarDS#getPaletteName(int)
1918     */
1919    public String getPaletteName(int idx) {
1920        log.trace("getPaletteName(): start");
1921
1922        byte[] refs = getPaletteRefs();
1923        int did = -1;
1924        int pal_id = -1;
1925        String paletteName = null;
1926        long size = 100L;
1927
1928        if (refs == null) {
1929            log.debug("getPaletteName(): refs is null");
1930            log.trace("getPaletteName(): finish");
1931            return null;
1932        }
1933
1934        byte[] ref_buf = new byte[8];
1935
1936        try {
1937            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1938        }
1939        catch (Throwable err) {
1940            log.debug("getPaletteName(): arraycopy failure: ", err);
1941            log.trace("getPaletteName(): finish");
1942            return null;
1943        }
1944
1945        did = open();
1946        if (did >= 0) {
1947            try {
1948                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
1949                paletteName = H5.H5Iget_name(pal_id);
1950            }
1951            catch (Exception ex) {
1952                ex.printStackTrace();
1953            }
1954            finally {
1955                close(pal_id);
1956                close(did);
1957            }
1958        }
1959
1960        log.trace("getPaletteName(): finish");
1961        return paletteName;
1962    }
1963
1964    /*
1965     * (non-Javadoc)
1966     *
1967     * @see hdf.object.ScalarDS#readPalette(int)
1968     */
1969    @Override
1970    public byte[][] readPalette(int idx) {
1971        log.trace("readPalette(): start");
1972
1973        byte[][] thePalette = null;
1974        byte[] refs = getPaletteRefs();
1975        int did = -1;
1976        int pal_id = -1;
1977        int tid = -1;
1978
1979        if (refs == null) {
1980            log.debug("readPalette(): refs is null");
1981            log.trace("readPalette(): finish");
1982            return null;
1983        }
1984
1985        byte[] p = null;
1986        byte[] ref_buf = new byte[8];
1987
1988        try {
1989            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1990        }
1991        catch (Throwable err) {
1992            log.debug("readPalette(): arraycopy failure: ", err);
1993            log.trace("readPalette(): failure");
1994            return null;
1995        }
1996
1997        did = open();
1998        if (did >= 0) {
1999            try {
2000                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
2001                tid = H5.H5Dget_type(pal_id);
2002
2003                // support only 3*256 byte palette data
2004                if (H5.H5Dget_storage_size(pal_id) <= 768) {
2005                    p = new byte[3 * 256];
2006                    H5.H5Dread(pal_id, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
2007                }
2008            }
2009            catch (HDF5Exception ex) {
2010                log.debug("readPalette(): failure: ", ex);
2011                p = null;
2012            }
2013            finally {
2014                try {
2015                    H5.H5Tclose(tid);
2016                }
2017                catch (HDF5Exception ex2) {
2018                    log.debug("readPalette(): H5Tclose(tid {}) failure: ", tid, ex2);
2019                }
2020                close(pal_id);
2021                close(did);
2022            }
2023        }
2024
2025        if (p != null) {
2026            thePalette = new byte[3][256];
2027            for (int i = 0; i < 256; i++) {
2028                thePalette[0][i] = p[i * 3];
2029                thePalette[1][i] = p[i * 3 + 1];
2030                thePalette[2][i] = p[i * 3 + 2];
2031            }
2032        }
2033
2034        log.trace("readPalette(): finish");
2035        return thePalette;
2036    }
2037
2038    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
2039        log.trace("parseFillValue(): start");
2040
2041        byte[] data = null;
2042
2043        if (type == null || fillValue == null) {
2044            log.debug("parseFillValue(): datatype or fill value is null");
2045            log.trace("parseFillValue(): finish");
2046            return null;
2047        }
2048
2049        int datatypeClass = type.getDatatypeClass();
2050        int datatypeSize = type.getDatatypeSize();
2051
2052        double val_dbl = 0;
2053        String val_str = null;
2054
2055        if (fillValue instanceof String) {
2056            val_str = (String) fillValue;
2057        }
2058        else if (fillValue.getClass().isArray()) {
2059            val_str = Array.get(fillValue, 0).toString();
2060        }
2061
2062        if (datatypeClass != Datatype.CLASS_STRING) {
2063            try {
2064                val_dbl = Double.parseDouble(val_str);
2065            }
2066            catch (NumberFormatException ex) {
2067                log.debug("parseFillValue(): parse error: ", ex);
2068                log.trace("parseFillValue(): finish");
2069                return null;
2070            }
2071        }
2072
2073        try {
2074            switch (datatypeClass) {
2075            case Datatype.CLASS_INTEGER:
2076            case Datatype.CLASS_ENUM:
2077            case Datatype.CLASS_CHAR:
2078                log.trace("parseFillValue(): class CLASS_INT-ENUM-CHAR");
2079                if (datatypeSize == 1) {
2080                    data = new byte[] { (byte) val_dbl };
2081                }
2082                else if (datatypeSize == 2) {
2083                    data = HDFNativeData.shortToByte((short) val_dbl);
2084                }
2085                else if (datatypeSize == 8) {
2086                    data = HDFNativeData.longToByte((long) val_dbl);
2087                }
2088                else {
2089                    data = HDFNativeData.intToByte((int) val_dbl);
2090                }
2091                break;
2092            case Datatype.CLASS_FLOAT:
2093                log.trace("parseFillValue(): class CLASS_FLOAT");
2094                if (datatypeSize == 8) {
2095                    data = HDFNativeData.doubleToByte(val_dbl);
2096                }
2097                else {
2098                    data = HDFNativeData.floatToByte((float) val_dbl);
2099                    ;
2100                }
2101                break;
2102            case Datatype.CLASS_STRING:
2103                log.trace("parseFillValue(): class CLASS_STRING");
2104                data = val_str.getBytes();
2105                break;
2106            case Datatype.CLASS_REFERENCE:
2107                log.trace("parseFillValue(): class CLASS_REFERENCE");
2108                data = HDFNativeData.longToByte((long) val_dbl);
2109                break;
2110            default:
2111                log.debug("parseFillValue(): datatypeClass unknown");
2112                break;
2113            } // switch (tclass)
2114        }
2115        catch (Exception ex) {
2116            log.debug("parseFillValue(): failure: ", ex);
2117            data = null;
2118        }
2119
2120        log.trace("parseFillValue(): finish");
2121        return data;
2122    }
2123
2124    /*
2125     * (non-Javadoc)
2126     *
2127     * @see hdf.object.ScalarDS#getPaletteRefs()
2128     */
2129    @Override
2130    public byte[] getPaletteRefs() {
2131        if (rank <= 0) {
2132            init(); // init will be called to get refs
2133        }
2134
2135        return paletteRefs;
2136    }
2137
2138    /**
2139     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
2140     * length is 8*numberOfPalettes.
2141     */
2142    private byte[] getPaletteRefs(int did) {
2143        log.trace("getPaletteRefs(): start");
2144
2145        int aid = -1;
2146        int sid = -1;
2147        int size = 0;
2148        int rank = 0;
2149        int atype = -1;
2150        byte[] ref_buf = null;
2151
2152        try {
2153            aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2154            sid = H5.H5Aget_space(aid);
2155            rank = H5.H5Sget_simple_extent_ndims(sid);
2156            size = 1;
2157            if (rank > 0) {
2158                long[] dims = new long[rank];
2159                H5.H5Sget_simple_extent_dims(sid, dims, null);
2160                log.trace("getPaletteRefs(): rank={}, dims={}", rank, dims);
2161                for (int i = 0; i < rank; i++) {
2162                    size *= (int) dims[i];
2163                }
2164            }
2165
2166            if ((size * 8) < Integer.MIN_VALUE || (size * 8) > Integer.MAX_VALUE) throw new HDF5Exception("Invalid int size");
2167
2168            ref_buf = new byte[size * 8];
2169            atype = H5.H5Aget_type(aid);
2170
2171            H5.H5Aread(aid, atype, ref_buf);
2172        }
2173        catch (HDF5Exception ex) {
2174            log.debug("getPaletteRefs(): Palette attribute search failed: Expected", ex);
2175            ref_buf = null;
2176        }
2177        finally {
2178            try {
2179                H5.H5Tclose(atype);
2180            }
2181            catch (HDF5Exception ex2) {
2182                log.debug("getPaletteRefs(): H5Tclose(atype {}) failure: ", atype, ex2);
2183            }
2184            try {
2185                H5.H5Sclose(sid);
2186            }
2187            catch (HDF5Exception ex2) {
2188                log.debug("getPaletteRefs(): H5Sclose(sid {}) failure: ", sid, ex2);
2189            }
2190            try {
2191                H5.H5Aclose(aid);
2192            }
2193            catch (HDF5Exception ex2) {
2194                log.debug("getPaletteRefs(): H5Aclose(aid {}) failure: ", aid, ex2);
2195            }
2196        }
2197
2198        log.trace("getPaletteRefs(): finish");
2199        return ref_buf;
2200    }
2201
2202    /**
2203     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2204     * of size is the same as that of the dataspace of the dataset being changed.
2205     *
2206     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2207     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2208     * H5Screate_simple)
2209     *
2210     * @param newDims the dimension target size
2211     *
2212     * @throws HDF5Exception
2213     *             If there is an error at the HDF5 library level.
2214     */
2215    public void extend(long[] newDims) throws HDF5Exception {
2216        int did = -1;
2217        int sid = -1;
2218
2219        did = open();
2220        if (did >= 0) {
2221            try {
2222                H5.H5Dset_extent(did, newDims);
2223                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2224                sid = H5.H5Dget_space(did);
2225                long[] checkDims = new long[rank];
2226                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2227                log.trace("extend(): rank={}, checkDims={}", rank, checkDims);
2228                for (int i = 0; i < rank; i++) {
2229                    if (checkDims[i] != newDims[i]) {
2230                        log.debug("extend(): error extending dataset");
2231                        throw new HDF5Exception("error extending dataset " + getName());
2232                    }
2233                }
2234                dims = checkDims;
2235            }
2236            catch (Exception e) {
2237                log.debug("extend(): failure: ", e);
2238                throw new HDF5Exception(e.getMessage());
2239            }
2240            finally {
2241                if (sid > 0)
2242                    H5.H5Sclose(sid);
2243
2244                close(did);
2245            }
2246        }
2247    }
2248
2249}