001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import hdf.hdf5lib.H5;
028import hdf.hdf5lib.HDF5Constants;
029import hdf.hdf5lib.HDFArray;
030import hdf.hdf5lib.HDFNativeData;
031import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
032import hdf.hdf5lib.exceptions.HDF5Exception;
033import hdf.hdf5lib.exceptions.HDF5LibraryException;
034import hdf.hdf5lib.structs.H5O_info_t;
035import hdf.hdf5lib.structs.H5O_token_t;
036
037import hdf.object.Attribute;
038import hdf.object.CompoundDS;
039import hdf.object.Dataset;
040import hdf.object.Datatype;
041import hdf.object.FileFormat;
042import hdf.object.Group;
043import hdf.object.HObject;
044import hdf.object.MetaDataContainer;
045import hdf.object.Utils;
046
047import hdf.object.h5.H5Datatype;
048import hdf.object.h5.H5MetaDataContainer;
049import hdf.object.h5.H5ReferenceType;
050
051/**
052 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
053 *
054 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
055 * that stores a description of the data elements, data layout, and all other information necessary
056 * to write, read, and interpret the stored data.
057 *
058 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
059 * collection of one or more atomic types or small arrays of such types. Each member of a compound
060 * type has a name which is unique within that type, and a byte offset that determines the first
061 * byte (smallest byte address) of that member in a compound datum.
062 *
063 * For more information on HDF5 datasets and datatypes, read the <a href=
064 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
065 * User's Guide</a>.
066 *
067 * There are two basic types of compound datasets: simple compound data and nested compound data.
068 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
069 * are compound or array of compound data.
070 *
071 * Since Java does not understand C structures, we cannot directly read/write compound data values
072 * as in the following C example.
073 *
074 * <pre>
075 * typedef struct s1_t {
076 *         int    a;
077 *         float  b;
078 *         double c;
079 *         } s1_t;
080 *     s1_t       s1[LENGTH];
081 *     ...
082 *     H5Dwrite(..., s1);
083 *     H5Dread(..., s1);
084 * </pre>
085 *
086 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
087 * data by fields instead of compound structure. As for the example above, the java.util.Vector
088 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
089 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
090 * by field.
091 *
092 * @version 1.1 9/4/2007
093 * @author Peter X. Cao
094 */
095public class H5CompoundDS extends CompoundDS implements MetaDataContainer
096{
097    private static final long serialVersionUID = -5968625125574032736L;
098
099    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
100
101    /**
102     * The metadata object for this data object. Members of the metadata are instances of Attribute.
103     */
104    private H5MetaDataContainer objMetadata;
105
106    /** the object properties */
107    private H5O_info_t objInfo;
108
109    /** flag to indicate if the dataset is an external dataset */
110    private boolean isExternal = false;
111
112    /** flag to indicate if the dataset is a virtual dataset */
113    private boolean isVirtual = false;
114    /** the list of virtual names */
115    private List<String> virtualNameList;
116
117    /**
118     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
119     *
120     * The dataset object represents an existing dataset in the file. For example, new
121     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
122     * dataset,"dset1", at group "/g0/".
123     *
124     * This object is usually constructed at FileFormat.open(), which loads the file structure and
125     * object information into memory. It is rarely used elsewhere.
126     *
127     * @param theFile
128     *            the file that contains the data object.
129     * @param theName
130     *            the name of the data object, e.g. "dset".
131     * @param thePath
132     *            the full path of the data object, e.g. "/arrays/".
133     */
134    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
135        this(theFile, theName, thePath, null);
136    }
137
138    /**
139     * @deprecated Not for public use in the future.<br>
140     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
141     *
142     * @param theFile
143     *            the file that contains the data object.
144     * @param theName
145     *            the name of the data object, e.g. "dset".
146     * @param thePath
147     *            the full path of the data object, e.g. "/arrays/".
148     * @param oid
149     *            the oid of the data object.
150     */
151    @Deprecated
152    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
153        super(theFile, theName, thePath, oid);
154        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
155
156        if (theFile != null) {
157            if (oid == null) {
158                // retrieve the object ID
159                byte[] refBuf = null;
160                try {
161                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
162                    this.oid = HDFNativeData.byteToLong(refBuf);
163                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
164                }
165                catch (Exception ex) {
166                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
167                }
168                finally {
169                    if (refBuf != null)
170                        H5.H5Rdestroy(refBuf);
171                }
172            }
173            log.trace("constructor OID {}", this.oid);
174            try {
175                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
176            }
177            catch (Exception ex) {
178                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
179            }
180        }
181        else {
182            this.oid = null;
183            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
184        }
185    }
186
187    /*
188     * (non-Javadoc)
189     *
190     * @see hdf.object.HObject#open()
191     */
192    @Override
193    public long open() {
194        long did = HDF5Constants.H5I_INVALID_HID;
195
196        if (getFID() < 0)
197            log.trace("open(): file id for:{} is invalid", getPath() + getName());
198        else {
199            try {
200                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
201                log.trace("open(): did={}", did);
202            }
203            catch (HDF5Exception ex) {
204                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
205                did = HDF5Constants.H5I_INVALID_HID;
206            }
207        }
208
209        return did;
210    }
211
212    /*
213     * (non-Javadoc)
214     *
215     * @see hdf.object.HObject#close(int)
216     */
217    @Override
218    public void close(long did) {
219        if (did >= 0) {
220            try {
221                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
222            }
223            catch (Exception ex) {
224                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
225            }
226            try {
227                H5.H5Dclose(did);
228            }
229            catch (HDF5Exception ex) {
230                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
231            }
232        }
233    }
234
235    /**
236     * Retrieves datatype and dataspace information from file and sets the dataset
237     * in memory.
238     *
239     * The init() is designed to support lazy operation in a dataset object. When a
240     * data object is retrieved from file, the datatype, dataspace and raw data are
241     * not loaded into memory. When it is asked to read the raw data from file,
242     * init() is first called to get the datatype and dataspace information, then
243     * load the raw data from file.
244     *
245     * init() is also used to reset the selection of a dataset (start, stride and
246     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
247     * the following example, init() at step 1) retrieves datatype and dataspace
248     * information from file. getData() at step 3) reads only one data point. init()
249     * at step 4) resets the selection to the whole dataset. getData() at step 4)
250     * reads the values of whole dataset into memory.
251     *
252     * <pre>
253     * dset = (Dataset) file.get(NAME_DATASET);
254     *
255     * // 1) get datatype and dataspace information from file
256     * dset.init();
257     * rank = dset.getRank(); // rank = 2, a 2D dataset
258     * count = dset.getSelectedDims();
259     * start = dset.getStartDims();
260     * dims = dset.getDims();
261     *
262     * // 2) select only one data point
263     * for (int i = 0; i &lt; rank; i++) {
264     *     start[0] = 0;
265     *     count[i] = 1;
266     * }
267     *
268     * // 3) read one data point
269     * data = dset.getData();
270     *
271     * // 4) reset selection to the whole dataset
272     * dset.init();
273     *
274     * // 5) clean the memory data buffer
275     * dset.clearData();
276     *
277     * // 6) Read the whole dataset
278     * data = dset.getData();
279     * </pre>
280     */
281    @Override
282    public void init() {
283        if (inited) {
284            resetSelection();
285            log.trace("init(): Dataset already initialized");
286            return; // already called. Initialize only once
287        }
288
289        long did = HDF5Constants.H5I_INVALID_HID;
290        long tid = HDF5Constants.H5I_INVALID_HID;
291        long sid = HDF5Constants.H5I_INVALID_HID;
292        flatNameList = new Vector<>();
293        flatTypeList = new Vector<>();
294
295        did = open();
296        if (did >= 0) {
297            // check if it is an external or virtual dataset
298            long pid = HDF5Constants.H5I_INVALID_HID;
299            try {
300                pid = H5.H5Dget_create_plist(did);
301                try {
302                    int nfiles = H5.H5Pget_external_count(pid);
303                    isExternal = (nfiles > 0);
304                    int layoutType = H5.H5Pget_layout(pid);
305                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
306                        try {
307                            long vmaps = H5.H5Pget_virtual_count(pid);
308                            if (vmaps > 0) {
309                                virtualNameList = new Vector<>();
310                                for (long next = 0; next < vmaps; next++) {
311                                    try {
312                                        String fname = H5.H5Pget_virtual_filename(pid, next);
313                                        virtualNameList.add(fname);
314                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
315                                    }
316                                    catch (Exception err) {
317                                        log.trace("init(): vds[{}] continue", next);
318                                    }
319                                }
320                            }
321                        }
322                        catch (Exception err) {
323                            log.debug("init(): vds count error: ", err);
324                        }
325                    }
326                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
327                }
328                catch (Exception ex) {
329                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
330                }
331            }
332            catch (Exception ex) {
333                log.debug("init(): H5Dget_create_plist() failure: ", ex);
334            }
335            finally {
336                try {
337                    H5.H5Pclose(pid);
338                }
339                catch (Exception ex) {
340                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
341                }
342            }
343
344            try {
345                sid = H5.H5Dget_space(did);
346                rank = H5.H5Sget_simple_extent_ndims(sid);
347                tid = H5.H5Dget_type(did);
348                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
349
350                if (rank == 0) {
351                    // a scalar data point
352                    isScalar = true;
353                    rank = 1;
354                    dims = new long[] { 1 };
355                    log.trace("init(): rank is a scalar data point");
356                }
357                else {
358                    isScalar = false;
359                    dims = new long[rank];
360                    maxDims = new long[rank];
361                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
362                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
363                }
364
365                try {
366                    int nativeClass = H5.H5Tget_class(tid);
367                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
368                        long lsize = 1;
369                        if (rank > 0) {
370                            log.trace("init():rank={}, dims={}", rank, dims);
371                            for (int j = 0; j < dims.length; j++) {
372                                lsize *= dims[j];
373                            }
374                        }
375                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
376                    }
377                    else
378                        datatype = new H5Datatype(getFileFormat(), tid);
379
380                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}", tid,
381                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(),
382                            ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
383
384                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
385                }
386                catch (Exception ex) {
387                    log.debug("init(): failed to create datatype for dataset: ", ex);
388                    datatype = null;
389                }
390
391                // initialize member information
392                numberOfMembers = flatNameList.size();
393                log.trace("init(): numberOfMembers={}", numberOfMembers);
394
395                memberNames = new String[numberOfMembers];
396                memberTypes = new Datatype[numberOfMembers];
397                memberOrders = new int[numberOfMembers];
398                isMemberSelected = new boolean[numberOfMembers];
399                memberDims = new Object[numberOfMembers];
400
401                for (int i = 0; i < numberOfMembers; i++) {
402                    isMemberSelected[i] = true;
403                    memberOrders[i] = 1;
404                    memberDims[i] = null;
405
406                    try {
407                        memberTypes[i] = flatTypeList.get(i);
408                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
409
410                        if (memberTypes[i].isArray()) {
411                            long mdim[] = memberTypes[i].getArrayDims();
412                            int idim[] = new int[mdim.length];
413                            int arrayNpoints = 1;
414
415                            for (int j = 0; j < idim.length; j++) {
416                                idim[j] = (int) mdim[j];
417                                arrayNpoints *= idim[j];
418                            }
419
420                            memberDims[i] = idim;
421                            memberOrders[i] = arrayNpoints;
422                        }
423                    }
424                    catch (Exception ex) {
425                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
426                        memberTypes[i] = null;
427                    }
428
429                    try {
430                        memberNames[i] = flatNameList.get(i);
431                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
432                    }
433                    catch (Exception ex) {
434                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
435                        memberNames[i] = "null";
436                    }
437                } //  (int i=0; i<numberOfMembers; i++)
438
439                inited = true;
440            }
441            catch (HDF5Exception ex) {
442                numberOfMembers = 0;
443                memberNames = null;
444                memberTypes = null;
445                memberOrders = null;
446                log.debug("init(): ", ex);
447            }
448            finally {
449                if (datatype != null)
450                    datatype.close(tid);
451
452                try {
453                    H5.H5Sclose(sid);
454                }
455                catch (HDF5Exception ex2) {
456                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
457                }
458            }
459
460            close(did);
461
462            startDims = new long[rank];
463            selectedDims = new long[rank];
464
465            resetSelection();
466        }
467        else {
468            log.debug("init(): failed to open dataset");
469        }
470    }
471
472    /**
473     * Get the token for this object.
474     *
475     * @return true if it has any attributes, false otherwise.
476     */
477    public long[] getToken() {
478        H5O_token_t token = objInfo.token;
479        return HDFNativeData.byteToLong(token.data);
480    }
481
482    /**
483     * Check if the object has any attributes attached.
484     *
485     * @return true if it has any attributes, false otherwise.
486     */
487    @Override
488    public boolean hasAttribute() {
489        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
490
491        if (objInfo.num_attrs < 0) {
492            long did = open();
493            if (did >= 0) {
494                objInfo.num_attrs = 0;
495
496                try {
497                    objInfo = H5.H5Oget_info(did);
498                }
499                catch (Exception ex) {
500                    objInfo.num_attrs = 0;
501                    log.debug("hasAttribute(): get object info failure: ", ex);
502                }
503                finally {
504                    close(did);
505                }
506                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
507            }
508            else {
509                log.debug("hasAttribute(): could not open dataset");
510            }
511        }
512
513        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
514        return (objInfo.num_attrs > 0);
515    }
516
517    /**
518     * Returns the datatype of the data object.
519     *
520     * @return the datatype of the data object.
521     */
522    @Override
523    public Datatype getDatatype() {
524        if (!inited)
525            init();
526
527        if (datatype == null) {
528            long did = HDF5Constants.H5I_INVALID_HID;
529            long tid = HDF5Constants.H5I_INVALID_HID;
530
531            did = open();
532            if (did >= 0) {
533                try {
534                    tid = H5.H5Dget_type(did);
535                    int nativeClass = H5.H5Tget_class(tid);
536                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
537                        long lsize = 1;
538                        if (rank > 0) {
539                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
540                            for (int j = 0; j < dims.length; j++) {
541                                lsize *= dims[j];
542                            }
543                        }
544                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
545                    }
546                    else
547                        datatype = new H5Datatype(getFileFormat(), tid);
548                }
549                catch (Exception ex) {
550                    log.debug("getDatatype(): ", ex);
551                }
552                finally {
553                    try {
554                        H5.H5Tclose(tid);
555                    }
556                    catch (HDF5Exception ex) {
557                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
558                    }
559                    try {
560                        H5.H5Dclose(did);
561                    }
562                    catch (HDF5Exception ex) {
563                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
564                    }
565                }
566            }
567        }
568
569        if (isExternal) {
570            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
571
572            if (pdir == null) {
573                pdir = ".";
574            }
575            System.setProperty("user.dir", pdir);
576            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
577        }
578
579        return datatype;
580    }
581
582    /**
583     * Removes all of the elements from metadata list.
584     * The list should be empty after this call returns.
585     */
586    @Override
587    public void clear() {
588        super.clear();
589        objMetadata.clear();
590    }
591
592    /*
593     * (non-Javadoc)
594     *
595     * @see hdf.object.Dataset#readBytes()
596     */
597    @Override
598    public byte[] readBytes() throws HDF5Exception {
599        byte[] theData = null;
600
601        if (!isInited())
602            init();
603
604        long did = open();
605        if (did >= 0) {
606            long fspace = HDF5Constants.H5I_INVALID_HID;
607            long mspace = HDF5Constants.H5I_INVALID_HID;
608            long tid = HDF5Constants.H5I_INVALID_HID;
609
610            try {
611                long[] lsize = { 1 };
612                for (int j = 0; j < selectedDims.length; j++)
613                    lsize[0] *= selectedDims[j];
614
615                fspace = H5.H5Dget_space(did);
616                mspace = H5.H5Screate_simple(rank, selectedDims, null);
617
618                // set the rectangle selection
619                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
620                if (rank * dims[0] > 1)
621                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
622
623                tid = H5.H5Dget_type(did);
624                long size = H5.H5Tget_size(tid) * lsize[0];
625                log.trace("readBytes(): size = {}", size);
626
627                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
628                    throw new Exception("Invalid int size");
629
630                theData = new byte[(int)size];
631
632                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
633                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
634            }
635            catch (Exception ex) {
636                log.debug("readBytes(): failed to read data: ", ex);
637            }
638            finally {
639                try {
640                    H5.H5Sclose(fspace);
641                }
642                catch (Exception ex2) {
643                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
644                }
645                try {
646                    H5.H5Sclose(mspace);
647                }
648                catch (Exception ex2) {
649                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
650                }
651                try {
652                    H5.H5Tclose(tid);
653                }
654                catch (HDF5Exception ex2) {
655                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
656                }
657                close(did);
658            }
659        }
660
661        return theData;
662    }
663
664    /**
665     * Reads the data from file.
666     *
667     * read() reads the data from file to a memory buffer and returns the memory
668     * buffer. The dataset object does not hold the memory buffer. To store the
669     * memory buffer in the dataset object, one must call getData().
670     *
671     * By default, the whole dataset is read into memory. Users can also select
672     * a subset to read. Subsetting is done in an implicit way.
673     *
674     * <b>How to Select a Subset</b>
675     *
676     * A selection is specified by three arrays: start, stride and count.
677     * <ol>
678     * <li>start: offset of a selection
679     * <li>stride: determines how many elements to move in each dimension
680     * <li>count: number of elements to select in each dimension
681     * </ol>
682     * getStartDims(), getStride() and getSelectedDims() returns the start,
683     * stride and count arrays respectively. Applications can make a selection
684     * by changing the values of the arrays.
685     *
686     * The following example shows how to make a subset. In the example, the
687     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
688     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
689     * We want to select every other data point in dims[1] and dims[2]
690     *
691     * <pre>
692     * int rank = dataset.getRank(); // number of dimensions of the dataset
693     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
694     * long[] selected = dataset.getSelectedDims(); // the selected size of the
695     *                                              // dataset
696     * long[] start = dataset.getStartDims(); // the offset of the selection
697     * long[] stride = dataset.getStride(); // the stride of the dataset
698     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
699     *                                                   // dimensions for
700     *                                                   // display
701     *
702     * // select dim1 and dim2 as 2D data for display, and slice through dim0
703     * selectedIndex[0] = 1;
704     * selectedIndex[1] = 2;
705     * selectedIndex[1] = 0;
706     *
707     * // reset the selection arrays
708     * for (int i = 0; i &lt; rank; i++) {
709     *     start[i] = 0;
710     *     selected[i] = 1;
711     *     stride[i] = 1;
712     * }
713     *
714     * // set stride to 2 on dim1 and dim2 so that every other data point is
715     * // selected.
716     * stride[1] = 2;
717     * stride[2] = 2;
718     *
719     * // set the selection size of dim1 and dim2
720     * selected[1] = dims[1] / stride[1];
721     * selected[2] = dims[1] / stride[2];
722     *
723     * // when dataset.getData() is called, the selection above will be used
724     * // since
725     * // the dimension arrays are passed by reference. Changes of these arrays
726     * // outside the dataset object directly change the values of these array
727     * // in the dataset object.
728     * </pre>
729     *
730     * For CompoundDS, the memory data object is an java.util.List object. Each
731     * element of the list is a data array that corresponds to a compound field.
732     *
733     * For example, if compound dataset "comp" has the following nested
734     * structure, and member datatypes
735     *
736     * <pre>
737     * comp --&gt; m01 (int)
738     * comp --&gt; m02 (float)
739     * comp --&gt; nest1 --&gt; m11 (char)
740     * comp --&gt; nest1 --&gt; m12 (String)
741     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
742     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
743     * </pre>
744     *
745     * getData() returns a list of six arrays: {int[], float[], char[],
746     * String[], long[] and double[]}.
747     *
748     * @return the data read from file.
749     *
750     * @see #getData()
751     * @see hdf.object.DataFormat#read()
752     *
753     * @throws Exception
754     *             if object can not be read
755     */
756    @Override
757    public Object read() throws Exception {
758        Object readData = null;
759
760        if (!isInited())
761            init();
762
763        try {
764            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
765        }
766        catch (Exception ex) {
767            log.debug("read(): failed to read compound dataset: ", ex);
768            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
769        }
770
771        return readData;
772    }
773
774    /**
775     * Writes the given data buffer into this dataset in a file.
776     *
777     * The data buffer is a vector that contains the data values of compound fields. The data is written
778     * into file field by field.
779     *
780     * @param buf
781     *            The vector that contains the data values of compound fields.
782     *
783     * @throws Exception
784     *             If there is an error at the HDF5 library level.
785     */
786    @Override
787    public void write(Object buf) throws Exception {
788        if (this.getFileFormat().isReadOnly())
789            throw new Exception("cannot write to compound dataset in file opened as read-only");
790
791        if (!isInited())
792            init();
793
794        try {
795            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
796        }
797        catch (Exception ex) {
798            log.debug("write(Object): failed to write compound dataset: ", ex);
799            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
800        }
801    }
802
803    /*
804     * Routine to convert datatypes that are read in as byte arrays to
805     * regular types.
806     */
807    protected Object convertByteMember(final Datatype dtype, byte[] byteData) {
808        Object theObj = null;
809
810        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
811            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
812        else
813            theObj = super.convertByteMember(dtype, byteData);
814
815        return theObj;
816    }
817
818    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
819        H5Datatype dsDatatype = (H5Datatype) getDatatype();
820        Object theData = null;
821
822        if (numberOfMembers <= 0) {
823            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
824            throw new Exception("dataset contains no members");
825        }
826
827        /*
828         * I/O type-specific pre-initialization.
829         */
830        if (ioType == H5File.IO_TYPE.WRITE) {
831            if ((writeBuf == null) || !(writeBuf instanceof List)) {
832                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
833                throw new Exception("write buffer is null or invalid");
834            }
835
836            /*
837             * Check for any unsupported datatypes and fail early before
838             * attempting to write to the dataset.
839             */
840            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
841                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
842                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
843            }
844
845            if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) {
846                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
847                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
848            }
849        }
850
851        long did = open();
852        if (did >= 0) {
853            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
854
855            try {
856                /*
857                 * NOTE: this call sets up a hyperslab selection in the file according to the
858                 * current selection in the dataset object.
859                 */
860                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
861                        selectedStride, selectedDims, spaceIDs);
862
863                theData = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
864            }
865            finally {
866                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
867                    try {
868                        H5.H5Sclose(spaceIDs[0]);
869                    }
870                    catch (Exception ex) {
871                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
872                    }
873                }
874
875                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
876                    try {
877                        H5.H5Sclose(spaceIDs[1]);
878                    }
879                    catch (Exception ex) {
880                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
881                    }
882                }
883
884                close(did);
885            }
886        }
887        else
888            log.debug("compoundDatasetCommonIO(): failed to open dataset");
889
890        return theData;
891    }
892
893    /*
894     * Private recursive routine to read/write an entire compound datatype field by
895     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
896     * COMPOUND datatypes.
897     *
898     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
899     * running counter so that we can index properly into the flattened name list
900     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
901     */
902    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
903            final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex) {
904        Object theData = null;
905
906        if (cmpdType.isArray()) {
907            log.trace("compoundTypeIO(): ARRAY type");
908
909            long[] arrayDims = cmpdType.getArrayDims();
910            int arrSize = nSelPoints;
911            for (int i = 0; i < arrayDims.length; i++)
912                arrSize *= arrayDims[i];
913            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
914        }
915        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
916            /*
917             * TODO: true variable-length support.
918             */
919            String[] errVal = new String[nSelPoints];
920            String errStr = "*UNSUPPORTED*";
921
922            for (int j = 0; j < nSelPoints; j++)
923                errVal[j] = errStr;
924
925            /*
926             * Setup a fake data list.
927             */
928            Datatype baseType = cmpdType.getDatatypeBase();
929            while (baseType != null && !baseType.isCompound()) {
930                baseType = baseType.getDatatypeBase();
931            }
932
933            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
934            fakeVlenData.add(errVal);
935
936            theData = fakeVlenData;
937        }
938        else if (cmpdType.isCompound()) {
939            List<Object> memberDataList = null;
940            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
941
942            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write", typeList.size());
943
944            if (ioType == H5File.IO_TYPE.READ)
945                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
946
947            try {
948                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
949                    H5Datatype memberType = null;
950                    String memberName = null;
951                    Object memberData = null;
952
953                    try {
954                        memberType = (H5Datatype) typeList.get(i);
955                    }
956                    catch (Exception ex) {
957                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
958                        globalMemberIndex[0]++;
959                        continue;
960                    }
961
962                    /*
963                     * Since the type list used here is not a flattened structure, we need to skip
964                     * the member selection check for compound types, as otherwise having a single
965                     * member not selected would skip the reading/writing for the entire compound
966                     * type. The member selection check will be deferred to the recursive compound
967                     * read/write below.
968                     */
969                    if (!memberType.isCompound()) {
970                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
971                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
972                            globalMemberIndex[0]++;
973                            continue; // the field is not selected
974                        }
975                    }
976
977                    if (!memberType.isCompound()) {
978                        try {
979                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
980                        }
981                        catch (Exception ex) {
982                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
983                            memberName = "null";
984                        }
985                    }
986
987                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
988
989                    if (ioType == H5File.IO_TYPE.READ) {
990                        try {
991                            if (memberType.isCompound())
992                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
993                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
994                                /*
995                                 * Recursively detect any nested array/vlen of compound types.
996                                 */
997                                boolean compoundFound = false;
998
999                                Datatype base = memberType.getDatatypeBase();
1000                                while (base != null) {
1001                                    if (base.isCompound())
1002                                        compoundFound = true;
1003
1004                                    base = base.getDatatypeBase();
1005                                }
1006
1007                                if (compoundFound) {
1008                                    /*
1009                                     * Skip the top-level array/vlen type.
1010                                     */
1011                                    globalMemberIndex[0]++;
1012
1013                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
1014                                }
1015                                else {
1016                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1017                                    globalMemberIndex[0]++;
1018                                }
1019                            }
1020                            else {
1021                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1022                                globalMemberIndex[0]++;
1023                            }
1024                        }
1025                        catch (Exception ex) {
1026                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
1027                            globalMemberIndex[0]++;
1028                            memberData = null;
1029                        }
1030
1031                        if (memberData == null) {
1032                            String[] errVal = new String[nSelPoints];
1033                            String errStr = "*ERROR*";
1034
1035                            for (int j = 0; j < nSelPoints; j++)
1036                                errVal[j] = errStr;
1037
1038                            memberData = errVal;
1039                        }
1040
1041                        memberDataList.add(memberData);
1042                    }
1043                    else {
1044                        try {
1045                            /*
1046                             * TODO: currently doesn't correctly handle non-selected compound members.
1047                             */
1048                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
1049                        }
1050                        catch (Exception ex) {
1051                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1052                            globalMemberIndex[0]++;
1053                            continue;
1054                        }
1055
1056                        if (memberData == null) {
1057                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1058                            globalMemberIndex[0]++;
1059                            continue;
1060                        }
1061
1062                        try {
1063                            if (memberType.isCompound()) {
1064                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
1065                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
1066                            }
1067                            else {
1068                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
1069                                globalMemberIndex[0]++;
1070                            }
1071                        }
1072                        catch (Exception ex) {
1073                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1074                            globalMemberIndex[0]++;
1075                        }
1076                    }
1077                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1078            }
1079            catch (Exception ex) {
1080                log.debug("compoundTypeIO(): failure: ", ex);
1081                memberDataList = null;
1082            }
1083
1084            theData = memberDataList;
1085        }
1086
1087        return theData;
1088    }
1089
1090    /*
1091     * Private routine to read a single field of a compound datatype by creating a
1092     * compound datatype and inserting the single field into that datatype.
1093     */
1094    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1095            final H5Datatype memberType, String memberName) throws Exception {
1096        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1097        Object memberData = null;
1098
1099        try {
1100            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1101            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1102        }
1103        catch (OutOfMemoryError err) {
1104            memberData = null;
1105            throw new Exception("Out of memory");
1106        }
1107        catch (Exception ex) {
1108            log.debug("readSingleCompoundMember(): ", ex);
1109            memberData = null;
1110        }
1111
1112        if (memberData != null) {
1113            /*
1114             * Create a compound datatype containing just a single field (the one which we
1115             * want to read).
1116             */
1117            long compTid = -1;
1118            try {
1119                compTid = dsDatatype.createCompoundFieldType(memberName);
1120            }
1121            catch (HDF5Exception ex) {
1122                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1123                        memberType.getDescription(), ex);
1124                memberData = null;
1125            }
1126
1127            /*
1128             * Actually read the data for this member now that everything has been setup.
1129             */
1130            try {
1131                if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1132                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1133                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1134                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1135
1136                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1137                }
1138                else {
1139                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1140                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1141                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1142
1143                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1144                }
1145            }
1146            catch (HDF5DataFiltersException exfltr) {
1147                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1148                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1149            }
1150            catch (Exception ex) {
1151                log.debug("readSingleCompoundMember(): read failure: ", ex);
1152                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1153            }
1154            finally {
1155                dsDatatype.close(compTid);
1156            }
1157
1158            /*
1159             * Perform any necessary data conversions.
1160             */
1161            if (memberType.isUnsigned()) {
1162                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1163                memberData = Dataset.convertFromUnsignedC(memberData, null);
1164            }
1165            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1166                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1167
1168                /*
1169                 * For all other types that get read into memory as a byte[] (such as nested
1170                 * compounds and arrays of compounds), we must manually convert the byte[] into
1171                 * something usable.
1172                 */
1173                memberData = convertByteMember(memberType, (byte[]) memberData);
1174            }
1175        }
1176
1177        return memberData;
1178    }
1179
1180    /*
1181     * Private routine to write a single field of a compound datatype by creating a
1182     * compound datatype and inserting the single field into that datatype.
1183     */
1184    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1185            final H5Datatype memberType, String memberName, Object theData) throws Exception {
1186        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1187
1188        /*
1189         * Check for any unsupported datatypes before attempting to write this compound
1190         * member.
1191         */
1192        if (memberType.isVLEN() && !memberType.isVarStr()) {
1193            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1194            throw new Exception("writing of VL non-strings is not currently supported");
1195        }
1196
1197        /*
1198         * Perform any necessary data conversions before writing the data.
1199         */
1200        Object tmpData = theData;
1201        try {
1202            if (memberType.isUnsigned()) {
1203                // Check if we need to convert unsigned integer data from Java-style
1204                // to C-style integers
1205                long tsize = memberType.getDatatypeSize();
1206                String cname = theData.getClass().getName();
1207                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1208                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1209                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1210
1211                if (doIntConversion) {
1212                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1213                    tmpData = convertToUnsignedC(theData, null);
1214                }
1215            }
1216            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1217                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1218                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1219            }
1220            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1221                log.trace("writeSingleCompoundMember(): converting enum names to values");
1222                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1223            }
1224        }
1225        catch (Exception ex) {
1226            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1227            tmpData = null;
1228        }
1229
1230        if (tmpData == null) {
1231            log.debug("writeSingleCompoundMember(): data is null");
1232            return;
1233        }
1234
1235        /*
1236         * Create a compound datatype containing just a single field (the one which we
1237         * want to write).
1238         */
1239        long compTid = -1;
1240        try {
1241            compTid = dsDatatype.createCompoundFieldType(memberName);
1242        }
1243        catch (HDF5Exception ex) {
1244            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1245                    memberType.getDescription(), ex);
1246        }
1247
1248        /*
1249         * Actually write the data now that everything has been setup.
1250         */
1251        try {
1252            if (memberType.isVarStr()) {
1253                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1254                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1255                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1256
1257                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1258            }
1259            else {
1260                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1261                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1262                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1263
1264                // BUG!!! does not write nested compound data and no
1265                // exception was caught. Need to check if it is a java
1266                // error or C library error.
1267                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1268            }
1269        }
1270        catch (Exception ex) {
1271            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1272            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1273        }
1274        finally {
1275            dsDatatype.close(compTid);
1276        }
1277    }
1278
1279    /**
1280     * Converts the data values of this data object to appropriate Java integers if
1281     * they are unsigned integers.
1282     *
1283     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1284     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1285     *
1286     * @return the converted data buffer.
1287     */
1288    @Override
1289    public Object convertFromUnsignedC() {
1290        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1291    }
1292
1293    /**
1294     * Converts Java integer data values of this data object back to unsigned C-type
1295     * integer data if they are unsigned integers.
1296     *
1297     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1298     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1299     *
1300     * @return the converted data buffer.
1301     */
1302    @Override
1303    public Object convertToUnsignedC() {
1304        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1305    }
1306
1307    /**
1308     * Retrieves the object's metadata, such as attributes, from the file.
1309     *
1310     * Metadata, such as attributes, is stored in a List.
1311     *
1312     * @return the list of metadata objects.
1313     *
1314     * @throws HDF5Exception
1315     *             if the metadata can not be retrieved
1316     */
1317    @Override
1318    public List<Attribute> getMetadata() throws HDF5Exception {
1319        int gmIndexType = 0;
1320        int gmIndexOrder = 0;
1321
1322        try {
1323            gmIndexType = fileFormat.getIndexType(null);
1324        }
1325        catch (Exception ex) {
1326            log.debug("getMetadata(): getIndexType failed: ", ex);
1327        }
1328        try {
1329            gmIndexOrder = fileFormat.getIndexOrder(null);
1330        }
1331        catch (Exception ex) {
1332            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1333        }
1334        return this.getMetadata(gmIndexType, gmIndexOrder);
1335    }
1336
1337    /**
1338     * Retrieves the object's metadata, such as attributes, from the file.
1339     *
1340     * Metadata, such as attributes, is stored in a List.
1341     *
1342     * @param attrPropList
1343     *             the list of properties to get
1344     *
1345     * @return the list of metadata objects.
1346     *
1347     * @throws HDF5Exception
1348     *             if the metadata can not be retrieved
1349     */
1350    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1351        if (!isInited())
1352            init();
1353
1354        try {
1355            this.linkTargetObjName = H5File.getLinkTargetName(this);
1356        }
1357        catch (Exception ex) {
1358            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1359        }
1360
1361        if (objMetadata.getAttributeList() == null) {
1362            long did = HDF5Constants.H5I_INVALID_HID;
1363            long pcid = HDF5Constants.H5I_INVALID_HID;
1364            long paid = HDF5Constants.H5I_INVALID_HID;
1365
1366            did = open();
1367            if (did >= 0) {
1368                try {
1369                    // get the compression and chunk information
1370                    pcid = H5.H5Dget_create_plist(did);
1371                    paid = H5.H5Dget_access_plist(did);
1372                    long storageSize = H5.H5Dget_storage_size(did);
1373                    int nfilt = H5.H5Pget_nfilters(pcid);
1374                    int layoutType = H5.H5Pget_layout(pcid);
1375
1376                    storageLayout.setLength(0);
1377                    compression.setLength(0);
1378
1379                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1380                        chunkSize = new long[rank];
1381                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1382                        int n = chunkSize.length;
1383                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1384                        for (int i = 1; i < n; i++)
1385                            storageLayout.append(" X ").append(chunkSize[i]);
1386
1387                        if (nfilt > 0) {
1388                            long nelmts = 1;
1389                            long uncompSize;
1390                            long datumSize = getDatatype().getDatatypeSize();
1391
1392                            if (datumSize < 0) {
1393                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1394                                try {
1395                                    tmptid = H5.H5Dget_type(did);
1396                                    datumSize = H5.H5Tget_size(tmptid);
1397                                }
1398                                finally {
1399                                    try {
1400                                        H5.H5Tclose(tmptid);
1401                                    }
1402                                    catch (Exception ex2) {
1403                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1404                                    }
1405                                }
1406                            }
1407
1408                            for (int i = 0; i < rank; i++)
1409                                nelmts *= dims[i];
1410                            uncompSize = nelmts * datumSize;
1411
1412                            /* compression ratio = uncompressed size / compressed size */
1413
1414                            if (storageSize != 0) {
1415                                double ratio = (double) uncompSize / (double) storageSize;
1416                                DecimalFormat df = new DecimalFormat();
1417                                df.setMinimumFractionDigits(3);
1418                                df.setMaximumFractionDigits(3);
1419                                compression.append(df.format(ratio)).append(":1");
1420                            }
1421                        }
1422                    }
1423                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1424                        storageLayout.append("COMPACT");
1425                    }
1426                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1427                        storageLayout.append("CONTIGUOUS");
1428                        if (H5.H5Pget_external_count(pcid) > 0)
1429                            storageLayout.append(" - EXTERNAL ");
1430                    }
1431                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1432                        storageLayout.append("VIRTUAL - ");
1433                        try {
1434                            long vmaps = H5.H5Pget_virtual_count(pcid);
1435                            try {
1436                                int virtView = H5.H5Pget_virtual_view(paid);
1437                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1438                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1439                                    storageLayout.append("First Missing");
1440                                else
1441                                    storageLayout.append("Last Available");
1442                                storageLayout.append("\nGAP : ").append(virtGap);
1443                            }
1444                            catch (Exception err) {
1445                                log.debug("getMetadata(): vds error: ", err);
1446                                storageLayout.append("ERROR");
1447                            }
1448                            storageLayout.append("\nMAPS : ").append(vmaps);
1449                            if (vmaps > 0) {
1450                                for (long next = 0; next < vmaps; next++) {
1451                                    try {
1452                                        H5.H5Pget_virtual_vspace(pcid, next);
1453                                        H5.H5Pget_virtual_srcspace(pcid, next);
1454                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1455                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1456                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1457                                    }
1458                                    catch (Exception err) {
1459                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1460                                        storageLayout.append("ERROR");
1461                                    }
1462                                }
1463                            }
1464                        }
1465                        catch (Exception err) {
1466                            log.debug("getMetadata(): vds count error: ", err);
1467                            storageLayout.append("ERROR");
1468                        }
1469                    }
1470                    else {
1471                        chunkSize = null;
1472                        storageLayout.append("NONE");
1473                    }
1474
1475                    int[] flags = { 0, 0 };
1476                    long[] cdNelmts = { 20 };
1477                    int[] cdValues = new int[(int) cdNelmts[0]];
1478                    String[] cdName = { "", "" };
1479                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1480                    int filter = -1;
1481                    int[] filterConfig = { 1 };
1482
1483                    filters.setLength(0);
1484
1485                    if (nfilt == 0) {
1486                        filters.append("NONE");
1487                    }
1488                    else {
1489                        for (int i = 0, k = 0; i < nfilt; i++) {
1490                            log.trace("getMetadata(): filter[{}]", i);
1491                            if (i > 0)
1492                                filters.append(", ");
1493                            if (k > 0)
1494                                compression.append(", ");
1495
1496                            try {
1497                                cdNelmts[0] = 20;
1498                                cdValues = new int[(int) cdNelmts[0]];
1499                                cdValues = new int[(int) cdNelmts[0]];
1500                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1501                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1502                                for (int j = 0; j < cdNelmts[0]; j++)
1503                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1504                            }
1505                            catch (Exception err) {
1506                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1507                                filters.append("ERROR");
1508                                continue;
1509                            }
1510
1511                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1512                                filters.append("NONE");
1513                            }
1514                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1515                                filters.append("GZIP");
1516                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1517                                k++;
1518                            }
1519                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1520                                filters.append("Error detection filter");
1521                            }
1522                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1523                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1524                            }
1525                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1526                                filters.append("NBIT");
1527                            }
1528                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1529                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1530                            }
1531                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1532                                filters.append("SZIP");
1533                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1534                                k++;
1535                                int flag = -1;
1536                                try {
1537                                    flag = H5.H5Zget_filter_info(filter);
1538                                }
1539                                catch (Exception ex) {
1540                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1541                                    flag = -1;
1542                                }
1543                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1544                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1545                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1546                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1547                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1548                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1549                            }
1550                            else {
1551                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1552                                for (int j = 0; j < cdNelmts[0]; j++) {
1553                                    if (j > 0)
1554                                        filters.append(", ");
1555                                    filters.append(cdValues[j]);
1556                                }
1557                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1558                            }
1559                        } //  (int i=0; i<nfilt; i++)
1560                    }
1561
1562                    if (compression.length() == 0)
1563                        compression.append("NONE");
1564                    log.trace("getMetadata(): filter compression={}", compression);
1565                    log.trace("getMetadata(): filter information={}", filters);
1566
1567                    storage.setLength(0);
1568                    storage.append("SIZE: ").append(storageSize);
1569
1570                    try {
1571                        int[] at = { 0 };
1572                        H5.H5Pget_alloc_time(pcid, at);
1573                        storage.append(", allocation time: ");
1574                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1575                            storage.append("Early");
1576                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1577                            storage.append("Incremental");
1578                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1579                            storage.append("Late");
1580                        else
1581                            storage.append("Default");
1582                    }
1583                    catch (Exception ex) {
1584                        log.debug("getMetadata(): Storage allocation time:", ex);
1585                    }
1586                    log.trace("getMetadata(): storage={}", storage);
1587                }
1588                finally {
1589                    try {
1590                        H5.H5Pclose(paid);
1591                    }
1592                    catch (Exception ex) {
1593                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1594                    }
1595                    try {
1596                        H5.H5Pclose(pcid);
1597                    }
1598                    catch (Exception ex) {
1599                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1600                    }
1601                    close(did);
1602                }
1603            }
1604        }
1605
1606        List<Attribute> attrlist = null;
1607        try {
1608            attrlist = objMetadata.getMetadata(attrPropList);
1609        }
1610        catch (Exception ex) {
1611            log.debug("getMetadata(): getMetadata failed: ", ex);
1612        }
1613        return attrlist;
1614    }
1615
1616    /**
1617     * Writes a specific piece of metadata (such as an attribute) into the file.
1618     *
1619     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1620     * value. If the attribute does not exist in the file, it creates the
1621     * attribute in the file and attaches it to the object. It will fail to
1622     * write a new attribute to the object where an attribute with the same name
1623     * already exists. To update the value of an existing attribute in the file,
1624     * one needs to get the instance of the attribute by getMetadata(), change
1625     * its values, then use writeMetadata() to write the value.
1626     *
1627     * @param info
1628     *            the metadata to write.
1629     *
1630     * @throws Exception
1631     *             if the metadata can not be written
1632     */
1633    @Override
1634    public void writeMetadata(Object info) throws Exception {
1635        try {
1636            objMetadata.writeMetadata(info);
1637        }
1638        catch (Exception ex) {
1639            log.debug("writeMetadata(): Object not an Attribute");
1640            return;
1641        }
1642    }
1643
1644    /**
1645     * Deletes an existing piece of metadata from this object.
1646     *
1647     * @param info
1648     *            the metadata to delete.
1649     *
1650     * @throws HDF5Exception
1651     *             if the metadata can not be removed
1652     */
1653    @Override
1654    public void removeMetadata(Object info) throws HDF5Exception {
1655        try {
1656            objMetadata.removeMetadata(info);
1657        }
1658        catch (Exception ex) {
1659            log.debug("removeMetadata(): Object not an Attribute");
1660            return;
1661        }
1662
1663        Attribute attr = (Attribute) info;
1664        log.trace("removeMetadata(): {}", attr.getAttributeName());
1665        long did = open();
1666        if (did >= 0) {
1667            try {
1668                H5.H5Adelete(did, attr.getAttributeName());
1669            }
1670            finally {
1671                close(did);
1672            }
1673        }
1674        else {
1675            log.debug("removeMetadata(): failed to open compound dataset");
1676        }
1677    }
1678
1679    /**
1680     * Updates an existing piece of metadata attached to this object.
1681     *
1682     * @param info
1683     *            the metadata to update.
1684     *
1685     * @throws HDF5Exception
1686     *             if the metadata can not be updated
1687     */
1688    @Override
1689    public void updateMetadata(Object info) throws HDF5Exception {
1690        try {
1691            objMetadata.updateMetadata(info);
1692        }
1693        catch (Exception ex) {
1694            log.debug("updateMetadata(): Object not an Attribute");
1695            return;
1696        }
1697    }
1698
1699    /*
1700     * (non-Javadoc)
1701     *
1702     * @see hdf.object.HObject#setName(java.lang.String)
1703     */
1704    @Override
1705    public void setName(String newName) throws Exception {
1706        if (newName == null)
1707            throw new IllegalArgumentException("The new name is NULL");
1708
1709        H5File.renameObject(this, newName);
1710        super.setName(newName);
1711    }
1712
1713    /**
1714     * @deprecated Not for public use in the future. <br>
1715     *             Using
1716     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1717     *
1718     * @param name
1719     *            the name of the dataset to create.
1720     * @param pgroup
1721     *            parent group where the new dataset is created.
1722     * @param dims
1723     *            the dimension size of the dataset.
1724     * @param memberNames
1725     *            the names of compound datatype
1726     * @param memberDatatypes
1727     *            the datatypes of the compound datatype
1728     * @param memberSizes
1729     *            the dim sizes of the members
1730     * @param data
1731     *            list of data arrays written to the new dataset, null if no data is written to the new
1732     *            dataset.
1733     *
1734     * @return the new compound dataset if successful; otherwise returns null.
1735     *
1736     * @throws Exception
1737     *             if there is a failure.
1738     */
1739    @Deprecated
1740    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1741            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1742        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null)
1743                || (memberDatatypes == null) || (memberSizes == null)) {
1744            return null;
1745        }
1746
1747        int nMembers = memberNames.length;
1748        int memberRanks[] = new int[nMembers];
1749        long memberDims[][] = new long[nMembers][1];
1750        for (int i = 0; i < nMembers; i++) {
1751            memberRanks[i] = 1;
1752            memberDims[i][0] = memberSizes[i];
1753        }
1754
1755        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1756    }
1757
1758    /**
1759     * @deprecated Not for public use in the future. <br>
1760     *             Using
1761     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1762     *
1763     * @param name
1764     *            the name of the dataset to create.
1765     * @param pgroup
1766     *            parent group where the new dataset is created.
1767     * @param dims
1768     *            the dimension size of the dataset.
1769     * @param memberNames
1770     *            the names of compound datatype
1771     * @param memberDatatypes
1772     *            the datatypes of the compound datatype
1773     * @param memberRanks
1774     *            the ranks of the members
1775     * @param memberDims
1776     *            the dim sizes of the members
1777     * @param data
1778     *            list of data arrays written to the new dataset, null if no data is written to the new
1779     *            dataset.
1780     *
1781     * @return the new compound dataset if successful; otherwise returns null.
1782     *
1783     * @throws Exception
1784     *             if the dataset can not be created.
1785     */
1786    @Deprecated
1787    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1788            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1789        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1790                memberDims, data);
1791    }
1792
1793    /**
1794     * Creates a simple compound dataset in a file with/without chunking and compression.
1795     *
1796     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1797     * details of creating a compound dataset from users.
1798     *
1799     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1800     * dataset is not supported. The required information to create a compound dataset includes the
1801     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1802     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1803     *
1804     * The following example shows how to use this function to create a compound dataset in file.
1805     *
1806     * <pre>
1807     * H5File file = null;
1808     * String message = &quot;&quot;;
1809     * Group pgroup = null;
1810     * int[] DATA_INT = new int[DIM_SIZE];
1811     * float[] DATA_FLOAT = new float[DIM_SIZE];
1812     * String[] DATA_STR = new String[DIM_SIZE];
1813     * long[] DIMs = { 50, 10 };
1814     * long[] CHUNKs = { 25, 5 };
1815     *
1816     * try {
1817     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1818     *     file.open();
1819     *     pgroup = (Group) file.get(&quot;/&quot;);
1820     * }
1821     * catch (Exception ex) {
1822     * }
1823     *
1824     * Vector data = new Vector();
1825     * data.add(0, DATA_INT);
1826     * data.add(1, DATA_FLOAT);
1827     * data.add(2, DATA_STR);
1828     *
1829     * // create groups
1830     * Datatype[] mdtypes = new H5Datatype[3];
1831     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1832     * Dataset dset = null;
1833     * try {
1834     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1835     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1836     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1837     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1838     * }
1839     * catch (Exception ex) {
1840     *     failed(message, ex, file);
1841     *     return 1;
1842     * }
1843     * </pre>
1844     *
1845     * @param name
1846     *            the name of the dataset to create.
1847     * @param pgroup
1848     *            parent group where the new dataset is created.
1849     * @param dims
1850     *            the dimension size of the dataset.
1851     * @param maxdims
1852     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1853     * @param chunks
1854     *            the chunk size of the dataset. No chunking if chunk = null.
1855     * @param gzip
1856     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1857     * @param memberNames
1858     *            the names of compound datatype
1859     * @param memberDatatypes
1860     *            the datatypes of the compound datatype
1861     * @param memberRanks
1862     *            the ranks of the members
1863     * @param memberDims
1864     *            the dim sizes of the members
1865     * @param data
1866     *            list of data arrays written to the new dataset, null if no data is written to the new
1867     *            dataset.
1868     *
1869     * @return the new compound dataset if successful; otherwise returns null.
1870     *
1871     * @throws Exception
1872     *             if there is a failure.
1873     */
1874    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1875            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1876        H5CompoundDS dataset = null;
1877        String fullPath = null;
1878        long did = HDF5Constants.H5I_INVALID_HID;
1879        long plist = HDF5Constants.H5I_INVALID_HID;
1880        long sid = HDF5Constants.H5I_INVALID_HID;
1881        long tid = HDF5Constants.H5I_INVALID_HID;
1882
1883        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1884                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1885                || (memberDims == null)) {
1886            log.debug("create(): one or more parameters are null");
1887            return null;
1888        }
1889
1890        H5File file = (H5File) pgroup.getFileFormat();
1891        if (file == null) {
1892            log.debug("create(): parent group FileFormat is null");
1893            return null;
1894        }
1895
1896        String path = HObject.SEPARATOR;
1897        if (!pgroup.isRoot()) {
1898            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1899            if (name.endsWith("/"))
1900                name = name.substring(0, name.length() - 1);
1901            int idx = name.lastIndexOf('/');
1902            if (idx >= 0)
1903                name = name.substring(idx + 1);
1904        }
1905
1906        fullPath = path + name;
1907
1908        int typeSize = 0;
1909        int nMembers = memberNames.length;
1910        long[] mTypes = new long[nMembers];
1911        int memberSize = 1;
1912        for (int i = 0; i < nMembers; i++) {
1913            memberSize = 1;
1914            for (int j = 0; j < memberRanks[i]; j++)
1915                memberSize *= memberDims[i][j];
1916
1917            mTypes[i] = -1;
1918            // the member is an array
1919            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1920                long tmptid = -1;
1921                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1922                    try {
1923                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1924                    }
1925                    finally {
1926                        try {
1927                            H5.H5Tclose(tmptid);
1928                        }
1929                        catch (Exception ex) {
1930                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1931                        }
1932                    }
1933                }
1934            }
1935            else {
1936                mTypes[i] = memberDatatypes[i].createNative();
1937            }
1938            try {
1939                typeSize += H5.H5Tget_size(mTypes[i]);
1940            }
1941            catch (Exception ex) {
1942                log.debug("create(): array create H5Tget_size:", ex);
1943
1944                while (i > 0) {
1945                    try {
1946                        H5.H5Tclose(mTypes[i]);
1947                    }
1948                    catch (HDF5Exception ex2) {
1949                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1950                    }
1951                    i--;
1952                }
1953                throw ex;
1954            }
1955        } //  (int i = 0; i < nMembers; i++) {
1956
1957        // setup chunking and compression
1958        boolean isExtentable = false;
1959        if (maxdims != null) {
1960            for (int i = 0; i < maxdims.length; i++) {
1961                if (maxdims[i] == 0)
1962                    maxdims[i] = dims[i];
1963                else if (maxdims[i] < 0)
1964                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1965
1966                if (maxdims[i] != dims[i])
1967                    isExtentable = true;
1968            }
1969        }
1970
1971        // HDF5 requires you to use chunking in order to define extendible
1972        // datasets. Chunking makes it possible to extend datasets efficiently,
1973        // without having to reorganize storage excessively. Using default size
1974        // of 64x...which has good performance
1975        if ((chunks == null) && isExtentable) {
1976            chunks = new long[dims.length];
1977            for (int i = 0; i < dims.length; i++)
1978                chunks[i] = Math.min(dims[i], 64);
1979        }
1980
1981        // prepare the dataspace and datatype
1982        int rank = dims.length;
1983
1984        try {
1985            sid = H5.H5Screate_simple(rank, dims, maxdims);
1986
1987            // figure out creation properties
1988            plist = HDF5Constants.H5P_DEFAULT;
1989
1990            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1991            int offset = 0;
1992            for (int i = 0; i < nMembers; i++) {
1993                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1994                offset += H5.H5Tget_size(mTypes[i]);
1995            }
1996
1997            if (chunks != null) {
1998                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1999
2000                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2001                H5.H5Pset_chunk(plist, rank, chunks);
2002
2003                // compression requires chunking
2004                if (gzip > 0) {
2005                    H5.H5Pset_deflate(plist, gzip);
2006                }
2007            }
2008
2009            long fid = file.getFID();
2010
2011            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2012            dataset = new H5CompoundDS(file, name, path);
2013        }
2014        finally {
2015            try {
2016                H5.H5Pclose(plist);
2017            }
2018            catch (HDF5Exception ex) {
2019                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2020            }
2021            try {
2022                H5.H5Sclose(sid);
2023            }
2024            catch (HDF5Exception ex) {
2025                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2026            }
2027            try {
2028                H5.H5Tclose(tid);
2029            }
2030            catch (HDF5Exception ex) {
2031                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2032            }
2033            try {
2034                H5.H5Dclose(did);
2035            }
2036            catch (HDF5Exception ex) {
2037                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2038            }
2039
2040            for (int i = 0; i < nMembers; i++) {
2041                try {
2042                    H5.H5Tclose(mTypes[i]);
2043                }
2044                catch (HDF5Exception ex) {
2045                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2046                }
2047            }
2048        }
2049
2050        if (dataset != null) {
2051            pgroup.addToMemberList(dataset);
2052            if (data != null) {
2053                dataset.init();
2054                long selected[] = dataset.getSelectedDims();
2055                for (int i = 0; i < rank; i++)
2056                    selected[i] = dims[i];
2057                dataset.write(data);
2058            }
2059        }
2060
2061        return dataset;
2062    }
2063
2064    /*
2065     * (non-Javadoc)
2066     *
2067     * @see hdf.object.Dataset#isString(long)
2068     */
2069    @Override
2070    public boolean isString(long tid) {
2071        boolean b = false;
2072        try {
2073            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2074        }
2075        catch (Exception ex) {
2076            b = false;
2077        }
2078
2079        return b;
2080    }
2081
2082    /*
2083     * (non-Javadoc)
2084     *
2085     * @see hdf.object.Dataset#getSize(long)
2086     */
2087    @Override
2088    public long getSize(long tid) {
2089        return H5Datatype.getDatatypeSize(tid);
2090    }
2091
2092    /*
2093     * (non-Javadoc)
2094     *
2095     * @see hdf.object.Dataset#isVirtual()
2096     */
2097    @Override
2098    public boolean isVirtual() {
2099        return isVirtual;
2100    }
2101
2102    /*
2103     * (non-Javadoc)
2104     *
2105     * @see hdf.object.Dataset#getVirtualFilename(int)
2106     */
2107    @Override
2108    public String getVirtualFilename(int index) {
2109        if(isVirtual)
2110            return virtualNameList.get(index);
2111        else
2112            return null;
2113    }
2114
2115    /*
2116     * (non-Javadoc)
2117     *
2118     * @see hdf.object.Dataset#getVirtualMaps()
2119     */
2120    @Override
2121    public int getVirtualMaps() {
2122        if(isVirtual)
2123            return virtualNameList.size();
2124        else
2125            return -1;
2126    }
2127
2128    /*
2129     * (non-Javadoc)
2130     *
2131     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2132     */
2133    @Override
2134    public String toString(String delimiter, int maxItems) {
2135        Object theData = originalBuf;
2136        if (theData == null) {
2137            log.debug("toString: value is null");
2138            return null;
2139        }
2140
2141        if (theData instanceof List<?>) {
2142            log.trace("toString: value is list");
2143            return null;
2144        }
2145
2146        Class<? extends Object> valClass = theData.getClass();
2147
2148        if (!valClass.isArray()) {
2149            log.trace("toString: finish - not array");
2150            String strValue = theData.toString();
2151            if (maxItems > 0 && strValue.length() > maxItems)
2152                // truncate the extra characters
2153                strValue = strValue.substring(0, maxItems);
2154            return strValue;
2155        }
2156
2157        // value is an array
2158        StringBuilder sb = new StringBuilder();
2159        int n = Array.getLength(theData);
2160        if ((maxItems > 0) && (n > maxItems))
2161            n = maxItems;
2162
2163        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), n);
2164        if (((H5Datatype) getDatatype()).isStdRef()) {
2165            String cname = valClass.getName();
2166            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2167            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2168            String ref_str = ((H5ReferenceType) getDatatype()).getObjectReferenceName((byte[])theData);
2169            log.trace("toString: ref_str={}", ref_str);
2170            return ref_str;
2171        }
2172        else {
2173            return super.toString(delimiter, maxItems);
2174        }
2175    }
2176
2177}