001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import org.slf4j.Logger;
028import org.slf4j.LoggerFactory;
029
030import hdf.hdf5lib.H5;
031import hdf.hdf5lib.HDF5Constants;
032import hdf.hdf5lib.HDFArray;
033import hdf.hdf5lib.HDFNativeData;
034import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
035import hdf.hdf5lib.exceptions.HDF5Exception;
036import hdf.hdf5lib.exceptions.HDF5LibraryException;
037import hdf.hdf5lib.structs.H5O_info_t;
038import hdf.hdf5lib.structs.H5O_token_t;
039
040import hdf.object.Attribute;
041import hdf.object.CompoundDS;
042import hdf.object.Dataset;
043import hdf.object.Datatype;
044import hdf.object.FileFormat;
045import hdf.object.Group;
046import hdf.object.HObject;
047import hdf.object.MetaDataContainer;
048import hdf.object.Utils;
049
050import hdf.object.h5.H5Datatype;
051import hdf.object.h5.H5MetaDataContainer;
052import hdf.object.h5.H5ReferenceType;
053
054/**
055 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
056 *
057 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that stores a
058 * description of the data elements, data layout, and all other information necessary to write, read, and interpret the
059 * stored data.
060 *
061 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of one or more
062 * atomic types or small arrays of such types. Each member of a compound type has a name which is unique within that
063 * type, and a byte offset that determines the first byte (smallest byte address) of that member in a compound datum.
064 *
065 * For more information on HDF5 datasets and datatypes, read
066 * <a href="https://hdfgroup.github.io/hdf5/_h5_d__u_g.html#sec_dataset">HDF5 Datasets in HDF5 User Guide</a>
067 * <a href="https://hdfgroup.github.io/hdf5/_h5_t__u_g.html#sec_datatype">HDF5 Datatypes in HDF5 User Guide</a>
068 *
069 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a simple
070 * compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array of compound data.
071 *
072 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the following C
073 * example.
074 *
075 * <pre>
076 * typedef struct s1_t {
077 *         int    a;
078 *         float  b;
079 *         double c;
080 *         } s1_t;
081 *     s1_t       s1[LENGTH];
082 *     ...
083 *     H5Dwrite(..., s1);
084 *     H5Dread(..., s1);
085 * </pre>
086 *
087 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by fields
088 * instead of compound structure. As for the example above, the java.util.Vector object has three elements: int[LENGTH],
089 * float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of int, float and double, we will be
090 * able to read/write the compound data by field.
091 *
092 * @version 1.1 9/4/2007
093 * @author Peter X. Cao
094 */
095public class H5CompoundDS extends CompoundDS implements MetaDataContainer
096{
097    private static final long serialVersionUID = -5968625125574032736L;
098
099    private static final Logger log = LoggerFactory.getLogger(H5CompoundDS.class);
100
101    /**
102     * The metadata object for this data object. Members of the metadata are instances of Attribute.
103     */
104    private H5MetaDataContainer objMetadata;
105
106    /** the object properties */
107    private H5O_info_t objInfo;
108
109    /** flag to indicate if the dataset is an external dataset */
110    private boolean isExternal = false;
111
112    /** flag to indicate if the dataset is a virtual dataset */
113    private boolean isVirtual = false;
114    /** the list of virtual names */
115    private List<String> virtualNameList;
116
117    /**
118     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
119     *
120     * The dataset object represents an existing dataset in the file. For example, new
121     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
122     * dataset,"dset1", at group "/g0/".
123     *
124     * This object is usually constructed at FileFormat.open(), which loads the file structure and
125     * object information into memory. It is rarely used elsewhere.
126     *
127     * @param theFile
128     *            the file that contains the data object.
129     * @param theName
130     *            the name of the data object, e.g. "dset".
131     * @param thePath
132     *            the full path of the data object, e.g. "/arrays/".
133     */
134    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
135        this(theFile, theName, thePath, null);
136    }
137
138    /**
139     * @deprecated Not for public use in the future.<br>
140     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
141     *
142     * @param theFile
143     *            the file that contains the data object.
144     * @param theName
145     *            the name of the data object, e.g. "dset".
146     * @param thePath
147     *            the full path of the data object, e.g. "/arrays/".
148     * @param oid
149     *            the oid of the data object.
150     */
151    @Deprecated
152    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
153        super(theFile, theName, thePath, oid);
154        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
155
156        if (theFile != null) {
157            if (oid == null) {
158                // retrieve the object ID
159                byte[] refBuf = null;
160                try {
161                    refBuf = H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
162                    this.oid = HDFNativeData.byteToLong(refBuf);
163                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
164                }
165                catch (Exception ex) {
166                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(), this.getFullName());
167                }
168                finally {
169                    if (refBuf != null)
170                        H5.H5Rdestroy(refBuf);
171                }
172            }
173            log.trace("constructor OID {}", this.oid);
174            try {
175                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(), HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
176            }
177            catch (Exception ex) {
178                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
179            }
180        }
181        else {
182            this.oid = null;
183            objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
184        }
185    }
186
187    /*
188     * (non-Javadoc)
189     *
190     * @see hdf.object.HObject#open()
191     */
192    @Override
193    public long open() {
194        long did = HDF5Constants.H5I_INVALID_HID;
195
196        if (getFID() < 0)
197            log.trace("open(): file id for:{} is invalid", getPath() + getName());
198        else {
199            try {
200                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
201                log.trace("open(): did={}", did);
202            }
203            catch (HDF5Exception ex) {
204                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
205                did = HDF5Constants.H5I_INVALID_HID;
206            }
207        }
208
209        return did;
210    }
211
212    /*
213     * (non-Javadoc)
214     *
215     * @see hdf.object.HObject#close(int)
216     */
217    @Override
218    public void close(long did) {
219        if (did >= 0) {
220            try {
221                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
222            }
223            catch (Exception ex) {
224                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
225            }
226            try {
227                H5.H5Dclose(did);
228            }
229            catch (HDF5Exception ex) {
230                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
231            }
232        }
233    }
234
235    /**
236     * Retrieves datatype and dataspace information from file and sets the dataset
237     * in memory.
238     *
239     * The init() is designed to support lazy operation in a dataset object. When a
240     * data object is retrieved from file, the datatype, dataspace and raw data are
241     * not loaded into memory. When it is asked to read the raw data from file,
242     * init() is first called to get the datatype and dataspace information, then
243     * load the raw data from file.
244     *
245     * init() is also used to reset the selection of a dataset (start, stride and
246     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
247     * the following example, init() at step 1) retrieves datatype and dataspace
248     * information from file. getData() at step 3) reads only one data point. init()
249     * at step 4) resets the selection to the whole dataset. getData() at step 4)
250     * reads the values of whole dataset into memory.
251     *
252     * <pre>
253     * dset = (Dataset) file.get(NAME_DATASET);
254     *
255     * // 1) get datatype and dataspace information from file
256     * dset.init();
257     * rank = dset.getRank(); // rank = 2, a 2D dataset
258     * count = dset.getSelectedDims();
259     * start = dset.getStartDims();
260     * dims = dset.getDims();
261     *
262     * // 2) select only one data point
263     * for (int i = 0; i &lt; rank; i++) {
264     *     start[0] = 0;
265     *     count[i] = 1;
266     * }
267     *
268     * // 3) read one data point
269     * data = dset.getData();
270     *
271     * // 4) reset selection to the whole dataset
272     * dset.init();
273     *
274     * // 5) clean the memory data buffer
275     * dset.clearData();
276     *
277     * // 6) Read the whole dataset
278     * data = dset.getData();
279     * </pre>
280     */
281    @Override
282    public void init() {
283        if (inited) {
284            resetSelection();
285            log.trace("init(): Dataset already initialized");
286            return; // already called. Initialize only once
287        }
288
289        long did = HDF5Constants.H5I_INVALID_HID;
290        long tid = HDF5Constants.H5I_INVALID_HID;
291        long sid = HDF5Constants.H5I_INVALID_HID;
292        flatNameList = new Vector<>();
293        flatTypeList = new Vector<>();
294
295        did = open();
296        if (did >= 0) {
297            // check if it is an external or virtual dataset
298            long pid = HDF5Constants.H5I_INVALID_HID;
299            try {
300                pid = H5.H5Dget_create_plist(did);
301                try {
302                    int nfiles = H5.H5Pget_external_count(pid);
303                    isExternal = (nfiles > 0);
304                    int layoutType = H5.H5Pget_layout(pid);
305                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
306                        try {
307                            long vmaps = H5.H5Pget_virtual_count(pid);
308                            if (vmaps > 0) {
309                                virtualNameList = new Vector<>();
310                                for (long next = 0; next < vmaps; next++) {
311                                    try {
312                                        String fname = H5.H5Pget_virtual_filename(pid, next);
313                                        virtualNameList.add(fname);
314                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
315                                    }
316                                    catch (Exception err) {
317                                        log.trace("init(): vds[{}] continue", next);
318                                    }
319                                }
320                            }
321                        }
322                        catch (Exception err) {
323                            log.debug("init(): vds count error: ", err);
324                        }
325                    }
326                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
327                }
328                catch (Exception ex) {
329                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
330                }
331            }
332            catch (Exception ex) {
333                log.debug("init(): H5Dget_create_plist() failure: ", ex);
334            }
335            finally {
336                try {
337                    H5.H5Pclose(pid);
338                }
339                catch (Exception ex) {
340                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
341                }
342            }
343
344            try {
345                sid = H5.H5Dget_space(did);
346                rank = H5.H5Sget_simple_extent_ndims(sid);
347                space_type = H5.H5Sget_simple_extent_type(sid);
348                if (space_type == HDF5Constants.H5S_NULL)
349                    isNULL = true;
350                else
351                    isNULL = false;
352                tid = H5.H5Dget_type(did);
353                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
354
355                if (rank == 0) {
356                    // a scalar data point
357                    isScalar = true;
358                    rank = 1;
359                    dims = new long[] { 1 };
360                    log.trace("init(): rank is a scalar data point");
361                }
362                else {
363                    isScalar = false;
364                    dims = new long[rank];
365                    maxDims = new long[rank];
366                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
367                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
368                }
369
370                try {
371                    int nativeClass = H5.H5Tget_class(tid);
372                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
373                        long lsize = 1;
374                        if (rank > 0) {
375                            log.trace("init():rank={}, dims={}", rank, dims);
376                            for (int j = 0; j < dims.length; j++) {
377                                lsize *= dims[j];
378                            }
379                        }
380                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
381                    }
382                    else
383                        datatype = new H5Datatype(getFileFormat(), tid);
384
385                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}", tid,
386                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(),
387                            ((H5Datatype) datatype).isStdRef(), ((H5Datatype) datatype).isRegRef());
388
389                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
390                }
391                catch (Exception ex) {
392                    log.debug("init(): failed to create datatype for dataset: ", ex);
393                    datatype = null;
394                }
395
396                // initialize member information
397                numberOfMembers = flatNameList.size();
398                log.trace("init(): numberOfMembers={}", numberOfMembers);
399
400                memberNames = new String[numberOfMembers];
401                memberTypes = new Datatype[numberOfMembers];
402                memberOrders = new int[numberOfMembers];
403                isMemberSelected = new boolean[numberOfMembers];
404                memberDims = new Object[numberOfMembers];
405
406                for (int i = 0; i < numberOfMembers; i++) {
407                    isMemberSelected[i] = true;
408                    memberOrders[i] = 1;
409                    memberDims[i] = null;
410
411                    try {
412                        memberTypes[i] = flatTypeList.get(i);
413                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
414
415                        if (memberTypes[i].isArray()) {
416                            long mdim[] = memberTypes[i].getArrayDims();
417                            int idim[] = new int[mdim.length];
418                            int arrayNpoints = 1;
419
420                            for (int j = 0; j < idim.length; j++) {
421                                idim[j] = (int) mdim[j];
422                                arrayNpoints *= idim[j];
423                            }
424
425                            memberDims[i] = idim;
426                            memberOrders[i] = arrayNpoints;
427                        }
428                    }
429                    catch (Exception ex) {
430                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
431                        memberTypes[i] = null;
432                    }
433
434                    try {
435                        memberNames[i] = flatNameList.get(i);
436                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
437                    }
438                    catch (Exception ex) {
439                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
440                        memberNames[i] = "null";
441                    }
442                } //  (int i=0; i<numberOfMembers; i++)
443
444                inited = true;
445            }
446            catch (HDF5Exception ex) {
447                numberOfMembers = 0;
448                memberNames = null;
449                memberTypes = null;
450                memberOrders = null;
451                log.debug("init(): ", ex);
452            }
453            finally {
454                if (datatype != null)
455                    datatype.close(tid);
456
457                try {
458                    H5.H5Sclose(sid);
459                }
460                catch (HDF5Exception ex2) {
461                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
462                }
463            }
464
465            close(did);
466
467            startDims = new long[rank];
468            selectedDims = new long[rank];
469
470            resetSelection();
471        }
472        else {
473            log.debug("init(): failed to open dataset");
474        }
475    }
476
477    /**
478     * Get the token for this object.
479     *
480     * @return true if it has any attributes, false otherwise.
481     */
482    public long[] getToken() {
483        H5O_token_t token = objInfo.token;
484        return HDFNativeData.byteToLong(token.data);
485    }
486
487    /**
488     * Check if the object has any attributes attached.
489     *
490     * @return true if it has any attributes, false otherwise.
491     */
492    @Override
493    public boolean hasAttribute() {
494        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
495
496        if (objInfo.num_attrs < 0) {
497            long did = open();
498            if (did >= 0) {
499                objInfo.num_attrs = 0;
500
501                try {
502                    objInfo = H5.H5Oget_info(did);
503                }
504                catch (Exception ex) {
505                    objInfo.num_attrs = 0;
506                    log.debug("hasAttribute(): get object info failure: ", ex);
507                }
508                finally {
509                    close(did);
510                }
511                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
512            }
513            else {
514                log.debug("hasAttribute(): could not open dataset");
515            }
516        }
517
518        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
519        return (objInfo.num_attrs > 0);
520    }
521
522    /**
523     * Returns the datatype of the data object.
524     *
525     * @return the datatype of the data object.
526     */
527    @Override
528    public Datatype getDatatype() {
529        if (!inited)
530            init();
531
532        if (datatype == null) {
533            long did = HDF5Constants.H5I_INVALID_HID;
534            long tid = HDF5Constants.H5I_INVALID_HID;
535
536            did = open();
537            if (did >= 0) {
538                try {
539                    tid = H5.H5Dget_type(did);
540                    int nativeClass = H5.H5Tget_class(tid);
541                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
542                        long lsize = 1;
543                        if (rank > 0) {
544                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
545                            for (int j = 0; j < dims.length; j++) {
546                                lsize *= dims[j];
547                            }
548                        }
549                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
550                    }
551                    else
552                        datatype = new H5Datatype(getFileFormat(), tid);
553                }
554                catch (Exception ex) {
555                    log.debug("getDatatype(): ", ex);
556                }
557                finally {
558                    try {
559                        H5.H5Tclose(tid);
560                    }
561                    catch (HDF5Exception ex) {
562                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
563                    }
564                    try {
565                        H5.H5Dclose(did);
566                    }
567                    catch (HDF5Exception ex) {
568                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
569                    }
570                }
571            }
572        }
573
574        if (isExternal) {
575            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
576
577            if (pdir == null) {
578                pdir = ".";
579            }
580            System.setProperty("user.dir", pdir);
581            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
582        }
583
584        return datatype;
585    }
586
587    /**
588     * Removes all of the elements from metadata list.
589     * The list should be empty after this call returns.
590     */
591    @Override
592    public void clear() {
593        super.clear();
594        objMetadata.clear();
595    }
596
597    /*
598     * (non-Javadoc)
599     *
600     * @see hdf.object.Dataset#readBytes()
601     */
602    @Override
603    public byte[] readBytes() throws HDF5Exception {
604        byte[] theData = null;
605
606        if (!isInited())
607            init();
608
609        long did = open();
610        if (did >= 0) {
611            long fspace = HDF5Constants.H5I_INVALID_HID;
612            long mspace = HDF5Constants.H5I_INVALID_HID;
613            long tid = HDF5Constants.H5I_INVALID_HID;
614
615            try {
616                long[] lsize = { 1 };
617                for (int j = 0; j < selectedDims.length; j++)
618                    lsize[0] *= selectedDims[j];
619
620                fspace = H5.H5Dget_space(did);
621                mspace = H5.H5Screate_simple(rank, selectedDims, null);
622
623                // set the rectangle selection
624                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
625                if (rank * dims[0] > 1)
626                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
627
628                tid = H5.H5Dget_type(did);
629                long size = H5.H5Tget_size(tid) * lsize[0];
630                log.trace("readBytes(): size = {}", size);
631
632                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
633                    throw new Exception("Invalid int size");
634
635                theData = new byte[(int)size];
636
637                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
638                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
639            }
640            catch (Exception ex) {
641                log.debug("readBytes(): failed to read data: ", ex);
642            }
643            finally {
644                try {
645                    H5.H5Sclose(fspace);
646                }
647                catch (Exception ex2) {
648                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
649                }
650                try {
651                    H5.H5Sclose(mspace);
652                }
653                catch (Exception ex2) {
654                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
655                }
656                try {
657                    H5.H5Tclose(tid);
658                }
659                catch (HDF5Exception ex2) {
660                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
661                }
662                close(did);
663            }
664        }
665
666        return theData;
667    }
668
669    /**
670     * Reads the data from file.
671     *
672     * read() reads the data from file to a memory buffer and returns the memory
673     * buffer. The dataset object does not hold the memory buffer. To store the
674     * memory buffer in the dataset object, one must call getData().
675     *
676     * By default, the whole dataset is read into memory. Users can also select
677     * a subset to read. Subsetting is done in an implicit way.
678     *
679     * <b>How to Select a Subset</b>
680     *
681     * A selection is specified by three arrays: start, stride and count.
682     * <ol>
683     * <li>start: offset of a selection
684     * <li>stride: determines how many elements to move in each dimension
685     * <li>count: number of elements to select in each dimension
686     * </ol>
687     * getStartDims(), getStride() and getSelectedDims() returns the start,
688     * stride and count arrays respectively. Applications can make a selection
689     * by changing the values of the arrays.
690     *
691     * The following example shows how to make a subset. In the example, the
692     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
693     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
694     * We want to select every other data point in dims[1] and dims[2]
695     *
696     * <pre>
697     * int rank = dataset.getRank(); // number of dimensions of the dataset
698     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
699     * long[] selected = dataset.getSelectedDims(); // the selected size of the
700     *                                              // dataset
701     * long[] start = dataset.getStartDims(); // the offset of the selection
702     * long[] stride = dataset.getStride(); // the stride of the dataset
703     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
704     *                                                   // dimensions for
705     *                                                   // display
706     *
707     * // select dim1 and dim2 as 2D data for display, and slice through dim0
708     * selectedIndex[0] = 1;
709     * selectedIndex[1] = 2;
710     * selectedIndex[1] = 0;
711     *
712     * // reset the selection arrays
713     * for (int i = 0; i &lt; rank; i++) {
714     *     start[i] = 0;
715     *     selected[i] = 1;
716     *     stride[i] = 1;
717     * }
718     *
719     * // set stride to 2 on dim1 and dim2 so that every other data point is
720     * // selected.
721     * stride[1] = 2;
722     * stride[2] = 2;
723     *
724     * // set the selection size of dim1 and dim2
725     * selected[1] = dims[1] / stride[1];
726     * selected[2] = dims[1] / stride[2];
727     *
728     * // when dataset.getData() is called, the selection above will be used
729     * // since
730     * // the dimension arrays are passed by reference. Changes of these arrays
731     * // outside the dataset object directly change the values of these array
732     * // in the dataset object.
733     * </pre>
734     *
735     * For CompoundDS, the memory data object is an java.util.List object. Each
736     * element of the list is a data array that corresponds to a compound field.
737     *
738     * For example, if compound dataset "comp" has the following nested
739     * structure, and member datatypes
740     *
741     * <pre>
742     * comp --&gt; m01 (int)
743     * comp --&gt; m02 (float)
744     * comp --&gt; nest1 --&gt; m11 (char)
745     * comp --&gt; nest1 --&gt; m12 (String)
746     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
747     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
748     * </pre>
749     *
750     * getData() returns a list of six arrays: {int[], float[], char[],
751     * String[], long[] and double[]}.
752     *
753     * @return the data read from file.
754     *
755     * @see #getData()
756     * @see hdf.object.DataFormat#read()
757     *
758     * @throws Exception
759     *             if object can not be read
760     */
761    @Override
762    public Object read() throws Exception {
763        Object readData = null;
764
765        if (!isInited())
766            init();
767
768        try {
769            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
770        }
771        catch (Exception ex) {
772            log.debug("read(): failed to read compound dataset: ", ex);
773            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
774        }
775
776        return readData;
777    }
778
779    /**
780     * Writes the given data buffer into this dataset in a file.
781     *
782     * The data buffer is a vector that contains the data values of compound fields. The data is written
783     * into file field by field.
784     *
785     * @param buf
786     *            The vector that contains the data values of compound fields.
787     *
788     * @throws Exception
789     *             If there is an error at the HDF5 library level.
790     */
791    @Override
792    public void write(Object buf) throws Exception {
793        if (this.getFileFormat().isReadOnly())
794            throw new Exception("cannot write to compound dataset in file opened as read-only");
795
796        if (!isInited())
797            init();
798
799        try {
800            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
801        }
802        catch (Exception ex) {
803            log.debug("write(Object): failed to write compound dataset: ", ex);
804            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
805        }
806    }
807
808    /*
809     * Routine to convert datatypes that are read in as byte arrays to
810     * regular types.
811     */
812    @Override
813    protected Object convertByteMember(final Datatype dtype, byte[] byteData) {
814        Object theObj = null;
815        log.debug("convertByteMember(): dtype={} byteData={}", dtype, byteData);
816
817        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
818            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
819        else
820            theObj = super.convertByteMember(dtype, byteData);
821
822        return theObj;
823    }
824
825    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
826        H5Datatype dsDatatype = (H5Datatype) getDatatype();
827        Object theData = null;
828
829        if (numberOfMembers <= 0) {
830            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
831            throw new Exception("dataset contains no members");
832        }
833
834        /*
835         * I/O type-specific pre-initialization.
836         */
837        if (ioType == H5File.IO_TYPE.WRITE) {
838            if ((writeBuf == null) || !(writeBuf instanceof List)) {
839                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
840                throw new Exception("write buffer is null or invalid");
841            }
842
843            /*
844             * Check for any unsupported datatypes and fail early before
845             * attempting to write to the dataset.
846             */
847            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
848                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
849                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
850            }
851
852            if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) {
853                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
854                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
855            }
856        }
857
858        long did = open();
859        if (did >= 0) {
860            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
861
862            try {
863                /*
864                 * NOTE: this call sets up a hyperslab selection in the file according to the
865                 * current selection in the dataset object.
866                 */
867                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
868                        selectedStride, selectedDims, spaceIDs);
869
870                theData = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
871            }
872            finally {
873                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
874                    try {
875                        H5.H5Sclose(spaceIDs[0]);
876                    }
877                    catch (Exception ex) {
878                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
879                    }
880                }
881
882                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
883                    try {
884                        H5.H5Sclose(spaceIDs[1]);
885                    }
886                    catch (Exception ex) {
887                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
888                    }
889                }
890
891                close(did);
892            }
893        }
894        else
895            log.debug("compoundDatasetCommonIO(): failed to open dataset");
896
897        return theData;
898    }
899
900    /*
901     * Private recursive routine to read/write an entire compound datatype field by
902     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
903     * COMPOUND datatypes.
904     *
905     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
906     * running counter so that we can index properly into the flattened name list
907     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
908     */
909    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
910            final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex) {
911        Object theData = null;
912
913        if (cmpdType.isArray()) {
914            log.trace("compoundTypeIO(): ARRAY type");
915
916            long[] arrayDims = cmpdType.getArrayDims();
917            int arrSize = nSelPoints;
918            for (int i = 0; i < arrayDims.length; i++)
919                arrSize *= arrayDims[i];
920            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
921        }
922        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
923            /*
924             * TODO: true variable-length support.
925             */
926            String[] errVal = new String[nSelPoints];
927            String errStr = "*UNSUPPORTED*";
928
929            for (int j = 0; j < nSelPoints; j++)
930                errVal[j] = errStr;
931
932            /*
933             * Setup a fake data list.
934             */
935            Datatype baseType = cmpdType.getDatatypeBase();
936            while (baseType != null && !baseType.isCompound()) {
937                baseType = baseType.getDatatypeBase();
938            }
939
940            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
941            fakeVlenData.add(errVal);
942
943            theData = fakeVlenData;
944        }
945        else if (cmpdType.isCompound()) {
946            List<Object> memberDataList = null;
947            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
948
949            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write", typeList.size());
950
951            if (ioType == H5File.IO_TYPE.READ)
952                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
953
954            try {
955                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
956                    H5Datatype memberType = null;
957                    String memberName = null;
958                    Object memberData = null;
959
960                    try {
961                        memberType = (H5Datatype) typeList.get(i);
962                    }
963                    catch (Exception ex) {
964                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
965                        globalMemberIndex[0]++;
966                        continue;
967                    }
968
969                    /*
970                     * Since the type list used here is not a flattened structure, we need to skip
971                     * the member selection check for compound types, as otherwise having a single
972                     * member not selected would skip the reading/writing for the entire compound
973                     * type. The member selection check will be deferred to the recursive compound
974                     * read/write below.
975                     */
976                    if (!memberType.isCompound()) {
977                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
978                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
979                            globalMemberIndex[0]++;
980                            continue; // the field is not selected
981                        }
982                    }
983
984                    if (!memberType.isCompound()) {
985                        try {
986                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
987                        }
988                        catch (Exception ex) {
989                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
990                            memberName = "null";
991                        }
992                    }
993
994                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
995
996                    if (ioType == H5File.IO_TYPE.READ) {
997                        try {
998                            if (memberType.isCompound())
999                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
1000                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
1001                                /*
1002                                 * Recursively detect any nested array/vlen of compound types.
1003                                 */
1004                                boolean compoundFound = false;
1005
1006                                Datatype base = memberType.getDatatypeBase();
1007                                while (base != null) {
1008                                    if (base.isCompound())
1009                                        compoundFound = true;
1010
1011                                    base = base.getDatatypeBase();
1012                                }
1013
1014                                if (compoundFound) {
1015                                    /*
1016                                     * Skip the top-level array/vlen type.
1017                                     */
1018                                    globalMemberIndex[0]++;
1019
1020                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
1021                                }
1022                                else {
1023                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1024                                    globalMemberIndex[0]++;
1025                                }
1026                            }
1027                            else {
1028                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
1029                                globalMemberIndex[0]++;
1030                            }
1031                        }
1032                        catch (Exception ex) {
1033                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
1034                            globalMemberIndex[0]++;
1035                            memberData = null;
1036                        }
1037
1038                        if (memberData == null) {
1039                            String[] errVal = new String[nSelPoints];
1040                            String errStr = "*ERROR*";
1041
1042                            for (int j = 0; j < nSelPoints; j++)
1043                                errVal[j] = errStr;
1044
1045                            memberData = errVal;
1046                        }
1047
1048                        memberDataList.add(memberData);
1049                    }
1050                    else {
1051                        try {
1052                            /*
1053                             * TODO: currently doesn't correctly handle non-selected compound members.
1054                             */
1055                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
1056                        }
1057                        catch (Exception ex) {
1058                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1059                            globalMemberIndex[0]++;
1060                            continue;
1061                        }
1062
1063                        if (memberData == null) {
1064                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1065                            globalMemberIndex[0]++;
1066                            continue;
1067                        }
1068
1069                        try {
1070                            if (memberType.isCompound()) {
1071                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
1072                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
1073                            }
1074                            else {
1075                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
1076                                globalMemberIndex[0]++;
1077                            }
1078                        }
1079                        catch (Exception ex) {
1080                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1081                            globalMemberIndex[0]++;
1082                        }
1083                    }
1084                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1085            }
1086            catch (Exception ex) {
1087                log.debug("compoundTypeIO(): failure: ", ex);
1088                memberDataList = null;
1089            }
1090
1091            theData = memberDataList;
1092        }
1093
1094        return theData;
1095    }
1096
1097    /*
1098     * Private routine to read a single field of a compound datatype by creating a
1099     * compound datatype and inserting the single field into that datatype.
1100     */
1101    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1102            final H5Datatype memberType, String memberName) throws Exception {
1103        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1104        Object memberData = null;
1105
1106        try {
1107            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1108            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1109        }
1110        catch (OutOfMemoryError err) {
1111            memberData = null;
1112            throw new Exception("Out of memory");
1113        }
1114        catch (Exception ex) {
1115            log.debug("readSingleCompoundMember(): ", ex);
1116            memberData = null;
1117        }
1118
1119        if (memberData != null) {
1120            /*
1121             * Create a compound datatype containing just a single field (the one which we
1122             * want to read).
1123             */
1124            long compTid = -1;
1125            try {
1126                compTid = dsDatatype.createCompoundFieldType(memberName);
1127            }
1128            catch (HDF5Exception ex) {
1129                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1130                        memberType.getDescription(), ex);
1131                memberData = null;
1132            }
1133
1134            /*
1135             * Actually read the data for this member now that everything has been setup.
1136             */
1137            try {
1138                if (memberType.isVarStr()) {
1139                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1140                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1141                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1142
1143                    H5.H5Dread_VLStrings(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1144                }
1145                else if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1146                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1147                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1148                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1149
1150                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1151                }
1152                else {
1153                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1154                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1155                                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1156
1157                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1158                }
1159            }
1160            catch (HDF5DataFiltersException exfltr) {
1161                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1162                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1163            }
1164            catch (Exception ex) {
1165                log.debug("readSingleCompoundMember(): read failure: ", ex);
1166                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1167            }
1168            finally {
1169                dsDatatype.close(compTid);
1170            }
1171
1172            /*
1173             * Perform any necessary data conversions.
1174             */
1175            if (memberType.isUnsigned()) {
1176                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1177                memberData = Dataset.convertFromUnsignedC(memberData, null);
1178            }
1179            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1180                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1181
1182                /*
1183                 * For all other types that get read into memory as a byte[] (such as nested
1184                 * compounds and arrays of compounds), we must manually convert the byte[] into
1185                 * something usable.
1186                 */
1187                memberData = convertByteMember(memberType, (byte[]) memberData);
1188            }
1189        }
1190
1191        return memberData;
1192    }
1193
1194    /*
1195     * Private routine to write a single field of a compound datatype by creating a
1196     * compound datatype and inserting the single field into that datatype.
1197     */
1198    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1199            final H5Datatype memberType, String memberName, Object theData) throws Exception {
1200        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1201
1202        /*
1203         * Check for any unsupported datatypes before attempting to write this compound
1204         * member.
1205         */
1206        if (memberType.isVLEN() && !memberType.isVarStr()) {
1207            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1208            throw new Exception("writing of VL non-strings is not currently supported");
1209        }
1210
1211        /*
1212         * Perform any necessary data conversions before writing the data.
1213         */
1214        Object tmpData = theData;
1215        try {
1216            if (memberType.isUnsigned()) {
1217                // Check if we need to convert unsigned integer data from Java-style
1218                // to C-style integers
1219                long tsize = memberType.getDatatypeSize();
1220                String cname = theData.getClass().getName();
1221                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1222                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1223                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1224
1225                if (doIntConversion) {
1226                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1227                    tmpData = convertToUnsignedC(theData, null);
1228                }
1229            }
1230            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1231                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1232                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1233            }
1234            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1235                log.trace("writeSingleCompoundMember(): converting enum names to values");
1236                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1237            }
1238        }
1239        catch (Exception ex) {
1240            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1241            tmpData = null;
1242        }
1243
1244        if (tmpData == null) {
1245            log.debug("writeSingleCompoundMember(): data is null");
1246            return;
1247        }
1248
1249        /*
1250         * Create a compound datatype containing just a single field (the one which we
1251         * want to write).
1252         */
1253        long compTid = -1;
1254        try {
1255            compTid = dsDatatype.createCompoundFieldType(memberName);
1256        }
1257        catch (HDF5Exception ex) {
1258            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1259                    memberType.getDescription(), ex);
1260        }
1261
1262        /*
1263         * Actually write the data now that everything has been setup.
1264         */
1265        try {
1266            if (memberType.isVarStr()) {
1267                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1268                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1269                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1270
1271                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1272            }
1273            else {
1274                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1275                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1276                                (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1277
1278                // BUG!!! does not write nested compound data and no
1279                // exception was caught. Need to check if it is a java
1280                // error or C library error.
1281                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1282            }
1283        }
1284        catch (Exception ex) {
1285            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1286            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1287        }
1288        finally {
1289            dsDatatype.close(compTid);
1290        }
1291    }
1292
1293    /**
1294     * Converts the data values of this data object to appropriate Java integers if
1295     * they are unsigned integers.
1296     *
1297     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1298     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1299     *
1300     * @return the converted data buffer.
1301     */
1302    @Override
1303    public Object convertFromUnsignedC() {
1304        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1305    }
1306
1307    /**
1308     * Converts Java integer data values of this data object back to unsigned C-type
1309     * integer data if they are unsigned integers.
1310     *
1311     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1312     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1313     *
1314     * @return the converted data buffer.
1315     */
1316    @Override
1317    public Object convertToUnsignedC() {
1318        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1319    }
1320
1321    /**
1322     * Retrieves the object's metadata, such as attributes, from the file.
1323     *
1324     * Metadata, such as attributes, is stored in a List.
1325     *
1326     * @return the list of metadata objects.
1327     *
1328     * @throws HDF5Exception
1329     *             if the metadata can not be retrieved
1330     */
1331    @Override
1332    public List<Attribute> getMetadata() throws HDF5Exception {
1333        int gmIndexType = 0;
1334        int gmIndexOrder = 0;
1335
1336        try {
1337            gmIndexType = fileFormat.getIndexType(null);
1338        }
1339        catch (Exception ex) {
1340            log.debug("getMetadata(): getIndexType failed: ", ex);
1341        }
1342        try {
1343            gmIndexOrder = fileFormat.getIndexOrder(null);
1344        }
1345        catch (Exception ex) {
1346            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1347        }
1348        return this.getMetadata(gmIndexType, gmIndexOrder);
1349    }
1350
1351    /**
1352     * Retrieves the object's metadata, such as attributes, from the file.
1353     *
1354     * Metadata, such as attributes, is stored in a List.
1355     *
1356     * @param attrPropList
1357     *             the list of properties to get
1358     *
1359     * @return the list of metadata objects.
1360     *
1361     * @throws HDF5Exception
1362     *             if the metadata can not be retrieved
1363     */
1364    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1365        if (!isInited())
1366            init();
1367
1368        try {
1369            this.linkTargetObjName = H5File.getLinkTargetName(this);
1370        }
1371        catch (Exception ex) {
1372            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1373        }
1374
1375        if (objMetadata.getAttributeList() == null) {
1376            long did = HDF5Constants.H5I_INVALID_HID;
1377            long pcid = HDF5Constants.H5I_INVALID_HID;
1378            long paid = HDF5Constants.H5I_INVALID_HID;
1379
1380            did = open();
1381            if (did >= 0) {
1382                try {
1383                    // get the compression and chunk information
1384                    pcid = H5.H5Dget_create_plist(did);
1385                    paid = H5.H5Dget_access_plist(did);
1386                    long storageSize = H5.H5Dget_storage_size(did);
1387                    int nfilt = H5.H5Pget_nfilters(pcid);
1388                    int layoutType = H5.H5Pget_layout(pcid);
1389
1390                    storageLayout.setLength(0);
1391                    compression.setLength(0);
1392
1393                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1394                        chunkSize = new long[rank];
1395                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1396                        int n = chunkSize.length;
1397                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1398                        for (int i = 1; i < n; i++)
1399                            storageLayout.append(" X ").append(chunkSize[i]);
1400
1401                        if (nfilt > 0) {
1402                            long nelmts = 1;
1403                            long uncompSize;
1404                            long datumSize = getDatatype().getDatatypeSize();
1405
1406                            if (datumSize < 0) {
1407                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1408                                try {
1409                                    tmptid = H5.H5Dget_type(did);
1410                                    datumSize = H5.H5Tget_size(tmptid);
1411                                }
1412                                finally {
1413                                    try {
1414                                        H5.H5Tclose(tmptid);
1415                                    }
1416                                    catch (Exception ex2) {
1417                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1418                                    }
1419                                }
1420                            }
1421
1422                            for (int i = 0; i < rank; i++)
1423                                nelmts *= dims[i];
1424                            uncompSize = nelmts * datumSize;
1425
1426                            /* compression ratio = uncompressed size / compressed size */
1427
1428                            if (storageSize != 0) {
1429                                double ratio = (double) uncompSize / (double) storageSize;
1430                                DecimalFormat df = new DecimalFormat();
1431                                df.setMinimumFractionDigits(3);
1432                                df.setMaximumFractionDigits(3);
1433                                compression.append(df.format(ratio)).append(":1");
1434                            }
1435                        }
1436                    }
1437                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1438                        storageLayout.append("COMPACT");
1439                    }
1440                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1441                        storageLayout.append("CONTIGUOUS");
1442                        if (H5.H5Pget_external_count(pcid) > 0)
1443                            storageLayout.append(" - EXTERNAL ");
1444                    }
1445                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1446                        storageLayout.append("VIRTUAL - ");
1447                        try {
1448                            long vmaps = H5.H5Pget_virtual_count(pcid);
1449                            try {
1450                                int virtView = H5.H5Pget_virtual_view(paid);
1451                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1452                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1453                                    storageLayout.append("First Missing");
1454                                else
1455                                    storageLayout.append("Last Available");
1456                                storageLayout.append("\nGAP : ").append(virtGap);
1457                            }
1458                            catch (Exception err) {
1459                                log.debug("getMetadata(): vds error: ", err);
1460                                storageLayout.append("ERROR");
1461                            }
1462                            storageLayout.append("\nMAPS : ").append(vmaps);
1463                            if (vmaps > 0) {
1464                                for (long next = 0; next < vmaps; next++) {
1465                                    try {
1466                                        H5.H5Pget_virtual_vspace(pcid, next);
1467                                        H5.H5Pget_virtual_srcspace(pcid, next);
1468                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1469                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1470                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1471                                    }
1472                                    catch (Exception err) {
1473                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1474                                        storageLayout.append("ERROR");
1475                                    }
1476                                }
1477                            }
1478                        }
1479                        catch (Exception err) {
1480                            log.debug("getMetadata(): vds count error: ", err);
1481                            storageLayout.append("ERROR");
1482                        }
1483                    }
1484                    else {
1485                        chunkSize = null;
1486                        storageLayout.append("NONE");
1487                    }
1488
1489                    int[] flags = { 0, 0 };
1490                    long[] cdNelmts = { 20 };
1491                    int[] cdValues = new int[(int) cdNelmts[0]];
1492                    String[] cdName = { "", "" };
1493                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1494                    int filter = -1;
1495                    int[] filterConfig = { 1 };
1496
1497                    filters.setLength(0);
1498
1499                    if (nfilt == 0) {
1500                        filters.append("NONE");
1501                    }
1502                    else {
1503                        for (int i = 0, k = 0; i < nfilt; i++) {
1504                            log.trace("getMetadata(): filter[{}]", i);
1505                            if (i > 0)
1506                                filters.append(", ");
1507                            if (k > 0)
1508                                compression.append(", ");
1509
1510                            try {
1511                                cdNelmts[0] = 20;
1512                                cdValues = new int[(int) cdNelmts[0]];
1513                                cdValues = new int[(int) cdNelmts[0]];
1514                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1515                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1516                                for (int j = 0; j < cdNelmts[0]; j++)
1517                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1518                            }
1519                            catch (Exception err) {
1520                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1521                                filters.append("ERROR");
1522                                continue;
1523                            }
1524
1525                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1526                                filters.append("NONE");
1527                            }
1528                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1529                                filters.append("GZIP");
1530                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1531                                k++;
1532                            }
1533                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1534                                filters.append("Error detection filter");
1535                            }
1536                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1537                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1538                            }
1539                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1540                                filters.append("NBIT");
1541                            }
1542                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1543                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1544                            }
1545                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1546                                filters.append("SZIP");
1547                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1548                                k++;
1549                                int flag = -1;
1550                                try {
1551                                    flag = H5.H5Zget_filter_info(filter);
1552                                }
1553                                catch (Exception ex) {
1554                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1555                                    flag = -1;
1556                                }
1557                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1558                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1559                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1560                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1561                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1562                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1563                            }
1564                            else {
1565                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1566                                for (int j = 0; j < cdNelmts[0]; j++) {
1567                                    if (j > 0)
1568                                        filters.append(", ");
1569                                    filters.append(cdValues[j]);
1570                                }
1571                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1572                            }
1573                        } //  (int i=0; i<nfilt; i++)
1574                    }
1575
1576                    if (compression.length() == 0)
1577                        compression.append("NONE");
1578                    log.trace("getMetadata(): filter compression={}", compression);
1579                    log.trace("getMetadata(): filter information={}", filters);
1580
1581                    storage.setLength(0);
1582                    storage.append("SIZE: ").append(storageSize);
1583
1584                    try {
1585                        int[] at = { 0 };
1586                        H5.H5Pget_alloc_time(pcid, at);
1587                        storage.append(", allocation time: ");
1588                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1589                            storage.append("Early");
1590                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1591                            storage.append("Incremental");
1592                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1593                            storage.append("Late");
1594                        else
1595                            storage.append("Default");
1596                    }
1597                    catch (Exception ex) {
1598                        log.debug("getMetadata(): Storage allocation time:", ex);
1599                    }
1600                    log.trace("getMetadata(): storage={}", storage);
1601                }
1602                finally {
1603                    try {
1604                        H5.H5Pclose(paid);
1605                    }
1606                    catch (Exception ex) {
1607                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1608                    }
1609                    try {
1610                        H5.H5Pclose(pcid);
1611                    }
1612                    catch (Exception ex) {
1613                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1614                    }
1615                    close(did);
1616                }
1617            }
1618        }
1619
1620        List<Attribute> attrlist = null;
1621        try {
1622            attrlist = objMetadata.getMetadata(attrPropList);
1623        }
1624        catch (Exception ex) {
1625            log.debug("getMetadata(): getMetadata failed: ", ex);
1626        }
1627        return attrlist;
1628    }
1629
1630    /**
1631     * Writes a specific piece of metadata (such as an attribute) into the file.
1632     *
1633     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1634     * value. If the attribute does not exist in the file, it creates the
1635     * attribute in the file and attaches it to the object. It will fail to
1636     * write a new attribute to the object where an attribute with the same name
1637     * already exists. To update the value of an existing attribute in the file,
1638     * one needs to get the instance of the attribute by getMetadata(), change
1639     * its values, then use writeMetadata() to write the value.
1640     *
1641     * @param info
1642     *            the metadata to write.
1643     *
1644     * @throws Exception
1645     *             if the metadata can not be written
1646     */
1647    @Override
1648    public void writeMetadata(Object info) throws Exception {
1649        try {
1650            objMetadata.writeMetadata(info);
1651        }
1652        catch (Exception ex) {
1653            log.debug("writeMetadata(): Object not an Attribute");
1654            return;
1655        }
1656    }
1657
1658    /**
1659     * Deletes an existing piece of metadata from this object.
1660     *
1661     * @param info
1662     *            the metadata to delete.
1663     *
1664     * @throws HDF5Exception
1665     *             if the metadata can not be removed
1666     */
1667    @Override
1668    public void removeMetadata(Object info) throws HDF5Exception {
1669        try {
1670            objMetadata.removeMetadata(info);
1671        }
1672        catch (Exception ex) {
1673            log.debug("removeMetadata(): Object not an Attribute");
1674            return;
1675        }
1676
1677        Attribute attr = (Attribute) info;
1678        log.trace("removeMetadata(): {}", attr.getAttributeName());
1679        long did = open();
1680        if (did >= 0) {
1681            try {
1682                H5.H5Adelete(did, attr.getAttributeName());
1683            }
1684            finally {
1685                close(did);
1686            }
1687        }
1688        else {
1689            log.debug("removeMetadata(): failed to open compound dataset");
1690        }
1691    }
1692
1693    /**
1694     * Updates an existing piece of metadata attached to this object.
1695     *
1696     * @param info
1697     *            the metadata to update.
1698     *
1699     * @throws HDF5Exception
1700     *             if the metadata can not be updated
1701     */
1702    @Override
1703    public void updateMetadata(Object info) throws HDF5Exception {
1704        try {
1705            objMetadata.updateMetadata(info);
1706        }
1707        catch (Exception ex) {
1708            log.debug("updateMetadata(): Object not an Attribute");
1709            return;
1710        }
1711    }
1712
1713    /*
1714     * (non-Javadoc)
1715     *
1716     * @see hdf.object.HObject#setName(java.lang.String)
1717     */
1718    @Override
1719    public void setName(String newName) throws Exception {
1720        if (newName == null)
1721            throw new IllegalArgumentException("The new name is NULL");
1722
1723        H5File.renameObject(this, newName);
1724        super.setName(newName);
1725    }
1726
1727    /**
1728     * @deprecated Not for public use in the future. <br>
1729     *             Using
1730     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1731     *
1732     * @param name
1733     *            the name of the dataset to create.
1734     * @param pgroup
1735     *            parent group where the new dataset is created.
1736     * @param dims
1737     *            the dimension size of the dataset.
1738     * @param memberNames
1739     *            the names of compound datatype
1740     * @param memberDatatypes
1741     *            the datatypes of the compound datatype
1742     * @param memberSizes
1743     *            the dim sizes of the members
1744     * @param data
1745     *            list of data arrays written to the new dataset, null if no data is written to the new
1746     *            dataset.
1747     *
1748     * @return the new compound dataset if successful; otherwise returns null.
1749     *
1750     * @throws Exception
1751     *             if there is a failure.
1752     */
1753    @Deprecated
1754    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1755            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1756        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null)
1757                || (memberDatatypes == null) || (memberSizes == null)) {
1758            return null;
1759        }
1760
1761        int nMembers = memberNames.length;
1762        int memberRanks[] = new int[nMembers];
1763        long memberDims[][] = new long[nMembers][1];
1764        for (int i = 0; i < nMembers; i++) {
1765            memberRanks[i] = 1;
1766            memberDims[i][0] = memberSizes[i];
1767        }
1768
1769        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1770    }
1771
1772    /**
1773     * @deprecated Not for public use in the future. <br>
1774     *             Using
1775     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1776     *
1777     * @param name
1778     *            the name of the dataset to create.
1779     * @param pgroup
1780     *            parent group where the new dataset is created.
1781     * @param dims
1782     *            the dimension size of the dataset.
1783     * @param memberNames
1784     *            the names of compound datatype
1785     * @param memberDatatypes
1786     *            the datatypes of the compound datatype
1787     * @param memberRanks
1788     *            the ranks of the members
1789     * @param memberDims
1790     *            the dim sizes of the members
1791     * @param data
1792     *            list of data arrays written to the new dataset, null if no data is written to the new
1793     *            dataset.
1794     *
1795     * @return the new compound dataset if successful; otherwise returns null.
1796     *
1797     * @throws Exception
1798     *             if the dataset can not be created.
1799     */
1800    @Deprecated
1801    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1802            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1803        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1804                memberDims, data);
1805    }
1806
1807    /**
1808     * Creates a simple compound dataset in a file with/without chunking and compression.
1809     *
1810     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1811     * details of creating a compound dataset from users.
1812     *
1813     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1814     * dataset is not supported. The required information to create a compound dataset includes the
1815     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1816     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1817     *
1818     * The following example shows how to use this function to create a compound dataset in file.
1819     *
1820     * <pre>
1821     * H5File file = null;
1822     * String message = &quot;&quot;;
1823     * Group pgroup = null;
1824     * int[] DATA_INT = new int[DIM_SIZE];
1825     * float[] DATA_FLOAT = new float[DIM_SIZE];
1826     * String[] DATA_STR = new String[DIM_SIZE];
1827     * long[] DIMs = { 50, 10 };
1828     * long[] CHUNKs = { 25, 5 };
1829     *
1830     * try {
1831     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1832     *     file.open();
1833     *     pgroup = (Group) file.get(&quot;/&quot;);
1834     * }
1835     * catch (Exception ex) {
1836     * }
1837     *
1838     * Vector data = new Vector();
1839     * data.add(0, DATA_INT);
1840     * data.add(1, DATA_FLOAT);
1841     * data.add(2, DATA_STR);
1842     *
1843     * // create groups
1844     * Datatype[] mdtypes = new H5Datatype[3];
1845     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1846     * Dataset dset = null;
1847     * try {
1848     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1849     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1850     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1851     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1852     * }
1853     * catch (Exception ex) {
1854     *     failed(message, ex, file);
1855     *     return 1;
1856     * }
1857     * </pre>
1858     *
1859     * @param name
1860     *            the name of the dataset to create.
1861     * @param pgroup
1862     *            parent group where the new dataset is created.
1863     * @param dims
1864     *            the dimension size of the dataset.
1865     * @param maxdims
1866     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1867     * @param chunks
1868     *            the chunk size of the dataset. No chunking if chunk = null.
1869     * @param gzip
1870     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1871     * @param memberNames
1872     *            the names of compound datatype
1873     * @param memberDatatypes
1874     *            the datatypes of the compound datatype
1875     * @param memberRanks
1876     *            the ranks of the members
1877     * @param memberDims
1878     *            the dim sizes of the members
1879     * @param data
1880     *            list of data arrays written to the new dataset, null if no data is written to the new
1881     *            dataset.
1882     *
1883     * @return the new compound dataset if successful; otherwise returns null.
1884     *
1885     * @throws Exception
1886     *             if there is a failure.
1887     */
1888    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1889            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1890        H5CompoundDS dataset = null;
1891        String fullPath = null;
1892        long did = HDF5Constants.H5I_INVALID_HID;
1893        long plist = HDF5Constants.H5I_INVALID_HID;
1894        long sid = HDF5Constants.H5I_INVALID_HID;
1895        long tid = HDF5Constants.H5I_INVALID_HID;
1896
1897        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1898                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1899                || (memberDims == null)) {
1900            log.debug("create(): one or more parameters are null");
1901            return null;
1902        }
1903
1904        H5File file = (H5File) pgroup.getFileFormat();
1905        if (file == null) {
1906            log.debug("create(): parent group FileFormat is null");
1907            return null;
1908        }
1909
1910        String path = HObject.SEPARATOR;
1911        if (!pgroup.isRoot()) {
1912            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1913            if (name.endsWith("/"))
1914                name = name.substring(0, name.length() - 1);
1915            int idx = name.lastIndexOf('/');
1916            if (idx >= 0)
1917                name = name.substring(idx + 1);
1918        }
1919
1920        fullPath = path + name;
1921
1922        int typeSize = 0;
1923        int nMembers = memberNames.length;
1924        long[] mTypes = new long[nMembers];
1925        int memberSize = 1;
1926        for (int i = 0; i < nMembers; i++) {
1927            memberSize = 1;
1928            for (int j = 0; j < memberRanks[i]; j++)
1929                memberSize *= memberDims[i][j];
1930
1931            mTypes[i] = -1;
1932            // the member is an array
1933            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1934                long tmptid = -1;
1935                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1936                    try {
1937                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1938                    }
1939                    finally {
1940                        try {
1941                            H5.H5Tclose(tmptid);
1942                        }
1943                        catch (Exception ex) {
1944                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1945                        }
1946                    }
1947                }
1948            }
1949            else {
1950                mTypes[i] = memberDatatypes[i].createNative();
1951            }
1952            try {
1953                typeSize += H5.H5Tget_size(mTypes[i]);
1954            }
1955            catch (Exception ex) {
1956                log.debug("create(): array create H5Tget_size:", ex);
1957
1958                while (i > 0) {
1959                    try {
1960                        H5.H5Tclose(mTypes[i]);
1961                    }
1962                    catch (HDF5Exception ex2) {
1963                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1964                    }
1965                    i--;
1966                }
1967                throw ex;
1968            }
1969        } //  (int i = 0; i < nMembers; i++) {
1970
1971        // setup chunking and compression
1972        boolean isExtentable = false;
1973        if (maxdims != null) {
1974            for (int i = 0; i < maxdims.length; i++) {
1975                if (maxdims[i] == 0)
1976                    maxdims[i] = dims[i];
1977                else if (maxdims[i] < 0)
1978                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1979
1980                if (maxdims[i] != dims[i])
1981                    isExtentable = true;
1982            }
1983        }
1984
1985        // HDF5 requires you to use chunking in order to define extendible
1986        // datasets. Chunking makes it possible to extend datasets efficiently,
1987        // without having to reorganize storage excessively. Using default size
1988        // of 64x...which has good performance
1989        if ((chunks == null) && isExtentable) {
1990            chunks = new long[dims.length];
1991            for (int i = 0; i < dims.length; i++)
1992                chunks[i] = Math.min(dims[i], 64);
1993        }
1994
1995        // prepare the dataspace and datatype
1996        int rank = dims.length;
1997
1998        try {
1999            sid = H5.H5Screate_simple(rank, dims, maxdims);
2000
2001            // figure out creation properties
2002            plist = HDF5Constants.H5P_DEFAULT;
2003
2004            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2005            int offset = 0;
2006            for (int i = 0; i < nMembers; i++) {
2007                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2008                offset += H5.H5Tget_size(mTypes[i]);
2009            }
2010
2011            if (chunks != null) {
2012                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2013
2014                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2015                H5.H5Pset_chunk(plist, rank, chunks);
2016
2017                // compression requires chunking
2018                if (gzip > 0) {
2019                    H5.H5Pset_deflate(plist, gzip);
2020                }
2021            }
2022
2023            long fid = file.getFID();
2024
2025            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
2026            dataset = new H5CompoundDS(file, name, path);
2027        }
2028        finally {
2029            try {
2030                H5.H5Pclose(plist);
2031            }
2032            catch (HDF5Exception ex) {
2033                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2034            }
2035            try {
2036                H5.H5Sclose(sid);
2037            }
2038            catch (HDF5Exception ex) {
2039                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2040            }
2041            try {
2042                H5.H5Tclose(tid);
2043            }
2044            catch (HDF5Exception ex) {
2045                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2046            }
2047            try {
2048                H5.H5Dclose(did);
2049            }
2050            catch (HDF5Exception ex) {
2051                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2052            }
2053
2054            for (int i = 0; i < nMembers; i++) {
2055                try {
2056                    H5.H5Tclose(mTypes[i]);
2057                }
2058                catch (HDF5Exception ex) {
2059                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2060                }
2061            }
2062        }
2063
2064        if (dataset != null) {
2065            pgroup.addToMemberList(dataset);
2066            if (data != null) {
2067                dataset.init();
2068                long selected[] = dataset.getSelectedDims();
2069                for (int i = 0; i < rank; i++)
2070                    selected[i] = dims[i];
2071                dataset.write(data);
2072            }
2073        }
2074
2075        return dataset;
2076    }
2077
2078    /*
2079     * (non-Javadoc)
2080     *
2081     * @see hdf.object.Dataset#isString(long)
2082     */
2083    @Override
2084    public boolean isString(long tid) {
2085        boolean b = false;
2086        try {
2087            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2088        }
2089        catch (Exception ex) {
2090            b = false;
2091        }
2092
2093        return b;
2094    }
2095
2096    /*
2097     * (non-Javadoc)
2098     *
2099     * @see hdf.object.Dataset#getSize(long)
2100     */
2101    @Override
2102    public long getSize(long tid) {
2103        return H5Datatype.getDatatypeSize(tid);
2104    }
2105
2106    /*
2107     * (non-Javadoc)
2108     *
2109     * @see hdf.object.Dataset#isVirtual()
2110     */
2111    @Override
2112    public boolean isVirtual() {
2113        return isVirtual;
2114    }
2115
2116    /*
2117     * (non-Javadoc)
2118     *
2119     * @see hdf.object.Dataset#getVirtualFilename(int)
2120     */
2121    @Override
2122    public String getVirtualFilename(int index) {
2123        if(isVirtual)
2124            return virtualNameList.get(index);
2125        else
2126            return null;
2127    }
2128
2129    /*
2130     * (non-Javadoc)
2131     *
2132     * @see hdf.object.Dataset#getVirtualMaps()
2133     */
2134    @Override
2135    public int getVirtualMaps() {
2136        if(isVirtual)
2137            return virtualNameList.size();
2138        else
2139            return -1;
2140    }
2141
2142    /*
2143     * (non-Javadoc)
2144     *
2145     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2146     */
2147    @Override
2148    public String toString(String delimiter, int maxItems) {
2149        Object theData = originalBuf;
2150        if (theData == null) {
2151            log.debug("toString: value is null");
2152            return null;
2153        }
2154
2155        if (theData instanceof List<?>) {
2156            log.trace("toString: value is list");
2157            return null;
2158        }
2159
2160        Class<? extends Object> valClass = theData.getClass();
2161
2162        if (!valClass.isArray()) {
2163            log.trace("toString: finish - not array");
2164            String strValue = theData.toString();
2165            if (maxItems > 0 && strValue.length() > maxItems)
2166                // truncate the extra characters
2167                strValue = strValue.substring(0, maxItems);
2168            return strValue;
2169        }
2170
2171        // value is an array
2172        StringBuilder sb = new StringBuilder();
2173        int n = Array.getLength(theData);
2174        if ((maxItems > 0) && (n > maxItems))
2175            n = maxItems;
2176
2177        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype) getDatatype()).isStdRef(), n);
2178        if (((H5Datatype) getDatatype()).isStdRef()) {
2179            String cname = valClass.getName();
2180            char dname = cname.charAt(cname.lastIndexOf('[') + 1);
2181            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2182            String ref_str = ((H5ReferenceType) getDatatype()).getObjectReferenceName((byte[])theData);
2183            log.trace("toString: ref_str={}", ref_str);
2184            return ref_str;
2185        }
2186        else {
2187            return super.toString(delimiter, maxItems);
2188        }
2189    }
2190
2191}