001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import hdf.hdf5lib.H5;
028import hdf.hdf5lib.HDF5Constants;
029import hdf.hdf5lib.HDFNativeData;
030import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
031import hdf.hdf5lib.exceptions.HDF5Exception;
032import hdf.hdf5lib.exceptions.HDF5LibraryException;
033import hdf.hdf5lib.structs.H5O_info_t;
034
035import hdf.object.Attribute;
036import hdf.object.CompoundDS;
037import hdf.object.Dataset;
038import hdf.object.Datatype;
039import hdf.object.FileFormat;
040import hdf.object.Group;
041import hdf.object.HObject;
042import hdf.object.MetaDataContainer;
043import hdf.object.Utils;
044
045import hdf.object.h5.H5Attribute;
046import hdf.object.h5.H5MetaDataContainer;
047
048/**
049 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
050 *
051 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata
052 * that stores a description of the data elements, data layout, and all other information necessary
053 * to write, read, and interpret the stored data.
054 *
055 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a
056 * collection of one or more atomic types or small arrays of such types. Each member of a compound
057 * type has a name which is unique within that type, and a byte offset that determines the first
058 * byte (smallest byte address) of that member in a compound datum.
059 *
060 * For more information on HDF5 datasets and datatypes, read the <a href=
061 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5
062 * User's Guide</a>.
063 *
064 * There are two basic types of compound datasets: simple compound data and nested compound data.
065 * Members of a simple compound dataset have atomic datatypes. Members of a nested compound dataset
066 * are compound or array of compound data.
067 *
068 * Since Java does not understand C structures, we cannot directly read/write compound data values
069 * as in the following C example.
070 *
071 * <pre>
072 * typedef struct s1_t {
073 *         int    a;
074 *         float  b;
075 *         double c;
076 *         } s1_t;
077 *     s1_t       s1[LENGTH];
078 *     ...
079 *     H5Dwrite(..., s1);
080 *     H5Dread(..., s1);
081 * </pre>
082 *
083 * Values of compound data fields are stored in java.util.Vector object. We read and write compound
084 * data by fields instead of compound structure. As for the example above, the java.util.Vector
085 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands
086 * the primitive datatypes of int, float and double, we will be able to read/write the compound data
087 * by field.
088 *
089 * @version 1.1 9/4/2007
090 * @author Peter X. Cao
091 */
092public class H5CompoundDS extends CompoundDS implements MetaDataContainer
093{
094    private static final long serialVersionUID = -5968625125574032736L;
095
096    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
097
098    /**
099     * The metadata object for this data object. Members of the metadata are instances of Attribute.
100     */
101    private H5MetaDataContainer objMetadata;
102
103    /** the object properties */
104    private H5O_info_t objInfo;
105
106    /** flag to indicate if the dataset is an external dataset */
107    private boolean isExternal = false;
108
109    /** flag to indicate if the dataset is a virtual dataset */
110    private boolean isVirtual = false;
111    /** the list of virtual names */
112    private List<String> virtualNameList;
113
114    /**
115     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
116     *
117     * The dataset object represents an existing dataset in the file. For example, new
118     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
119     * dataset,"dset1", at group "/g0/".
120     *
121     * This object is usually constructed at FileFormat.open(), which loads the file structure and
122     * object information into memory. It is rarely used elsewhere.
123     *
124     * @param theFile
125     *            the file that contains the data object.
126     * @param theName
127     *            the name of the data object, e.g. "dset".
128     * @param thePath
129     *            the full path of the data object, e.g. "/arrays/".
130     */
131    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
132        this(theFile, theName, thePath, null);
133    }
134
135    /**
136     * @deprecated Not for public use in the future.<br>
137     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
138     *
139     * @param theFile
140     *            the file that contains the data object.
141     * @param theName
142     *            the name of the data object, e.g. "dset".
143     * @param thePath
144     *            the full path of the data object, e.g. "/arrays/".
145     * @param oid
146     *            the oid of the data object.
147     */
148    @Deprecated
149    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
150        super(theFile, theName, thePath, oid);
151        objInfo = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
152        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
153
154        if (theFile != null) {
155            if (oid == null) {
156                // retrieve the object ID
157                try {
158                    byte[] refBuf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
159                    this.oid = new long[1];
160                    this.oid[0] = HDFNativeData.byteToLong(refBuf, 0);
161                }
162                catch (Exception ex) {
163                    log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
164                }
165            }
166        }
167        else
168            this.oid = null;
169    }
170
171    /*
172     * (non-Javadoc)
173     *
174     * @see hdf.object.HObject#open()
175     */
176    @Override
177    public long open() {
178        long did = HDF5Constants.H5I_INVALID_HID;
179
180        if (getFID() < 0)
181            log.trace("open(): file id for:{} is invalid", getPath() + getName());
182        else {
183            try {
184                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
185                log.trace("open(): did={}", did);
186            }
187            catch (HDF5Exception ex) {
188                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
189                did = HDF5Constants.H5I_INVALID_HID;
190            }
191        }
192
193        return did;
194    }
195
196    /*
197     * (non-Javadoc)
198     *
199     * @see hdf.object.HObject#close(int)
200     */
201    @Override
202    public void close(long did) {
203        if (did >= 0) {
204            try {
205                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
206            }
207            catch (Exception ex) {
208                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
209            }
210            try {
211                H5.H5Dclose(did);
212            }
213            catch (HDF5Exception ex) {
214                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
215            }
216        }
217    }
218
219    /**
220     * Retrieves datatype and dataspace information from file and sets the dataset
221     * in memory.
222     *
223     * The init() is designed to support lazy operation in a dataset object. When a
224     * data object is retrieved from file, the datatype, dataspace and raw data are
225     * not loaded into memory. When it is asked to read the raw data from file,
226     * init() is first called to get the datatype and dataspace information, then
227     * load the raw data from file.
228     *
229     * init() is also used to reset the selection of a dataset (start, stride and
230     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
231     * the following example, init() at step 1) retrieves datatype and dataspace
232     * information from file. getData() at step 3) reads only one data point. init()
233     * at step 4) resets the selection to the whole dataset. getData() at step 4)
234     * reads the values of whole dataset into memory.
235     *
236     * <pre>
237     * dset = (Dataset) file.get(NAME_DATASET);
238     *
239     * // 1) get datatype and dataspace information from file
240     * dset.init();
241     * rank = dset.getRank(); // rank = 2, a 2D dataset
242     * count = dset.getSelectedDims();
243     * start = dset.getStartDims();
244     * dims = dset.getDims();
245     *
246     * // 2) select only one data point
247     * for (int i = 0; i &lt; rank; i++) {
248     *     start[0] = 0;
249     *     count[i] = 1;
250     * }
251     *
252     * // 3) read one data point
253     * data = dset.getData();
254     *
255     * // 4) reset selection to the whole dataset
256     * dset.init();
257     *
258     * // 5) clean the memory data buffer
259     * dset.clearData();
260     *
261     * // 6) Read the whole dataset
262     * data = dset.getData();
263     * </pre>
264     */
265    @Override
266    public void init() {
267        if (inited) {
268            resetSelection();
269            log.trace("init(): Dataset already initialized");
270            return; // already called. Initialize only once
271        }
272
273        long did = HDF5Constants.H5I_INVALID_HID;
274        long tid = HDF5Constants.H5I_INVALID_HID;
275        long sid = HDF5Constants.H5I_INVALID_HID;
276        flatNameList = new Vector<>();
277        flatTypeList = new Vector<>();
278
279        did = open();
280        if (did >= 0) {
281            // check if it is an external or virtual dataset
282            long pid = HDF5Constants.H5I_INVALID_HID;
283            try {
284                pid = H5.H5Dget_create_plist(did);
285                try {
286                    int nfiles = H5.H5Pget_external_count(pid);
287                    isExternal = (nfiles > 0);
288                    int layoutType = H5.H5Pget_layout(pid);
289                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
290                        try {
291                            long vmaps = H5.H5Pget_virtual_count(pid);
292                            if (vmaps > 0) {
293                                virtualNameList = new Vector<>();
294                                for (long next = 0; next < vmaps; next++) {
295                                    try {
296                                        String fname = H5.H5Pget_virtual_filename(pid, next);
297                                        virtualNameList.add(fname);
298                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
299                                    }
300                                    catch (Exception err) {
301                                        log.trace("init(): vds[{}] continue", next);
302                                    }
303                                }
304                            }
305                        }
306                        catch (Exception err) {
307                            log.debug("init(): vds count error: ", err);
308                        }
309                    }
310                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal, isVirtual);
311                }
312                catch (Exception ex) {
313                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
314                }
315            }
316            catch (Exception ex) {
317                log.debug("init(): H5Dget_create_plist() failure: ", ex);
318            }
319            finally {
320                try {
321                    H5.H5Pclose(pid);
322                }
323                catch (Exception ex) {
324                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
325                }
326            }
327
328            try {
329                sid = H5.H5Dget_space(did);
330                rank = H5.H5Sget_simple_extent_ndims(sid);
331                tid = H5.H5Dget_type(did);
332                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
333
334                if (rank == 0) {
335                    // a scalar data point
336                    isScalar = true;
337                    rank = 1;
338                    dims = new long[] { 1 };
339                    log.trace("init(): rank is a scalar data point");
340                }
341                else {
342                    isScalar = false;
343                    dims = new long[rank];
344                    maxDims = new long[rank];
345                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
346                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
347                }
348
349                try {
350                    datatype = new H5Datatype(getFileFormat(), tid);
351
352                    log.trace("init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", tid,
353                            datatype.isText(), datatype.isVLEN(), ((H5Datatype) datatype).isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef());
354
355                    H5Datatype.extractCompoundInfo((H5Datatype) datatype, "", flatNameList, flatTypeList);
356                }
357                catch (Exception ex) {
358                    log.debug("init(): failed to create datatype for dataset: ", ex);
359                    datatype = null;
360                }
361
362                // initialize member information
363                numberOfMembers = flatNameList.size();
364                log.trace("init(): numberOfMembers={}", numberOfMembers);
365
366                memberNames = new String[numberOfMembers];
367                memberTypes = new Datatype[numberOfMembers];
368                memberOrders = new int[numberOfMembers];
369                isMemberSelected = new boolean[numberOfMembers];
370                memberDims = new Object[numberOfMembers];
371
372                for (int i = 0; i < numberOfMembers; i++) {
373                    isMemberSelected[i] = true;
374                    memberOrders[i] = 1;
375                    memberDims[i] = null;
376
377                    try {
378                        memberTypes[i] = flatTypeList.get(i);
379                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
380
381                        if (memberTypes[i].isArray()) {
382                            long mdim[] = memberTypes[i].getArrayDims();
383                            int idim[] = new int[mdim.length];
384                            int arrayNpoints = 1;
385
386                            for (int j = 0; j < idim.length; j++) {
387                                idim[j] = (int) mdim[j];
388                                arrayNpoints *= idim[j];
389                            }
390
391                            memberDims[i] = idim;
392                            memberOrders[i] = arrayNpoints;
393                        }
394                    }
395                    catch (Exception ex) {
396                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
397                        memberTypes[i] = null;
398                    }
399
400                    try {
401                        memberNames[i] = flatNameList.get(i);
402                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
403                    }
404                    catch (Exception ex) {
405                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
406                        memberNames[i] = "null";
407                    }
408                } //  (int i=0; i<numberOfMembers; i++)
409
410                inited = true;
411            }
412            catch (HDF5Exception ex) {
413                numberOfMembers = 0;
414                memberNames = null;
415                memberTypes = null;
416                memberOrders = null;
417                log.debug("init(): ", ex);
418            }
419            finally {
420                if (datatype != null)
421                    datatype.close(tid);
422
423                try {
424                    H5.H5Sclose(sid);
425                }
426                catch (HDF5Exception ex2) {
427                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
428                }
429            }
430
431            close(did);
432
433            startDims = new long[rank];
434            selectedDims = new long[rank];
435
436            resetSelection();
437        }
438        else {
439            log.debug("init(): failed to open dataset");
440        }
441    }
442
443    /**
444     * Check if the object has any attributes attached.
445     *
446     * @return true if it has any attributes, false otherwise.
447     */
448    @Override
449    public boolean hasAttribute() {
450        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
451
452        if (objInfo.num_attrs < 0) {
453            long did = open();
454            if (did >= 0) {
455                objInfo.num_attrs = 0;
456
457                try {
458                    objInfo = H5.H5Oget_info(did);
459                }
460                catch (Exception ex) {
461                    objInfo.num_attrs = 0;
462                    log.debug("hasAttribute(): get object info failure: ", ex);
463                }
464                finally {
465                    close(did);
466                }
467                objMetadata.setObjectAttributeSize((int) objInfo.num_attrs);
468            }
469            else {
470                log.debug("hasAttribute(): could not open dataset");
471            }
472        }
473
474        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
475        return (objInfo.num_attrs > 0);
476    }
477
478    /**
479     * Returns the datatype of the data object.
480     *
481     * @return the datatype of the data object.
482     */
483    @Override
484    public Datatype getDatatype() {
485        if (!inited)
486            init();
487
488        if (datatype == null) {
489            long did = HDF5Constants.H5I_INVALID_HID;
490            long tid = HDF5Constants.H5I_INVALID_HID;
491
492            did = open();
493            if (did >= 0) {
494                try {
495                    tid = H5.H5Dget_type(did);
496                    datatype = new H5Datatype(getFileFormat(), tid);
497                }
498                catch (Exception ex) {
499                    log.debug("getDatatype(): ", ex);
500                }
501                finally {
502                    try {
503                        H5.H5Tclose(tid);
504                    }
505                    catch (HDF5Exception ex) {
506                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
507                    }
508                    try {
509                        H5.H5Dclose(did);
510                    }
511                    catch (HDF5Exception ex) {
512                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
513                    }
514                }
515            }
516        }
517
518        if (isExternal) {
519            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
520
521            if (pdir == null) {
522                pdir = ".";
523            }
524            System.setProperty("user.dir", pdir);
525            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
526        }
527
528        return datatype;
529    }
530
531    /**
532     * Removes all of the elements from metadata list.
533     * The list should be empty after this call returns.
534     */
535    @Override
536    public void clear() {
537        super.clear();
538        objMetadata.clear();
539    }
540
541    /*
542     * (non-Javadoc)
543     *
544     * @see hdf.object.Dataset#readBytes()
545     */
546    @Override
547    public byte[] readBytes() throws HDF5Exception {
548        byte[] theData = null;
549
550        if (!isInited())
551            init();
552
553        long did = open();
554        if (did >= 0) {
555            long fspace = HDF5Constants.H5I_INVALID_HID;
556            long mspace = HDF5Constants.H5I_INVALID_HID;
557            long tid = HDF5Constants.H5I_INVALID_HID;
558
559            try {
560                long[] lsize = { 1 };
561                for (int j = 0; j < selectedDims.length; j++)
562                    lsize[0] *= selectedDims[j];
563
564                fspace = H5.H5Dget_space(did);
565                mspace = H5.H5Screate_simple(rank, selectedDims, null);
566
567                // set the rectangle selection
568                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
569                if (rank * dims[0] > 1)
570                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims, null); // set block to 1
571
572                tid = H5.H5Dget_type(did);
573                long size = H5.H5Tget_size(tid) * lsize[0];
574                log.trace("readBytes(): size = {}", size);
575
576                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
577                    throw new Exception("Invalid int size");
578
579                theData = new byte[(int)size];
580
581                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace, mspace);
582                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
583            }
584            catch (Exception ex) {
585                log.debug("readBytes(): failed to read data: ", ex);
586            }
587            finally {
588                try {
589                    H5.H5Sclose(fspace);
590                }
591                catch (Exception ex2) {
592                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
593                }
594                try {
595                    H5.H5Sclose(mspace);
596                }
597                catch (Exception ex2) {
598                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
599                }
600                try {
601                    H5.H5Tclose(tid);
602                }
603                catch (HDF5Exception ex2) {
604                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
605                }
606                close(did);
607            }
608        }
609
610        return theData;
611    }
612
613    /**
614     * Reads the data from file.
615     *
616     * read() reads the data from file to a memory buffer and returns the memory
617     * buffer. The dataset object does not hold the memory buffer. To store the
618     * memory buffer in the dataset object, one must call getData().
619     *
620     * By default, the whole dataset is read into memory. Users can also select
621     * a subset to read. Subsetting is done in an implicit way.
622     *
623     * <b>How to Select a Subset</b>
624     *
625     * A selection is specified by three arrays: start, stride and count.
626     * <ol>
627     * <li>start: offset of a selection
628     * <li>stride: determines how many elements to move in each dimension
629     * <li>count: number of elements to select in each dimension
630     * </ol>
631     * getStartDims(), getStride() and getSelectedDims() returns the start,
632     * stride and count arrays respectively. Applications can make a selection
633     * by changing the values of the arrays.
634     *
635     * The following example shows how to make a subset. In the example, the
636     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
637     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
638     * We want to select every other data point in dims[1] and dims[2]
639     *
640     * <pre>
641     * int rank = dataset.getRank(); // number of dimensions of the dataset
642     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
643     * long[] selected = dataset.getSelectedDims(); // the selected size of the
644     *                                              // dataset
645     * long[] start = dataset.getStartDims(); // the offset of the selection
646     * long[] stride = dataset.getStride(); // the stride of the dataset
647     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
648     *                                                   // dimensions for
649     *                                                   // display
650     *
651     * // select dim1 and dim2 as 2D data for display, and slice through dim0
652     * selectedIndex[0] = 1;
653     * selectedIndex[1] = 2;
654     * selectedIndex[1] = 0;
655     *
656     * // reset the selection arrays
657     * for (int i = 0; i &lt; rank; i++) {
658     *     start[i] = 0;
659     *     selected[i] = 1;
660     *     stride[i] = 1;
661     * }
662     *
663     * // set stride to 2 on dim1 and dim2 so that every other data point is
664     * // selected.
665     * stride[1] = 2;
666     * stride[2] = 2;
667     *
668     * // set the selection size of dim1 and dim2
669     * selected[1] = dims[1] / stride[1];
670     * selected[2] = dims[1] / stride[2];
671     *
672     * // when dataset.getData() is called, the selection above will be used
673     * // since
674     * // the dimension arrays are passed by reference. Changes of these arrays
675     * // outside the dataset object directly change the values of these array
676     * // in the dataset object.
677     * </pre>
678     *
679     * For CompoundDS, the memory data object is an java.util.List object. Each
680     * element of the list is a data array that corresponds to a compound field.
681     *
682     * For example, if compound dataset "comp" has the following nested
683     * structure, and member datatypes
684     *
685     * <pre>
686     * comp --&gt; m01 (int)
687     * comp --&gt; m02 (float)
688     * comp --&gt; nest1 --&gt; m11 (char)
689     * comp --&gt; nest1 --&gt; m12 (String)
690     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
691     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
692     * </pre>
693     *
694     * getData() returns a list of six arrays: {int[], float[], char[],
695     * String[], long[] and double[]}.
696     *
697     * @return the data read from file.
698     *
699     * @see #getData()
700     * @see hdf.object.DataFormat#read()
701     *
702     * @throws Exception
703     *             if object can not be read
704     */
705    @Override
706    public Object read() throws Exception {
707        Object readData = null;
708
709        if (!isInited())
710            init();
711
712        try {
713            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
714        }
715        catch (Exception ex) {
716            log.debug("read(): failed to read compound dataset: ", ex);
717            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
718        }
719
720        return readData;
721    }
722
723    /**
724     * Writes the given data buffer into this dataset in a file.
725     *
726     * The data buffer is a vector that contains the data values of compound fields. The data is written
727     * into file field by field.
728     *
729     * @param buf
730     *            The vector that contains the data values of compound fields.
731     *
732     * @throws Exception
733     *             If there is an error at the HDF5 library level.
734     */
735    @Override
736    public void write(Object buf) throws Exception {
737        if (this.getFileFormat().isReadOnly())
738            throw new Exception("cannot write to compound dataset in file opened as read-only");
739
740        if (!isInited())
741            init();
742
743        try {
744            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
745        }
746        catch (Exception ex) {
747            log.debug("write(Object): failed to write compound dataset: ", ex);
748            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
749        }
750    }
751
752    /*
753     * Routine to convert datatypes that are read in as byte arrays to
754     * regular types.
755     */
756    protected Object convertByteMember(final Datatype dtype, byte[] byteData) {
757        Object theObj = null;
758
759        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
760            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
761        else
762            theObj = super.convertByteMember(dtype, byteData);
763
764        return theObj;
765    }
766
767    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception {
768        H5Datatype dsDatatype = (H5Datatype) getDatatype();
769        Object theData = null;
770
771        if (numberOfMembers <= 0) {
772            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
773            throw new Exception("dataset contains no members");
774        }
775
776        /*
777         * I/O type-specific pre-initialization.
778         */
779        if (ioType == H5File.IO_TYPE.WRITE) {
780            if ((writeBuf == null) || !(writeBuf instanceof List)) {
781                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
782                throw new Exception("write buffer is null or invalid");
783            }
784
785            /*
786             * Check for any unsupported datatypes and fail early before
787             * attempting to write to the dataset.
788             */
789            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
790                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
791                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
792            }
793
794            if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) {
795                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
796                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
797            }
798        }
799
800        long did = open();
801        if (did >= 0) {
802            long[] spaceIDs = { HDF5Constants.H5I_INVALID_HID, HDF5Constants.H5I_INVALID_HID }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
803
804            try {
805                /*
806                 * NOTE: this call sets up a hyperslab selection in the file according to the
807                 * current selection in the dataset object.
808                 */
809                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(did, dims, startDims,
810                        selectedStride, selectedDims, spaceIDs);
811
812                theData = compoundTypeIO(ioType, did, spaceIDs, (int) totalSelectedSpacePoints, dsDatatype, writeBuf, new int[]{0});
813            }
814            finally {
815                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
816                    try {
817                        H5.H5Sclose(spaceIDs[0]);
818                    }
819                    catch (Exception ex) {
820                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
821                    }
822                }
823
824                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
825                    try {
826                        H5.H5Sclose(spaceIDs[1]);
827                    }
828                    catch (Exception ex) {
829                        log.debug("compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
830                    }
831                }
832
833                close(did);
834            }
835        }
836        else
837            log.debug("compoundDatasetCommonIO(): failed to open dataset");
838
839        return theData;
840    }
841
842    /*
843     * Private recursive routine to read/write an entire compound datatype field by
844     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
845     * COMPOUND datatypes.
846     *
847     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
848     * running counter so that we can index properly into the flattened name list
849     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
850     */
851    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
852            final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex) {
853        Object theData = null;
854
855        if (cmpdType.isArray()) {
856            log.trace("compoundTypeIO(): ARRAY type");
857
858            long[] arrayDims = cmpdType.getArrayDims();
859            int arrSize = nSelPoints;
860            for (int i = 0; i < arrayDims.length; i++)
861                arrSize *= arrayDims[i];
862            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), writeBuf, globalMemberIndex);
863        }
864        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
865            /*
866             * TODO: true variable-length support.
867             */
868            String[] errVal = new String[nSelPoints];
869            String errStr = "*UNSUPPORTED*";
870
871            for (int j = 0; j < nSelPoints; j++)
872                errVal[j] = errStr;
873
874            /*
875             * Setup a fake data list.
876             */
877            Datatype baseType = cmpdType.getDatatypeBase();
878            while (baseType != null && !baseType.isCompound()) {
879                baseType = baseType.getDatatypeBase();
880            }
881
882            List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints);
883            fakeVlenData.add(errVal);
884
885            theData = fakeVlenData;
886        }
887        else if (cmpdType.isCompound()) {
888            List<Object> memberDataList = null;
889            List<Datatype> typeList = cmpdType.getCompoundMemberTypes();
890
891            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write", typeList.size());
892
893            if (ioType == H5File.IO_TYPE.READ)
894                memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints);
895
896            try {
897                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
898                    H5Datatype memberType = null;
899                    String memberName = null;
900                    Object memberData = null;
901
902                    try {
903                        memberType = (H5Datatype) typeList.get(i);
904                    }
905                    catch (Exception ex) {
906                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
907                        globalMemberIndex[0]++;
908                        continue;
909                    }
910
911                    /*
912                     * Since the type list used here is not a flattened structure, we need to skip
913                     * the member selection check for compound types, as otherwise having a single
914                     * member not selected would skip the reading/writing for the entire compound
915                     * type. The member selection check will be deferred to the recursive compound
916                     * read/write below.
917                     */
918                    if (!memberType.isCompound()) {
919                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
920                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
921                            globalMemberIndex[0]++;
922                            continue; // the field is not selected
923                        }
924                    }
925
926                    if (!memberType.isCompound()) {
927                        try {
928                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
929                        }
930                        catch (Exception ex) {
931                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
932                            memberName = "null";
933                        }
934                    }
935
936                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName, memberType.getDescription());
937
938                    if (ioType == H5File.IO_TYPE.READ) {
939                        try {
940                            if (memberType.isCompound())
941                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
942                            else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
943                                /*
944                                 * Recursively detect any nested array/vlen of compound types.
945                                 */
946                                boolean compoundFound = false;
947
948                                Datatype base = memberType.getDatatypeBase();
949                                while (base != null) {
950                                    if (base.isCompound())
951                                        compoundFound = true;
952
953                                    base = base.getDatatypeBase();
954                                }
955
956                                if (compoundFound) {
957                                    /*
958                                     * Skip the top-level array/vlen type.
959                                     */
960                                    globalMemberIndex[0]++;
961
962                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, writeBuf, globalMemberIndex);
963                                }
964                                else {
965                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
966                                    globalMemberIndex[0]++;
967                                }
968                            }
969                            else {
970                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName);
971                                globalMemberIndex[0]++;
972                            }
973                        }
974                        catch (Exception ex) {
975                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
976                            globalMemberIndex[0]++;
977                            memberData = null;
978                        }
979
980                        if (memberData == null) {
981                            String[] errVal = new String[nSelPoints];
982                            String errStr = "*ERROR*";
983
984                            for (int j = 0; j < nSelPoints; j++)
985                                errVal[j] = errStr;
986
987                            memberData = errVal;
988                        }
989
990                        memberDataList.add(memberData);
991                    }
992                    else {
993                        try {
994                            /*
995                             * TODO: currently doesn't correctly handle non-selected compound members.
996                             */
997                            memberData = ((List<?>) writeBuf).get(writeListIndex++);
998                        }
999                        catch (Exception ex) {
1000                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1001                            globalMemberIndex[0]++;
1002                            continue;
1003                        }
1004
1005                        if (memberData == null) {
1006                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1007                            globalMemberIndex[0]++;
1008                            continue;
1009                        }
1010
1011                        try {
1012                            if (memberType.isCompound()) {
1013                                List<?> nestedList = (List<?>) ((List<?>) writeBuf).get(writeListIndex++);
1014                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList, globalMemberIndex);
1015                            }
1016                            else {
1017                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName, memberData);
1018                                globalMemberIndex[0]++;
1019                            }
1020                        }
1021                        catch (Exception ex) {
1022                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1023                            globalMemberIndex[0]++;
1024                        }
1025                    }
1026                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1027            }
1028            catch (Exception ex) {
1029                log.debug("compoundTypeIO(): failure: ", ex);
1030                memberDataList = null;
1031            }
1032
1033            theData = memberDataList;
1034        }
1035
1036        return theData;
1037    }
1038
1039    /*
1040     * Private routine to read a single field of a compound datatype by creating a
1041     * compound datatype and inserting the single field into that datatype.
1042     */
1043    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1044            final H5Datatype memberType, String memberName) throws Exception {
1045        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1046        Object memberData = null;
1047
1048        try {
1049            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1050            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1051        }
1052        catch (OutOfMemoryError err) {
1053            memberData = null;
1054            throw new Exception("Out of memory");
1055        }
1056        catch (Exception ex) {
1057            log.debug("readSingleCompoundMember(): ", ex);
1058            memberData = null;
1059        }
1060
1061        if (memberData != null) {
1062            /*
1063             * Create a compound datatype containing just a single field (the one which we
1064             * want to read).
1065             */
1066            long compTid = -1;
1067            try {
1068                compTid = dsDatatype.createCompoundFieldType(memberName);
1069            }
1070            catch (HDF5Exception ex) {
1071                log.debug("readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1072                        memberType.getDescription(), ex);
1073                memberData = null;
1074            }
1075
1076            /*
1077             * Actually read the data for this member now that everything has been setup.
1078             */
1079            try {
1080                if (memberType.isVLEN() || (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1081                    log.trace("readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1082                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1083                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1084
1085                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) memberData);
1086                }
1087                else {
1088                    log.trace("readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1089                            dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1090                            (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1091
1092                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, memberData);
1093                }
1094            }
1095            catch (HDF5DataFiltersException exfltr) {
1096                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1097                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1098            }
1099            catch (Exception ex) {
1100                log.debug("readSingleCompoundMember(): read failure: ", ex);
1101                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1102            }
1103            finally {
1104                dsDatatype.close(compTid);
1105            }
1106
1107            /*
1108             * Perform any necessary data conversions.
1109             */
1110            if (memberType.isUnsigned()) {
1111                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1112                memberData = Dataset.convertFromUnsignedC(memberData, null);
1113            }
1114            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1115                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1116
1117                /*
1118                 * For all other types that get read into memory as a byte[] (such as nested
1119                 * compounds and arrays of compounds), we must manually convert the byte[] into
1120                 * something usable.
1121                 */
1122                memberData = convertByteMember(memberType, (byte[]) memberData);
1123            }
1124        }
1125
1126        return memberData;
1127    }
1128
1129    /*
1130     * Private routine to write a single field of a compound datatype by creating a
1131     * compound datatype and inserting the single field into that datatype.
1132     */
1133    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1134            final H5Datatype memberType, String memberName, Object theData) throws Exception {
1135        H5Datatype dsDatatype = (H5Datatype) this.getDatatype();
1136
1137        /*
1138         * Check for any unsupported datatypes before attempting to write this compound
1139         * member.
1140         */
1141        if (memberType.isVLEN() && !memberType.isVarStr()) {
1142            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1143            throw new Exception("writing of VL non-strings is not currently supported");
1144        }
1145
1146        /*
1147         * Perform any necessary data conversions before writing the data.
1148         */
1149        Object tmpData = theData;
1150        try {
1151            if (memberType.isUnsigned()) {
1152                // Check if we need to convert unsigned integer data from Java-style
1153                // to C-style integers
1154                long tsize = memberType.getDatatypeSize();
1155                String cname = theData.getClass().getName();
1156                char dname = cname.charAt(cname.lastIndexOf('[') + 1);
1157                boolean doIntConversion = (((tsize == 1) && (dname == 'S'))
1158                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1159
1160                if (doIntConversion) {
1161                    log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1162                    tmpData = convertToUnsignedC(theData, null);
1163                }
1164            }
1165            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1166                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1167                tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize());
1168            }
1169            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1170                log.trace("writeSingleCompoundMember(): converting enum names to values");
1171                tmpData = memberType.convertEnumNameToValue((String[]) theData);
1172            }
1173        }
1174        catch (Exception ex) {
1175            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1176            tmpData = null;
1177        }
1178
1179        if (tmpData == null) {
1180            log.debug("writeSingleCompoundMember(): data is null");
1181            return;
1182        }
1183
1184        /*
1185         * Create a compound datatype containing just a single field (the one which we
1186         * want to write).
1187         */
1188        long compTid = -1;
1189        try {
1190            compTid = dsDatatype.createCompoundFieldType(memberName);
1191        }
1192        catch (HDF5Exception ex) {
1193            log.debug("writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1194                    memberType.getDescription(), ex);
1195        }
1196
1197        /*
1198         * Actually write the data now that everything has been setup.
1199         */
1200        try {
1201            if (memberType.isVarStr()) {
1202                log.trace("writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1203                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1204                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1205
1206                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (String[]) tmpData);
1207            }
1208            else {
1209                log.trace("writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1210                        dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1211                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1212
1213                // BUG!!! does not write nested compound data and no
1214                // exception was caught. Need to check if it is a java
1215                // error or C library error.
1216                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1217            }
1218        }
1219        catch (Exception ex) {
1220            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1221            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1222        }
1223        finally {
1224            dsDatatype.close(compTid);
1225        }
1226    }
1227
1228    /**
1229     * Converts the data values of this data object to appropriate Java integers if
1230     * they are unsigned integers.
1231     *
1232     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1233     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1234     *
1235     * @return the converted data buffer.
1236     */
1237    @Override
1238    public Object convertFromUnsignedC() {
1239        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1240    }
1241
1242    /**
1243     * Converts Java integer data values of this data object back to unsigned C-type
1244     * integer data if they are unsigned integers.
1245     *
1246     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1247     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1248     *
1249     * @return the converted data buffer.
1250     */
1251    @Override
1252    public Object convertToUnsignedC() {
1253        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1254    }
1255
1256    /**
1257     * Retrieves the object's metadata, such as attributes, from the file.
1258     *
1259     * Metadata, such as attributes, is stored in a List.
1260     *
1261     * @return the list of metadata objects.
1262     *
1263     * @throws HDF5Exception
1264     *             if the metadata can not be retrieved
1265     */
1266    @Override
1267    public List<Attribute> getMetadata() throws HDF5Exception {
1268        int gmIndexType = 0;
1269        int gmIndexOrder = 0;
1270
1271        try {
1272            gmIndexType = fileFormat.getIndexType(null);
1273        }
1274        catch (Exception ex) {
1275            log.debug("getMetadata(): getIndexType failed: ", ex);
1276        }
1277        try {
1278            gmIndexOrder = fileFormat.getIndexOrder(null);
1279        }
1280        catch (Exception ex) {
1281            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1282        }
1283        return this.getMetadata(gmIndexType, gmIndexOrder);
1284    }
1285
1286    /**
1287     * Retrieves the object's metadata, such as attributes, from the file.
1288     *
1289     * Metadata, such as attributes, is stored in a List.
1290     *
1291     * @param attrPropList
1292     *             the list of properties to get
1293     *
1294     * @return the list of metadata objects.
1295     *
1296     * @throws HDF5Exception
1297     *             if the metadata can not be retrieved
1298     */
1299    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1300        if (!isInited())
1301            init();
1302
1303        try {
1304            this.linkTargetObjName = H5File.getLinkTargetName(this);
1305        }
1306        catch (Exception ex) {
1307            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1308        }
1309
1310        if (objMetadata.getAttributeList() == null) {
1311            long did = HDF5Constants.H5I_INVALID_HID;
1312            long pcid = HDF5Constants.H5I_INVALID_HID;
1313            long paid = HDF5Constants.H5I_INVALID_HID;
1314
1315            did = open();
1316            if (did >= 0) {
1317                try {
1318                    // get the compression and chunk information
1319                    pcid = H5.H5Dget_create_plist(did);
1320                    paid = H5.H5Dget_access_plist(did);
1321                    long storageSize = H5.H5Dget_storage_size(did);
1322                    int nfilt = H5.H5Pget_nfilters(pcid);
1323                    int layoutType = H5.H5Pget_layout(pcid);
1324
1325                    storageLayout.setLength(0);
1326                    compression.setLength(0);
1327
1328                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1329                        chunkSize = new long[rank];
1330                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1331                        int n = chunkSize.length;
1332                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1333                        for (int i = 1; i < n; i++)
1334                            storageLayout.append(" X ").append(chunkSize[i]);
1335
1336                        if (nfilt > 0) {
1337                            long nelmts = 1;
1338                            long uncompSize;
1339                            long datumSize = getDatatype().getDatatypeSize();
1340
1341                            if (datumSize < 0) {
1342                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1343                                try {
1344                                    tmptid = H5.H5Dget_type(did);
1345                                    datumSize = H5.H5Tget_size(tmptid);
1346                                }
1347                                finally {
1348                                    try {
1349                                        H5.H5Tclose(tmptid);
1350                                    }
1351                                    catch (Exception ex2) {
1352                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid, ex2);
1353                                    }
1354                                }
1355                            }
1356
1357                            for (int i = 0; i < rank; i++)
1358                                nelmts *= dims[i];
1359                            uncompSize = nelmts * datumSize;
1360
1361                            /* compression ratio = uncompressed size / compressed size */
1362
1363                            if (storageSize != 0) {
1364                                double ratio = (double) uncompSize / (double) storageSize;
1365                                DecimalFormat df = new DecimalFormat();
1366                                df.setMinimumFractionDigits(3);
1367                                df.setMaximumFractionDigits(3);
1368                                compression.append(df.format(ratio)).append(":1");
1369                            }
1370                        }
1371                    }
1372                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1373                        storageLayout.append("COMPACT");
1374                    }
1375                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1376                        storageLayout.append("CONTIGUOUS");
1377                        if (H5.H5Pget_external_count(pcid) > 0)
1378                            storageLayout.append(" - EXTERNAL ");
1379                    }
1380                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1381                        storageLayout.append("VIRTUAL - ");
1382                        try {
1383                            long vmaps = H5.H5Pget_virtual_count(pcid);
1384                            try {
1385                                int virtView = H5.H5Pget_virtual_view(paid);
1386                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1387                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1388                                    storageLayout.append("First Missing");
1389                                else
1390                                    storageLayout.append("Last Available");
1391                                storageLayout.append("\nGAP : ").append(virtGap);
1392                            }
1393                            catch (Exception err) {
1394                                log.debug("getMetadata(): vds error: ", err);
1395                                storageLayout.append("ERROR");
1396                            }
1397                            storageLayout.append("\nMAPS : ").append(vmaps);
1398                            if (vmaps > 0) {
1399                                for (long next = 0; next < vmaps; next++) {
1400                                    try {
1401                                        H5.H5Pget_virtual_vspace(pcid, next);
1402                                        H5.H5Pget_virtual_srcspace(pcid, next);
1403                                        String fname = H5.H5Pget_virtual_filename(pcid, next);
1404                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1405                                        storageLayout.append("\n").append(fname).append(" : ").append(dsetname);
1406                                    }
1407                                    catch (Exception err) {
1408                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1409                                        storageLayout.append("ERROR");
1410                                    }
1411                                }
1412                            }
1413                        }
1414                        catch (Exception err) {
1415                            log.debug("getMetadata(): vds count error: ", err);
1416                            storageLayout.append("ERROR");
1417                        }
1418                    }
1419                    else {
1420                        chunkSize = null;
1421                        storageLayout.append("NONE");
1422                    }
1423
1424                    int[] flags = { 0, 0 };
1425                    long[] cdNelmts = { 20 };
1426                    int[] cdValues = new int[(int) cdNelmts[0]];
1427                    String[] cdName = { "", "" };
1428                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1429                    int filter = -1;
1430                    int[] filterConfig = { 1 };
1431
1432                    filters.setLength(0);
1433
1434                    if (nfilt == 0) {
1435                        filters.append("NONE");
1436                    }
1437                    else {
1438                        for (int i = 0, k = 0; i < nfilt; i++) {
1439                            log.trace("getMetadata(): filter[{}]", i);
1440                            if (i > 0)
1441                                filters.append(", ");
1442                            if (k > 0)
1443                                compression.append(", ");
1444
1445                            try {
1446                                cdNelmts[0] = 20;
1447                                cdValues = new int[(int) cdNelmts[0]];
1448                                cdValues = new int[(int) cdNelmts[0]];
1449                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName, filterConfig);
1450                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0], cdNelmts[0]);
1451                                for (int j = 0; j < cdNelmts[0]; j++)
1452                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1453                            }
1454                            catch (Exception err) {
1455                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1456                                filters.append("ERROR");
1457                                continue;
1458                            }
1459
1460                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1461                                filters.append("NONE");
1462                            }
1463                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1464                                filters.append("GZIP");
1465                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1466                                k++;
1467                            }
1468                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1469                                filters.append("Error detection filter");
1470                            }
1471                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1472                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1473                            }
1474                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1475                                filters.append("NBIT");
1476                            }
1477                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1478                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1479                            }
1480                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1481                                filters.append("SZIP");
1482                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1483                                k++;
1484                                int flag = -1;
1485                                try {
1486                                    flag = H5.H5Zget_filter_info(filter);
1487                                }
1488                                catch (Exception ex) {
1489                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1490                                    flag = -1;
1491                                }
1492                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1493                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1494                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1495                                        || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED
1496                                                + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1497                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1498                            }
1499                            else {
1500                                filters.append("USERDEFINED ").append(cdName[0]).append("(").append(filter).append("): ");
1501                                for (int j = 0; j < cdNelmts[0]; j++) {
1502                                    if (j > 0)
1503                                        filters.append(", ");
1504                                    filters.append(cdValues[j]);
1505                                }
1506                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1507                            }
1508                        } //  (int i=0; i<nfilt; i++)
1509                    }
1510
1511                    if (compression.length() == 0)
1512                        compression.append("NONE");
1513                    log.trace("getMetadata(): filter compression={}", compression);
1514                    log.trace("getMetadata(): filter information={}", filters);
1515
1516                    storage.setLength(0);
1517                    storage.append("SIZE: ").append(storageSize);
1518
1519                    try {
1520                        int[] at = { 0 };
1521                        H5.H5Pget_alloc_time(pcid, at);
1522                        storage.append(", allocation time: ");
1523                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1524                            storage.append("Early");
1525                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1526                            storage.append("Incremental");
1527                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1528                            storage.append("Late");
1529                        else
1530                            storage.append("Default");
1531                    }
1532                    catch (Exception ex) {
1533                        log.debug("getMetadata(): Storage allocation time:", ex);
1534                    }
1535                    log.trace("getMetadata(): storage={}", storage);
1536                }
1537                finally {
1538                    try {
1539                        H5.H5Pclose(paid);
1540                    }
1541                    catch (Exception ex) {
1542                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1543                    }
1544                    try {
1545                        H5.H5Pclose(pcid);
1546                    }
1547                    catch (Exception ex) {
1548                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1549                    }
1550                    close(did);
1551                }
1552            }
1553        }
1554
1555        List<Attribute> attrlist = null;
1556        try {
1557            attrlist = objMetadata.getMetadata(attrPropList);
1558        }
1559        catch (Exception ex) {
1560            log.debug("getMetadata(): getMetadata failed: ", ex);
1561        }
1562        return attrlist;
1563    }
1564
1565    /**
1566     * Writes a specific piece of metadata (such as an attribute) into the file.
1567     *
1568     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1569     * value. If the attribute does not exist in the file, it creates the
1570     * attribute in the file and attaches it to the object. It will fail to
1571     * write a new attribute to the object where an attribute with the same name
1572     * already exists. To update the value of an existing attribute in the file,
1573     * one needs to get the instance of the attribute by getMetadata(), change
1574     * its values, then use writeMetadata() to write the value.
1575     *
1576     * @param info
1577     *            the metadata to write.
1578     *
1579     * @throws Exception
1580     *             if the metadata can not be written
1581     */
1582    @Override
1583    public void writeMetadata(Object info) throws Exception {
1584        try {
1585            objMetadata.writeMetadata(info);
1586        }
1587        catch (Exception ex) {
1588            log.debug("writeMetadata(): Object not an Attribute");
1589            return;
1590        }
1591    }
1592
1593    /**
1594     * Deletes an existing piece of metadata from this object.
1595     *
1596     * @param info
1597     *            the metadata to delete.
1598     *
1599     * @throws HDF5Exception
1600     *             if the metadata can not be removed
1601     */
1602    @Override
1603    public void removeMetadata(Object info) throws HDF5Exception {
1604        try {
1605            objMetadata.removeMetadata(info);
1606        }
1607        catch (Exception ex) {
1608            log.debug("removeMetadata(): Object not an Attribute");
1609            return;
1610        }
1611
1612        Attribute attr = (Attribute) info;
1613        log.trace("removeMetadata(): {}", attr.getAttributeName());
1614        long did = open();
1615        if (did >= 0) {
1616            try {
1617                H5.H5Adelete(did, attr.getAttributeName());
1618            }
1619            finally {
1620                close(did);
1621            }
1622        }
1623        else {
1624            log.debug("removeMetadata(): failed to open compound dataset");
1625        }
1626    }
1627
1628    /**
1629     * Updates an existing piece of metadata attached to this object.
1630     *
1631     * @param info
1632     *            the metadata to update.
1633     *
1634     * @throws HDF5Exception
1635     *             if the metadata can not be updated
1636     */
1637    @Override
1638    public void updateMetadata(Object info) throws HDF5Exception {
1639        try {
1640            objMetadata.updateMetadata(info);
1641        }
1642        catch (Exception ex) {
1643            log.debug("updateMetadata(): Object not an Attribute");
1644            return;
1645        }
1646    }
1647
1648    /*
1649     * (non-Javadoc)
1650     *
1651     * @see hdf.object.HObject#setName(java.lang.String)
1652     */
1653    @Override
1654    public void setName(String newName) throws Exception {
1655        if (newName == null)
1656            throw new IllegalArgumentException("The new name is NULL");
1657
1658        H5File.renameObject(this, newName);
1659        super.setName(newName);
1660    }
1661
1662    /**
1663     * @deprecated Not for public use in the future. <br>
1664     *             Using
1665     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1666     *
1667     * @param name
1668     *            the name of the dataset to create.
1669     * @param pgroup
1670     *            parent group where the new dataset is created.
1671     * @param dims
1672     *            the dimension size of the dataset.
1673     * @param memberNames
1674     *            the names of compound datatype
1675     * @param memberDatatypes
1676     *            the datatypes of the compound datatype
1677     * @param memberSizes
1678     *            the dim sizes of the members
1679     * @param data
1680     *            list of data arrays written to the new dataset, null if no data is written to the new
1681     *            dataset.
1682     *
1683     * @return the new compound dataset if successful; otherwise returns null.
1684     *
1685     * @throws Exception
1686     *             if there is a failure.
1687     */
1688    @Deprecated
1689    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1690            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1691        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null)
1692                || (memberDatatypes == null) || (memberSizes == null)) {
1693            return null;
1694        }
1695
1696        int nMembers = memberNames.length;
1697        int memberRanks[] = new int[nMembers];
1698        long memberDims[][] = new long[nMembers][1];
1699        for (int i = 0; i < nMembers; i++) {
1700            memberRanks[i] = 1;
1701            memberDims[i][0] = memberSizes[i];
1702        }
1703
1704        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1705    }
1706
1707    /**
1708     * @deprecated Not for public use in the future. <br>
1709     *             Using
1710     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1711     *
1712     * @param name
1713     *            the name of the dataset to create.
1714     * @param pgroup
1715     *            parent group where the new dataset is created.
1716     * @param dims
1717     *            the dimension size of the dataset.
1718     * @param memberNames
1719     *            the names of compound datatype
1720     * @param memberDatatypes
1721     *            the datatypes of the compound datatype
1722     * @param memberRanks
1723     *            the ranks of the members
1724     * @param memberDims
1725     *            the dim sizes of the members
1726     * @param data
1727     *            list of data arrays written to the new dataset, null if no data is written to the new
1728     *            dataset.
1729     *
1730     * @return the new compound dataset if successful; otherwise returns null.
1731     *
1732     * @throws Exception
1733     *             if the dataset can not be created.
1734     */
1735    @Deprecated
1736    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1737            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1738        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1739                memberDims, data);
1740    }
1741
1742    /**
1743     * Creates a simple compound dataset in a file with/without chunking and compression.
1744     *
1745     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1746     * details of creating a compound dataset from users.
1747     *
1748     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1749     * dataset is not supported. The required information to create a compound dataset includes the
1750     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1751     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1752     *
1753     * The following example shows how to use this function to create a compound dataset in file.
1754     *
1755     * <pre>
1756     * H5File file = null;
1757     * String message = &quot;&quot;;
1758     * Group pgroup = null;
1759     * int[] DATA_INT = new int[DIM_SIZE];
1760     * float[] DATA_FLOAT = new float[DIM_SIZE];
1761     * String[] DATA_STR = new String[DIM_SIZE];
1762     * long[] DIMs = { 50, 10 };
1763     * long[] CHUNKs = { 25, 5 };
1764     *
1765     * try {
1766     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1767     *     file.open();
1768     *     pgroup = (Group) file.get(&quot;/&quot;);
1769     * }
1770     * catch (Exception ex) {
1771     * }
1772     *
1773     * Vector data = new Vector();
1774     * data.add(0, DATA_INT);
1775     * data.add(1, DATA_FLOAT);
1776     * data.add(2, DATA_STR);
1777     *
1778     * // create groups
1779     * Datatype[] mdtypes = new H5Datatype[3];
1780     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1781     * Dataset dset = null;
1782     * try {
1783     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1784     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1785     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1786     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1787     * }
1788     * catch (Exception ex) {
1789     *     failed(message, ex, file);
1790     *     return 1;
1791     * }
1792     * </pre>
1793     *
1794     * @param name
1795     *            the name of the dataset to create.
1796     * @param pgroup
1797     *            parent group where the new dataset is created.
1798     * @param dims
1799     *            the dimension size of the dataset.
1800     * @param maxdims
1801     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1802     * @param chunks
1803     *            the chunk size of the dataset. No chunking if chunk = null.
1804     * @param gzip
1805     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1806     * @param memberNames
1807     *            the names of compound datatype
1808     * @param memberDatatypes
1809     *            the datatypes of the compound datatype
1810     * @param memberRanks
1811     *            the ranks of the members
1812     * @param memberDims
1813     *            the dim sizes of the members
1814     * @param data
1815     *            list of data arrays written to the new dataset, null if no data is written to the new
1816     *            dataset.
1817     *
1818     * @return the new compound dataset if successful; otherwise returns null.
1819     *
1820     * @throws Exception
1821     *             if there is a failure.
1822     */
1823    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1824            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1825        H5CompoundDS dataset = null;
1826        String fullPath = null;
1827        long did = HDF5Constants.H5I_INVALID_HID;
1828        long plist = HDF5Constants.H5I_INVALID_HID;
1829        long sid = HDF5Constants.H5I_INVALID_HID;
1830        long tid = HDF5Constants.H5I_INVALID_HID;
1831
1832        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1833                || (memberNames == null) || (memberDatatypes == null) || (memberRanks == null)
1834                || (memberDims == null)) {
1835            log.debug("create(): one or more parameters are null");
1836            return null;
1837        }
1838
1839        H5File file = (H5File) pgroup.getFileFormat();
1840        if (file == null) {
1841            log.debug("create(): parent group FileFormat is null");
1842            return null;
1843        }
1844
1845        String path = HObject.SEPARATOR;
1846        if (!pgroup.isRoot()) {
1847            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1848            if (name.endsWith("/"))
1849                name = name.substring(0, name.length() - 1);
1850            int idx = name.lastIndexOf('/');
1851            if (idx >= 0)
1852                name = name.substring(idx + 1);
1853        }
1854
1855        fullPath = path + name;
1856
1857        int typeSize = 0;
1858        int nMembers = memberNames.length;
1859        long[] mTypes = new long[nMembers];
1860        int memberSize = 1;
1861        for (int i = 0; i < nMembers; i++) {
1862            memberSize = 1;
1863            for (int j = 0; j < memberRanks[i]; j++)
1864                memberSize *= memberDims[i][j];
1865
1866            mTypes[i] = -1;
1867            // the member is an array
1868            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
1869                long tmptid = -1;
1870                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
1871                    try {
1872                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1873                    }
1874                    finally {
1875                        try {
1876                            H5.H5Tclose(tmptid);
1877                        }
1878                        catch (Exception ex) {
1879                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
1880                        }
1881                    }
1882                }
1883            }
1884            else {
1885                mTypes[i] = memberDatatypes[i].createNative();
1886            }
1887            try {
1888                typeSize += H5.H5Tget_size(mTypes[i]);
1889            }
1890            catch (Exception ex) {
1891                log.debug("create(): array create H5Tget_size:", ex);
1892
1893                while (i > 0) {
1894                    try {
1895                        H5.H5Tclose(mTypes[i]);
1896                    }
1897                    catch (HDF5Exception ex2) {
1898                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
1899                    }
1900                    i--;
1901                }
1902                throw ex;
1903            }
1904        } //  (int i = 0; i < nMembers; i++) {
1905
1906        // setup chunking and compression
1907        boolean isExtentable = false;
1908        if (maxdims != null) {
1909            for (int i = 0; i < maxdims.length; i++) {
1910                if (maxdims[i] == 0)
1911                    maxdims[i] = dims[i];
1912                else if (maxdims[i] < 0)
1913                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1914
1915                if (maxdims[i] != dims[i])
1916                    isExtentable = true;
1917            }
1918        }
1919
1920        // HDF5 requires you to use chunking in order to define extendible
1921        // datasets. Chunking makes it possible to extend datasets efficiently,
1922        // without having to reorganize storage excessively. Using default size
1923        // of 64x...which has good performance
1924        if ((chunks == null) && isExtentable) {
1925            chunks = new long[dims.length];
1926            for (int i = 0; i < dims.length; i++)
1927                chunks[i] = Math.min(dims[i], 64);
1928        }
1929
1930        // prepare the dataspace and datatype
1931        int rank = dims.length;
1932
1933        try {
1934            sid = H5.H5Screate_simple(rank, dims, maxdims);
1935
1936            // figure out creation properties
1937            plist = HDF5Constants.H5P_DEFAULT;
1938
1939            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1940            int offset = 0;
1941            for (int i = 0; i < nMembers; i++) {
1942                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1943                offset += H5.H5Tget_size(mTypes[i]);
1944            }
1945
1946            if (chunks != null) {
1947                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1948
1949                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1950                H5.H5Pset_chunk(plist, rank, chunks);
1951
1952                // compression requires chunking
1953                if (gzip > 0) {
1954                    H5.H5Pset_deflate(plist, gzip);
1955                }
1956            }
1957
1958            long fid = file.getFID();
1959
1960            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1961            dataset = new H5CompoundDS(file, name, path);
1962        }
1963        finally {
1964            try {
1965                H5.H5Pclose(plist);
1966            }
1967            catch (HDF5Exception ex) {
1968                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
1969            }
1970            try {
1971                H5.H5Sclose(sid);
1972            }
1973            catch (HDF5Exception ex) {
1974                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
1975            }
1976            try {
1977                H5.H5Tclose(tid);
1978            }
1979            catch (HDF5Exception ex) {
1980                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
1981            }
1982            try {
1983                H5.H5Dclose(did);
1984            }
1985            catch (HDF5Exception ex) {
1986                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
1987            }
1988
1989            for (int i = 0; i < nMembers; i++) {
1990                try {
1991                    H5.H5Tclose(mTypes[i]);
1992                }
1993                catch (HDF5Exception ex) {
1994                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
1995                }
1996            }
1997        }
1998
1999        if (dataset != null) {
2000            pgroup.addToMemberList(dataset);
2001            if (data != null) {
2002                dataset.init();
2003                long selected[] = dataset.getSelectedDims();
2004                for (int i = 0; i < rank; i++)
2005                    selected[i] = dims[i];
2006                dataset.write(data);
2007            }
2008        }
2009
2010        return dataset;
2011    }
2012
2013    /*
2014     * (non-Javadoc)
2015     *
2016     * @see hdf.object.Dataset#isString(long)
2017     */
2018    @Override
2019    public boolean isString(long tid) {
2020        boolean b = false;
2021        try {
2022            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2023        }
2024        catch (Exception ex) {
2025            b = false;
2026        }
2027
2028        return b;
2029    }
2030
2031    /*
2032     * (non-Javadoc)
2033     *
2034     * @see hdf.object.Dataset#getSize(long)
2035     */
2036    @Override
2037    public long getSize(long tid) {
2038        long tsize = -1;
2039
2040        try {
2041            tsize = H5.H5Tget_size(tid);
2042        }
2043        catch (Exception ex) {
2044            tsize = -1;
2045        }
2046
2047        return tsize;
2048    }
2049
2050    /*
2051     * (non-Javadoc)
2052     *
2053     * @see hdf.object.Dataset#isVirtual()
2054     */
2055    @Override
2056    public boolean isVirtual() {
2057        return isVirtual;
2058    }
2059
2060    /*
2061     * (non-Javadoc)
2062     *
2063     * @see hdf.object.Dataset#getVirtualFilename(int)
2064     */
2065    @Override
2066    public String getVirtualFilename(int index) {
2067        if(isVirtual)
2068            return virtualNameList.get(index);
2069        else
2070            return null;
2071    }
2072
2073    /*
2074     * (non-Javadoc)
2075     *
2076     * @see hdf.object.Dataset#getVirtualMaps()
2077     */
2078    @Override
2079    public int getVirtualMaps() {
2080        if(isVirtual)
2081            return virtualNameList.size();
2082        else
2083            return -1;
2084    }
2085
2086}