001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFConstants;
021import hdf.hdflib.HDFException;
022import hdf.hdflib.HDFLibrary;
023import hdf.object.Attribute;
024import hdf.object.CompoundDS;
025import hdf.object.Dataset;
026import hdf.object.Datatype;
027import hdf.object.FileFormat;
028import hdf.object.Group;
029
030/**
031 * H4Vdata describes a multi-dimension array of HDF4 vdata, inheriting CompoundDS.
032 * <p>
033 * A vdata is like a table that consists of a collection of records whose values
034 * are stored in fixed-length fields. All records have the same structure and
035 * all values in each field have the same data type. Vdatas are uniquely
036 * identified by a name, a class, and a series of individual field names.
037 * <p>
038 * <b>How to Select a Subset</b>
039 * <p>
040 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
041 * defined to select a subset of a data array. The selection is done in an implicit way.
042 * Function calls to dimension information such as getSelectedDims() return an array
043 * of dimension values, which is a reference to the array in the dataset object.
044 * Changes of the array outside the dataset object directly change the values of
045 * the array in the dataset object. It is like pointers in C.
046 * <p>
047 *
048 * The following is an example of how to make a subset. In the example, the dataset
049 * is a 4-dimension with size of [200][100][50][10], i.e.
050 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
051 * We want to select every other data point in dims[1] and dims[2]
052 * <pre>
053     int rank = dataset.getRank();   // number of dimensions of the dataset
054     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
055     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
056     long[] start = dataset.getStartDims(); // the offset of the selection
057     long[] stride = dataset.getStride(); // the stride of the dataset
058     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
059
060     // select dim1 and dim2 as 2D data for display, and slice through dim0
061     selectedIndex[0] = 1;
062     selectedIndex[1] = 2;
063     selectedIndex[1] = 0;
064
065     // reset the selection arrays
066     for (int i=0; i&lt;rank; i++) {
067         start[i] = 0;
068         selected[i] = 1;
069         stride[i] = 1;
070    }
071
072    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
073    stride[1] = 2;
074    stride[2] = 2;
075
076    // set the selection size of dim1 and dim2
077    selected[1] = dims[1]/stride[1];
078    selected[2] = dims[1]/stride[2];
079
080    // when dataset.read() is called, the selection above will be used since
081    // the dimension arrays is passed by reference. Changes of these arrays
082    // outside the dataset object directly change the values of these array
083    // in the dataset object.
084
085 * </pre>
086 *
087 * @version 1.1 9/4/2007
088 * @author Peter X. Cao
089 */
090public class H4Vdata extends CompoundDS
091{
092    private static final long serialVersionUID = -5978700886955419959L;
093
094    private static final org.slf4j.Logger       log = org.slf4j.LoggerFactory.getLogger(H4Vdata.class);
095
096    /**
097     * The list of attributes of this data object. Members of the list are
098     * instance of Attribute.
099     */
100    @SuppressWarnings("rawtypes")
101    private List                                attributeList;
102
103    /**
104     * Number of records of this Vdata table.
105     */
106    private int                                 numberOfRecords;
107
108    /**
109     * The data types of the members of the compound dataset.
110     */
111    private long[]                              memberTIDs;
112
113    private int                                 nAttributes = -1;
114
115
116    public H4Vdata(FileFormat theFile, String name, String path)
117    {
118        this(theFile, name, path, null);
119    }
120
121    /**
122     * Creates an H4Vdata object with specific name and path.
123     *
124     * @param theFile the HDF file.
125     * @param name the name of this H4Vdata.
126     * @param path the full path of this H4Vdata.
127     * @param oid the unique identifier of this data object.
128     */
129    @SuppressWarnings("deprecation")
130    public H4Vdata(
131            FileFormat theFile,
132            String name,
133            String path,
134            long[] oid)
135    {
136        super (theFile, name, path, oid);
137        numberOfRecords = 0;
138        numberOfMembers = 0;
139        memberOrders = null;
140    }
141
142    /*
143     * (non-Javadoc)
144     * @see hdf.object.DataFormat#hasAttribute()
145     */
146    @Override
147    public boolean hasAttribute ()
148    {
149        if (nAttributes < 0) {
150            long id = open();
151
152            if (id >= 0) {
153                try {
154                    nAttributes = HDFLibrary.VSnattrs(id);
155                }
156                catch (Exception ex) {
157                    log.debug("hasAttribute() failure: ", ex);
158                    nAttributes = 0;
159                }
160
161                log.trace("hasAttribute(): nAttributes={}", nAttributes);
162
163                close(id);
164            }
165        }
166
167        return (nAttributes>0);
168    }
169
170    // implementing Dataset
171    @Override
172    public Datatype getDatatype()
173    {
174        if (!inited)
175            init();
176
177        if (datatype == null) {
178            try {
179                datatype = new H4Datatype(-1);
180            }
181            catch (Exception ex) {
182                log.debug("getDatatype(): failed to create datatype: ", ex);
183                datatype = null;
184            }
185        }
186
187        return datatype;
188    }
189
190    @Override
191    public Object getFillValue() {
192        return null;
193    }
194
195    // Implementing Dataset
196    @Override
197    public byte[] readBytes() throws HDFException
198    {
199        byte[] theData = null;
200
201        if (!isInited())
202            init();
203
204        if (numberOfMembers <= 0) {
205            log.debug("readBytes(): VData contains no members");
206            return null; // this Vdata does not have any filed
207        }
208
209        long id = open();
210        if (id < 0) {
211            log.debug("readBytes(): Invalid VData ID");
212            return null;
213        }
214
215        String allNames = memberNames[0];
216        for (int i=0; i<numberOfMembers; i++) {
217            allNames += ","+memberNames[i];
218        }
219
220        try {
221            // moves the access pointer to the start position
222            HDFLibrary.VSseek(id, (int)startDims[0]);
223            // Specify the fields to be accessed
224            HDFLibrary.VSsetfields(id, allNames);
225            int[] recordSize = {0};
226            HDFLibrary.VSQueryvsize(id, recordSize);
227            int size =recordSize[0] * (int)selectedDims[0];
228            theData = new byte[size];
229            HDFLibrary.VSread(
230                    id,
231                    theData,
232                    (int)selectedDims[0],
233                    HDFConstants.FULL_INTERLACE);
234        }
235        catch (Exception ex) {
236            log.debug("readBytes(): failure: ", ex);
237        }
238        finally {
239            close(id);
240        }
241
242        return theData;
243    }
244
245    // Implementing DataFormat
246    @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
247    @Override
248    public Object read() throws HDFException
249    {
250        List list = null;
251
252        if (!isInited())
253            init();
254
255        if (numberOfMembers <= 0) {
256            log.debug("read(): VData contains no members");
257            return null; // this Vdata does not have any filed
258        }
259
260        long id = open();
261        if (id < 0) {
262            log.debug("read(): Invalid VData ID");
263            return null;
264        }
265
266        list = new Vector();
267
268        // assume external data files are located in the same directory as the main file.
269        HDFLibrary.HXsetdir(getFileFormat().getParent());
270
271        Object member_data = null;
272        for (int i=0; i<numberOfMembers; i++) {
273            if (!isMemberSelected[i]) {
274                continue;
275            }
276
277            try {
278                // moves the access pointer to the start position
279                HDFLibrary.VSseek(id, (int)startDims[0]);
280                // Specify the fields to be accessed
281                HDFLibrary.VSsetfields(id, memberNames[i]);
282            }
283            catch (HDFException ex) {
284                log.debug("read(): failure: ", ex);
285                isMemberSelected[i] = false;
286                continue;
287            }
288
289            int n = memberOrders[i]*(int)selectedDims[0];
290
291            member_data = H4Datatype.allocateArray(memberTIDs[i], n);
292
293            log.trace("read(): index={} isMemberSelected[i]={} memberOrders[i]={} array size={}", i, isMemberSelected[i], memberOrders[i], n);
294            if (member_data == null) {
295                String[] nullValues = new String[n];
296                for (int j=0; j<n; j++) {
297                    nullValues[j] = "*ERROR*";
298                }
299                list.add(nullValues);
300                continue;
301            }
302
303            try {
304                HDFLibrary.VSread(
305                        id,
306                        member_data,
307                        (int)selectedDims[0],
308                        HDFConstants.FULL_INTERLACE);
309                if ((memberTIDs[i] == HDFConstants.DFNT_CHAR) ||
310                        (memberTIDs[i] ==  HDFConstants.DFNT_UCHAR8)) {
311                    // convert characters to string
312                    log.trace("read(): convert characters to string");
313                    member_data = Dataset.byteToString((byte[])member_data, memberOrders[i]);
314                    try {
315                        memberTypes[i] = new H4Datatype(Datatype.CLASS_STRING, memberOrders[i], Datatype.NATIVE, Datatype.NATIVE);
316                    }
317                    catch (Exception ex) {
318                        log.debug("read(): failed to create datatype for member[{}]: ", i, ex);
319                        memberTypes[i] = null;
320                    }
321                    memberOrders[i] = 1; //one String
322                }
323                else if (H4Datatype.isUnsigned(memberTIDs[i])) {
324                    // convert unsigned integer to appropriate Java integer
325                    log.trace("read(): convert unsigned integer to appropriate Java integer");
326                    member_data = Dataset.convertFromUnsignedC(member_data);
327                }
328            }
329            catch (HDFException ex) {
330                String[] nullValues = new String[n];
331                for (int j=0; j<n; j++) {
332                    nullValues[j] = "*ERROR*";
333                }
334                list.add(nullValues);
335                continue;
336            }
337
338            list.add(member_data);
339        } //  (int i=0; i<numberOfMembers; i++)
340
341        close(id);
342
343        return list;
344    }
345
346    // Implementing DataFormat
347    @Override
348    public void write(Object buf) throws HDFException
349    {
350        //For writing to a vdata, VSsetfields can only be called once, to set
351        //up the fields in a vdata. Once the vdata fields are set, they may
352        //not be changed. Thus, to update some fields of a record after the
353        //first write, the user must read all the fields to a buffer, update
354        //the buffer, then write the entire record back to the vdata.
355        log.trace("write(): disabled");
356        /*
357        if (buf == null || numberOfMembers <= 0 || !(buf instanceof List))
358            return; // no data to write
359
360        List list = (List)buf;
361        Object member_data = null;
362        String member_name = null;
363
364        int vid = open();
365        if (vid < 0) return;
366
367        int idx = 0;
368        for (int i=0; i<numberOfMembers; i++) {
369            if (!isMemberSelected[i])
370                continue;
371
372            HDFLibrary.VSsetfields(vid, memberNames[i]);
373
374            try {
375                // Specify the fields to be accessed
376
377                // moves the access pointer to the start position
378                HDFLibrary.VSseek(vid, (int)startDims[0]);
379            }
380            catch (HDFException ex) {
381                continue;
382            }
383
384            member_data = list.get(idx++);
385            if (member_data == null)
386                continue;
387
388            if (memberTIDs[i] == HDFConstants.DFNT_CHAR ||
389                memberTIDs[i] ==  HDFConstants.DFNT_UCHAR8) {
390                member_data = Dataset.stringToByte((String[])member_data, memberOrders[i]);
391            }
392            else if (H4Datatype.isUnsigned(memberTIDs[i])) {
393                // convert unsigned integer to appropriate Java integer
394                member_data = Dataset.convertToUnsignedC(member_data);
395            }
396
397
398            int interlace = HDFConstants.NO_INTERLACE;
399            try {
400                int write_num = HDFLibrary.VSwrite(
401                    vid, member_data, (int)selectedDims[0], interlace);
402            }
403            catch (HDFException ex) {
404                log.debug("write():", ex);
405            }
406        } //  (int i=0; i<numberOfMembers; i++)
407
408        close(vid);
409         */
410    }
411
412    @Override
413    public Object convertFromUnsignedC() {
414        throw new UnsupportedOperationException("H4Vdata:convertFromUnsignedC Unsupported operation.");
415    }
416
417    @Override
418    public Object convertToUnsignedC() {
419        throw new UnsupportedOperationException("H4Vdata:convertToUnsignedC Unsupported operation.");
420    }
421
422    // Implementing DataFormat
423    @Override
424    @SuppressWarnings({"rawtypes", "unchecked"})
425    public List getMetadata() throws HDFException
426    {
427        if (attributeList != null) {
428            log.trace("getMetdata(): attributeList != null");
429            return attributeList;
430        }
431
432        long id = open();
433
434        if (id < 0) {
435            log.debug("getMetadata(): Invalid VData ID");
436            return attributeList;
437        }
438
439        int n=0;
440        try {
441            n = HDFLibrary.VSnattrs(id);
442
443            if (n <= 0) {
444                log.debug("getMetadata(): VData number of attributes <= 0");
445                return attributeList;
446            }
447
448            attributeList = new Vector(n, 5);
449            boolean b = false;
450            String[] attrName = new String[1];
451            int[] attrInfo = new int[5];
452
453            // _HDF_VDATA (or -1) to specify the vdata attribute
454            int nleft = n;
455            for (int j=-1; j<numberOfMembers; j++) {
456                for (int i=0; i<nleft; i++) {
457                    attrName[0] = "";
458
459                    try {
460                        b = HDFLibrary.VSattrinfo(id, j, i, attrName, attrInfo);
461                        // mask off the litend bit
462                        attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
463                    }
464                    catch (HDFException ex) {
465                        log.debug("getMetadata(): attribute[{}] VSattrinfo failure: ", i, ex);
466                        b = false;
467                        ex.printStackTrace();
468                    }
469
470                    if (!b || attrName[0].length()<=0) {
471                        continue;
472                    }
473
474                    long[] attrDims = {attrInfo[1]};
475                    Attribute attr = new Attribute(this, attrName[0], new H4Datatype(attrInfo[0]), attrDims);
476                    if (j>=0)
477                        attr.setProperty("field", memberNames[j]);
478                    attributeList.add(attr);
479
480                    Object buf = null;
481                    try {
482                        buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
483                    }
484                    catch (OutOfMemoryError e) {
485                        log.debug("getMetadata(): out of memory: ", e);
486                        buf = null;
487                    }
488
489                    try {
490                        HDFLibrary.VSgetattr(id, j, i, buf);
491                    }
492                    catch (HDFException ex) {
493                        log.debug("getMetadata(): attribute[{}] VSgetattr failure: ", i, ex);
494                        buf = null;
495                    }
496
497                    if (buf != null) {
498                        if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
499                                (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
500                            buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
501                        }
502
503                        attr.setData(buf);
504                        nleft--;
505                    }
506                } //  (int i=0; i<n; i++)
507            } //  (int j=-1; j<numberOfMembers; j++)
508        }
509        catch (Exception ex) {
510            log.debug("getMetadata(): failure: ", ex);
511        }
512        finally {
513            close(id);
514        }
515
516        // todo: We shall also load attributes of fields
517
518        return attributeList;
519    }
520
521    // To do: Implementing DataFormat
522    @Override
523    @SuppressWarnings({"rawtypes", "unchecked"})
524    public void writeMetadata(Object info) throws Exception
525    {
526        // only attribute metadata is supported.
527        if (!(info instanceof Attribute)) {
528            log.debug("writeMetadata(): Object not an Attribute");
529            return;
530        }
531
532        try {
533            getFileFormat().writeAttribute(this, (Attribute)info, true);
534
535            if (attributeList == null) {
536                attributeList = new Vector();
537            }
538
539            attributeList.add(info);
540            nAttributes = attributeList.size();
541        }
542        catch (Exception ex) {
543            log.trace("writeMetadata(): failure: ", ex);
544        }
545    }
546
547    // To do: Implementing DataFormat
548    @Override
549    public void removeMetadata(Object info) throws HDFException
550    {
551        log.trace("removeMetadata(): disabled");
552    }
553
554    // implementing DataFormat
555    @Override
556    public void updateMetadata(Object info) throws Exception {
557        log.trace("updateMetadata(): disabled");
558    }
559
560    // Implementing DataFormat
561    @Override
562    public long open()
563    {
564        // try to open with write permission
565        long vsid = -1;
566        try {
567            vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "w");
568        }
569        catch (HDFException ex) {
570            log.debug("open(): VSattach failure: ", ex);
571            vsid = -1;
572        }
573
574        // try to open with read-only permission
575        if (vsid < 0) {
576            try {
577                vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "r");
578            }
579            catch (HDFException ex) {
580                log.debug("open(): VSattach failure: ", ex);
581                vsid = -1;
582            }
583        }
584
585        return vsid;
586    }
587
588    // Implementing DataFormat
589    @Override
590    public void close(long vsid)
591    {
592        try {
593            HDFLibrary.VSdetach(vsid);
594        }
595        catch (Exception ex) {
596            log.debug("close(): VSdetach failure: ", ex);
597        }
598    }
599
600    /**
601     * Initializes the H4Vdata such as dimension sizes of this dataset.
602     */
603    @Override
604    public void init()
605    {
606        if (inited) {
607            log.trace("init(): Already initialized");
608            return; // already called. Initialize only once
609        }
610
611        long id = open();
612        if (id < 0) {
613            log.debug("init(): Invalid VData ID");
614            return;
615        }
616
617        try {
618            numberOfMembers = HDFLibrary.VFnfields(id);
619            numberOfRecords = HDFLibrary.VSelts(id);
620        }
621        catch (HDFException ex) {
622            numberOfMembers = 0;
623            numberOfRecords = 0;
624        }
625
626        //        Still need to get information if there is no record, see bug 1738
627        //        if ((numberOfMembers <=0) || (numberOfRecords <= 0)) {
628        //            // no table field is defined or no records
629        //            close(id);
630        //            return;
631        //        }
632
633        // a Vdata table is an one dimension array of records.
634        // each record has the same fields
635        rank = 1;
636        dims = new long[1];
637        dims[0] = numberOfRecords;
638        selectedDims = new long[1];
639        selectedDims[0] = numberOfRecords;
640        selectedIndex[0] = 0;
641        startDims = new long[1];
642        startDims[0] = 0;
643
644        memberNames = new String[numberOfMembers];
645        memberTIDs = new long[numberOfMembers];
646        memberTypes = new Datatype[numberOfMembers];
647        memberOrders = new int[numberOfMembers];
648        isMemberSelected = new boolean[numberOfMembers];
649
650        try {
651            datatype = new H4Datatype(Datatype.CLASS_COMPOUND, -1, Datatype.NATIVE, Datatype.NATIVE);
652        }
653        catch (Exception ex) {
654            log.debug("init(): failed to create compound datatype for VData");
655            datatype = null;
656        }
657
658        for (int i = 0; i < numberOfMembers; i++) {
659            isMemberSelected[i] = true;
660            try {
661                memberNames[i] = HDFLibrary.VFfieldname(id, i);
662                memberTIDs[i] = HDFLibrary.VFfieldtype(id, i);
663                try {
664                    memberTypes[i] = new H4Datatype(memberTIDs[i]);
665                }
666                catch (Exception ex) {
667                    log.debug("init(): failed to create datatype for member[{}]: ", i, ex);
668                    memberTypes[i] = null;
669                }
670                // mask off the litend bit
671                memberTIDs[i] = memberTIDs[i] & (~HDFConstants.DFNT_LITEND);
672                memberOrders[i] = HDFLibrary.VFfieldorder(id, i);
673                log.trace("init():{}> isMemberSelected[i]={} memberNames[i]={} memberTIDs[i]={} memberOrders[i]={}", i, isMemberSelected[i], memberNames[i], memberTIDs[i], memberOrders[i]);
674
675                /*
676                 * NOTE: An ugly workaround to get HDF4 "compound" datatypes to work correctly.
677                 */
678                if (datatype != null) {
679                    datatype.getCompoundMemberNames().add(memberNames[i]);
680                    datatype.getCompoundMemberTypes().add(memberTypes[i]);
681                }
682            }
683            catch (HDFException ex) {
684                log.debug("init(): member[{}]: ", i, ex);
685                continue;
686            }
687        } //  (int i=0; i<numberOfMembers; i++)
688
689        inited = true;
690
691        close(id);
692    }
693
694    /**
695     * Returns the number of records.
696     *
697     * @return the number of records
698     */
699    public int getRecordCount()
700    {
701        return numberOfRecords;
702    }
703
704    /**
705     * Returns the number of fields.
706     *
707     * @return the number of fields
708     */
709    public int getFieldCount()
710    {
711        return numberOfMembers;
712    }
713
714    /**
715     * Returns the orders of fields
716     *
717     * @return the orders of fields
718     */
719    public int[] getFieldOrders()
720    {
721        return memberOrders;
722    }
723
724    //Implementing DataFormat
725    @SuppressWarnings("rawtypes")
726    public List getMetadata(int... attrPropList) throws Exception {
727        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
728    }
729
730    @Override
731    public Dataset copy(Group pgroup, String name, long[] dims, Object data)
732            throws Exception {
733        throw new UnsupportedOperationException(
734                "Writing a vdata to a new dataset is not implemented.");
735    }
736}