001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the file COPYING.                     *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * If you do not have access to this file, you may request a copy from       *
011 * help@hdfgroup.org.                                                        *
012 ****************************************************************************/
013
014package hdf.object.h4;
015
016import java.util.List;
017import java.util.Vector;
018
019import hdf.hdflib.HDFConstants;
020import hdf.hdflib.HDFException;
021import hdf.hdflib.HDFLibrary;
022import hdf.object.Attribute;
023import hdf.object.CompoundDS;
024import hdf.object.Dataset;
025import hdf.object.Datatype;
026import hdf.object.FileFormat;
027import hdf.object.Group;
028
029/**
030 * H4Vdata describes a multi-dimension array of HDF4 vdata, inheriting CompoundDS.
031 * <p>
032 * A vdata is like a table that consists of a collection of records whose values
033 * are stored in fixed-length fields. All records have the same structure and
034 * all values in each field have the same data type. Vdatas are uniquely
035 * identified by a name, a class, and a series of individual field names.
036 * <p>
037 * <b>How to Select a Subset</b>
038 * <p>
039 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is
040 * defined to select a subset of a data array. The selection is done in an implicit way.
041 * Function calls to dimension information such as getSelectedDims() return an array
042 * of dimension values, which is a reference to the array in the dataset object.
043 * Changes of the array outside the dataset object directly change the values of
044 * the array in the dataset object. It is like pointers in C.
045 * <p>
046 *
047 * The following is an example of how to make a subset. In the example, the dataset
048 * is a 4-dimension with size of [200][100][50][10], i.e.
049 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
050 * We want to select every other data point in dims[1] and dims[2]
051 * <pre>
052     int rank = dataset.getRank();   // number of dimensions of the dataset
053     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
054     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
055     long[] start = dataset.getStartDims(); // the offset of the selection
056     long[] stride = dataset.getStride(); // the stride of the dataset
057     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
058
059     // select dim1 and dim2 as 2D data for display, and slice through dim0
060     selectedIndex[0] = 1;
061     selectedIndex[1] = 2;
062     selectedIndex[1] = 0;
063
064     // reset the selection arrays
065     for (int i=0; i&lt;rank; i++) {
066         start[i] = 0;
067         selected[i] = 1;
068         stride[i] = 1;
069    }
070
071    // set stride to 2 on dim1 and dim2 so that every other data point is selected.
072    stride[1] = 2;
073    stride[2] = 2;
074
075    // set the selection size of dim1 and dim2
076    selected[1] = dims[1]/stride[1];
077    selected[2] = dims[1]/stride[2];
078
079    // when dataset.read() is called, the selection above will be used since
080    // the dimension arrays is passed by reference. Changes of these arrays
081    // outside the dataset object directly change the values of these array
082    // in the dataset object.
083
084 * </pre>
085 *
086 * @version 1.1 9/4/2007
087 * @author Peter X. Cao
088 */
089public class H4Vdata extends CompoundDS
090{
091    private static final long serialVersionUID = -5978700886955419959L;
092
093    private final static org.slf4j.Logger       log = org.slf4j.LoggerFactory.getLogger(H4Vdata.class);
094
095    /**
096     * The list of attributes of this data object. Members of the list are
097     * instance of Attribute.
098     */
099    @SuppressWarnings("rawtypes")
100    private List                                attributeList;
101
102    /**
103     * Number of records of this Vdata table.
104     */
105    private int                                 numberOfRecords;
106
107    /**
108     * The data types of the members of the compound dataset.
109     */
110    private int[] memberTIDs;
111
112    private int                                 nAttributes = -1;
113
114
115    public H4Vdata(FileFormat theFile, String name, String path)
116    {
117        this(theFile, name, path, null);
118    }
119
120    /**
121     * Creates an H4Vdata object with specific name and path.
122     *
123     * @param theFile the HDF file.
124     * @param name the name of this H4Vdata.
125     * @param path the full path of this H4Vdata.
126     * @param oid the unique identifier of this data object.
127     */
128    public H4Vdata(
129        FileFormat theFile,
130        String name,
131        String path,
132        long[] oid)
133    {
134        super (theFile, name, path, oid);
135        numberOfRecords = 0;
136        numberOfMembers = 0;
137        memberOrders = null;
138    }
139
140    /*
141     * (non-Javadoc)
142     * @see hdf.object.DataFormat#hasAttribute()
143     */
144    public boolean hasAttribute ()
145    {
146        if (nAttributes < 0) {
147            int id = open();
148
149            if (id >= 0) {
150                try {
151                    nAttributes = HDFLibrary.VSnattrs(id);
152                }
153                catch (Exception ex) {
154                    log.debug("hasAttribute() failure: ", ex);
155                    nAttributes = 0;
156                }
157
158                log.trace("hasAttribute(): nAttributes={}", nAttributes);
159
160                close(id);
161            }
162        }
163
164        return (nAttributes>0);
165    }
166
167    // implementing Dataset
168    @Override
169    public Datatype getDatatype()
170    {
171        if (datatype == null) {
172            datatype = new H4Datatype(-1);
173        }
174
175        return datatype;
176    }
177
178    // Implementing Dataset
179    @Override
180    public byte[] readBytes() throws HDFException
181    {
182        log.trace("readBytes(): start");
183
184        byte[] theData = null;
185
186        if (rank <= 0) {
187            init();
188        }
189        if (numberOfMembers <= 0) {
190            log.debug("readBytes(): VData contains no members");
191            log.trace("readBytes(): finish");
192            return null; // this Vdata does not have any filed
193        }
194
195        int id = open();
196        if (id < 0) {
197            log.debug("readBytes(): Invalid VData ID");
198            log.trace("readBytes(): finish");
199            return null;
200        }
201
202        String allNames = memberNames[0];
203        for (int i=0; i<numberOfMembers; i++) {
204            allNames += ","+memberNames[i];
205        }
206
207        try {
208            // moves the access pointer to the start position
209            HDFLibrary.VSseek(id, (int)startDims[0]);
210            // Specify the fields to be accessed
211            HDFLibrary.VSsetfields(id, allNames);
212            int[] recordSize = {0};
213            HDFLibrary.VSQueryvsize(id, recordSize);
214            int size =recordSize[0] * (int)selectedDims[0];
215            theData = new byte[size];
216            HDFLibrary.VSread(
217                id,
218                theData,
219                (int)selectedDims[0],
220                HDFConstants.FULL_INTERLACE);
221        }
222        catch (Exception ex) {
223            log.debug("readBytes(): failure: ", ex);
224        }
225        finally {
226            close(id);
227        }
228
229        log.trace("readBytes(): finish");
230        return theData;
231    }
232
233    // Implementing DataFormat
234    @SuppressWarnings({"rawtypes", "unchecked"})
235    @Override
236    public Object read() throws HDFException
237    {
238        log.trace("read(): start");
239
240        List list = null;
241
242        if (rank <= 0) {
243            init();
244        }
245        if (numberOfMembers <= 0) {
246            log.debug("read(): VData contains no members");
247            log.trace("read(): finish");
248            return null; // this Vdata does not have any filed
249        }
250
251        int id = open();
252        if (id < 0) {
253            log.debug("read(): Invalid VData ID");
254            log.trace("read(): finish");
255            return null;
256        }
257
258        list = new Vector();
259
260        // assume external data files are located in the same directory as the main file.
261        HDFLibrary.HXsetdir(getFileFormat().getParent());
262
263        Object member_data = null;
264        for (int i=0; i<numberOfMembers; i++) {
265            if (!isMemberSelected[i]) {
266                continue;
267            }
268
269            try {
270                // moves the access pointer to the start position
271                HDFLibrary.VSseek(id, (int)startDims[0]);
272                // Specify the fields to be accessed
273                HDFLibrary.VSsetfields(id, memberNames[i]);
274            }
275            catch (HDFException ex) {
276                log.debug("read(): failure: ", ex);
277                isMemberSelected[i] = false;
278                continue;
279            }
280
281            int n = memberOrders[i]*(int)selectedDims[0];
282            member_data = H4Datatype.allocateArray(memberTIDs[i], n);
283
284            log.trace("read(): index={} isMemberSelected[i]={} memberOrders[i]={} array size={}", i, isMemberSelected[i], memberOrders[i], n);
285            if (member_data == null) {
286                String[] nullValues = new String[n];
287                for (int j=0; j<n; j++) {
288                    nullValues[j] = "*error*";
289                }
290                list.add(nullValues);
291                continue;
292            }
293
294            try {
295                HDFLibrary.VSread(
296                    id,
297                    member_data,
298                    (int)selectedDims[0],
299                    HDFConstants.FULL_INTERLACE);
300                if ((memberTIDs[i] == HDFConstants.DFNT_CHAR) ||
301                    (memberTIDs[i] ==  HDFConstants.DFNT_UCHAR8)) {
302                    // convert characters to string
303                    log.trace("read(): convert characters to string");
304                    member_data = Dataset.byteToString((byte[])member_data, memberOrders[i]);
305                    memberTypes[i] = new H4Datatype(Datatype.CLASS_STRING, memberOrders[i], -1, -1);
306                    memberOrders[i] = 1; //one String
307                }
308                else if (H4Datatype.isUnsigned(memberTIDs[i])) {
309                    // convert unsigned integer to appropriate Java integer
310                    log.trace("read(): convert unsigned integer to appropriate Java integer");
311                    member_data = Dataset.convertFromUnsignedC(member_data);
312                }
313            }
314            catch (HDFException ex) {
315                String[] nullValues = new String[n];
316                for (int j=0; j<n; j++) {
317                    nullValues[j] = "*error*";
318                }
319                list.add(nullValues);
320                continue;
321            }
322
323            list.add(member_data);
324        } // for (int i=0; i<numberOfMembers; i++)
325
326        close(id);
327
328        log.trace("read(): finish");
329        return list;
330    }
331
332    // Implementing DataFormat
333    @Override
334    public void write(Object buf) throws HDFException
335    {
336        //For writing to a vdata, VSsetfields can only be called once, to set
337        //up the fields in a vdata. Once the vdata fields are set, they may
338        //not be changed. Thus, to update some fields of a record after the
339        //first write, the user must read all the fields to a buffer, update
340        //the buffer, then write the entire record back to the vdata.
341        log.trace("write(): disabled");
342/*
343        if (buf == null || numberOfMembers <= 0 || !(buf instanceof List))
344            return; // no data to write
345
346        List list = (List)buf;
347        Object member_data = null;
348        String member_name = null;
349
350        int vid = open();
351        if (vid < 0) return;
352
353        int idx = 0;
354        for (int i=0; i<numberOfMembers; i++) {
355            if (!isMemberSelected[i])
356                continue;
357
358            HDFLibrary.VSsetfields(vid, memberNames[i]);
359
360            try {
361                // Specify the fields to be accessed
362
363                // moves the access pointer to the start position
364                HDFLibrary.VSseek(vid, (int)startDims[0]);
365            }
366            catch (HDFException ex) {
367                continue;
368            }
369
370            member_data = list.get(idx++);
371            if (member_data == null)
372                continue;
373
374            if (memberTIDs[i] == HDFConstants.DFNT_CHAR ||
375                memberTIDs[i] ==  HDFConstants.DFNT_UCHAR8) {
376                member_data = Dataset.stringToByte((String[])member_data, memberOrders[i]);
377            }
378            else if (H4Datatype.isUnsigned(memberTIDs[i])) {
379                // convert unsigned integer to appropriate Java integer
380                member_data = Dataset.convertToUnsignedC(member_data);
381            }
382
383
384            int interlace = HDFConstants.NO_INTERLACE;
385            try {
386                int write_num = HDFLibrary.VSwrite(
387                    vid, member_data, (int)selectedDims[0], interlace);
388            }
389            catch (HDFException ex) {
390                log.debug("write():", ex);
391            }
392        } // for (int i=0; i<numberOfMembers; i++)
393
394        close(vid);
395*/
396    }
397
398    // Implementing DataFormat
399    @SuppressWarnings({"rawtypes", "unchecked"})
400    public List getMetadata() throws HDFException
401    {
402        log.trace("getMetadata(): start");
403
404        if (attributeList != null) {
405            log.trace("getMetdata(): attributeList != null");
406            log.trace("getMetadata(): finish");
407            return attributeList;
408        }
409
410        int id = open();
411
412        if (id < 0) {
413            log.debug("getMetadata(): Invalid VData ID");
414            log.trace("getMetadata(): finish");
415            return attributeList;
416        }
417
418        int n=0;
419        try {
420            n = HDFLibrary.VSnattrs(id);
421
422            if (n <= 0) {
423                log.debug("getMetadata(): VData number of attributes <= 0");
424                log.trace("getMetadata(): finish");
425                return attributeList;
426            }
427
428            attributeList = new Vector(n, 5);
429            boolean b = false;
430            String[] attrName = new String[1];
431            int[] attrInfo = new int[5];
432
433            // _HDF_VDATA (or -1) to specify the vdata attribute
434            int nleft = n;
435            for (int j=-1; j<numberOfMembers; j++) {
436                for (int i=0; i<nleft; i++) {
437                    attrName[0] = "";
438
439                    try {
440                        b = HDFLibrary.VSattrinfo(id, j, i, attrName, attrInfo);
441                        // mask off the litend bit
442                        attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
443                    }
444                    catch (HDFException ex) {
445                        log.debug("getMetadata(): attribute[{}] VSattrinfo failure: ", i, ex);
446                        b = false;
447                        ex.printStackTrace();
448                    }
449
450                    if (!b || attrName[0].length()<=0) {
451                        continue;
452                    }
453
454                    long[] attrDims = {attrInfo[1]};
455                    Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims);
456                    if (j>=0)
457                        attr.setProperty("field", memberNames[j]);
458                    attributeList.add(attr);
459
460                    Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
461                    try {
462                        HDFLibrary.VSgetattr(id, j, i, buf);
463                    }
464                    catch (HDFException ex) {
465                        log.debug("getMetadata(): attribute[{}] VSgetattr failure: ", i, ex);
466                        buf = null;
467                    }
468
469                    if (buf != null) {
470                        if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
471                            (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
472                            buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
473                        }
474
475                        attr.setValue(buf);
476                        nleft--;
477                    }
478                } // for (int i=0; i<n; i++)
479            } // for (int j=-1; j<numberOfMembers; j++)
480        }
481        catch (Exception ex) {
482            log.debug("getMetadata(): failure: ", ex);
483        }
484        finally {
485            close(id);
486        }
487
488        // todo: We shall also load attributes of fields
489
490        log.trace("getMetadata(): finish");
491        return attributeList;
492    }
493
494    // To do: Implementing DataFormat
495    @SuppressWarnings({"rawtypes", "unchecked"})
496    public void writeMetadata(Object info) throws Exception
497    {
498        log.trace("writeMetadata(): start");
499
500        // only attribute metadata is supported.
501        if (!(info instanceof Attribute)) {
502            log.debug("writeMetadata(): Object not an Attribute");
503            log.trace("writeMetadata(): finish");
504            return;
505        }
506
507        try {
508            getFileFormat().writeAttribute(this, (Attribute)info, true);
509
510            if (attributeList == null) {
511                attributeList = new Vector();
512            }
513
514            attributeList.add(info);
515            nAttributes = attributeList.size();
516        }
517        catch (Exception ex) {
518            log.trace("writeMetadata(): failure: ", ex);
519        }
520
521        log.trace("writeMetadata(): finish");
522    }
523
524    // To do: Implementing DataFormat
525    public void removeMetadata(Object info) throws HDFException
526    {
527        log.trace("removeMetadata(): disabled");
528    }
529
530    // implementing DataFormat
531    public void updateMetadata(Object info) throws Exception {
532        log.trace("updateMetadata(): disabled");
533    }
534
535    // Implementing DataFormat
536    @Override
537    public int open()
538    {
539        log.trace("open(): start");
540
541        // try to open with write permission
542        int vsid = -1;
543        try {
544            vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "w");
545        }
546        catch (HDFException ex) {
547            log.debug("open(): VSattach failure: ", ex);
548            vsid = -1;
549        }
550
551        // try to open with read-only permission
552        if (vsid < 0) {
553            try {
554                vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "r");
555            }
556            catch (HDFException ex) {
557                log.debug("open(): VSattach failure: ", ex);
558                vsid = -1;
559            }
560        }
561
562        log.trace("open(): finish");
563        return vsid;
564    }
565
566    // Implementing DataFormat
567    @Override
568    public void close(int vsid)
569    {
570        try {
571            HDFLibrary.VSdetach(vsid);
572        }
573        catch (Exception ex) {
574            log.debug("close(): VSdetach failure: ", ex);
575        }
576    }
577
578    /**
579     * Initializes the H4Vdata such as dimension sizes of this dataset.
580     */
581    @Override
582    public void init()
583    {
584        log.trace("init(): start");
585        if (rank>0) {
586            log.trace("init(): Already initialized");
587            log.trace("init(): finish");
588            return; // already called. Initialize only once
589        }
590
591        int id = open();
592        if (id < 0) {
593            log.debug("init(): Invalid VData ID");
594            log.trace("init(): finish");
595            return;
596        }
597
598        try {
599            numberOfMembers = HDFLibrary.VFnfields(id);
600            numberOfRecords = HDFLibrary.VSelts(id);
601        }
602        catch (HDFException ex) {
603            numberOfMembers = 0;
604            numberOfRecords = 0;
605        }
606
607//        Still need to get information if there is no record, see bug 1738
608//        if ((numberOfMembers <=0) || (numberOfRecords <= 0)) {
609//            // no table field is defined or no records
610//            close(id);
611//            return;
612//        }
613
614        // a Vdata table is an one dimension array of records.
615        // each record has the same fields
616        rank = 1;
617        dims = new long[1];
618        dims[0] = numberOfRecords;
619        selectedDims = new long[1];
620        selectedDims[0] = numberOfRecords;
621        selectedIndex[0] = 0;
622        startDims = new long[1];
623        startDims[0] = 0;
624
625        memberNames = new String[numberOfMembers];
626        memberTIDs = new int[numberOfMembers];
627        memberTypes = new Datatype[numberOfMembers];
628        memberOrders = new int[numberOfMembers];
629        isMemberSelected = new boolean[numberOfMembers];
630
631        for (int i=0; i<numberOfMembers; i++) {
632            isMemberSelected[i] = true;
633            try {
634                memberNames[i] = HDFLibrary.VFfieldname(id, i);
635                memberTIDs[i] = HDFLibrary.VFfieldtype(id, i);
636                memberTypes[i] = new H4Datatype(memberTIDs[i]);
637                // mask off the litend bit
638                memberTIDs[i] = memberTIDs[i] & (~HDFConstants.DFNT_LITEND);
639                memberOrders[i] = HDFLibrary.VFfieldorder(id, i);
640                log.trace("init():{}> isMemberSelected[i]={} memberNames[i]={} memberTIDs[i]={} memberOrders[i]={}", i, isMemberSelected[i], memberNames[i], memberTIDs[i], memberOrders[i]);
641            }
642            catch (HDFException ex) {
643                log.debug("init(): member[{}]: ", i, ex);
644                log.trace("init(): continue");
645                continue;
646            }
647        } // for (int i=0; i<numberOfMembers; i++)
648
649        close(id);
650        log.trace("init(): finish");
651    }
652
653    /**
654     * Returns the number of records.
655     *
656     * @return the number of records
657     */
658    public int getRecordCount()
659    {
660        return numberOfRecords;
661    }
662
663    /**
664     * Returns the number of fields.
665     *
666     * @return the number of fields
667     */
668    public int getFieldCount()
669    {
670        return numberOfMembers;
671    }
672
673    /**
674     * Returns the orders of fields
675     *
676     * @return the orders of fields
677     */
678    public int[] getFieldOrders()
679    {
680        return memberOrders;
681    }
682
683    //Implementing DataFormat
684    @SuppressWarnings("rawtypes")
685    public List getMetadata(int... attrPropList) throws Exception {
686        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
687    }
688
689    public Dataset copy(Group pgroup, String name, long[] dims, Object data)
690            throws Exception {
691        throw new UnsupportedOperationException(
692                "Writing a vdata to a new dataset is not implemented.");
693    }
694}