001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.text.DecimalFormat; 021import java.util.ArrayList; 022import java.util.Arrays; 023import java.util.Collection; 024import java.util.HashMap; 025import java.util.Iterator; 026import java.util.List; 027import java.util.Map; 028import java.util.Vector; 029 030import hdf.object.Attribute; 031import hdf.object.CompoundDS; 032import hdf.object.Dataset; 033import hdf.object.Datatype; 034import hdf.object.FileFormat; 035import hdf.object.Group; 036import hdf.object.HObject; 037import hdf.object.MetaDataContainer; 038import hdf.object.Utils; 039import hdf.object.h5.H5Datatype; 040import hdf.object.h5.H5ReferenceType; 041 042import hdf.hdf5lib.H5; 043import hdf.hdf5lib.HDF5Constants; 044import hdf.hdf5lib.HDFNativeData; 045import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 046import hdf.hdf5lib.exceptions.HDF5Exception; 047 048import org.slf4j.Logger; 049import org.slf4j.LoggerFactory; 050 051/** 052 * The H5CompoundAttr class defines an HDF5 attribute of compound datatypes. 053 * 054 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group 055 * or named datatype. 056 * 057 * Like a dataset, an attribute has a name, datatype and dataspace. 058 * 059 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of 060 * one or more atomic types or small arrays of such types. Each member of a compound type has a name which is 061 * unique within that type, and a byte offset that determines the first byte (smallest byte address) of that 062 * member in a compound datum. 063 * 064 * For more information on HDF5 attributes and datatypes, read the 065 * <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User 066 * Guide</a> 067 * 068 * There are two basic types of compound attributes: simple compound data and nested compound data. Members of 069 * a simple compound attribute have atomic datatypes. Members of a nested compound attribute are compound or 070 * array of compound data. 071 * 072 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the 073 * following C example. 074 * 075 * <pre> 076 * typedef struct s1_t { 077 * int a; 078 * float b; 079 * double c; 080 * } s1_t; 081 * s1_t s1[LENGTH]; 082 * ... 083 * H5Dwrite(..., s1); 084 * H5Dread(..., s1); 085 * </pre> 086 * 087 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by 088 * fields instead of compound structure. As for the example above, the java.util.Vector object has three 089 * elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of 090 * int, float and double, we will be able to read/write the compound data by field. 091 * 092 * @version 1.0 6/15/2021 093 * @author Allen Byrne 094 */ 095public class H5CompoundAttr extends CompoundDS implements H5Attribute { 096 private static final long serialVersionUID = 2072473407027648309L; 097 098 private static final Logger log = LoggerFactory.getLogger(H5CompoundAttr.class); 099 100 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 101 protected HObject parentObject; 102 103 /** additional information and properties for the attribute, Attribute interface */ 104 private transient Map<String, Object> properties; 105 106 /** 107 * Create an attribute with specified name, data type and dimension sizes. 108 * 109 * @param parentObj 110 * the HObject to which this H5CompoundAttr is attached. 111 * @param attrName 112 * the name of the attribute. 113 * @param attrType 114 * the datatype of the attribute. 115 * @param attrDims 116 * the dimension sizes of the attribute, null for scalar attribute 117 * 118 * @see hdf.object.Datatype 119 */ 120 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) 121 { 122 this(parentObj, attrName, attrType, attrDims, null); 123 } 124 125 /** 126 * Create an attribute with specific name and value. 127 * 128 * @param parentObj 129 * the HObject to which this H5CompoundAttr is attached. 130 * @param attrName 131 * the name of the attribute. 132 * @param attrType 133 * the datatype of the attribute. 134 * @param attrDims 135 * the dimension sizes of the attribute, null for scalar attribute 136 * @param attrValue 137 * the value of the attribute, null if no value 138 * 139 * @see hdf.object.Datatype 140 */ 141 @SuppressWarnings({"rawtypes", "unchecked", "deprecation"}) 142 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, 143 Object attrValue) 144 { 145 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 146 (parentObj == null) ? null : parentObj.getFullName(), null); 147 148 log.trace("H5CompoundAttr: start {}", parentObj); 149 this.parentObject = parentObj; 150 151 datatype = attrType; 152 153 if (attrValue != null) { 154 data = attrValue; 155 originalBuf = attrValue; 156 isDataLoaded = true; 157 } 158 properties = new HashMap(); 159 160 if (attrDims == null) { 161 rank = 1; 162 dims = new long[] {1}; 163 isScalar = true; 164 } 165 else { 166 dims = attrDims; 167 rank = dims.length; 168 isScalar = false; 169 } 170 171 selectedDims = new long[rank]; 172 startDims = new long[rank]; 173 selectedStride = new long[rank]; 174 175 numberOfMembers = 0; 176 memberNames = null; 177 isMemberSelected = null; 178 memberTypes = null; 179 180 log.trace("attrName={}, attrType={}, attrValue={}, rank={}", attrName, attrType.getDescription(), 181 data, rank); 182 183 resetSelection(); 184 } 185 186 /* 187 * (non-Javadoc) 188 * 189 * @see hdf.object.HObject#open() 190 */ 191 @Override 192 public long open() 193 { 194 if (parentObject == null) { 195 log.debug("open(): attribute's parent object is null"); 196 return HDF5Constants.H5I_INVALID_HID; 197 } 198 199 long aid = HDF5Constants.H5I_INVALID_HID; 200 long pObjID = HDF5Constants.H5I_INVALID_HID; 201 202 try { 203 pObjID = parentObject.open(); 204 if (pObjID >= 0) { 205 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 206 log.trace("open(): FILE_TYPE_HDF5"); 207 if (H5.H5Aexists(pObjID, getName())) 208 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 209 } 210 } 211 212 log.trace("open(): aid={}", aid); 213 } 214 catch (Exception ex) { 215 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 216 aid = HDF5Constants.H5I_INVALID_HID; 217 } 218 finally { 219 parentObject.close(pObjID); 220 } 221 222 return aid; 223 } 224 225 /* 226 * (non-Javadoc) 227 * 228 * @see hdf.object.HObject#close(int) 229 */ 230 @Override 231 public void close(long aid) 232 { 233 if (aid >= 0) { 234 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 235 log.trace("close(): FILE_TYPE_HDF5"); 236 try { 237 H5.H5Aclose(aid); 238 } 239 catch (HDF5Exception ex) { 240 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 241 } 242 } 243 } 244 } 245 246 /** 247 * Retrieves datatype and dataspace information from file and sets the attribute 248 * in memory. 249 * 250 * The init() is designed to support lazy operation in a attribute object. When a 251 * data object is retrieved from file, the datatype, dataspace and raw data are 252 * not loaded into memory. When it is asked to read the raw data from file, 253 * init() is first called to get the datatype and dataspace information, then 254 * load the raw data from file. 255 * 256 * init() is also used to reset the selection of a attribute (start, stride and 257 * count) to the default, which is the entire attribute for 1D or 2D datasets. In 258 * the following example, init() at step 1) retrieves datatype and dataspace 259 * information from file. getData() at step 3) reads only one data point. init() 260 * at step 4) resets the selection to the whole attribute. getData() at step 4) 261 * reads the values of whole attribute into memory. 262 * 263 * <pre> 264 * dset = (Dataset) file.get(NAME_DATASET); 265 * 266 * // 1) get datatype and dataspace information from file 267 * attr.init(); 268 * rank = attr.getAttributeRank(); // rank = 2, a 2D attribute 269 * count = attr.getSelectedDims(); 270 * start = attr.getStartDims(); 271 * dims = attr.getAttributeDims(); 272 * 273 * // 2) select only one data point 274 * for (int i = 0; i < rank; i++) { 275 * start[0] = 0; 276 * count[i] = 1; 277 * } 278 * 279 * // 3) read one data point 280 * data = attr.getAttributeData(); 281 * 282 * // 4) reset selection to the whole attribute 283 * attr.init(); 284 * 285 * // 5) clean the memory data buffer 286 * attr.clearData(); 287 * 288 * // 6) Read the whole attribute 289 * data = attr.getAttributeData(); 290 * </pre> 291 */ 292 @Override 293 public void init() 294 { 295 if (inited) { 296 resetSelection(); 297 log.trace("init(): H5CompoundAttr already inited"); 298 return; 299 } 300 301 long aid = HDF5Constants.H5I_INVALID_HID; 302 long tid = HDF5Constants.H5I_INVALID_HID; 303 long sid = HDF5Constants.H5I_INVALID_HID; 304 int tclass = HDF5Constants.H5I_INVALID_HID; 305 flatNameList = new Vector<>(); 306 flatTypeList = new Vector<>(); 307 long[] memberTIDs = null; 308 309 log.trace("init(): FILE_TYPE_HDF5"); 310 aid = open(); 311 if (aid >= 0) { 312 try { 313 sid = H5.H5Aget_space(aid); 314 rank = H5.H5Sget_simple_extent_ndims(sid); 315 space_type = H5.H5Sget_simple_extent_type(sid); 316 if (space_type == HDF5Constants.H5S_NULL) 317 isNULL = true; 318 else 319 isNULL = false; 320 tid = H5.H5Aget_type(aid); 321 tclass = H5.H5Tget_class(tid); 322 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 323 324 long tmptid = 0; 325 326 // Handle ARRAY and VLEN types by getting the base type 327 if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) { 328 try { 329 tmptid = tid; 330 tid = H5.H5Tget_super(tmptid); 331 log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid); 332 } 333 catch (Exception ex) { 334 log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex); 335 tid = -1; 336 } 337 finally { 338 try { 339 H5.H5Tclose(tmptid); 340 } 341 catch (HDF5Exception ex) { 342 log.debug("init(): H5Tclose({}) failure: ", tmptid, ex); 343 } 344 } 345 } 346 347 if (tclass == HDF5Constants.H5T_COMPOUND) { 348 // initialize member information 349 H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList); 350 numberOfMembers = flatNameList.size(); 351 log.trace("init(): numberOfMembers={}", numberOfMembers); 352 353 memberNames = new String[numberOfMembers]; 354 memberTIDs = new long[numberOfMembers]; 355 memberTypes = new Datatype[numberOfMembers]; 356 memberOrders = new int[numberOfMembers]; 357 isMemberSelected = new boolean[numberOfMembers]; 358 memberDims = new Object[numberOfMembers]; 359 360 for (int i = 0; i < numberOfMembers; i++) { 361 isMemberSelected[i] = true; 362 memberTIDs[i] = flatTypeList.get(i).createNative(); 363 364 try { 365 memberTypes[i] = flatTypeList.get(i); 366 } 367 catch (Exception ex) { 368 log.debug("init(): failed to create datatype for member[{}]: ", i, ex); 369 memberTypes[i] = null; 370 } 371 372 memberNames[i] = flatNameList.get(i); 373 memberOrders[i] = 1; 374 memberDims[i] = null; 375 log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, 376 i, memberNames[i], i, memberTIDs[i], i, memberTypes[i]); 377 378 try { 379 tclass = H5.H5Tget_class(memberTIDs[i]); 380 } 381 catch (HDF5Exception ex) { 382 log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex); 383 } 384 385 if (tclass == HDF5Constants.H5T_ARRAY) { 386 int n = H5.H5Tget_array_ndims(memberTIDs[i]); 387 long mdim[] = new long[n]; 388 H5.H5Tget_array_dims(memberTIDs[i], mdim); 389 int idim[] = new int[n]; 390 for (int j = 0; j < n; j++) 391 idim[j] = (int)mdim[j]; 392 memberDims[i] = idim; 393 tmptid = H5.H5Tget_super(memberTIDs[i]); 394 memberOrders[i] = (int)(H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid)); 395 try { 396 H5.H5Tclose(tmptid); 397 } 398 catch (HDF5Exception ex) { 399 log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, 400 ex); 401 } 402 } 403 } // (int i=0; i<numberOfMembers; i++) 404 } 405 406 if (rank == 0) { 407 // a scalar data point 408 isScalar = true; 409 rank = 1; 410 dims = new long[] {1}; 411 log.trace("init(): rank is a scalar data point"); 412 } 413 else { 414 isScalar = false; 415 dims = new long[rank]; 416 maxDims = new long[rank]; 417 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 418 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 419 } 420 421 inited = true; 422 } 423 catch (HDF5Exception ex) { 424 numberOfMembers = 0; 425 memberNames = null; 426 memberTypes = null; 427 memberOrders = null; 428 log.debug("init(): ", ex); 429 } 430 finally { 431 try { 432 H5.H5Tclose(tid); 433 } 434 catch (HDF5Exception ex2) { 435 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 436 } 437 try { 438 H5.H5Sclose(sid); 439 } 440 catch (HDF5Exception ex2) { 441 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 442 } 443 444 if (memberTIDs != null) { 445 for (int i = 0; i < memberTIDs.length; i++) { 446 try { 447 H5.H5Tclose(memberTIDs[i]); 448 } 449 catch (Exception ex) { 450 log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex); 451 } 452 } 453 } 454 } 455 456 close(aid); 457 458 startDims = new long[rank]; 459 selectedDims = new long[rank]; 460 461 resetSelection(); 462 } 463 else { 464 log.debug("init(): failed to open attribute"); 465 } 466 } 467 468 /** 469 * Returns the datatype of the data object. 470 * 471 * @return the datatype of the data object. 472 */ 473 @Override 474 public Datatype getDatatype() 475 { 476 if (!inited) 477 init(); 478 479 if (datatype == null) { 480 long aid = HDF5Constants.H5I_INVALID_HID; 481 long tid = HDF5Constants.H5I_INVALID_HID; 482 483 aid = open(); 484 if (aid >= 0) { 485 try { 486 tid = H5.H5Aget_type(aid); 487 int nativeClass = H5.H5Tget_class(tid); 488 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 489 long lsize = 1; 490 long sid = H5.H5Aget_space(aid); 491 int rank = H5.H5Sget_simple_extent_ndims(sid); 492 if (rank > 0) { 493 long dims[] = new long[rank]; 494 H5.H5Sget_simple_extent_dims(sid, dims, null); 495 log.trace("getDatatype(): rank={}, dims={}", rank, dims); 496 for (int j = 0; j < dims.length; j++) { 497 lsize *= dims[j]; 498 } 499 } 500 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 501 } 502 else 503 datatype = new H5Datatype(getFileFormat(), tid); 504 } 505 catch (Exception ex) { 506 log.debug("getDatatype(): ", ex); 507 } 508 finally { 509 try { 510 H5.H5Tclose(tid); 511 } 512 catch (HDF5Exception ex) { 513 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 514 } 515 try { 516 H5.H5Aclose(aid); 517 } 518 catch (HDF5Exception ex) { 519 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 520 } 521 } 522 } 523 } 524 525 return datatype; 526 } 527 528 /** 529 * Returns the data buffer of the attribute in memory. 530 * 531 * If data is already loaded into memory, returns the data; otherwise, calls 532 * read() to read data from file into a memory buffer and returns the memory 533 * buffer. 534 * 535 * The whole attribute is read into memory. Users can also select 536 * a subset from the whole data. Subsetting is done in an implicit way. 537 * 538 * <b>How to Select a Subset</b> 539 * 540 * A selection is specified by three arrays: start, stride and count. 541 * <ol> 542 * <li>start: offset of a selection 543 * <li>stride: determines how many elements to move in each dimension 544 * <li>count: number of elements to select in each dimension 545 * </ol> 546 * getStartDims(), getStride() and getSelectedDims() returns the start, 547 * stride and count arrays respectively. Applications can make a selection 548 * by changing the values of the arrays. 549 * 550 * The following example shows how to make a subset. In the example, the 551 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 552 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 553 * We want to select every other data point in dims[1] and dims[2] 554 * 555 * <pre> 556 * int rank = attribute.getRank(); // number of dimensions of the attribute 557 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 558 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 559 * long[] start = attribute.getStartDims(); // the offset of the selection 560 * long[] stride = attribute.getStride(); // the stride of the attribute 561 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 562 * 563 * // select dim1 and dim2 as 2D data for display,and slice through dim0 564 * selectedIndex[0] = 1; 565 * selectedIndex[1] = 2; 566 * selectedIndex[2] = 0; 567 * 568 * // reset the selection arrays 569 * for (int i = 0; i < rank; i++) { 570 * start[i] = 0; 571 * selected[i] = 1; 572 * stride[i] = 1; 573 * } 574 * 575 * // set stride to 2 on dim1 and dim2 so that every other data point is 576 * // selected. 577 * stride[1] = 2; 578 * stride[2] = 2; 579 * 580 * // set the selection size of dim1 and dim2 581 * selected[1] = dims[1] / stride[1]; 582 * selected[2] = dims[1] / stride[2]; 583 * 584 * // when H5CompoundAttr.getData() is called, the selection above will be used since 585 * // the dimension arrays are passed by reference. Changes of these arrays 586 * // outside the attribute object directly change the values of these array 587 * // in the attribute object. 588 * </pre> 589 * 590 * For H5CompoundAttr, the memory data object is an java.util.List object. Each 591 * element of the list is a data array that corresponds to a compound field. 592 * 593 * For example, if compound attribute "comp" has the following nested 594 * structure, and member datatypes 595 * 596 * <pre> 597 * comp --> m01 (int) 598 * comp --> m02 (float) 599 * comp --> nest1 --> m11 (char) 600 * comp --> nest1 --> m12 (String) 601 * comp --> nest1 --> nest2 --> m21 (long) 602 * comp --> nest1 --> nest2 --> m22 (double) 603 * </pre> 604 * 605 * getData() returns a list of six arrays: {int[], float[], char[], 606 * String[], long[] and double[]}. 607 * 608 * @return the memory buffer of the attribute. 609 * 610 * @throws Exception if object can not be read 611 * @throws OutOfMemoryError if memory is exhausted 612 */ 613 @Override 614 public Object getData() throws Exception, OutOfMemoryError 615 { 616 log.trace("getData(): isDataLoaded={}", isDataLoaded); 617 if (!isDataLoaded) 618 data = read(); // load the data, attributes read all data 619 620 nPoints = 1; 621 log.trace("getData(): selectedDims length={}", selectedDims.length); 622 int point_len = selectedDims.length; 623 // Partial data for 3 or more dimensions 624 if (rank > 2) 625 point_len = 3; 626 for (int j = 0; j < point_len; j++) { 627 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 628 nPoints *= selectedDims[j]; 629 } 630 log.trace("getData: read {}", nPoints); 631 632 // apply the selection for 3 or more dimensions 633 // selection only expects to use 3 selectedDims 634 // where selectedIndex[0] is the row dimension 635 // where selectedIndex[1] is the col dimension 636 // where selectedIndex[2] is the frame dimension 637 if (rank > 2) 638 data = AttributeSelection(); 639 640 return data; 641 } 642 643 /* 644 * (non-Javadoc) 645 * 646 * @see hdf.object.Attribute#readBytes() 647 */ 648 @Override 649 public byte[] readBytes() throws HDF5Exception 650 { 651 byte[] theData = null; 652 653 if (!isInited()) 654 init(); 655 656 long aid = open(); 657 if (aid >= 0) { 658 long tid = HDF5Constants.H5I_INVALID_HID; 659 660 try { 661 long[] lsize = {1}; 662 for (int j = 0; j < selectedDims.length; j++) 663 lsize[0] *= selectedDims[j]; 664 665 tid = H5.H5Aget_type(aid); 666 long size = H5.H5Tget_size(tid) * lsize[0]; 667 log.trace("readBytes(): size={}", size); 668 669 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 670 throw new Exception("Invalid int size"); 671 672 theData = new byte[(int)size]; 673 674 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 675 H5.H5Aread(aid, tid, theData); 676 } 677 catch (Exception ex) { 678 log.debug("readBytes(): failed to read data: ", ex); 679 } 680 finally { 681 try { 682 H5.H5Tclose(tid); 683 } 684 catch (HDF5Exception ex2) { 685 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 686 } 687 close(aid); 688 } 689 } 690 691 return theData; 692 } 693 694 /** 695 * Reads the data from file. 696 * 697 * read() reads the data from file to a memory buffer and returns the memory 698 * buffer. The attribute object does not hold the memory buffer. To store the 699 * memory buffer in the attribute object, one must call getData(). 700 * 701 * By default, the whole attribute is read into memory. 702 * 703 * For CompoundAttr, the memory data object is an java.util.List object. Each 704 * element of the list is a data array that corresponds to a compound field. 705 * 706 * For example, if compound dataset "comp" has the following nested 707 * structure, and member datatypes 708 * 709 * <pre> 710 * comp --> m01 (int) 711 * comp --> m02 (float) 712 * comp --> nest1 --> m11 (char) 713 * comp --> nest1 --> m12 (String) 714 * comp --> nest1 --> nest2 --> m21 (long) 715 * comp --> nest1 --> nest2 --> m22 (double) 716 * </pre> 717 * 718 * getData() returns a list of six arrays: {int[], float[], char[], 719 * String[], long[] and double[]}. 720 * 721 * @return the data read from file. 722 * 723 * @see #getData() 724 * @see hdf.object.DataFormat#read() 725 * 726 * @throws Exception 727 * if object can not be read 728 */ 729 @Override 730 public Object read() throws Exception 731 { 732 Object readData = null; 733 734 if (!isInited()) 735 init(); 736 737 try { 738 readData = compoundAttributeCommonIO(H5File.IO_TYPE.READ, null); 739 } 740 catch (Exception ex) { 741 log.debug("read(): failed to read compound attribute: ", ex); 742 throw new Exception("failed to read compound attribute: " + ex.getMessage(), ex); 743 } 744 745 return readData; 746 } 747 748 /** 749 * Writes the given data buffer into this attribute in a file. 750 * 751 * The data buffer is a vector that contains the data values of compound fields. The data is written 752 * into file as one data blob. 753 * 754 * @param buf 755 * The vector that contains the data values of compound fields. 756 * 757 * @throws Exception 758 * If there is an error at the HDF5 library level. 759 */ 760 @Override 761 public void write(Object buf) throws Exception 762 { 763 if (this.getFileFormat().isReadOnly()) 764 throw new Exception("cannot write to compound attribute in file opened as read-only"); 765 766 if (!buf.equals(data)) 767 setData(buf); 768 769 init(); 770 771 if (parentObject == null) { 772 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 773 return; 774 } 775 776 ((MetaDataContainer)getParentObject()).writeMetadata(this); 777 778 try { 779 compoundAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 780 } 781 catch (Exception ex) { 782 log.debug("write(Object): failed to write compound attribute: ", ex); 783 throw new Exception("failed to write compound attribute: " + ex.getMessage(), ex); 784 } 785 resetSelection(); 786 } 787 788 /* 789 * Routine to convert datatypes that are read in as byte arrays to 790 * regular types. 791 */ 792 @Override 793 protected Object convertByteMember(final Datatype dtype, byte[] byteData) 794 { 795 Object theObj = null; 796 797 if (dtype.isFloat() && dtype.getDatatypeSize() == 16) 798 theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0); 799 else 800 theObj = super.convertByteMember(dtype, byteData); 801 802 return theObj; 803 } 804 805 private Object compoundAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception 806 { 807 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 808 Object theData = null; 809 810 if (numberOfMembers <= 0) { 811 log.debug("compoundAttributeCommonIO(): attribute contains no members"); 812 throw new Exception("dataset contains no members"); 813 } 814 815 /* 816 * I/O type-specific pre-initialization. 817 */ 818 if (ioType == H5File.IO_TYPE.WRITE) { 819 if ((writeBuf == null) || !(writeBuf instanceof List)) { 820 log.debug("compoundAttributeCommonIO(): writeBuf is null or invalid"); 821 throw new Exception("write buffer is null or invalid"); 822 } 823 824 /* 825 * Check for any unsupported datatypes and fail early before 826 * attempting to write to the attribute. 827 */ 828 if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) { 829 log.debug("compoundAttributeCommonIO(): cannot write attribute of type ARRAY of COMPOUND"); 830 throw new HDF5Exception("Unsupported attribute of type ARRAY of COMPOUND"); 831 } 832 833 if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) { 834 log.debug("compoundAttributeCommonIO(): cannot write attribute of type VLEN of COMPOUND"); 835 throw new HDF5Exception("Unsupported attribute of type VLEN of COMPOUND"); 836 } 837 } 838 839 long aid = open(); 840 if (aid >= 0) { 841 log.trace("compoundAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 842 try { 843 theData = AttributeCommonIO(aid, ioType, writeBuf); 844 } 845 finally { 846 close(aid); 847 } 848 } 849 else 850 log.debug("compoundAttributeCommonIO(): failed to open attribute"); 851 852 return theData; 853 } 854 855 /* 856 * Private recursive routine to read/write an entire compound datatype field by 857 * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of 858 * COMPOUND datatypes. 859 * 860 * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a 861 * running counter so that we can index properly into the flattened name list 862 * generated from H5Datatype.extractCompoundInfo() at attribute init time. 863 */ 864 private Object compoundTypeIO(H5Datatype parentType, int nSelPoints, final H5Datatype cmpdType, 865 Object dataBuf, int[] globalMemberIndex) 866 { 867 Object theData = null; 868 869 if (cmpdType.isArray()) { 870 log.trace("compoundTypeIO(): ARRAY type"); 871 872 long[] arrayDims = cmpdType.getArrayDims(); 873 int arrSize = nSelPoints; 874 for (int i = 0; i < arrayDims.length; i++) { 875 arrSize *= arrayDims[i]; 876 } 877 theData = compoundTypeIO(cmpdType, arrSize, (H5Datatype)cmpdType.getDatatypeBase(), dataBuf, 878 globalMemberIndex); 879 } 880 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 881 /* 882 * TODO: true variable-length support. 883 */ 884 String[] errVal = new String[nSelPoints]; 885 String errStr = "*UNSUPPORTED*"; 886 887 for (int j = 0; j < nSelPoints; j++) 888 errVal[j] = errStr; 889 890 /* 891 * Setup a fake data list. 892 */ 893 Datatype baseType = cmpdType.getDatatypeBase(); 894 while (baseType != null && !baseType.isCompound()) { 895 baseType = baseType.getDatatypeBase(); 896 } 897 898 List<Object> fakeVlenData = 899 (List<Object>)H5Datatype.allocateArray((H5Datatype)baseType, nSelPoints); 900 fakeVlenData.add(errVal); 901 902 theData = fakeVlenData; 903 } 904 else if (cmpdType.isCompound()) { 905 long parentLength = parentType.getDatatypeSize(); 906 List<Object> memberDataList = null; 907 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 908 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 909 910 log.trace("compoundTypeIO(): read {} members: parentLength={}", typeList.size(), parentLength); 911 912 memberDataList = (List<Object>)H5Datatype.allocateArray(cmpdType, nSelPoints); 913 914 try { 915 for (int i = 0; i < typeList.size(); i++) { 916 long memberOffset = 0; // offset into dataBuf 917 H5Datatype memberType = null; 918 String memberName = null; 919 Object memberData = null; 920 921 try { 922 memberType = (H5Datatype)typeList.get(i); 923 memberOffset = offsetList.get(i); 924 } 925 catch (Exception ex) { 926 log.debug("compoundTypeIO(): get member {} failure: ", i, ex); 927 globalMemberIndex[0]++; 928 continue; 929 } 930 931 /* 932 * Since the type list used here is not a flattened structure, we need to skip 933 * the member selection check for compound types, as otherwise having a single 934 * member not selected would skip the reading/writing for the entire compound 935 * type. The member selection check will be deferred to the recursive compound 936 * read/write below. 937 */ 938 if (!memberType.isCompound()) { 939 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 940 log.debug("compoundTypeIO(): member[{}] is not selected", i); 941 globalMemberIndex[0]++; 942 continue; // the field is not selected 943 } 944 } 945 946 if (!memberType.isCompound()) { 947 try { 948 memberName = new String(flatNameList.get(globalMemberIndex[0])); 949 } 950 catch (Exception ex) { 951 log.debug("compoundTypeIO(): get member {} name failure: ", i, ex); 952 memberName = "null"; 953 } 954 } 955 956 log.trace("compoundTypeIO(): member[{}]({}) is type {} offset {}", i, memberName, 957 memberType.getDescription(), memberOffset); 958 959 try { 960 int mt_typesize = (int)memberType.getDatatypeSize(); 961 log.trace("compoundTypeIO(): member[{}] mt_typesize={}", i, mt_typesize); 962 byte[] memberbuf = new byte[nSelPoints * mt_typesize]; 963 for (int dimindx = 0; dimindx < nSelPoints; dimindx++) 964 try { 965 System.arraycopy(dataBuf, (int)memberOffset + dimindx * (int)parentLength, 966 memberbuf, dimindx * mt_typesize, mt_typesize); 967 } 968 catch (Exception err) { 969 log.trace("compoundTypeIO(): arraycopy failure: ", err); 970 } 971 972 if (memberType.isCompound()) { 973 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 974 globalMemberIndex); 975 } 976 else if (memberType 977 .isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) { 978 /* 979 * Recursively detect any nested array/vlen of compound types. 980 */ 981 boolean compoundFound = false; 982 983 Datatype base = memberType.getDatatypeBase(); 984 while (base != null) { 985 if (base.isCompound()) 986 compoundFound = true; 987 988 base = base.getDatatypeBase(); 989 } 990 991 if (compoundFound) { 992 /* 993 * Skip the top-level array/vlen type. 994 */ 995 globalMemberIndex[0]++; 996 997 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 998 globalMemberIndex); 999 } 1000 else { 1001 memberData = convertByteMember(memberType, memberbuf); 1002 globalMemberIndex[0]++; 1003 } 1004 } 1005 else { 1006 memberData = convertByteMember(memberType, memberbuf); 1007 globalMemberIndex[0]++; 1008 } 1009 } 1010 catch (Exception ex) { 1011 log.debug("compoundTypeIO(): failed to read member {}: ", i, ex); 1012 globalMemberIndex[0]++; 1013 memberData = null; 1014 } 1015 1016 if (memberData == null) { 1017 String[] errVal = new String[nSelPoints]; 1018 String errStr = "*ERROR*"; 1019 1020 for (int j = 0; j < nSelPoints; j++) 1021 errVal[j] = errStr; 1022 1023 memberData = errVal; 1024 } 1025 1026 memberDataList.add(memberData); 1027 } // (i = 0; i < atomicTypeList.size(); i++) 1028 } 1029 catch (Exception ex) { 1030 log.debug("compoundTypeIO(): failure: ", ex); 1031 memberDataList = null; 1032 } 1033 1034 theData = memberDataList; 1035 } 1036 1037 return theData; 1038 } 1039 1040 private Object compoundTypeWriteIO(H5Datatype parentType, final H5Datatype cmpdType, Object dataBuf, 1041 int[] globalMemberIndex) 1042 { 1043 Object theData = null; 1044 if (cmpdType.isArray()) { 1045 Object memberData = null; 1046 log.trace("compoundTypeWriteIO(): ARRAY type"); 1047 1048 theData = compoundTypeWriteIO(cmpdType, (H5Datatype)cmpdType.getDatatypeBase(), dataBuf, 1049 globalMemberIndex); 1050 } 1051 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 1052 /* 1053 * TODO: true variable-length support. 1054 */ 1055 String errVal = new String("*UNSUPPORTED*"); 1056 1057 /* 1058 * Setup a fake data bytes. 1059 */ 1060 Datatype baseType = cmpdType.getDatatypeBase(); 1061 while (baseType != null && !baseType.isCompound()) { 1062 baseType = baseType.getDatatypeBase(); 1063 } 1064 1065 List<Object> fakeVlenData = (List<Object>)H5Datatype.allocateArray((H5Datatype)baseType, 1); 1066 fakeVlenData.add(errVal); 1067 1068 theData = convertMemberByte(baseType, fakeVlenData); 1069 } 1070 else if (cmpdType.isCompound()) { 1071 long parentLength = parentType.getDatatypeSize(); 1072 List<Object> memberDataList = null; 1073 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 1074 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 1075 1076 log.trace("compoundTypeWriteIO(): write {} members", typeList.size()); 1077 1078 theData = new byte[(int)cmpdType.getDatatypeSize()]; 1079 try { 1080 for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) { 1081 long memberOffset = 0; // offset into dataBuf 1082 H5Datatype memberType = null; 1083 String memberName = null; 1084 Object memberData = null; 1085 1086 try { 1087 memberType = (H5Datatype)typeList.get(i); 1088 memberOffset = offsetList.get(i); 1089 } 1090 catch (Exception ex) { 1091 log.debug("compoundTypeWriteIO(): get member {} failure: ", i, ex); 1092 globalMemberIndex[0]++; 1093 continue; 1094 } 1095 long memberLength = memberType.getDatatypeSize(); 1096 1097 /* 1098 * Since the type list used here is not a flattened structure, we need to skip the member 1099 * selection check for compound types, as otherwise having a single member not selected 1100 * would skip the reading/writing for the entire compound type. The member selection check 1101 * will be deferred to the recursive compound read/write below. 1102 */ 1103 if (!memberType.isCompound()) { 1104 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 1105 log.debug("compoundTypeWriteIO(): member[{}] is not selected", i); 1106 globalMemberIndex[0]++; 1107 continue; // the field is not selected 1108 } 1109 } 1110 1111 if (!memberType.isCompound()) { 1112 try { 1113 memberName = new String(flatNameList.get(globalMemberIndex[0])); 1114 } 1115 catch (Exception ex) { 1116 log.debug("compoundTypeWriteIO(): get member {} name failure: ", i, ex); 1117 memberName = "null"; 1118 } 1119 } 1120 1121 log.trace("compoundTypeWriteIO(): member[{}]({}) is type {} offset {}", i, memberName, 1122 memberType.getDescription(), memberOffset); 1123 1124 try { 1125 /* 1126 * TODO: currently doesn't correctly handle non-selected compound members. 1127 */ 1128 memberData = ((List<?>)dataBuf).get(i); 1129 } 1130 catch (Exception ex) { 1131 log.debug("compoundTypeWriteIO(): get member[{}] data failure: ", i, ex); 1132 globalMemberIndex[0]++; 1133 continue; 1134 } 1135 1136 if (memberData == null) { 1137 log.debug("compoundTypeWriteIO(): member[{}] data is null", i); 1138 globalMemberIndex[0]++; 1139 continue; 1140 } 1141 1142 try { 1143 if (memberType.isCompound()) { 1144 List<?> nestedList = (List<?>)((List<?>)dataBuf).get(i); 1145 memberData = 1146 compoundTypeWriteIO(cmpdType, memberType, nestedList, globalMemberIndex); 1147 } 1148 else { 1149 memberData = writeSingleCompoundMember(memberType, memberData); 1150 globalMemberIndex[0]++; 1151 } 1152 } 1153 catch (Exception ex) { 1154 log.debug("compoundTypeWriteIO(): failed to write member[{}]: ", i, ex); 1155 globalMemberIndex[0]++; 1156 } 1157 1158 byte[] indexedBytes = convertMemberByte(memberType, memberData); 1159 try { 1160 System.arraycopy(indexedBytes, 0, theData, writeListIndex, (int)memberLength); 1161 } 1162 catch (Exception err) { 1163 log.trace("compoundTypeWriteIO(): arraycopy failure: ", err); 1164 } 1165 writeListIndex += memberLength; 1166 } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++) 1167 } 1168 catch (Exception ex) { 1169 log.debug("compoundTypeWriteIO(): failure: ", ex); 1170 theData = null; 1171 } 1172 } 1173 1174 return theData; 1175 } 1176 1177 /* 1178 * Routine to convert datatypes that are in object arrays to 1179 * bytes. 1180 */ 1181 private byte[] convertMemberByte(final Datatype dtype, Object theObj) 1182 { 1183 byte[] byteData = null; 1184 1185 if (dtype.getDatatypeSize() == 1) { 1186 /* 1187 * Normal byte[] type, such as an integer datatype of size 1. 1188 */ 1189 byteData = (byte[])theObj; 1190 } 1191 else if (dtype.isString() && !dtype.isVarStr() && convertByteToString && 1192 !(theObj instanceof byte[])) { 1193 log.trace("convertMemberByte(): converting string array to byte array"); 1194 1195 byteData = stringToByte((String[])theObj, (int)dtype.getDatatypeSize()); 1196 } 1197 else if (dtype.isInteger()) { 1198 log.trace("convertMemberByte(): converting integer array to byte array"); 1199 1200 switch ((int)dtype.getDatatypeSize()) { 1201 case 1: 1202 /* 1203 * Normal byte[] type, such as an integer datatype of size 1. 1204 */ 1205 byteData = (byte[])theObj; 1206 break; 1207 case 2: 1208 byteData = HDFNativeData.shortToByte(0, 1, (short[])theObj); 1209 break; 1210 case 4: 1211 byteData = HDFNativeData.intToByte(0, 1, (int[])theObj); 1212 break; 1213 case 8: 1214 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1215 break; 1216 default: 1217 log.debug("convertMemberByte(): invalid datatype size"); 1218 byteData = null; 1219 break; 1220 } 1221 } 1222 else if (dtype.isFloat()) { 1223 log.trace("convertMemberByte(): converting float array to byte array"); 1224 1225 if (dtype.getDatatypeSize() == 16) 1226 byteData = ((H5Datatype)dtype).bigDecimalToByte((BigDecimal[])theObj, 0); 1227 else if (dtype.getDatatypeSize() == 8) 1228 byteData = HDFNativeData.doubleToByte(0, 1, (double[])theObj); 1229 else 1230 byteData = HDFNativeData.floatToByte(0, 1, (float[])theObj); 1231 } 1232 else if (((H5Datatype)dtype).isRegRef() || ((H5Datatype)dtype).isRefObj()) { 1233 log.trace("convertMemberByte(): reference type - converting long array to byte array"); 1234 1235 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1236 } 1237 else if (dtype.isArray()) { 1238 Datatype baseType = dtype.getDatatypeBase(); 1239 1240 /* 1241 * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes. 1242 */ 1243 while (baseType.isArray()) 1244 baseType = baseType.getDatatypeBase(); 1245 1246 /* 1247 * Optimize for the common cases of Arrays. 1248 */ 1249 switch (baseType.getDatatypeClass()) { 1250 case Datatype.CLASS_INTEGER: 1251 case Datatype.CLASS_FLOAT: 1252 case Datatype.CLASS_CHAR: 1253 case Datatype.CLASS_STRING: 1254 case Datatype.CLASS_BITFIELD: 1255 case Datatype.CLASS_OPAQUE: 1256 case Datatype.CLASS_COMPOUND: 1257 case Datatype.CLASS_REFERENCE: 1258 case Datatype.CLASS_ENUM: 1259 case Datatype.CLASS_VLEN: 1260 case Datatype.CLASS_TIME: 1261 byteData = convertMemberByte(baseType, theObj); 1262 break; 1263 1264 case Datatype.CLASS_ARRAY: { 1265 Datatype arrayType = dtype.getDatatypeBase(); 1266 1267 long[] arrayDims = dtype.getArrayDims(); 1268 int arrSize = 1; 1269 for (int i = 0; i < arrayDims.length; i++) { 1270 arrSize *= arrayDims[i]; 1271 } 1272 1273 byteData = new byte[arrSize * (int)arrayType.getDatatypeSize()]; 1274 1275 for (int i = 0; i < arrSize; i++) { 1276 byte[] indexedBytes = convertMemberByte(arrayType, ((Object[])theObj)[i]); 1277 try { 1278 System.arraycopy(indexedBytes, 0, byteData, (int)(i * arrayType.getDatatypeSize()), 1279 (int)arrayType.getDatatypeSize()); 1280 } 1281 catch (Exception err) { 1282 log.trace("convertMemberByte(): arraycopy failure: ", err); 1283 } 1284 } 1285 1286 break; 1287 } 1288 1289 case Datatype.CLASS_NO_CLASS: 1290 default: 1291 log.debug("convertMemberByte(): invalid datatype class"); 1292 byteData = null; 1293 } 1294 } 1295 else if (dtype.isCompound()) { 1296 /* 1297 * TODO: still valid after reading change? 1298 */ 1299 byteData = convertCompoundMemberBytes(dtype, (List<Object>)theObj); 1300 } 1301 else { 1302 log.debug("convertMemberByte(): no change as byte[]"); 1303 byteData = (byte[])theObj; 1304 } 1305 1306 return byteData; 1307 } 1308 1309 /** 1310 * Given an array of objects representing a compound Datatype, converts each of 1311 * its members into bytes and returns the results. 1312 * 1313 * @param dtype 1314 * The compound datatype to convert 1315 * @param theObj 1316 * The object array representing the data of the compound Datatype 1317 * @return The converted bytes of the objects 1318 */ 1319 private byte[] convertCompoundMemberBytes(final Datatype dtype, List<Object> theObj) 1320 { 1321 List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes()); 1322 List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes()); 1323 Iterator<Datatype> localIt = localTypes.iterator(); 1324 while (localIt.hasNext()) { 1325 Datatype curType = localIt.next(); 1326 1327 if (curType.isCompound()) 1328 continue; 1329 1330 if (!allSelectedTypes.contains(curType)) 1331 localIt.remove(); 1332 } 1333 1334 byte[] byteData = new byte[(int)dtype.getDatatypeSize()]; 1335 for (int i = 0, index = 0; i < localTypes.size(); i++) { 1336 Datatype curType = localTypes.get(i); 1337 byte[] indexedBytes = null; 1338 if (curType.isCompound()) 1339 indexedBytes = convertCompoundMemberBytes(curType, (List<Object>)theObj.get(i)); 1340 else 1341 indexedBytes = convertMemberByte(curType, theObj.get(i)); 1342 1343 try { 1344 System.arraycopy(indexedBytes, 0, byteData, index + (int)curType.getDatatypeSize(), 1345 (int)curType.getDatatypeSize()); 1346 } 1347 catch (Exception err) { 1348 log.trace("convertCompoundMemberBytes(): arraycopy failure: ", err); 1349 } 1350 index += curType.getDatatypeSize(); 1351 } 1352 1353 return byteData; 1354 } 1355 1356 /* 1357 * Private routine to convert a single field of a compound datatype. 1358 */ 1359 private Object writeSingleCompoundMember(final H5Datatype memberType, Object theData) throws Exception 1360 { 1361 /* 1362 * Perform any necessary data conversions before writing the data. 1363 */ 1364 Object tmpData = theData; 1365 try { 1366 if (memberType.isUnsigned()) { 1367 // Check if we need to convert unsigned integer data from Java-style 1368 // to C-style integers 1369 long tsize = memberType.getDatatypeSize(); 1370 String cname = theData.getClass().getName(); 1371 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1372 boolean doIntConversion = 1373 (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I')) || 1374 ((tsize == 4) && (dname == 'J'))); 1375 1376 if (doIntConversion) { 1377 log.trace( 1378 "writeSingleCompoundMember(): converting integer data to unsigned C-type integers"); 1379 tmpData = convertToUnsignedC(theData, null); 1380 } 1381 } 1382 else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) { 1383 log.trace("writeSingleCompoundMember(): converting string array to byte array"); 1384 tmpData = stringToByte((String[])theData, (int)memberType.getDatatypeSize()); 1385 } 1386 else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) { 1387 log.trace("writeSingleCompoundMember(): converting enum names to values"); 1388 tmpData = memberType.convertEnumNameToValue((String[])theData); 1389 } 1390 } 1391 catch (Exception ex) { 1392 log.debug("writeSingleCompoundMember(): data conversion failure: ", ex); 1393 tmpData = null; 1394 } 1395 1396 if (tmpData == null) { 1397 log.debug("writeSingleCompoundMember(): data is null"); 1398 } 1399 1400 return tmpData; 1401 } 1402 1403 /** 1404 * Converts the data values of this data object to appropriate Java integers if 1405 * they are unsigned integers. 1406 * 1407 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1408 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 1409 * 1410 * @return the converted data buffer. 1411 */ 1412 @Override 1413 public Object convertFromUnsignedC() 1414 { 1415 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 1416 } 1417 1418 /** 1419 * Converts Java integer data values of this data object back to unsigned C-type 1420 * integer data if they are unsigned integers. 1421 * 1422 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1423 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 1424 * 1425 * @return the converted data buffer. 1426 */ 1427 @Override 1428 public Object convertToUnsignedC() 1429 { 1430 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 1431 } 1432 1433 /* Implement interface Attribute */ 1434 1435 /** 1436 * Returns the HObject to which this Attribute is currently "attached". 1437 * 1438 * @return the HObject to which this Attribute is currently "attached". 1439 */ 1440 @Override 1441 public HObject getParentObject() 1442 { 1443 return parentObject; 1444 } 1445 1446 /** 1447 * Sets the HObject to which this Attribute is "attached". 1448 * 1449 * @param pObj 1450 * the new HObject to which this Attribute is "attached". 1451 */ 1452 @Override 1453 public void setParentObject(HObject pObj) 1454 { 1455 parentObject = pObj; 1456 } 1457 1458 /** 1459 * set a property for the attribute. 1460 * 1461 * @param key the attribute Map key 1462 * @param value the attribute Map value 1463 */ 1464 @Override 1465 public void setProperty(String key, Object value) 1466 { 1467 properties.put(key, value); 1468 } 1469 1470 /** 1471 * get a property for a given key. 1472 * 1473 * @param key the attribute Map key 1474 * 1475 * @return the property 1476 */ 1477 @Override 1478 public Object getProperty(String key) 1479 { 1480 return properties.get(key); 1481 } 1482 1483 /** 1484 * get all property keys. 1485 * 1486 * @return the Collection of property keys 1487 */ 1488 @Override 1489 public Collection<String> getPropertyKeys() 1490 { 1491 return properties.keySet(); 1492 } 1493 1494 /** 1495 * Returns the name of the object. For example, "Raster Image #2". 1496 * 1497 * @return The name of the object. 1498 */ 1499 @Override 1500 public final String getAttributeName() 1501 { 1502 return getName(); 1503 } 1504 1505 /** 1506 * Retrieves the attribute data from the file. 1507 * 1508 * @return the attribute data. 1509 * 1510 * @throws Exception 1511 * if the data can not be retrieved 1512 */ 1513 @Override 1514 public final Object getAttributeData() throws Exception, OutOfMemoryError 1515 { 1516 return getData(); 1517 } 1518 1519 /** 1520 * Returns the datatype of the attribute. 1521 * 1522 * @return the datatype of the attribute. 1523 */ 1524 @Override 1525 public final Datatype getAttributeDatatype() 1526 { 1527 return getDatatype(); 1528 } 1529 1530 /** 1531 * Returns the space type for the attribute. It returns a 1532 * negative number if it failed to retrieve the type information from 1533 * the file. 1534 * 1535 * @return the space type for the attribute. 1536 */ 1537 @Override 1538 public final int getAttributeSpaceType() 1539 { 1540 return getSpaceType(); 1541 } 1542 1543 /** 1544 * Returns the rank (number of dimensions) of the attribute. It returns a 1545 * negative number if it failed to retrieve the dimension information from 1546 * the file. 1547 * 1548 * @return the number of dimensions of the attribute. 1549 */ 1550 @Override 1551 public final int getAttributeRank() 1552 { 1553 return getRank(); 1554 } 1555 1556 /** 1557 * Returns the selected size of the rows and columns of the attribute. It returns a 1558 * negative number if it failed to retrieve the size information from 1559 * the file. 1560 * 1561 * @return the selected size of the rows and colums of the attribute. 1562 */ 1563 @Override 1564 public final int getAttributePlane() 1565 { 1566 return (int)getWidth() * (int)getHeight(); 1567 } 1568 1569 /** 1570 * Returns the array that contains the dimension sizes of the data value of 1571 * the attribute. It returns null if it failed to retrieve the dimension 1572 * information from the file. 1573 * 1574 * @return the dimension sizes of the attribute. 1575 */ 1576 @Override 1577 public final long[] getAttributeDims() 1578 { 1579 return getDims(); 1580 } 1581 1582 /** 1583 * @return true if the dataspace is a NULL; otherwise, returns false. 1584 */ 1585 @Override 1586 public boolean isAttributeNULL() 1587 { 1588 return isNULL(); 1589 } 1590 1591 /** 1592 * @return true if the data is a single scalar point; otherwise, returns false. 1593 */ 1594 @Override 1595 public boolean isAttributeScalar() 1596 { 1597 return isScalar(); 1598 } 1599 1600 /** 1601 * Not for public use in the future. 1602 * 1603 * setData() is not safe to use because it changes memory buffer 1604 * of the dataset object. Dataset operations such as write/read 1605 * will fail if the buffer type or size is changed. 1606 * 1607 * @param d the object data -must be an array of Objects 1608 */ 1609 @Override 1610 public void setAttributeData(Object d) 1611 { 1612 setData(d); 1613 } 1614 1615 /** 1616 * Writes the memory buffer of this dataset to file. 1617 * 1618 * @throws Exception if buffer can not be written 1619 */ 1620 @Override 1621 public void writeAttribute() throws Exception 1622 { 1623 write(); 1624 } 1625 1626 /** 1627 * Writes the given data buffer into this attribute in a file. 1628 * 1629 * The data buffer is a vector that contains the data values of compound fields. The data is written 1630 * into file as one data blob. 1631 * 1632 * @param buf 1633 * The vector that contains the data values of compound fields. 1634 * 1635 * @throws Exception 1636 * If there is an error at the library level. 1637 */ 1638 @Override 1639 public void writeAttribute(Object buf) throws Exception 1640 { 1641 write(buf); 1642 } 1643 1644 /** 1645 * Returns a string representation of the data value. For 1646 * example, "0, 255". 1647 * 1648 * For a compound datatype, it will be a 1D array of strings with field 1649 * members separated by the delimiter. For example, 1650 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1651 * float} of three data points. 1652 * 1653 * @param delimiter 1654 * The delimiter used to separate individual data points. It 1655 * can be a comma, semicolon, tab or space. For example, 1656 * toString(",") will separate data by commas. 1657 * 1658 * @return the string representation of the data values. 1659 */ 1660 @Override 1661 public String toAttributeString(String delimiter) 1662 { 1663 return toString(delimiter, -1); 1664 } 1665 1666 /** 1667 * Returns a string representation of the data value. For 1668 * example, "0, 255". 1669 * 1670 * For a compound datatype, it will be a 1D array of strings with field 1671 * members separated by the delimiter. For example, 1672 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1673 * float} of three data points. 1674 * 1675 * @param delimiter 1676 * The delimiter used to separate individual data points. It 1677 * can be a comma, semicolon, tab or space. For example, 1678 * toString(",") will separate data by commas. 1679 * @param maxItems 1680 * The maximum number of Array values to return 1681 * 1682 * @return the string representation of the data values. 1683 */ 1684 @Override 1685 public String toAttributeString(String delimiter, int maxItems) 1686 { 1687 Object theData = originalBuf; 1688 if (theData == null) { 1689 log.debug("toAttributeString: value is null"); 1690 return null; 1691 } 1692 1693 // attribute value is an array 1694 StringBuilder sb = new StringBuilder(); 1695 int numberTypes = ((ArrayList<Object[]>)theData).size(); 1696 log.trace("toAttributeString: numberTypes={}", numberTypes); 1697 List<Datatype> cmpdTypes = getDatatype().getCompoundMemberTypes(); 1698 1699 int loopcnt = 0; 1700 while (loopcnt < maxItems) { 1701 if (loopcnt > 0) 1702 sb.append(delimiter); 1703 sb.append("{"); 1704 for (int dv = 0; dv < numberTypes; dv++) { 1705 if (dv > 0) 1706 sb.append(delimiter); 1707 1708 Object theobj = ((ArrayList<Object[]>)theData).get(dv); 1709 Class<? extends Object> valClass = theobj.getClass(); 1710 log.trace("toAttributeString:valClass={}", valClass); 1711 int n = 0; 1712 Datatype dtype = cmpdTypes.get(dv); 1713 // value is an array 1714 if (valClass.isArray()) { 1715 n = Array.getLength(theobj); 1716 if (dtype.isRef()) 1717 n /= (int)dtype.getDatatypeSize(); 1718 } 1719 else 1720 n = ((ArrayList<Object[]>)theobj).size(); 1721 // if ((maxItems > 0) && (n + loopcnt > maxItems)) 1722 // n = maxItems - loopcnt; 1723 log.trace("toAttributeString:[{}] theobj={} size={}", dv, theobj, n); 1724 String sobj = toString(theobj, dtype, delimiter, n); 1725 sb.append(sobj); 1726 loopcnt += n; 1727 if (loopcnt >= maxItems) 1728 break; 1729 } // end for (int dv = 0; dv < numberTypes; dv++) 1730 sb.append("}"); 1731 break; 1732 } // end for (int i = 1; i < n; i++) 1733 1734 return sb.toString(); 1735 } 1736 1737 @Override 1738 protected String toString(Object theData, Datatype theType, String delimiter, int count) 1739 { 1740 log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), theType.isUnsigned(), 1741 count); 1742 StringBuilder sb = new StringBuilder(); 1743 Class<? extends Object> valClass = theData.getClass(); 1744 log.trace("toString:valClass={}", valClass); 1745 1746 H5Datatype dtype = (H5Datatype)theType; 1747 log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef()); 1748 if (dtype.isStdRef()) { 1749 return ((H5ReferenceType)dtype).toString(delimiter, count); 1750 } 1751 else if (dtype.isVLEN() && !dtype.isVarStr()) { 1752 log.trace("toString: vlen"); 1753 String strValue; 1754 1755 for (int k = 0; k < count; k++) { 1756 Object value = Array.get(theData, k); 1757 if (value == null) 1758 strValue = "null"; 1759 else { 1760 if (dtype.getDatatypeBase().isRef()) { 1761 ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value; 1762 log.trace("toString: vlen value={}", ref_value); 1763 strValue = "{"; 1764 for (int m = 0; m < ref_value.size(); m++) { 1765 byte[] curBytes = ref_value.get(m); 1766 if (m > 0) 1767 strValue += ", "; 1768 if (H5ReferenceType.zeroArrayCheck(curBytes)) 1769 strValue += "NULL"; 1770 else { 1771 if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) { 1772 strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT); 1773 } 1774 else if (dtype.getDatatypeBase().getDatatypeSize() == 1775 HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1776 try { 1777 strValue += H5Datatype.descRegionDataset( 1778 parentObject.getFileFormat().getFID(), curBytes); 1779 } 1780 catch (Exception ex) { 1781 ex.printStackTrace(); 1782 } 1783 } 1784 else if (dtype.getDatatypeBase().getDatatypeSize() == 1785 HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1786 try { 1787 strValue += H5Datatype.descReferenceObject( 1788 parentObject.getFileFormat().getFID(), curBytes); 1789 } 1790 catch (Exception ex) { 1791 ex.printStackTrace(); 1792 } 1793 } 1794 } 1795 } 1796 strValue += "}"; 1797 } 1798 else 1799 strValue = value.toString(); 1800 } 1801 if (k > 0) 1802 sb.append(", "); 1803 sb.append(strValue); 1804 } 1805 } 1806 else if (dtype.isRef()) { 1807 log.trace("toString: ref"); 1808 int dtypesize = (int)dtype.getDatatypeSize(); 1809 String strValue = "NULL"; 1810 byte[] rElements = null; 1811 1812 for (int k = 0; k < count; k++) { 1813 // need to iterate if type is ArrayList 1814 if (theData instanceof ArrayList) 1815 rElements = (byte[])((ArrayList)theData).get(k); 1816 else 1817 rElements = (byte[])theData; 1818 1819 if (H5ReferenceType.zeroArrayCheck(rElements)) 1820 strValue = "NULL"; 1821 else { 1822 if (dtype.isStdRef()) { 1823 strValue += H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT); 1824 } 1825 else if (dtypesize == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1826 try { 1827 strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), 1828 rElements); 1829 } 1830 catch (Exception ex) { 1831 ex.printStackTrace(); 1832 } 1833 } 1834 else if (dtypesize == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1835 try { 1836 strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), 1837 rElements); 1838 } 1839 catch (Exception ex) { 1840 ex.printStackTrace(); 1841 } 1842 } 1843 } 1844 if (k > 0) 1845 sb.append(", "); 1846 sb.append(strValue); 1847 } 1848 } 1849 else { 1850 return super.toString(theData, theType, delimiter, count); 1851 } 1852 1853 return sb.toString(); 1854 } 1855 1856 /* Implement interface H5Attribute */ 1857 1858 /** 1859 * The general read and write attribute operations for hdf5 object data. 1860 * 1861 * @param attr_id 1862 * the attribute to access 1863 * @param ioType 1864 * the type of IO operation 1865 * @param objBuf 1866 * the data buffer to use for write operation 1867 * 1868 * @return the attribute data 1869 * 1870 * @throws Exception 1871 * if the data can not be retrieved 1872 */ 1873 @Override 1874 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception 1875 { 1876 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 1877 Object theData = null; 1878 1879 long dt_size = dsDatatype.getDatatypeSize(); 1880 log.trace("AttributeCommonIO(): create native"); 1881 long tid = dsDatatype.createNative(); 1882 1883 if (ioType == H5File.IO_TYPE.READ) { 1884 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", 1885 dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1886 1887 long lsize = 1; 1888 for (int j = 0; j < dims.length; j++) 1889 lsize *= dims[j]; 1890 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1891 1892 try { 1893 // Read data. 1894 Object attr_data = new byte[(int)(dt_size * lsize)]; 1895 1896 try { 1897 H5.H5Aread(attr_id, tid, attr_data); 1898 } 1899 catch (Exception ex) { 1900 log.debug("AttributeCommonIO(): H5Aread failure: ", ex); 1901 } 1902 theData = compoundTypeIO(dsDatatype, (int)lsize, dsDatatype, attr_data, new int[] {0}); 1903 } 1904 catch (Exception ex) { 1905 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1906 throw new Exception(ex.getMessage(), ex); 1907 } 1908 finally { 1909 dsDatatype.close(tid); 1910 } 1911 for (int i = 0; i < ((ArrayList<Object[]>)theData).size(); i++) { 1912 Object theobj = ((ArrayList<Object[]>)theData).get(i); 1913 log.trace("AttributeCommonIO():read ioType data: {}", theobj); 1914 } 1915 originalBuf = theData; 1916 isDataLoaded = true; 1917 } // H5File.IO_TYPE.READ 1918 else { 1919 theData = compoundTypeWriteIO(dsDatatype, dsDatatype, objBuf, new int[] {0}); 1920 try { 1921 H5.H5Awrite(attr_id, tid, theData); 1922 } 1923 catch (Exception ex) { 1924 log.debug("AttributeCommonIO(): H5Awrite failure: ", ex); 1925 } 1926 finally { 1927 dsDatatype.close(tid); 1928 } 1929 } // H5File.IO_TYPE.WRITE 1930 1931 return theData; 1932 } 1933 1934 /** 1935 * Read a subset of an attribute for hdf5 object data. 1936 * 1937 * @return the selected attribute data 1938 * 1939 * @throws Exception 1940 * if the data can not be retrieved 1941 */ 1942 @Override 1943 public Object AttributeSelection() throws Exception 1944 { 1945 return originalBuf; 1946 } 1947}