001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.text.DecimalFormat; 021 022import java.util.ArrayList; 023import java.util.Arrays; 024import java.util.Collection; 025import java.util.HashMap; 026import java.util.Iterator; 027import java.util.List; 028import java.util.Map; 029import java.util.Vector; 030 031import org.slf4j.Logger; 032import org.slf4j.LoggerFactory; 033 034import hdf.hdf5lib.H5; 035import hdf.hdf5lib.HDF5Constants; 036import hdf.hdf5lib.HDFNativeData; 037import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 038import hdf.hdf5lib.exceptions.HDF5Exception; 039 040import hdf.object.Attribute; 041import hdf.object.CompoundDS; 042import hdf.object.Dataset; 043import hdf.object.Datatype; 044import hdf.object.FileFormat; 045import hdf.object.Group; 046import hdf.object.HObject; 047import hdf.object.MetaDataContainer; 048import hdf.object.Utils; 049 050import hdf.object.h5.H5Datatype; 051import hdf.object.h5.H5ReferenceType; 052 053/** 054 * The H5CompoundAttr class defines an HDF5 attribute of compound datatypes. 055 * 056 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group or named 057 * datatype. 058 * 059 * Like a dataset, an attribute has a name, datatype and dataspace. 060 * 061 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of one or more 062 * atomic types or small arrays of such types. Each member of a compound type has a name which is unique within that 063 * type, and a byte offset that determines the first byte (smallest byte address) of that member in a compound datum. 064 * 065 * For more information on HDF5 attributes and datatypes, read the 066 * <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 Attributes in HDF5 User Guide</a> 067 * 068 * There are two basic types of compound attributes: simple compound data and nested compound data. Members of a simple 069 * compound attribute have atomic datatypes. Members of a nested compound attribute are compound or array of compound 070 * data. 071 * 072 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the following C 073 * example. 074 * 075 * <pre> 076 * typedef struct s1_t { 077 * int a; 078 * float b; 079 * double c; 080 * } s1_t; 081 * s1_t s1[LENGTH]; 082 * ... 083 * H5Dwrite(..., s1); 084 * H5Dread(..., s1); 085 * </pre> 086 * 087 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by fields 088 * instead of compound structure. As for the example above, the java.util.Vector object has three elements: int[LENGTH], 089 * float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of int, float and double, we will be 090 * able to read/write the compound data by field. 091 * 092 * @version 1.0 6/15/2021 093 * @author Allen Byrne 094 */ 095public class H5CompoundAttr extends CompoundDS implements H5Attribute 096{ 097 private static final long serialVersionUID = 2072473407027648309L; 098 099 private static final Logger log = LoggerFactory.getLogger(H5CompoundAttr.class); 100 101 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 102 protected HObject parentObject; 103 104 /** additional information and properties for the attribute, Attribute interface */ 105 private transient Map<String, Object> properties; 106 107 /** 108 * Create an attribute with specified name, data type and dimension sizes. 109 * 110 * @param parentObj 111 * the HObject to which this H5CompoundAttr is attached. 112 * @param attrName 113 * the name of the attribute. 114 * @param attrType 115 * the datatype of the attribute. 116 * @param attrDims 117 * the dimension sizes of the attribute, null for scalar attribute 118 * 119 * @see hdf.object.Datatype 120 */ 121 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 122 this(parentObj, attrName, attrType, attrDims, null); 123 } 124 125 /** 126 * Create an attribute with specific name and value. 127 * 128 * @param parentObj 129 * the HObject to which this H5CompoundAttr is attached. 130 * @param attrName 131 * the name of the attribute. 132 * @param attrType 133 * the datatype of the attribute. 134 * @param attrDims 135 * the dimension sizes of the attribute, null for scalar attribute 136 * @param attrValue 137 * the value of the attribute, null if no value 138 * 139 * @see hdf.object.Datatype 140 */ 141 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 142 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 143 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 144 (parentObj == null) ? null : parentObj.getFullName(), null); 145 146 log.trace("H5CompoundAttr: start {}", parentObj); 147 this.parentObject = parentObj; 148 149 datatype = attrType; 150 151 if (attrValue != null) { 152 data = attrValue; 153 originalBuf = attrValue; 154 isDataLoaded = true; 155 } 156 properties = new HashMap(); 157 158 if (attrDims == null) { 159 rank = 1; 160 dims = new long[] { 1 }; 161 isScalar = true; 162 } 163 else { 164 dims = attrDims; 165 rank = dims.length; 166 isScalar = false; 167 } 168 169 selectedDims = new long[rank]; 170 startDims = new long[rank]; 171 selectedStride = new long[rank]; 172 173 numberOfMembers = 0; 174 memberNames = null; 175 isMemberSelected = null; 176 memberTypes = null; 177 178 log.trace("attrName={}, attrType={}, attrValue={}, rank={}", 179 attrName, attrType.getDescription(), data, rank); 180 181 resetSelection(); 182 } 183 184 /* 185 * (non-Javadoc) 186 * 187 * @see hdf.object.HObject#open() 188 */ 189 @Override 190 public long open() { 191 if (parentObject == null) { 192 log.debug("open(): attribute's parent object is null"); 193 return HDF5Constants.H5I_INVALID_HID; 194 } 195 196 long aid = HDF5Constants.H5I_INVALID_HID; 197 long pObjID = HDF5Constants.H5I_INVALID_HID; 198 199 try { 200 pObjID = parentObject.open(); 201 if (pObjID >= 0) { 202 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 203 log.trace("open(): FILE_TYPE_HDF5"); 204 if (H5.H5Aexists(pObjID, getName())) 205 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 206 } 207 } 208 209 log.trace("open(): aid={}", aid); 210 } 211 catch (Exception ex) { 212 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 213 aid = HDF5Constants.H5I_INVALID_HID; 214 } 215 finally { 216 parentObject.close(pObjID); 217 } 218 219 return aid; 220 } 221 222 /* 223 * (non-Javadoc) 224 * 225 * @see hdf.object.HObject#close(int) 226 */ 227 @Override 228 public void close(long aid) { 229 if (aid >= 0) { 230 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 231 log.trace("close(): FILE_TYPE_HDF5"); 232 try { 233 H5.H5Aclose(aid); 234 } 235 catch (HDF5Exception ex) { 236 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 237 } 238 } 239 } 240 } 241 242 243 /** 244 * Retrieves datatype and dataspace information from file and sets the attribute 245 * in memory. 246 * 247 * The init() is designed to support lazy operation in a attribute object. When a 248 * data object is retrieved from file, the datatype, dataspace and raw data are 249 * not loaded into memory. When it is asked to read the raw data from file, 250 * init() is first called to get the datatype and dataspace information, then 251 * load the raw data from file. 252 * 253 * init() is also used to reset the selection of a attribute (start, stride and 254 * count) to the default, which is the entire attribute for 1D or 2D datasets. In 255 * the following example, init() at step 1) retrieves datatype and dataspace 256 * information from file. getData() at step 3) reads only one data point. init() 257 * at step 4) resets the selection to the whole attribute. getData() at step 4) 258 * reads the values of whole attribute into memory. 259 * 260 * <pre> 261 * dset = (Dataset) file.get(NAME_DATASET); 262 * 263 * // 1) get datatype and dataspace information from file 264 * attr.init(); 265 * rank = attr.getAttributeRank(); // rank = 2, a 2D attribute 266 * count = attr.getSelectedDims(); 267 * start = attr.getStartDims(); 268 * dims = attr.getAttributeDims(); 269 * 270 * // 2) select only one data point 271 * for (int i = 0; i < rank; i++) { 272 * start[0] = 0; 273 * count[i] = 1; 274 * } 275 * 276 * // 3) read one data point 277 * data = attr.getAttributeData(); 278 * 279 * // 4) reset selection to the whole attribute 280 * attr.init(); 281 * 282 * // 5) clean the memory data buffer 283 * attr.clearData(); 284 * 285 * // 6) Read the whole attribute 286 * data = attr.getAttributeData(); 287 * </pre> 288 */ 289 @Override 290 public void init() { 291 if (inited) { 292 resetSelection(); 293 log.trace("init(): H5CompoundAttr already inited"); 294 return; 295 } 296 297 long aid = HDF5Constants.H5I_INVALID_HID; 298 long tid = HDF5Constants.H5I_INVALID_HID; 299 long sid = HDF5Constants.H5I_INVALID_HID; 300 int tclass = HDF5Constants.H5I_INVALID_HID; 301 flatNameList = new Vector<>(); 302 flatTypeList = new Vector<>(); 303 long[] memberTIDs = null; 304 305 log.trace("init(): FILE_TYPE_HDF5"); 306 aid = open(); 307 if (aid >= 0) { 308 try { 309 sid = H5.H5Aget_space(aid); 310 rank = H5.H5Sget_simple_extent_ndims(sid); 311 space_type = H5.H5Sget_simple_extent_type(sid); 312 if (space_type == HDF5Constants.H5S_NULL) 313 isNULL = true; 314 else 315 isNULL = false; 316 tid = H5.H5Aget_type(aid); 317 tclass = H5.H5Tget_class(tid); 318 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 319 320 long tmptid = 0; 321 322 // Handle ARRAY and VLEN types by getting the base type 323 if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) { 324 try { 325 tmptid = tid; 326 tid = H5.H5Tget_super(tmptid); 327 log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid); 328 } 329 catch (Exception ex) { 330 log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex); 331 tid = -1; 332 } 333 finally { 334 try { 335 H5.H5Tclose(tmptid); 336 } 337 catch (HDF5Exception ex) { 338 log.debug("init(): H5Tclose({}) failure: ", tmptid, ex); 339 } 340 } 341 } 342 343 if (tclass == HDF5Constants.H5T_COMPOUND) { 344 // initialize member information 345 H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList); 346 numberOfMembers = flatNameList.size(); 347 log.trace("init(): numberOfMembers={}", numberOfMembers); 348 349 memberNames = new String[numberOfMembers]; 350 memberTIDs = new long[numberOfMembers]; 351 memberTypes = new Datatype[numberOfMembers]; 352 memberOrders = new int[numberOfMembers]; 353 isMemberSelected = new boolean[numberOfMembers]; 354 memberDims = new Object[numberOfMembers]; 355 356 for (int i = 0; i < numberOfMembers; i++) { 357 isMemberSelected[i] = true; 358 memberTIDs[i] = flatTypeList.get(i).createNative(); 359 360 try { 361 memberTypes[i] = flatTypeList.get(i); 362 } 363 catch (Exception ex) { 364 log.debug("init(): failed to create datatype for member[{}]: ", i, ex); 365 memberTypes[i] = null; 366 } 367 368 memberNames[i] = flatNameList.get(i); 369 memberOrders[i] = 1; 370 memberDims[i] = null; 371 log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, i, 372 memberNames[i], i, memberTIDs[i], i, memberTypes[i]); 373 374 try { 375 tclass = H5.H5Tget_class(memberTIDs[i]); 376 } 377 catch (HDF5Exception ex) { 378 log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex); 379 } 380 381 if (tclass == HDF5Constants.H5T_ARRAY) { 382 int n = H5.H5Tget_array_ndims(memberTIDs[i]); 383 long mdim[] = new long[n]; 384 H5.H5Tget_array_dims(memberTIDs[i], mdim); 385 int idim[] = new int[n]; 386 for (int j = 0; j < n; j++) 387 idim[j] = (int) mdim[j]; 388 memberDims[i] = idim; 389 tmptid = H5.H5Tget_super(memberTIDs[i]); 390 memberOrders[i] = (int) (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid)); 391 try { 392 H5.H5Tclose(tmptid); 393 } 394 catch (HDF5Exception ex) { 395 log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 396 } 397 } 398 } // (int i=0; i<numberOfMembers; i++) 399 } 400 401 if (rank == 0) { 402 // a scalar data point 403 isScalar = true; 404 rank = 1; 405 dims = new long[] { 1 }; 406 log.trace("init(): rank is a scalar data point"); 407 } 408 else { 409 isScalar = false; 410 dims = new long[rank]; 411 maxDims = new long[rank]; 412 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 413 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 414 } 415 416 inited = true; 417 } 418 catch (HDF5Exception ex) { 419 numberOfMembers = 0; 420 memberNames = null; 421 memberTypes = null; 422 memberOrders = null; 423 log.debug("init(): ", ex); 424 } 425 finally { 426 try { 427 H5.H5Tclose(tid); 428 } 429 catch (HDF5Exception ex2) { 430 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 431 } 432 try { 433 H5.H5Sclose(sid); 434 } 435 catch (HDF5Exception ex2) { 436 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 437 } 438 439 if (memberTIDs != null) { 440 for (int i = 0; i < memberTIDs.length; i++) { 441 try { 442 H5.H5Tclose(memberTIDs[i]); 443 } 444 catch (Exception ex) { 445 log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex); 446 } 447 } 448 } 449 } 450 451 close(aid); 452 453 startDims = new long[rank]; 454 selectedDims = new long[rank]; 455 456 resetSelection(); 457 } 458 else { 459 log.debug("init(): failed to open attribute"); 460 } 461 } 462 463 /** 464 * Returns the datatype of the data object. 465 * 466 * @return the datatype of the data object. 467 */ 468 @Override 469 public Datatype getDatatype() { 470 if (!inited) 471 init(); 472 473 if (datatype == null) { 474 long aid = HDF5Constants.H5I_INVALID_HID; 475 long tid = HDF5Constants.H5I_INVALID_HID; 476 477 aid = open(); 478 if (aid >= 0) { 479 try { 480 tid = H5.H5Aget_type(aid); 481 int nativeClass = H5.H5Tget_class(tid); 482 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 483 long lsize = 1; 484 long sid = H5.H5Aget_space(aid); 485 int rank = H5.H5Sget_simple_extent_ndims(sid); 486 if (rank > 0) { 487 long dims[] = new long[rank]; 488 H5.H5Sget_simple_extent_dims(sid, dims, null); 489 log.trace("getDatatype(): rank={}, dims={}", rank, dims); 490 for (int j = 0; j < dims.length; j++) { 491 lsize *= dims[j]; 492 } 493 } 494 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 495 } 496 else 497 datatype = new H5Datatype(getFileFormat(), tid); 498 } 499 catch (Exception ex) { 500 log.debug("getDatatype(): ", ex); 501 } 502 finally { 503 try { 504 H5.H5Tclose(tid); 505 } 506 catch (HDF5Exception ex) { 507 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 508 } 509 try { 510 H5.H5Aclose(aid); 511 } 512 catch (HDF5Exception ex) { 513 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 514 } 515 } 516 } 517 } 518 519 return datatype; 520 } 521 522 /** 523 * Returns the data buffer of the attribute in memory. 524 * 525 * If data is already loaded into memory, returns the data; otherwise, calls 526 * read() to read data from file into a memory buffer and returns the memory 527 * buffer. 528 * 529 * The whole attribute is read into memory. Users can also select 530 * a subset from the whole data. Subsetting is done in an implicit way. 531 * 532 * <b>How to Select a Subset</b> 533 * 534 * A selection is specified by three arrays: start, stride and count. 535 * <ol> 536 * <li>start: offset of a selection 537 * <li>stride: determines how many elements to move in each dimension 538 * <li>count: number of elements to select in each dimension 539 * </ol> 540 * getStartDims(), getStride() and getSelectedDims() returns the start, 541 * stride and count arrays respectively. Applications can make a selection 542 * by changing the values of the arrays. 543 * 544 * The following example shows how to make a subset. In the example, the 545 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 546 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 547 * We want to select every other data point in dims[1] and dims[2] 548 * 549 * <pre> 550 * int rank = attribute.getRank(); // number of dimensions of the attribute 551 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 552 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 553 * long[] start = attribute.getStartDims(); // the offset of the selection 554 * long[] stride = attribute.getStride(); // the stride of the attribute 555 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 556 * 557 * // select dim1 and dim2 as 2D data for display,and slice through dim0 558 * selectedIndex[0] = 1; 559 * selectedIndex[1] = 2; 560 * selectedIndex[2] = 0; 561 * 562 * // reset the selection arrays 563 * for (int i = 0; i < rank; i++) { 564 * start[i] = 0; 565 * selected[i] = 1; 566 * stride[i] = 1; 567 * } 568 * 569 * // set stride to 2 on dim1 and dim2 so that every other data point is 570 * // selected. 571 * stride[1] = 2; 572 * stride[2] = 2; 573 * 574 * // set the selection size of dim1 and dim2 575 * selected[1] = dims[1] / stride[1]; 576 * selected[2] = dims[1] / stride[2]; 577 * 578 * // when H5CompoundAttr.getData() is called, the selection above will be used since 579 * // the dimension arrays are passed by reference. Changes of these arrays 580 * // outside the attribute object directly change the values of these array 581 * // in the attribute object. 582 * </pre> 583 * 584 * For H5CompoundAttr, the memory data object is an java.util.List object. Each 585 * element of the list is a data array that corresponds to a compound field. 586 * 587 * For example, if compound attribute "comp" has the following nested 588 * structure, and member datatypes 589 * 590 * <pre> 591 * comp --> m01 (int) 592 * comp --> m02 (float) 593 * comp --> nest1 --> m11 (char) 594 * comp --> nest1 --> m12 (String) 595 * comp --> nest1 --> nest2 --> m21 (long) 596 * comp --> nest1 --> nest2 --> m22 (double) 597 * </pre> 598 * 599 * getData() returns a list of six arrays: {int[], float[], char[], 600 * String[], long[] and double[]}. 601 * 602 * @return the memory buffer of the attribute. 603 * 604 * @throws Exception if object can not be read 605 * @throws OutOfMemoryError if memory is exhausted 606 */ 607 @Override 608 public Object getData() throws Exception, OutOfMemoryError { 609 log.trace("getData(): isDataLoaded={}", isDataLoaded); 610 if (!isDataLoaded) 611 data = read(); // load the data, attributes read all data 612 613 nPoints = 1; 614 log.trace("getData(): selectedDims length={}", selectedDims.length); 615 int point_len = selectedDims.length; 616 //Partial data for 3 or more dimensions 617 if (rank > 2) 618 point_len = 3; 619 for (int j = 0; j < point_len; j++) { 620 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 621 nPoints *= selectedDims[j]; 622 } 623 log.trace("getData: read {}", nPoints); 624 625 // apply the selection for 3 or more dimensions 626 // selection only expects to use 3 selectedDims 627 // where selectedIndex[0] is the row dimension 628 // where selectedIndex[1] is the col dimension 629 // where selectedIndex[2] is the frame dimension 630 if (rank > 2) 631 data = AttributeSelection(); 632 633 return data; 634 } 635 636 /* 637 * (non-Javadoc) 638 * 639 * @see hdf.object.Attribute#readBytes() 640 */ 641 @Override 642 public byte[] readBytes() throws HDF5Exception { 643 byte[] theData = null; 644 645 if (!isInited()) 646 init(); 647 648 long aid = open(); 649 if (aid >= 0) { 650 long tid = HDF5Constants.H5I_INVALID_HID; 651 652 try { 653 long[] lsize = { 1 }; 654 for (int j = 0; j < selectedDims.length; j++) 655 lsize[0] *= selectedDims[j]; 656 657 tid = H5.H5Aget_type(aid); 658 long size = H5.H5Tget_size(tid) * lsize[0]; 659 log.trace("readBytes(): size={}", size); 660 661 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 662 throw new Exception("Invalid int size"); 663 664 theData = new byte[(int)size]; 665 666 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 667 H5.H5Aread(aid, tid, theData); 668 } 669 catch (Exception ex) { 670 log.debug("readBytes(): failed to read data: ", ex); 671 } 672 finally { 673 try { 674 H5.H5Tclose(tid); 675 } 676 catch (HDF5Exception ex2) { 677 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 678 } 679 close(aid); 680 } 681 } 682 683 return theData; 684 } 685 686 /** 687 * Reads the data from file. 688 * 689 * read() reads the data from file to a memory buffer and returns the memory 690 * buffer. The attribute object does not hold the memory buffer. To store the 691 * memory buffer in the attribute object, one must call getData(). 692 * 693 * By default, the whole attribute is read into memory. 694 * 695 * For CompoundAttr, the memory data object is an java.util.List object. Each 696 * element of the list is a data array that corresponds to a compound field. 697 * 698 * For example, if compound dataset "comp" has the following nested 699 * structure, and member datatypes 700 * 701 * <pre> 702 * comp --> m01 (int) 703 * comp --> m02 (float) 704 * comp --> nest1 --> m11 (char) 705 * comp --> nest1 --> m12 (String) 706 * comp --> nest1 --> nest2 --> m21 (long) 707 * comp --> nest1 --> nest2 --> m22 (double) 708 * </pre> 709 * 710 * getData() returns a list of six arrays: {int[], float[], char[], 711 * String[], long[] and double[]}. 712 * 713 * @return the data read from file. 714 * 715 * @see #getData() 716 * @see hdf.object.DataFormat#read() 717 * 718 * @throws Exception 719 * if object can not be read 720 */ 721 @Override 722 public Object read() throws Exception { 723 Object readData = null; 724 725 if (!isInited()) 726 init(); 727 728 try { 729 readData = compoundAttributeCommonIO(H5File.IO_TYPE.READ, null); 730 } 731 catch (Exception ex) { 732 log.debug("read(): failed to read compound attribute: ", ex); 733 throw new Exception("failed to read compound attribute: " + ex.getMessage(), ex); 734 } 735 736 return readData; 737 } 738 739 /** 740 * Writes the given data buffer into this attribute in a file. 741 * 742 * The data buffer is a vector that contains the data values of compound fields. The data is written 743 * into file as one data blob. 744 * 745 * @param buf 746 * The vector that contains the data values of compound fields. 747 * 748 * @throws Exception 749 * If there is an error at the HDF5 library level. 750 */ 751 @Override 752 public void write(Object buf) throws Exception { 753 if (this.getFileFormat().isReadOnly()) 754 throw new Exception("cannot write to compound attribute in file opened as read-only"); 755 756 if (!buf.equals(data)) 757 setData(buf); 758 759 init(); 760 761 if (parentObject == null) { 762 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 763 return; 764 } 765 766 ((MetaDataContainer) getParentObject()).writeMetadata(this); 767 768 try { 769 compoundAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 770 } 771 catch (Exception ex) { 772 log.debug("write(Object): failed to write compound attribute: ", ex); 773 throw new Exception("failed to write compound attribute: " + ex.getMessage(), ex); 774 } 775 resetSelection(); 776 } 777 778 /* 779 * Routine to convert datatypes that are read in as byte arrays to 780 * regular types. 781 */ 782 @Override 783 protected Object convertByteMember(final Datatype dtype, byte[] byteData) { 784 Object theObj = null; 785 786 if (dtype.isFloat() && dtype.getDatatypeSize() == 16) 787 theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0); 788 else 789 theObj = super.convertByteMember(dtype, byteData); 790 791 return theObj; 792 } 793 794 private Object compoundAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception { 795 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 796 Object theData = null; 797 798 if (numberOfMembers <= 0) { 799 log.debug("compoundAttributeCommonIO(): attribute contains no members"); 800 throw new Exception("dataset contains no members"); 801 } 802 803 /* 804 * I/O type-specific pre-initialization. 805 */ 806 if (ioType == H5File.IO_TYPE.WRITE) { 807 if ((writeBuf == null) || !(writeBuf instanceof List)) { 808 log.debug("compoundAttributeCommonIO(): writeBuf is null or invalid"); 809 throw new Exception("write buffer is null or invalid"); 810 } 811 812 /* 813 * Check for any unsupported datatypes and fail early before 814 * attempting to write to the attribute. 815 */ 816 if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) { 817 log.debug("compoundAttributeCommonIO(): cannot write attribute of type ARRAY of COMPOUND"); 818 throw new HDF5Exception("Unsupported attribute of type ARRAY of COMPOUND"); 819 } 820 821 if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) { 822 log.debug("compoundAttributeCommonIO(): cannot write attribute of type VLEN of COMPOUND"); 823 throw new HDF5Exception("Unsupported attribute of type VLEN of COMPOUND"); 824 } 825 } 826 827 long aid = open(); 828 if (aid >= 0) { 829 log.trace("compoundAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 830 try { 831 theData = AttributeCommonIO(aid, ioType, writeBuf); 832 } 833 finally { 834 close(aid); 835 } 836 } 837 else 838 log.debug("compoundAttributeCommonIO(): failed to open attribute"); 839 840 return theData; 841 } 842 843 /* 844 * Private recursive routine to read/write an entire compound datatype field by 845 * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of 846 * COMPOUND datatypes. 847 * 848 * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a 849 * running counter so that we can index properly into the flattened name list 850 * generated from H5Datatype.extractCompoundInfo() at attribute init time. 851 */ 852 private Object compoundTypeIO(H5Datatype parentType, int nSelPoints, final H5Datatype cmpdType, 853 Object dataBuf, int[] globalMemberIndex) { 854 Object theData = null; 855 856 if (cmpdType.isArray()) { 857 log.trace("compoundTypeIO(): ARRAY type"); 858 859 long[] arrayDims = cmpdType.getArrayDims(); 860 int arrSize = nSelPoints; 861 for (int i = 0; i < arrayDims.length; i++) { 862 arrSize *= arrayDims[i]; 863 } 864 theData = compoundTypeIO(cmpdType, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 865 } 866 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 867 /* 868 * TODO: true variable-length support. 869 */ 870 String[] errVal = new String[nSelPoints]; 871 String errStr = "*UNSUPPORTED*"; 872 873 for (int j = 0; j < nSelPoints; j++) 874 errVal[j] = errStr; 875 876 /* 877 * Setup a fake data list. 878 */ 879 Datatype baseType = cmpdType.getDatatypeBase(); 880 while (baseType != null && !baseType.isCompound()) { 881 baseType = baseType.getDatatypeBase(); 882 } 883 884 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints); 885 fakeVlenData.add(errVal); 886 887 theData = fakeVlenData; 888 } 889 else if (cmpdType.isCompound()) { 890 long parentLength = parentType.getDatatypeSize(); 891 List<Object> memberDataList = null; 892 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 893 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 894 895 log.trace("compoundTypeIO(): read {} members: parentLength={}", typeList.size(), parentLength); 896 897 memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints); 898 899 try { 900 for (int i = 0; i < typeList.size(); i++) { 901 long memberOffset = 0; //offset into dataBuf 902 H5Datatype memberType = null; 903 String memberName = null; 904 Object memberData = null; 905 906 try { 907 memberType = (H5Datatype) typeList.get(i); 908 memberOffset = offsetList.get(i); 909 } 910 catch (Exception ex) { 911 log.debug("compoundTypeIO(): get member {} failure: ", i, ex); 912 globalMemberIndex[0]++; 913 continue; 914 } 915 916 /* 917 * Since the type list used here is not a flattened structure, we need to skip 918 * the member selection check for compound types, as otherwise having a single 919 * member not selected would skip the reading/writing for the entire compound 920 * type. The member selection check will be deferred to the recursive compound 921 * read/write below. 922 */ 923 if (!memberType.isCompound()) { 924 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 925 log.debug("compoundTypeIO(): member[{}] is not selected", i); 926 globalMemberIndex[0]++; 927 continue; // the field is not selected 928 } 929 } 930 931 if (!memberType.isCompound()) { 932 try { 933 memberName = new String(flatNameList.get(globalMemberIndex[0])); 934 } 935 catch (Exception ex) { 936 log.debug("compoundTypeIO(): get member {} name failure: ", i, ex); 937 memberName = "null"; 938 } 939 } 940 941 log.trace("compoundTypeIO(): member[{}]({}) is type {} offset {}", i, memberName, 942 memberType.getDescription(), memberOffset); 943 944 try { 945 int mt_typesize = (int)memberType.getDatatypeSize(); 946 log.trace("compoundTypeIO(): member[{}] mt_typesize={}", i, mt_typesize); 947 byte[] memberbuf = new byte[nSelPoints * mt_typesize]; 948 for (int dimindx = 0; dimindx < nSelPoints; dimindx++) 949 try { 950 System.arraycopy(dataBuf, (int)memberOffset + dimindx * (int)parentLength, memberbuf, dimindx * mt_typesize, mt_typesize); 951 } 952 catch (Exception err) { 953 log.trace("compoundTypeIO(): arraycopy failure: ", err); 954 } 955 956 if (memberType.isCompound()) { 957 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 958 globalMemberIndex); 959 } 960 else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) { 961 /* 962 * Recursively detect any nested array/vlen of compound types. 963 */ 964 boolean compoundFound = false; 965 966 Datatype base = memberType.getDatatypeBase(); 967 while (base != null) { 968 if (base.isCompound()) 969 compoundFound = true; 970 971 base = base.getDatatypeBase(); 972 } 973 974 if (compoundFound) { 975 /* 976 * Skip the top-level array/vlen type. 977 */ 978 globalMemberIndex[0]++; 979 980 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 981 globalMemberIndex); 982 } 983 else { 984 memberData = convertByteMember(memberType, memberbuf); 985 globalMemberIndex[0]++; 986 } 987 } 988 else { 989 memberData = convertByteMember(memberType, memberbuf); 990 globalMemberIndex[0]++; 991 } 992 } 993 catch (Exception ex) { 994 log.debug("compoundTypeIO(): failed to read member {}: ", i, ex); 995 globalMemberIndex[0]++; 996 memberData = null; 997 } 998 999 if (memberData == null) { 1000 String[] errVal = new String[nSelPoints]; 1001 String errStr = "*ERROR*"; 1002 1003 for (int j = 0; j < nSelPoints; j++) 1004 errVal[j] = errStr; 1005 1006 memberData = errVal; 1007 } 1008 1009 memberDataList.add(memberData); 1010 } // (i = 0; i < atomicTypeList.size(); i++) 1011 } 1012 catch (Exception ex) { 1013 log.debug("compoundTypeIO(): failure: ", ex); 1014 memberDataList = null; 1015 } 1016 1017 theData = memberDataList; 1018 } 1019 1020 return theData; 1021 } 1022 1023 private Object compoundTypeWriteIO(H5Datatype parentType, final H5Datatype cmpdType, 1024 Object dataBuf, int[] globalMemberIndex) { 1025 Object theData = null; 1026 if (cmpdType.isArray()) { 1027 Object memberData = null; 1028 log.trace("compoundTypeWriteIO(): ARRAY type"); 1029 1030 theData = compoundTypeWriteIO(cmpdType, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 1031 } 1032 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 1033 /* 1034 * TODO: true variable-length support. 1035 */ 1036 String errVal = new String("*UNSUPPORTED*"); 1037 1038 /* 1039 * Setup a fake data bytes. 1040 */ 1041 Datatype baseType = cmpdType.getDatatypeBase(); 1042 while (baseType != null && !baseType.isCompound()) { 1043 baseType = baseType.getDatatypeBase(); 1044 } 1045 1046 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, 1); 1047 fakeVlenData.add(errVal); 1048 1049 theData = convertMemberByte(baseType, fakeVlenData); 1050 } 1051 else if (cmpdType.isCompound()) { 1052 long parentLength = parentType.getDatatypeSize(); 1053 List<Object> memberDataList = null; 1054 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 1055 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 1056 1057 log.trace("compoundTypeWriteIO(): write {} members", typeList.size()); 1058 1059 theData = new byte[(int)cmpdType.getDatatypeSize()]; 1060 try { 1061 for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) { 1062 long memberOffset = 0; //offset into dataBuf 1063 H5Datatype memberType = null; 1064 String memberName = null; 1065 Object memberData = null; 1066 1067 try { 1068 memberType = (H5Datatype) typeList.get(i); 1069 memberOffset = offsetList.get(i); 1070 } 1071 catch (Exception ex) { 1072 log.debug("compoundTypeWriteIO(): get member {} failure: ", i, ex); 1073 globalMemberIndex[0]++; 1074 continue; 1075 } 1076 long memberLength = memberType.getDatatypeSize(); 1077 1078 /* 1079 * Since the type list used here is not a flattened structure, we need to skip the member selection 1080 * check for compound types, as otherwise having a single member not selected would skip the 1081 * reading/writing for the entire compound type. The member selection check will be deferred to the 1082 * recursive compound read/write below. 1083 */ 1084 if (!memberType.isCompound()) { 1085 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 1086 log.debug("compoundTypeWriteIO(): member[{}] is not selected", i); 1087 globalMemberIndex[0]++; 1088 continue; // the field is not selected 1089 } 1090 } 1091 1092 if (!memberType.isCompound()) { 1093 try { 1094 memberName = new String(flatNameList.get(globalMemberIndex[0])); 1095 } 1096 catch (Exception ex) { 1097 log.debug("compoundTypeWriteIO(): get member {} name failure: ", i, ex); 1098 memberName = "null"; 1099 } 1100 } 1101 1102 log.trace("compoundTypeWriteIO(): member[{}]({}) is type {} offset {}", i, memberName, 1103 memberType.getDescription(), memberOffset); 1104 1105 try { 1106 /* 1107 * TODO: currently doesn't correctly handle non-selected compound members. 1108 */ 1109 memberData = ((List<?>) dataBuf).get(i); 1110 } 1111 catch (Exception ex) { 1112 log.debug("compoundTypeWriteIO(): get member[{}] data failure: ", i, ex); 1113 globalMemberIndex[0]++; 1114 continue; 1115 } 1116 1117 if (memberData == null) { 1118 log.debug("compoundTypeWriteIO(): member[{}] data is null", i); 1119 globalMemberIndex[0]++; 1120 continue; 1121 } 1122 1123 try { 1124 if (memberType.isCompound()) { 1125 List<?> nestedList = (List<?>) ((List<?>) dataBuf).get(i); 1126 memberData = compoundTypeWriteIO(cmpdType, memberType, nestedList, globalMemberIndex); 1127 } 1128 else { 1129 memberData = writeSingleCompoundMember(memberType, memberData); 1130 globalMemberIndex[0]++; 1131 } 1132 } 1133 catch (Exception ex) { 1134 log.debug("compoundTypeWriteIO(): failed to write member[{}]: ", i, ex); 1135 globalMemberIndex[0]++; 1136 } 1137 1138 byte[] indexedBytes = convertMemberByte(memberType, memberData); 1139 try { 1140 System.arraycopy(indexedBytes, 0, theData, writeListIndex, (int)memberLength); 1141 } 1142 catch (Exception err) { 1143 log.trace("compoundTypeWriteIO(): arraycopy failure: ", err); 1144 } 1145 writeListIndex += memberLength; 1146 } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++) 1147 } 1148 catch (Exception ex) { 1149 log.debug("compoundTypeWriteIO(): failure: ", ex); 1150 theData = null; 1151 } 1152 } 1153 1154 return theData; 1155 } 1156 1157 /* 1158 * Routine to convert datatypes that are in object arrays to 1159 * bytes. 1160 */ 1161 private byte[] convertMemberByte(final Datatype dtype, Object theObj) { 1162 byte[] byteData = null; 1163 1164 if (dtype.getDatatypeSize() == 1) { 1165 /* 1166 * Normal byte[] type, such as an integer datatype of size 1. 1167 */ 1168 byteData = (byte[])theObj; 1169 } 1170 else if (dtype.isString() && !dtype.isVarStr() && convertByteToString && !(theObj instanceof byte[])) { 1171 log.trace("convertMemberByte(): converting string array to byte array"); 1172 1173 byteData = stringToByte((String[])theObj, (int) dtype.getDatatypeSize()); 1174 } 1175 else if (dtype.isInteger()) { 1176 log.trace("convertMemberByte(): converting integer array to byte array"); 1177 1178 switch ((int)dtype.getDatatypeSize()) { 1179 case 1: 1180 /* 1181 * Normal byte[] type, such as an integer datatype of size 1. 1182 */ 1183 byteData = (byte[])theObj; 1184 break; 1185 case 2: 1186 byteData = HDFNativeData.shortToByte(0, 1, (short[])theObj); 1187 break; 1188 case 4: 1189 byteData = HDFNativeData.intToByte(0, 1, (int[])theObj); 1190 break; 1191 case 8: 1192 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1193 break; 1194 default: 1195 log.debug("convertMemberByte(): invalid datatype size"); 1196 byteData = null; 1197 break; 1198 } 1199 } 1200 else if (dtype.isFloat()) { 1201 log.trace("convertMemberByte(): converting float array to byte array"); 1202 1203 if (dtype.getDatatypeSize() == 16) 1204 byteData = ((H5Datatype)dtype).bigDecimalToByte((BigDecimal[])theObj, 0); 1205 else if (dtype.getDatatypeSize() == 8) 1206 byteData = HDFNativeData.doubleToByte(0, 1, (double[])theObj); 1207 else 1208 byteData = HDFNativeData.floatToByte(0, 1, (float[])theObj); 1209 } 1210 else if (((H5Datatype)dtype).isRegRef() || ((H5Datatype)dtype).isRefObj()) { 1211 log.trace("convertMemberByte(): reference type - converting long array to byte array"); 1212 1213 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1214 } 1215 else if (dtype.isArray()) { 1216 Datatype baseType = dtype.getDatatypeBase(); 1217 1218 /* 1219 * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes. 1220 */ 1221 while (baseType.isArray()) 1222 baseType = baseType.getDatatypeBase(); 1223 1224 /* 1225 * Optimize for the common cases of Arrays. 1226 */ 1227 switch (baseType.getDatatypeClass()) { 1228 case Datatype.CLASS_INTEGER: 1229 case Datatype.CLASS_FLOAT: 1230 case Datatype.CLASS_CHAR: 1231 case Datatype.CLASS_STRING: 1232 case Datatype.CLASS_BITFIELD: 1233 case Datatype.CLASS_OPAQUE: 1234 case Datatype.CLASS_COMPOUND: 1235 case Datatype.CLASS_REFERENCE: 1236 case Datatype.CLASS_ENUM: 1237 case Datatype.CLASS_VLEN: 1238 case Datatype.CLASS_TIME: 1239 byteData = convertMemberByte(baseType, theObj); 1240 break; 1241 1242 case Datatype.CLASS_ARRAY: 1243 { 1244 Datatype arrayType = dtype.getDatatypeBase(); 1245 1246 long[] arrayDims = dtype.getArrayDims(); 1247 int arrSize = 1; 1248 for (int i = 0; i < arrayDims.length; i++) { 1249 arrSize *= arrayDims[i]; 1250 } 1251 1252 byteData = new byte[arrSize * (int)arrayType.getDatatypeSize()]; 1253 1254 for (int i = 0; i < arrSize; i++) { 1255 byte[] indexedBytes = convertMemberByte(arrayType, ((Object[]) theObj)[i]); 1256 try { 1257 System.arraycopy(indexedBytes, 0, byteData, (int)(i * arrayType.getDatatypeSize()), (int)arrayType.getDatatypeSize()); 1258 } 1259 catch (Exception err) { 1260 log.trace("convertMemberByte(): arraycopy failure: ", err); 1261 } 1262 } 1263 1264 break; 1265 } 1266 1267 case Datatype.CLASS_NO_CLASS: 1268 default: 1269 log.debug("convertMemberByte(): invalid datatype class"); 1270 byteData = null; 1271 } 1272 } 1273 else if (dtype.isCompound()) { 1274 /* 1275 * TODO: still valid after reading change? 1276 */ 1277 byteData = convertCompoundMemberBytes(dtype, (List<Object>)theObj); 1278 } 1279 else { 1280 log.debug("convertMemberByte(): no change as byte[]"); 1281 byteData = (byte[])theObj; 1282 } 1283 1284 return byteData; 1285 } 1286 1287 /** 1288 * Given an array of objects representing a compound Datatype, converts each of 1289 * its members into bytes and returns the results. 1290 * 1291 * @param dtype 1292 * The compound datatype to convert 1293 * @param theObj 1294 * The object array representing the data of the compound Datatype 1295 * @return The converted bytes of the objects 1296 */ 1297 private byte[] convertCompoundMemberBytes(final Datatype dtype, List<Object> theObj) { 1298 List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes()); 1299 List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes()); 1300 Iterator<Datatype> localIt = localTypes.iterator(); 1301 while (localIt.hasNext()) { 1302 Datatype curType = localIt.next(); 1303 1304 if (curType.isCompound()) 1305 continue; 1306 1307 if (!allSelectedTypes.contains(curType)) 1308 localIt.remove(); 1309 } 1310 1311 byte[] byteData = new byte[(int)dtype.getDatatypeSize()]; 1312 for (int i = 0, index = 0; i < localTypes.size(); i++) { 1313 Datatype curType = localTypes.get(i); 1314 byte[] indexedBytes = null; 1315 if (curType.isCompound()) 1316 indexedBytes = convertCompoundMemberBytes(curType, (List<Object>)theObj.get(i)); 1317 else 1318 indexedBytes = convertMemberByte(curType, theObj.get(i)); 1319 1320 try { 1321 System.arraycopy(indexedBytes, 0, byteData, index + (int)curType.getDatatypeSize(), (int)curType.getDatatypeSize()); 1322 } 1323 catch (Exception err) { 1324 log.trace("convertCompoundMemberBytes(): arraycopy failure: ", err); 1325 } 1326 index += curType.getDatatypeSize(); 1327 } 1328 1329 return byteData; 1330 } 1331 1332 /* 1333 * Private routine to convert a single field of a compound datatype. 1334 */ 1335 private Object writeSingleCompoundMember(final H5Datatype memberType, Object theData) throws Exception { 1336 /* 1337 * Perform any necessary data conversions before writing the data. 1338 */ 1339 Object tmpData = theData; 1340 try { 1341 if (memberType.isUnsigned()) { 1342 // Check if we need to convert unsigned integer data from Java-style 1343 // to C-style integers 1344 long tsize = memberType.getDatatypeSize(); 1345 String cname = theData.getClass().getName(); 1346 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1347 boolean doIntConversion = (((tsize == 1) && (dname == 'S')) 1348 || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J'))); 1349 1350 if (doIntConversion) { 1351 log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers"); 1352 tmpData = convertToUnsignedC(theData, null); 1353 } 1354 } 1355 else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) { 1356 log.trace("writeSingleCompoundMember(): converting string array to byte array"); 1357 tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize()); 1358 } 1359 else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) { 1360 log.trace("writeSingleCompoundMember(): converting enum names to values"); 1361 tmpData = memberType.convertEnumNameToValue((String[]) theData); 1362 } 1363 } 1364 catch (Exception ex) { 1365 log.debug("writeSingleCompoundMember(): data conversion failure: ", ex); 1366 tmpData = null; 1367 } 1368 1369 if (tmpData == null) { 1370 log.debug("writeSingleCompoundMember(): data is null"); 1371 } 1372 1373 return tmpData; 1374 } 1375 1376 /** 1377 * Converts the data values of this data object to appropriate Java integers if 1378 * they are unsigned integers. 1379 * 1380 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1381 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 1382 * 1383 * @return the converted data buffer. 1384 */ 1385 @Override 1386 public Object convertFromUnsignedC() { 1387 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 1388 } 1389 1390 /** 1391 * Converts Java integer data values of this data object back to unsigned C-type 1392 * integer data if they are unsigned integers. 1393 * 1394 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1395 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 1396 * 1397 * @return the converted data buffer. 1398 */ 1399 @Override 1400 public Object convertToUnsignedC() { 1401 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 1402 } 1403 1404 /* Implement interface Attribute */ 1405 1406 /** 1407 * Returns the HObject to which this Attribute is currently "attached". 1408 * 1409 * @return the HObject to which this Attribute is currently "attached". 1410 */ 1411 @Override 1412 public HObject getParentObject() { 1413 return parentObject; 1414 } 1415 1416 /** 1417 * Sets the HObject to which this Attribute is "attached". 1418 * 1419 * @param pObj 1420 * the new HObject to which this Attribute is "attached". 1421 */ 1422 @Override 1423 public void setParentObject(HObject pObj) { 1424 parentObject = pObj; 1425 } 1426 1427 /** 1428 * set a property for the attribute. 1429 * 1430 * @param key the attribute Map key 1431 * @param value the attribute Map value 1432 */ 1433 @Override 1434 public void setProperty(String key, Object value) { 1435 properties.put(key, value); 1436 } 1437 1438 /** 1439 * get a property for a given key. 1440 * 1441 * @param key the attribute Map key 1442 * 1443 * @return the property 1444 */ 1445 @Override 1446 public Object getProperty(String key) { 1447 return properties.get(key); 1448 } 1449 1450 /** 1451 * get all property keys. 1452 * 1453 * @return the Collection of property keys 1454 */ 1455 @Override 1456 public Collection<String> getPropertyKeys() { 1457 return properties.keySet(); 1458 } 1459 1460 /** 1461 * Returns the name of the object. For example, "Raster Image #2". 1462 * 1463 * @return The name of the object. 1464 */ 1465 @Override 1466 public final String getAttributeName() { 1467 return getName(); 1468 } 1469 1470 /** 1471 * Retrieves the attribute data from the file. 1472 * 1473 * @return the attribute data. 1474 * 1475 * @throws Exception 1476 * if the data can not be retrieved 1477 */ 1478 @Override 1479 public final Object getAttributeData() throws Exception, OutOfMemoryError { 1480 return getData(); 1481 } 1482 1483 /** 1484 * Returns the datatype of the attribute. 1485 * 1486 * @return the datatype of the attribute. 1487 */ 1488 @Override 1489 public final Datatype getAttributeDatatype() { 1490 return getDatatype(); 1491 } 1492 1493 /** 1494 * Returns the space type for the attribute. It returns a 1495 * negative number if it failed to retrieve the type information from 1496 * the file. 1497 * 1498 * @return the space type for the attribute. 1499 */ 1500 @Override 1501 public final int getAttributeSpaceType() { 1502 return getSpaceType(); 1503 } 1504 1505 /** 1506 * Returns the rank (number of dimensions) of the attribute. It returns a 1507 * negative number if it failed to retrieve the dimension information from 1508 * the file. 1509 * 1510 * @return the number of dimensions of the attribute. 1511 */ 1512 @Override 1513 public final int getAttributeRank() { 1514 return getRank(); 1515 } 1516 1517 /** 1518 * Returns the selected size of the rows and columns of the attribute. It returns a 1519 * negative number if it failed to retrieve the size information from 1520 * the file. 1521 * 1522 * @return the selected size of the rows and colums of the attribute. 1523 */ 1524 @Override 1525 public final int getAttributePlane() { 1526 return (int)getWidth() * (int)getHeight(); 1527 } 1528 1529 /** 1530 * Returns the array that contains the dimension sizes of the data value of 1531 * the attribute. It returns null if it failed to retrieve the dimension 1532 * information from the file. 1533 * 1534 * @return the dimension sizes of the attribute. 1535 */ 1536 @Override 1537 public final long[] getAttributeDims() { 1538 return getDims(); 1539 } 1540 1541 /** 1542 * @return true if the dataspace is a NULL; otherwise, returns false. 1543 */ 1544 @Override 1545 public boolean isAttributeNULL() { 1546 return isNULL(); 1547 } 1548 1549 /** 1550 * @return true if the data is a single scalar point; otherwise, returns false. 1551 */ 1552 @Override 1553 public boolean isAttributeScalar() { 1554 return isScalar(); 1555 } 1556 1557 /** 1558 * Not for public use in the future. 1559 * 1560 * setData() is not safe to use because it changes memory buffer 1561 * of the dataset object. Dataset operations such as write/read 1562 * will fail if the buffer type or size is changed. 1563 * 1564 * @param d the object data -must be an array of Objects 1565 */ 1566 @Override 1567 public void setAttributeData(Object d) { 1568 setData(d); 1569 } 1570 1571 /** 1572 * Writes the memory buffer of this dataset to file. 1573 * 1574 * @throws Exception if buffer can not be written 1575 */ 1576 @Override 1577 public void writeAttribute() throws Exception { 1578 write(); 1579 } 1580 1581 /** 1582 * Writes the given data buffer into this attribute in a file. 1583 * 1584 * The data buffer is a vector that contains the data values of compound fields. The data is written 1585 * into file as one data blob. 1586 * 1587 * @param buf 1588 * The vector that contains the data values of compound fields. 1589 * 1590 * @throws Exception 1591 * If there is an error at the library level. 1592 */ 1593 @Override 1594 public void writeAttribute(Object buf) throws Exception { 1595 write(buf); 1596 } 1597 1598 /** 1599 * Returns a string representation of the data value. For 1600 * example, "0, 255". 1601 * 1602 * For a compound datatype, it will be a 1D array of strings with field 1603 * members separated by the delimiter. For example, 1604 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1605 * float} of three data points. 1606 * 1607 * @param delimiter 1608 * The delimiter used to separate individual data points. It 1609 * can be a comma, semicolon, tab or space. For example, 1610 * toString(",") will separate data by commas. 1611 * 1612 * @return the string representation of the data values. 1613 */ 1614 @Override 1615 public String toAttributeString(String delimiter) { 1616 return toString(delimiter, -1); 1617 } 1618 1619 /** 1620 * Returns a string representation of the data value. For 1621 * example, "0, 255". 1622 * 1623 * For a compound datatype, it will be a 1D array of strings with field 1624 * members separated by the delimiter. For example, 1625 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1626 * float} of three data points. 1627 * 1628 * @param delimiter 1629 * The delimiter used to separate individual data points. It 1630 * can be a comma, semicolon, tab or space. For example, 1631 * toString(",") will separate data by commas. 1632 * @param maxItems 1633 * The maximum number of Array values to return 1634 * 1635 * @return the string representation of the data values. 1636 */ 1637 @Override 1638 public String toAttributeString(String delimiter, int maxItems) { 1639 Object theData = originalBuf; 1640 if (theData == null) { 1641 log.debug("toAttributeString: value is null"); 1642 return null; 1643 } 1644 1645 // attribute value is an array 1646 StringBuilder sb = new StringBuilder(); 1647 int numberTypes = ((ArrayList<Object[]>)theData).size(); 1648 log.trace("toAttributeString: numberTypes={}", numberTypes); 1649 List<Datatype> cmpdTypes = getDatatype().getCompoundMemberTypes(); 1650 1651 int loopcnt = 0; 1652 while (loopcnt < maxItems) { 1653 if (loopcnt > 0) 1654 sb.append(delimiter); 1655 sb.append("{"); 1656 for (int dv = 0; dv < numberTypes; dv++) { 1657 if (dv > 0) 1658 sb.append(delimiter); 1659 1660 Object theobj = ((ArrayList<Object[]>)theData).get(dv); 1661 Class<? extends Object> valClass = theobj.getClass(); 1662 log.trace("toAttributeString:valClass={}", valClass); 1663 int n = 0; 1664 Datatype dtype = cmpdTypes.get(dv); 1665 // value is an array 1666 if (valClass.isArray()) { 1667 n = Array.getLength(theobj); 1668 if (dtype.isRef()) 1669 n /= (int)dtype.getDatatypeSize(); 1670 } 1671 else 1672 n = ((ArrayList<Object[]>)theobj).size(); 1673 //if ((maxItems > 0) && (n + loopcnt > maxItems)) 1674 // n = maxItems - loopcnt; 1675 log.trace("toAttributeString:[{}] theobj={} size={}", dv, theobj, n); 1676 String sobj = toString(theobj, dtype, delimiter, n); 1677 sb.append(sobj); 1678 loopcnt += n; 1679 if (loopcnt >= maxItems) 1680 break; 1681 } // end for (int dv = 0; dv < numberTypes; dv++) 1682 sb.append("}"); 1683 break; 1684 } // end for (int i = 1; i < n; i++) 1685 1686 return sb.toString(); 1687 } 1688 1689 @Override 1690 protected String toString(Object theData, Datatype theType, String delimiter, int count) { 1691 log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), 1692 theType.isUnsigned(), count); 1693 StringBuilder sb = new StringBuilder(); 1694 Class<? extends Object> valClass = theData.getClass(); 1695 log.trace("toString:valClass={}", valClass); 1696 1697 H5Datatype dtype = (H5Datatype)theType; 1698 log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef()); 1699 if (dtype.isStdRef()) { 1700 return ((H5ReferenceType)dtype).toString(delimiter, count); 1701 } 1702 else if (dtype.isVLEN() && !dtype.isVarStr()) { 1703 log.trace("toString: vlen"); 1704 String strValue; 1705 1706 for (int k = 0; k < count; k++) { 1707 Object value = Array.get(theData, k); 1708 if (value == null) 1709 strValue = "null"; 1710 else { 1711 if (dtype.getDatatypeBase().isRef()) { 1712 ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value; 1713 log.trace("toString: vlen value={}", ref_value); 1714 strValue = "{"; 1715 for (int m = 0; m < ref_value.size(); m++) { 1716 byte[] curBytes = ref_value.get(m); 1717 if (m > 0) 1718 strValue += ", "; 1719 if (H5ReferenceType.zeroArrayCheck(curBytes)) 1720 strValue += "NULL"; 1721 else { 1722 if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) { 1723 strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT); 1724 } 1725 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1726 try { 1727 strValue += H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), curBytes); 1728 } 1729 catch (Exception ex) { 1730 ex.printStackTrace(); 1731 } 1732 } 1733 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1734 try { 1735 strValue += H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), curBytes); 1736 } 1737 catch (Exception ex) { 1738 ex.printStackTrace(); 1739 } 1740 } 1741 } 1742 } 1743 strValue += "}"; 1744 } 1745 else 1746 strValue = value.toString(); 1747 } 1748 if (k > 0) 1749 sb.append(", "); 1750 sb.append(strValue); 1751 } 1752 } 1753 else if (dtype.isRef()) { 1754 log.trace("toString: ref"); 1755 int dtypesize = (int)dtype.getDatatypeSize(); 1756 String strValue = "NULL"; 1757 byte[] rElements = null; 1758 1759 for (int k = 0; k < count; k++) { 1760 // need to iterate if type is ArrayList 1761 if (theData instanceof ArrayList) 1762 rElements = (byte[]) ((ArrayList) theData).get(k); 1763 else 1764 rElements = (byte[])theData; 1765 1766 if (H5ReferenceType.zeroArrayCheck(rElements)) 1767 strValue = "NULL"; 1768 else { 1769 if (dtype.isStdRef()) { 1770 strValue += H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT); 1771 } 1772 else if (dtypesize == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1773 try { 1774 strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), rElements); 1775 } 1776 catch (Exception ex) { 1777 ex.printStackTrace(); 1778 } 1779 } 1780 else if (dtypesize == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1781 try { 1782 strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), rElements); 1783 } 1784 catch (Exception ex) { 1785 ex.printStackTrace(); 1786 } 1787 } 1788 } 1789 if (k > 0) 1790 sb.append(", "); 1791 sb.append(strValue); 1792 } 1793 } 1794 else { 1795 return super.toString(theData, theType, delimiter, count); 1796 } 1797 1798 return sb.toString(); 1799 } 1800 1801 /* Implement interface H5Attribute */ 1802 1803 /** 1804 * The general read and write attribute operations for hdf5 object data. 1805 * 1806 * @param attr_id 1807 * the attribute to access 1808 * @param ioType 1809 * the type of IO operation 1810 * @param objBuf 1811 * the data buffer to use for write operation 1812 * 1813 * @return the attribute data 1814 * 1815 * @throws Exception 1816 * if the data can not be retrieved 1817 */ 1818 @Override 1819 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception { 1820 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1821 Object theData = null; 1822 1823 long dt_size = dsDatatype.getDatatypeSize(); 1824 log.trace("AttributeCommonIO(): create native"); 1825 long tid = dsDatatype.createNative(); 1826 1827 if (ioType == H5File.IO_TYPE.READ) { 1828 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1829 1830 long lsize = 1; 1831 for (int j = 0; j < dims.length; j++) 1832 lsize *= dims[j]; 1833 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1834 1835 try { 1836 // Read data. 1837 Object attr_data = new byte[(int)(dt_size * lsize)]; 1838 1839 try { 1840 H5.H5Aread(attr_id, tid, attr_data); 1841 } 1842 catch (Exception ex) { 1843 log.debug("AttributeCommonIO(): H5Aread failure: ", ex); 1844 } 1845 theData = compoundTypeIO(dsDatatype, (int)lsize, dsDatatype, attr_data, new int[]{0}); 1846 } 1847 catch (Exception ex) { 1848 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1849 throw new Exception(ex.getMessage(), ex); 1850 } 1851 finally { 1852 dsDatatype.close(tid); 1853 } 1854 for (int i = 0; i < ((ArrayList<Object[]>)theData).size(); i++) { 1855 Object theobj = ((ArrayList<Object[]>)theData).get(i); 1856 log.trace("AttributeCommonIO():read ioType data: {}", theobj); 1857 } 1858 originalBuf = theData; 1859 isDataLoaded = true; 1860 } // H5File.IO_TYPE.READ 1861 else { 1862 theData = compoundTypeWriteIO(dsDatatype, dsDatatype, objBuf, new int[]{0}); 1863 try { 1864 H5.H5Awrite(attr_id, tid, theData); 1865 } 1866 catch (Exception ex) { 1867 log.debug("AttributeCommonIO(): H5Awrite failure: ", ex); 1868 } 1869 finally { 1870 dsDatatype.close(tid); 1871 } 1872 } // H5File.IO_TYPE.WRITE 1873 1874 return theData; 1875 } 1876 1877 /** 1878 * Read a subset of an attribute for hdf5 object data. 1879 * 1880 * @return the selected attribute data 1881 * 1882 * @throws Exception 1883 * if the data can not be retrieved 1884 */ 1885 @Override 1886 public Object AttributeSelection() throws Exception { 1887 return originalBuf; 1888 } 1889}