001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.text.DecimalFormat; 021import java.util.ArrayList; 022import java.util.Arrays; 023import java.util.Collection; 024import java.util.HashMap; 025import java.util.Iterator; 026import java.util.List; 027import java.util.Map; 028import java.util.Vector; 029 030import hdf.hdf5lib.H5; 031import hdf.hdf5lib.HDF5Constants; 032import hdf.hdf5lib.HDFNativeData; 033import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 034import hdf.hdf5lib.exceptions.HDF5Exception; 035 036import hdf.object.Attribute; 037import hdf.object.CompoundDS; 038import hdf.object.Dataset; 039import hdf.object.Datatype; 040import hdf.object.FileFormat; 041import hdf.object.Group; 042import hdf.object.HObject; 043import hdf.object.MetaDataContainer; 044import hdf.object.Utils; 045 046import hdf.object.h5.H5Datatype; 047import hdf.object.h5.H5ReferenceType; 048 049/** 050 * The H5CompoundAttr class defines an HDF5 attribute of compound datatypes. 051 * 052 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 053 * dataset, group or named datatype. 054 * 055 * Like a dataset, an attribute has a name, datatype and dataspace. 056 * 057 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a 058 * collection of one or more atomic types or small arrays of such types. Each member of a compound 059 * type has a name which is unique within that type, and a byte offset that determines the first 060 * byte (smallest byte address) of that member in a compound datum. 061 * 062 * For more information on HDF5 attributes and datatypes, read the <a href= 063 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 064 * User's Guide</a>. 065 * 066 * There are two basic types of compound attributes: simple compound data and nested compound data. 067 * Members of a simple compound attribute have atomic datatypes. Members of a nested compound attribute 068 * are compound or array of compound data. 069 * 070 * Since Java does not understand C structures, we cannot directly read/write compound data values 071 * as in the following C example. 072 * 073 * <pre> 074 * typedef struct s1_t { 075 * int a; 076 * float b; 077 * double c; 078 * } s1_t; 079 * s1_t s1[LENGTH]; 080 * ... 081 * H5Dwrite(..., s1); 082 * H5Dread(..., s1); 083 * </pre> 084 * 085 * Values of compound data fields are stored in java.util.Vector object. We read and write compound 086 * data by fields instead of compound structure. As for the example above, the java.util.Vector 087 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands 088 * the primitive datatypes of int, float and double, we will be able to read/write the compound data 089 * by field. 090 * 091 * @version 1.0 6/15/2021 092 * @author Allen Byrne 093 */ 094public class H5CompoundAttr extends CompoundDS implements H5Attribute 095{ 096 private static final long serialVersionUID = 2072473407027648309L; 097 098 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundAttr.class); 099 100 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 101 protected HObject parentObject; 102 103 /** additional information and properties for the attribute, Attribute interface */ 104 private transient Map<String, Object> properties; 105 106 /** 107 * Create an attribute with specified name, data type and dimension sizes. 108 * 109 * @param parentObj 110 * the HObject to which this H5CompoundAttr is attached. 111 * @param attrName 112 * the name of the attribute. 113 * @param attrType 114 * the datatype of the attribute. 115 * @param attrDims 116 * the dimension sizes of the attribute, null for scalar attribute 117 * 118 * @see hdf.object.Datatype 119 */ 120 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 121 this(parentObj, attrName, attrType, attrDims, null); 122 } 123 124 /** 125 * Create an attribute with specific name and value. 126 * 127 * @param parentObj 128 * the HObject to which this H5CompoundAttr is attached. 129 * @param attrName 130 * the name of the attribute. 131 * @param attrType 132 * the datatype of the attribute. 133 * @param attrDims 134 * the dimension sizes of the attribute, null for scalar attribute 135 * @param attrValue 136 * the value of the attribute, null if no value 137 * 138 * @see hdf.object.Datatype 139 */ 140 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 141 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 142 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 143 (parentObj == null) ? null : parentObj.getFullName(), null); 144 145 log.trace("CompoundAttr: start {}", parentObj); 146 this.parentObject = parentObj; 147 148 datatype = attrType; 149 150 if (attrValue != null) { 151 data = attrValue; 152 originalBuf = attrValue; 153 isDataLoaded = true; 154 } 155 properties = new HashMap(); 156 157 if (attrDims == null) { 158 rank = 1; 159 dims = new long[] { 1 }; 160 isScalar = true; 161 } 162 else { 163 dims = attrDims; 164 rank = dims.length; 165 isScalar = false; 166 } 167 168 selectedDims = new long[rank]; 169 startDims = new long[rank]; 170 selectedStride = new long[rank]; 171 172 numberOfMembers = 0; 173 memberNames = null; 174 isMemberSelected = null; 175 memberTypes = null; 176 177 log.trace("attrName={}, attrType={}, attrValue={}, rank={}", 178 attrName, attrType.getDescription(), data, rank); 179 180 resetSelection(); 181 } 182 183 /* 184 * (non-Javadoc) 185 * 186 * @see hdf.object.HObject#open() 187 */ 188 @Override 189 public long open() { 190 if (parentObject == null) { 191 log.debug("open(): attribute's parent object is null"); 192 return HDF5Constants.H5I_INVALID_HID; 193 } 194 195 long aid = HDF5Constants.H5I_INVALID_HID; 196 long pObjID = HDF5Constants.H5I_INVALID_HID; 197 198 try { 199 pObjID = parentObject.open(); 200 if (pObjID >= 0) { 201 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 202 log.trace("open(): FILE_TYPE_HDF5"); 203 if (H5.H5Aexists(pObjID, getName())) 204 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 205 } 206 } 207 208 log.trace("open(): aid={}", aid); 209 } 210 catch (Exception ex) { 211 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 212 aid = HDF5Constants.H5I_INVALID_HID; 213 } 214 finally { 215 parentObject.close(pObjID); 216 } 217 218 return aid; 219 } 220 221 /* 222 * (non-Javadoc) 223 * 224 * @see hdf.object.HObject#close(int) 225 */ 226 @Override 227 public void close(long aid) { 228 if (aid >= 0) { 229 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 230 log.trace("close(): FILE_TYPE_HDF5"); 231 try { 232 H5.H5Aclose(aid); 233 } 234 catch (HDF5Exception ex) { 235 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 236 } 237 } 238 } 239 } 240 241 242 /** 243 * Retrieves datatype and dataspace information from file and sets the attribute 244 * in memory. 245 * 246 * The init() is designed to support lazy operation in a attribute object. When a 247 * data object is retrieved from file, the datatype, dataspace and raw data are 248 * not loaded into memory. When it is asked to read the raw data from file, 249 * init() is first called to get the datatype and dataspace information, then 250 * load the raw data from file. 251 * 252 * init() is also used to reset the selection of a attribute (start, stride and 253 * count) to the default, which is the entire attribute for 1D or 2D datasets. In 254 * the following example, init() at step 1) retrieves datatype and dataspace 255 * information from file. getData() at step 3) reads only one data point. init() 256 * at step 4) resets the selection to the whole attribute. getData() at step 4) 257 * reads the values of whole attribute into memory. 258 * 259 * <pre> 260 * dset = (Dataset) file.get(NAME_DATASET); 261 * 262 * // 1) get datatype and dataspace information from file 263 * attr.init(); 264 * rank = attr.getAttributeRank(); // rank = 2, a 2D attribute 265 * count = attr.getSelectedDims(); 266 * start = attr.getStartDims(); 267 * dims = attr.getAttributeDims(); 268 * 269 * // 2) select only one data point 270 * for (int i = 0; i < rank; i++) { 271 * start[0] = 0; 272 * count[i] = 1; 273 * } 274 * 275 * // 3) read one data point 276 * data = attr.getAttributeData(); 277 * 278 * // 4) reset selection to the whole attribute 279 * attr.init(); 280 * 281 * // 5) clean the memory data buffer 282 * attr.clearData(); 283 * 284 * // 6) Read the whole attribute 285 * data = attr.getAttributeData(); 286 * </pre> 287 */ 288 @Override 289 public void init() { 290 if (inited) { 291 resetSelection(); 292 log.trace("init(): H5CompoundAttr already inited"); 293 return; 294 } 295 296 long aid = HDF5Constants.H5I_INVALID_HID; 297 long tid = HDF5Constants.H5I_INVALID_HID; 298 long sid = HDF5Constants.H5I_INVALID_HID; 299 int tclass = HDF5Constants.H5I_INVALID_HID; 300 flatNameList = new Vector<>(); 301 flatTypeList = new Vector<>(); 302 long[] memberTIDs = null; 303 304 log.trace("init(): FILE_TYPE_HDF5"); 305 aid = open(); 306 if (aid >= 0) { 307 try { 308 sid = H5.H5Aget_space(aid); 309 rank = H5.H5Sget_simple_extent_ndims(sid); 310 space_type = H5.H5Sget_simple_extent_type(sid); 311 tid = H5.H5Aget_type(aid); 312 tclass = H5.H5Tget_class(tid); 313 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 314 315 long tmptid = 0; 316 317 // Handle ARRAY and VLEN types by getting the base type 318 if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) { 319 try { 320 tmptid = tid; 321 tid = H5.H5Tget_super(tmptid); 322 log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid); 323 } 324 catch (Exception ex) { 325 log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex); 326 tid = -1; 327 } 328 finally { 329 try { 330 H5.H5Tclose(tmptid); 331 } 332 catch (HDF5Exception ex) { 333 log.debug("init(): H5Tclose({}) failure: ", tmptid, ex); 334 } 335 } 336 } 337 338 if (tclass == HDF5Constants.H5T_COMPOUND) { 339 // initialize member information 340 H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList); 341 numberOfMembers = flatNameList.size(); 342 log.trace("init(): numberOfMembers={}", numberOfMembers); 343 344 memberNames = new String[numberOfMembers]; 345 memberTIDs = new long[numberOfMembers]; 346 memberTypes = new Datatype[numberOfMembers]; 347 memberOrders = new int[numberOfMembers]; 348 isMemberSelected = new boolean[numberOfMembers]; 349 memberDims = new Object[numberOfMembers]; 350 351 for (int i = 0; i < numberOfMembers; i++) { 352 isMemberSelected[i] = true; 353 memberTIDs[i] = flatTypeList.get(i).createNative(); 354 355 try { 356 memberTypes[i] = flatTypeList.get(i); 357 } 358 catch (Exception ex) { 359 log.debug("init(): failed to create datatype for member[{}]: ", i, ex); 360 memberTypes[i] = null; 361 } 362 363 memberNames[i] = flatNameList.get(i); 364 memberOrders[i] = 1; 365 memberDims[i] = null; 366 log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, i, 367 memberNames[i], i, memberTIDs[i], i, memberTypes[i]); 368 369 try { 370 tclass = H5.H5Tget_class(memberTIDs[i]); 371 } 372 catch (HDF5Exception ex) { 373 log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex); 374 } 375 376 if (tclass == HDF5Constants.H5T_ARRAY) { 377 int n = H5.H5Tget_array_ndims(memberTIDs[i]); 378 long mdim[] = new long[n]; 379 H5.H5Tget_array_dims(memberTIDs[i], mdim); 380 int idim[] = new int[n]; 381 for (int j = 0; j < n; j++) 382 idim[j] = (int) mdim[j]; 383 memberDims[i] = idim; 384 tmptid = H5.H5Tget_super(memberTIDs[i]); 385 memberOrders[i] = (int) (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid)); 386 try { 387 H5.H5Tclose(tmptid); 388 } 389 catch (HDF5Exception ex) { 390 log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 391 } 392 } 393 } // (int i=0; i<numberOfMembers; i++) 394 } 395 396 if (rank == 0) { 397 // a scalar data point 398 isScalar = true; 399 rank = 1; 400 dims = new long[] { 1 }; 401 log.trace("init(): rank is a scalar data point"); 402 } 403 else { 404 isScalar = false; 405 dims = new long[rank]; 406 maxDims = new long[rank]; 407 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 408 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 409 } 410 411 inited = true; 412 } 413 catch (HDF5Exception ex) { 414 numberOfMembers = 0; 415 memberNames = null; 416 memberTypes = null; 417 memberOrders = null; 418 log.debug("init(): ", ex); 419 } 420 finally { 421 try { 422 H5.H5Tclose(tid); 423 } 424 catch (HDF5Exception ex2) { 425 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 426 } 427 try { 428 H5.H5Sclose(sid); 429 } 430 catch (HDF5Exception ex2) { 431 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 432 } 433 434 if (memberTIDs != null) { 435 for (int i = 0; i < memberTIDs.length; i++) { 436 try { 437 H5.H5Tclose(memberTIDs[i]); 438 } 439 catch (Exception ex) { 440 log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex); 441 } 442 } 443 } 444 } 445 446 close(aid); 447 448 startDims = new long[rank]; 449 selectedDims = new long[rank]; 450 451 resetSelection(); 452 } 453 else { 454 log.debug("init(): failed to open attribute"); 455 } 456 } 457 458 /** 459 * Returns the datatype of the data object. 460 * 461 * @return the datatype of the data object. 462 */ 463 @Override 464 public Datatype getDatatype() { 465 if (!inited) 466 init(); 467 468 if (datatype == null) { 469 long aid = HDF5Constants.H5I_INVALID_HID; 470 long tid = HDF5Constants.H5I_INVALID_HID; 471 472 aid = open(); 473 if (aid >= 0) { 474 try { 475 tid = H5.H5Aget_type(aid); 476 int nativeClass = H5.H5Tget_class(tid); 477 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 478 long lsize = 1; 479 long sid = H5.H5Aget_space(aid); 480 int rank = H5.H5Sget_simple_extent_ndims(sid); 481 if (rank > 0) { 482 long dims[] = new long[rank]; 483 H5.H5Sget_simple_extent_dims(sid, dims, null); 484 log.trace("getDatatype(): rank={}, dims={}", rank, dims); 485 for (int j = 0; j < dims.length; j++) { 486 lsize *= dims[j]; 487 } 488 } 489 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 490 } 491 else 492 datatype = new H5Datatype(getFileFormat(), tid); 493 } 494 catch (Exception ex) { 495 log.debug("getDatatype(): ", ex); 496 } 497 finally { 498 try { 499 H5.H5Tclose(tid); 500 } 501 catch (HDF5Exception ex) { 502 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 503 } 504 try { 505 H5.H5Aclose(aid); 506 } 507 catch (HDF5Exception ex) { 508 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 509 } 510 } 511 } 512 } 513 514 return datatype; 515 } 516 517 /** 518 * Returns the data buffer of the attribute in memory. 519 * 520 * If data is already loaded into memory, returns the data; otherwise, calls 521 * read() to read data from file into a memory buffer and returns the memory 522 * buffer. 523 * 524 * The whole attribute is read into memory. Users can also select 525 * a subset from the whole data. Subsetting is done in an implicit way. 526 * 527 * <b>How to Select a Subset</b> 528 * 529 * A selection is specified by three arrays: start, stride and count. 530 * <ol> 531 * <li>start: offset of a selection 532 * <li>stride: determines how many elements to move in each dimension 533 * <li>count: number of elements to select in each dimension 534 * </ol> 535 * getStartDims(), getStride() and getSelectedDims() returns the start, 536 * stride and count arrays respectively. Applications can make a selection 537 * by changing the values of the arrays. 538 * 539 * The following example shows how to make a subset. In the example, the 540 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 541 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 542 * We want to select every other data point in dims[1] and dims[2] 543 * 544 * <pre> 545 * int rank = attribute.getRank(); // number of dimensions of the attribute 546 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 547 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 548 * long[] start = attribute.getStartDims(); // the offset of the selection 549 * long[] stride = attribute.getStride(); // the stride of the attribute 550 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 551 * 552 * // select dim1 and dim2 as 2D data for display,and slice through dim0 553 * selectedIndex[0] = 1; 554 * selectedIndex[1] = 2; 555 * selectedIndex[2] = 0; 556 * 557 * // reset the selection arrays 558 * for (int i = 0; i < rank; i++) { 559 * start[i] = 0; 560 * selected[i] = 1; 561 * stride[i] = 1; 562 * } 563 * 564 * // set stride to 2 on dim1 and dim2 so that every other data point is 565 * // selected. 566 * stride[1] = 2; 567 * stride[2] = 2; 568 * 569 * // set the selection size of dim1 and dim2 570 * selected[1] = dims[1] / stride[1]; 571 * selected[2] = dims[1] / stride[2]; 572 * 573 * // when H5CompoundAttr.getData() is called, the selection above will be used since 574 * // the dimension arrays are passed by reference. Changes of these arrays 575 * // outside the attribute object directly change the values of these array 576 * // in the attribute object. 577 * </pre> 578 * 579 * For H5CompoundAttr, the memory data object is an java.util.List object. Each 580 * element of the list is a data array that corresponds to a compound field. 581 * 582 * For example, if compound attribute "comp" has the following nested 583 * structure, and member datatypes 584 * 585 * <pre> 586 * comp --> m01 (int) 587 * comp --> m02 (float) 588 * comp --> nest1 --> m11 (char) 589 * comp --> nest1 --> m12 (String) 590 * comp --> nest1 --> nest2 --> m21 (long) 591 * comp --> nest1 --> nest2 --> m22 (double) 592 * </pre> 593 * 594 * getData() returns a list of six arrays: {int[], float[], char[], 595 * String[], long[] and double[]}. 596 * 597 * @return the memory buffer of the attribute. 598 * 599 * @throws Exception if object can not be read 600 * @throws OutOfMemoryError if memory is exhausted 601 */ 602 @Override 603 public Object getData() throws Exception, OutOfMemoryError { 604 log.trace("getData(): isDataLoaded={}", isDataLoaded); 605 if (!isDataLoaded) 606 data = read(); // load the data, attributes read all data 607 608 nPoints = 1; 609 log.trace("getData(): selectedDims length={}", selectedDims.length); 610 int point_len = selectedDims.length; 611 //Partial data for 3 or more dimensions 612 if (rank > 2) 613 point_len = 3; 614 for (int j = 0; j < point_len; j++) { 615 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 616 nPoints *= selectedDims[j]; 617 } 618 log.trace("getData: read {}", nPoints); 619 620 // apply the selection for 3 or more dimensions 621 // selection only expects to use 3 selectedDims 622 // where selectedIndex[0] is the row dimension 623 // where selectedIndex[1] is the col dimension 624 // where selectedIndex[2] is the frame dimension 625 if (rank > 2) 626 data = AttributeSelection(); 627 628 return data; 629 } 630 631 /* 632 * (non-Javadoc) 633 * 634 * @see hdf.object.Attribute#readBytes() 635 */ 636 @Override 637 public byte[] readBytes() throws HDF5Exception { 638 byte[] theData = null; 639 640 if (!isInited()) 641 init(); 642 643 long aid = open(); 644 if (aid >= 0) { 645 long tid = HDF5Constants.H5I_INVALID_HID; 646 647 try { 648 long[] lsize = { 1 }; 649 for (int j = 0; j < selectedDims.length; j++) 650 lsize[0] *= selectedDims[j]; 651 652 tid = H5.H5Aget_type(aid); 653 long size = H5.H5Tget_size(tid) * lsize[0]; 654 log.trace("readBytes(): size={}", size); 655 656 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 657 throw new Exception("Invalid int size"); 658 659 theData = new byte[(int)size]; 660 661 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 662 H5.H5Aread(aid, tid, theData); 663 } 664 catch (Exception ex) { 665 log.debug("readBytes(): failed to read data: ", ex); 666 } 667 finally { 668 try { 669 H5.H5Tclose(tid); 670 } 671 catch (HDF5Exception ex2) { 672 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 673 } 674 close(aid); 675 } 676 } 677 678 return theData; 679 } 680 681 /** 682 * Reads the data from file. 683 * 684 * read() reads the data from file to a memory buffer and returns the memory 685 * buffer. The attribute object does not hold the memory buffer. To store the 686 * memory buffer in the attribute object, one must call getData(). 687 * 688 * By default, the whole attribute is read into memory. 689 * 690 * For CompoundAttr, the memory data object is an java.util.List object. Each 691 * element of the list is a data array that corresponds to a compound field. 692 * 693 * For example, if compound dataset "comp" has the following nested 694 * structure, and member datatypes 695 * 696 * <pre> 697 * comp --> m01 (int) 698 * comp --> m02 (float) 699 * comp --> nest1 --> m11 (char) 700 * comp --> nest1 --> m12 (String) 701 * comp --> nest1 --> nest2 --> m21 (long) 702 * comp --> nest1 --> nest2 --> m22 (double) 703 * </pre> 704 * 705 * getData() returns a list of six arrays: {int[], float[], char[], 706 * String[], long[] and double[]}. 707 * 708 * @return the data read from file. 709 * 710 * @see #getData() 711 * @see hdf.object.DataFormat#read() 712 * 713 * @throws Exception 714 * if object can not be read 715 */ 716 @Override 717 public Object read() throws Exception { 718 Object readData = null; 719 720 if (!isInited()) 721 init(); 722 723 try { 724 readData = compoundAttributeCommonIO(H5File.IO_TYPE.READ, null); 725 } 726 catch (Exception ex) { 727 log.debug("read(): failed to read compound attribute: ", ex); 728 throw new Exception("failed to read compound attribute: " + ex.getMessage(), ex); 729 } 730 731 return readData; 732 } 733 734 /** 735 * Writes the given data buffer into this attribute in a file. 736 * 737 * The data buffer is a vector that contains the data values of compound fields. The data is written 738 * into file as one data blob. 739 * 740 * @param buf 741 * The vector that contains the data values of compound fields. 742 * 743 * @throws Exception 744 * If there is an error at the HDF5 library level. 745 */ 746 @Override 747 public void write(Object buf) throws Exception { 748 if (this.getFileFormat().isReadOnly()) 749 throw new Exception("cannot write to compound attribute in file opened as read-only"); 750 751 if (!buf.equals(data)) 752 setData(buf); 753 754 init(); 755 756 if (parentObject == null) { 757 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 758 return; 759 } 760 761 ((MetaDataContainer) getParentObject()).writeMetadata(this); 762 763 try { 764 compoundAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 765 } 766 catch (Exception ex) { 767 log.debug("write(Object): failed to write compound attribute: ", ex); 768 throw new Exception("failed to write compound attribute: " + ex.getMessage(), ex); 769 } 770 resetSelection(); 771 } 772 773 /* 774 * Routine to convert datatypes that are read in as byte arrays to 775 * regular types. 776 */ 777 protected Object convertByteMember(final Datatype dtype, byte[] byteData) { 778 Object theObj = null; 779 780 if (dtype.isFloat() && dtype.getDatatypeSize() == 16) 781 theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0); 782 else 783 theObj = super.convertByteMember(dtype, byteData); 784 785 return theObj; 786 } 787 788 private Object compoundAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception { 789 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 790 Object theData = null; 791 792 if (numberOfMembers <= 0) { 793 log.debug("compoundAttributeCommonIO(): attribute contains no members"); 794 throw new Exception("dataset contains no members"); 795 } 796 797 /* 798 * I/O type-specific pre-initialization. 799 */ 800 if (ioType == H5File.IO_TYPE.WRITE) { 801 if ((writeBuf == null) || !(writeBuf instanceof List)) { 802 log.debug("compoundAttributeCommonIO(): writeBuf is null or invalid"); 803 throw new Exception("write buffer is null or invalid"); 804 } 805 806 /* 807 * Check for any unsupported datatypes and fail early before 808 * attempting to write to the attribute. 809 */ 810 if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) { 811 log.debug("compoundAttributeCommonIO(): cannot write attribute of type ARRAY of COMPOUND"); 812 throw new HDF5Exception("Unsupported attribute of type ARRAY of COMPOUND"); 813 } 814 815 if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) { 816 log.debug("compoundAttributeCommonIO(): cannot write attribute of type VLEN of COMPOUND"); 817 throw new HDF5Exception("Unsupported attribute of type VLEN of COMPOUND"); 818 } 819 } 820 821 long aid = open(); 822 if (aid >= 0) { 823 log.trace("compoundAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 824 try { 825 theData = AttributeCommonIO(aid, ioType, writeBuf); 826 } 827 finally { 828 close(aid); 829 } 830 } 831 else 832 log.debug("compoundAttributeCommonIO(): failed to open attribute"); 833 834 return theData; 835 } 836 837 /* 838 * Private recursive routine to read/write an entire compound datatype field by 839 * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of 840 * COMPOUND datatypes. 841 * 842 * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a 843 * running counter so that we can index properly into the flattened name list 844 * generated from H5Datatype.extractCompoundInfo() at attribute init time. 845 */ 846 private Object compoundTypeIO(H5Datatype parentType, int nSelPoints, final H5Datatype cmpdType, 847 Object dataBuf, int[] globalMemberIndex) { 848 Object theData = null; 849 850 if (cmpdType.isArray()) { 851 log.trace("compoundTypeIO(): ARRAY type"); 852 853 long[] arrayDims = cmpdType.getArrayDims(); 854 int arrSize = nSelPoints; 855 for (int i = 0; i < arrayDims.length; i++) { 856 arrSize *= arrayDims[i]; 857 } 858 theData = compoundTypeIO(cmpdType, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 859 } 860 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 861 /* 862 * TODO: true variable-length support. 863 */ 864 String[] errVal = new String[nSelPoints]; 865 String errStr = "*UNSUPPORTED*"; 866 867 for (int j = 0; j < nSelPoints; j++) 868 errVal[j] = errStr; 869 870 /* 871 * Setup a fake data list. 872 */ 873 Datatype baseType = cmpdType.getDatatypeBase(); 874 while (baseType != null && !baseType.isCompound()) { 875 baseType = baseType.getDatatypeBase(); 876 } 877 878 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints); 879 fakeVlenData.add(errVal); 880 881 theData = fakeVlenData; 882 } 883 else if (cmpdType.isCompound()) { 884 long parentLength = parentType.getDatatypeSize(); 885 List<Object> memberDataList = null; 886 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 887 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 888 889 log.trace("compoundTypeIO(): read {} members: parentLength={}", typeList.size(), parentLength); 890 891 memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints); 892 893 try { 894 for (int i = 0; i < typeList.size(); i++) { 895 long memberOffset = 0; //offset into dataBuf 896 H5Datatype memberType = null; 897 String memberName = null; 898 Object memberData = null; 899 900 try { 901 memberType = (H5Datatype) typeList.get(i); 902 memberOffset = offsetList.get(i); 903 } 904 catch (Exception ex) { 905 log.debug("compoundTypeIO(): get member {} failure: ", i, ex); 906 globalMemberIndex[0]++; 907 continue; 908 } 909 910 /* 911 * Since the type list used here is not a flattened structure, we need to skip 912 * the member selection check for compound types, as otherwise having a single 913 * member not selected would skip the reading/writing for the entire compound 914 * type. The member selection check will be deferred to the recursive compound 915 * read/write below. 916 */ 917 if (!memberType.isCompound()) { 918 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 919 log.debug("compoundTypeIO(): member[{}] is not selected", i); 920 globalMemberIndex[0]++; 921 continue; // the field is not selected 922 } 923 } 924 925 if (!memberType.isCompound()) { 926 try { 927 memberName = new String(flatNameList.get(globalMemberIndex[0])); 928 } 929 catch (Exception ex) { 930 log.debug("compoundTypeIO(): get member {} name failure: ", i, ex); 931 memberName = "null"; 932 } 933 } 934 935 log.trace("compoundTypeIO(): member[{}]({}) is type {} offset {}", i, memberName, 936 memberType.getDescription(), memberOffset); 937 938 try { 939 int mt_typesize = (int)memberType.getDatatypeSize(); 940 log.trace("compoundTypeIO(): member[{}] mt_typesize={}", i, mt_typesize); 941 byte[] memberbuf = new byte[nSelPoints * mt_typesize]; 942 for (int dimindx = 0; dimindx < nSelPoints; dimindx++) 943 System.arraycopy(dataBuf, (int)memberOffset + dimindx * (int)parentLength, memberbuf, dimindx * mt_typesize, mt_typesize); 944 945 if (memberType.isCompound()) { 946 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 947 globalMemberIndex); 948 } 949 else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) { 950 /* 951 * Recursively detect any nested array/vlen of compound types. 952 */ 953 boolean compoundFound = false; 954 955 Datatype base = memberType.getDatatypeBase(); 956 while (base != null) { 957 if (base.isCompound()) 958 compoundFound = true; 959 960 base = base.getDatatypeBase(); 961 } 962 963 if (compoundFound) { 964 /* 965 * Skip the top-level array/vlen type. 966 */ 967 globalMemberIndex[0]++; 968 969 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 970 globalMemberIndex); 971 } 972 else { 973 memberData = convertByteMember(memberType, (byte[])memberbuf); 974 globalMemberIndex[0]++; 975 } 976 } 977 else { 978 memberData = convertByteMember(memberType, (byte[])memberbuf); 979 globalMemberIndex[0]++; 980 } 981 } 982 catch (Exception ex) { 983 log.debug("compoundTypeIO(): failed to read member {}: ", i, ex); 984 globalMemberIndex[0]++; 985 memberData = null; 986 } 987 988 if (memberData == null) { 989 String[] errVal = new String[nSelPoints]; 990 String errStr = "*ERROR*"; 991 992 for (int j = 0; j < nSelPoints; j++) 993 errVal[j] = errStr; 994 995 memberData = errVal; 996 } 997 998 memberDataList.add(memberData); 999 } // (i = 0; i < atomicTypeList.size(); i++) 1000 } 1001 catch (Exception ex) { 1002 log.debug("compoundTypeIO(): failure: ", ex); 1003 memberDataList = null; 1004 } 1005 1006 theData = memberDataList; 1007 } 1008 1009 return theData; 1010 } 1011 1012 private Object compoundTypeWriteIO(H5Datatype parentType, final H5Datatype cmpdType, 1013 Object dataBuf, int[] globalMemberIndex) { 1014 Object theData = null; 1015 if (cmpdType.isArray()) { 1016 Object memberData = null; 1017 log.trace("compoundTypeWriteIO(): ARRAY type"); 1018 1019 theData = compoundTypeWriteIO(cmpdType, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 1020 } 1021 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 1022 /* 1023 * TODO: true variable-length support. 1024 */ 1025 String errVal = new String("*UNSUPPORTED*"); 1026 1027 /* 1028 * Setup a fake data bytes. 1029 */ 1030 Datatype baseType = cmpdType.getDatatypeBase(); 1031 while (baseType != null && !baseType.isCompound()) { 1032 baseType = baseType.getDatatypeBase(); 1033 } 1034 1035 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, 1); 1036 fakeVlenData.add(errVal); 1037 1038 theData = convertMemberByte((H5Datatype)baseType, fakeVlenData); 1039 } 1040 else if (cmpdType.isCompound()) { 1041 long parentLength = parentType.getDatatypeSize(); 1042 List<Object> memberDataList = null; 1043 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 1044 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 1045 1046 log.trace("compoundTypeWriteIO(): write {} members", typeList.size()); 1047 1048 theData = (Object)new byte[(int)cmpdType.getDatatypeSize()]; 1049 try { 1050 for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) { 1051 long memberOffset = 0; //offset into dataBuf 1052 H5Datatype memberType = null; 1053 String memberName = null; 1054 Object memberData = null; 1055 1056 try { 1057 memberType = (H5Datatype) typeList.get(i); 1058 memberOffset = offsetList.get(i); 1059 } 1060 catch (Exception ex) { 1061 log.debug("compoundTypeWriteIO(): get member {} failure: ", i, ex); 1062 globalMemberIndex[0]++; 1063 continue; 1064 } 1065 long memberLength = memberType.getDatatypeSize(); 1066 1067 /* 1068 * Since the type list used here is not a flattened structure, we need to skip the member selection 1069 * check for compound types, as otherwise having a single member not selected would skip the 1070 * reading/writing for the entire compound type. The member selection check will be deferred to the 1071 * recursive compound read/write below. 1072 */ 1073 if (!memberType.isCompound()) { 1074 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 1075 log.debug("compoundTypeWriteIO(): member[{}] is not selected", i); 1076 globalMemberIndex[0]++; 1077 continue; // the field is not selected 1078 } 1079 } 1080 1081 if (!memberType.isCompound()) { 1082 try { 1083 memberName = new String(flatNameList.get(globalMemberIndex[0])); 1084 } 1085 catch (Exception ex) { 1086 log.debug("compoundTypeWriteIO(): get member {} name failure: ", i, ex); 1087 memberName = "null"; 1088 } 1089 } 1090 1091 log.trace("compoundTypeWriteIO(): member[{}]({}) is type {} offset {}", i, memberName, 1092 memberType.getDescription(), memberOffset); 1093 1094 try { 1095 /* 1096 * TODO: currently doesn't correctly handle non-selected compound members. 1097 */ 1098 memberData = ((List<?>) dataBuf).get(i); 1099 } 1100 catch (Exception ex) { 1101 log.debug("compoundTypeWriteIO(): get member[{}] data failure: ", i, ex); 1102 globalMemberIndex[0]++; 1103 continue; 1104 } 1105 1106 if (memberData == null) { 1107 log.debug("compoundTypeWriteIO(): member[{}] data is null", i); 1108 globalMemberIndex[0]++; 1109 continue; 1110 } 1111 1112 try { 1113 if (memberType.isCompound()) { 1114 List<?> nestedList = (List<?>) ((List<?>) dataBuf).get(i); 1115 memberData = compoundTypeWriteIO(cmpdType, memberType, nestedList, globalMemberIndex); 1116 } 1117 else { 1118 memberData = writeSingleCompoundMember(memberType, memberData); 1119 globalMemberIndex[0]++; 1120 } 1121 } 1122 catch (Exception ex) { 1123 log.debug("compoundTypeWriteIO(): failed to write member[{}]: ", i, ex); 1124 globalMemberIndex[0]++; 1125 } 1126 1127 byte[] indexedBytes = convertMemberByte((H5Datatype)memberType, memberData); 1128 System.arraycopy(indexedBytes, 0, theData, writeListIndex, (int)memberLength); 1129 writeListIndex += memberLength; 1130 } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++) 1131 } 1132 catch (Exception ex) { 1133 log.debug("compoundTypeWriteIO(): failure: ", ex); 1134 theData = null; 1135 } 1136 } 1137 1138 return theData; 1139 } 1140 1141 /* 1142 * Routine to convert datatypes that are in object arrays to 1143 * bytes. 1144 */ 1145 private byte[] convertMemberByte(final Datatype dtype, Object theObj) { 1146 byte[] byteData = null; 1147 1148 if (dtype.getDatatypeSize() == 1) { 1149 /* 1150 * Normal byte[] type, such as an integer datatype of size 1. 1151 */ 1152 byteData = (byte[])theObj; 1153 } 1154 else if (dtype.isString() && !dtype.isVarStr() && convertByteToString) { 1155 log.trace("convertMemberByte(): converting string array to byte array"); 1156 1157 byteData = stringToByte((String[])theObj, (int) dtype.getDatatypeSize()); 1158 } 1159 else if (dtype.isInteger()) { 1160 log.trace("convertMemberByte(): converting integer array to byte array"); 1161 1162 switch ((int)dtype.getDatatypeSize()) { 1163 case 1: 1164 /* 1165 * Normal byte[] type, such as an integer datatype of size 1. 1166 */ 1167 byteData = (byte[])theObj; 1168 break; 1169 case 2: 1170 byteData = HDFNativeData.shortToByte(0, 1, (short[])theObj); 1171 break; 1172 case 4: 1173 byteData = HDFNativeData.intToByte(0, 1, (int[])theObj); 1174 break; 1175 case 8: 1176 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1177 break; 1178 default: 1179 log.debug("convertMemberByte(): invalid datatype size"); 1180 byteData = null; 1181 break; 1182 } 1183 } 1184 else if (dtype.isFloat()) { 1185 log.trace("convertMemberByte(): converting float array to byte array"); 1186 1187 if (dtype.getDatatypeSize() == 16) 1188 byteData = ((H5Datatype)dtype).bigDecimalToByte((BigDecimal[])theObj, 0); 1189 else if (dtype.getDatatypeSize() == 8) 1190 byteData = HDFNativeData.doubleToByte(0, 1, (double[])theObj); 1191 else 1192 byteData = HDFNativeData.floatToByte(0, 1, (float[])theObj); 1193 } 1194 else if (dtype.isArray()) { 1195 Datatype baseType = dtype.getDatatypeBase(); 1196 1197 /* 1198 * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes. 1199 */ 1200 while (baseType.isArray()) 1201 baseType = baseType.getDatatypeBase(); 1202 1203 /* 1204 * Optimize for the common cases of Arrays. 1205 */ 1206 switch (baseType.getDatatypeClass()) { 1207 case Datatype.CLASS_INTEGER: 1208 case Datatype.CLASS_FLOAT: 1209 case Datatype.CLASS_CHAR: 1210 case Datatype.CLASS_STRING: 1211 case Datatype.CLASS_BITFIELD: 1212 case Datatype.CLASS_OPAQUE: 1213 case Datatype.CLASS_COMPOUND: 1214 case Datatype.CLASS_REFERENCE: 1215 case Datatype.CLASS_ENUM: 1216 case Datatype.CLASS_VLEN: 1217 case Datatype.CLASS_TIME: 1218 byteData = convertMemberByte(baseType, theObj); 1219 break; 1220 1221 case Datatype.CLASS_ARRAY: 1222 { 1223 Datatype arrayType = dtype.getDatatypeBase(); 1224 1225 long[] arrayDims = dtype.getArrayDims(); 1226 int arrSize = 1; 1227 for (int i = 0; i < arrayDims.length; i++) { 1228 arrSize *= arrayDims[i]; 1229 } 1230 1231 byteData = new byte[arrSize * (int)arrayType.getDatatypeSize()]; 1232 1233 for (int i = 0; i < arrSize; i++) { 1234 byte[] indexedBytes = convertMemberByte(arrayType, ((Object[]) theObj)[i]); 1235 System.arraycopy(indexedBytes, 0, byteData, (int)(i * arrayType.getDatatypeSize()), (int)arrayType.getDatatypeSize()); 1236 } 1237 1238 break; 1239 } 1240 1241 case Datatype.CLASS_NO_CLASS: 1242 default: 1243 log.debug("convertMemberByte(): invalid datatype class"); 1244 byteData = null; 1245 } 1246 } 1247 else if (dtype.isCompound()) { 1248 /* 1249 * TODO: still valid after reading change? 1250 */ 1251 byteData = (byte[])convertCompoundMemberBytes(dtype, (List<Object>)theObj); 1252 } 1253 else { 1254 byteData = (byte[])theObj; 1255 } 1256 1257 return byteData; 1258 } 1259 1260 /** 1261 * Given an array of objects representing a compound Datatype, converts each of 1262 * its members into bytes and returns the results. 1263 * 1264 * @param dtype 1265 * The compound datatype to convert 1266 * @param theObj 1267 * The object array representing the data of the compound Datatype 1268 * @return The converted bytes of the objects 1269 */ 1270 private byte[] convertCompoundMemberBytes(final Datatype dtype, List<Object> theObj) { 1271 List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes()); 1272 List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes()); 1273 Iterator<Datatype> localIt = localTypes.iterator(); 1274 while (localIt.hasNext()) { 1275 Datatype curType = localIt.next(); 1276 1277 if (curType.isCompound()) 1278 continue; 1279 1280 if (!allSelectedTypes.contains(curType)) 1281 localIt.remove(); 1282 } 1283 1284 byte[] byteData = new byte[(int)dtype.getDatatypeSize()]; 1285 for (int i = 0, index = 0; i < localTypes.size(); i++) { 1286 Datatype curType = localTypes.get(i); 1287 byte[] indexedBytes = null; 1288 if (curType.isCompound()) 1289 indexedBytes = convertCompoundMemberBytes(curType, (List<Object>)theObj.get(i)); 1290 else 1291 indexedBytes = convertMemberByte(curType, theObj.get(i)); 1292 1293 System.arraycopy(indexedBytes, 0, byteData, index + (int)curType.getDatatypeSize(), (int)curType.getDatatypeSize()); 1294 index += curType.getDatatypeSize(); 1295 } 1296 1297 return byteData; 1298 } 1299 1300 /* 1301 * Private routine to convert a single field of a compound datatype. 1302 */ 1303 private Object writeSingleCompoundMember(final H5Datatype memberType, Object theData) throws Exception { 1304 /* 1305 * Check for any unsupported datatypes before attempting to write this compound 1306 * member. 1307 */ 1308 if (memberType.isVLEN() && !memberType.isVarStr()) { 1309 log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported"); 1310 throw new Exception("writing of VL non-strings is not currently supported"); 1311 } 1312 1313 /* 1314 * Perform any necessary data conversions before writing the data. 1315 */ 1316 Object tmpData = theData; 1317 try { 1318 if (memberType.isUnsigned()) { 1319 // Check if we need to convert unsigned integer data from Java-style 1320 // to C-style integers 1321 long tsize = memberType.getDatatypeSize(); 1322 String cname = theData.getClass().getName(); 1323 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1324 boolean doIntConversion = (((tsize == 1) && (dname == 'S')) 1325 || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J'))); 1326 1327 if (doIntConversion) { 1328 log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers"); 1329 tmpData = convertToUnsignedC(theData, null); 1330 } 1331 } 1332 else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) { 1333 log.trace("writeSingleCompoundMember(): converting string array to byte array"); 1334 tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize()); 1335 } 1336 else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) { 1337 log.trace("writeSingleCompoundMember(): converting enum names to values"); 1338 tmpData = memberType.convertEnumNameToValue((String[]) theData); 1339 } 1340 } 1341 catch (Exception ex) { 1342 log.debug("writeSingleCompoundMember(): data conversion failure: ", ex); 1343 tmpData = null; 1344 } 1345 1346 if (tmpData == null) { 1347 log.debug("writeSingleCompoundMember(): data is null"); 1348 } 1349 1350 return tmpData; 1351 } 1352 1353 /** 1354 * Converts the data values of this data object to appropriate Java integers if 1355 * they are unsigned integers. 1356 * 1357 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1358 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 1359 * 1360 * @return the converted data buffer. 1361 */ 1362 @Override 1363 public Object convertFromUnsignedC() { 1364 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 1365 } 1366 1367 /** 1368 * Converts Java integer data values of this data object back to unsigned C-type 1369 * integer data if they are unsigned integers. 1370 * 1371 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1372 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 1373 * 1374 * @return the converted data buffer. 1375 */ 1376 @Override 1377 public Object convertToUnsignedC() { 1378 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 1379 } 1380 1381 /* Implement interface Attribute */ 1382 1383 /** 1384 * Returns the HObject to which this Attribute is currently "attached". 1385 * 1386 * @return the HObject to which this Attribute is currently "attached". 1387 */ 1388 public HObject getParentObject() { 1389 return parentObject; 1390 } 1391 1392 /** 1393 * Sets the HObject to which this Attribute is "attached". 1394 * 1395 * @param pObj 1396 * the new HObject to which this Attribute is "attached". 1397 */ 1398 public void setParentObject(HObject pObj) { 1399 parentObject = pObj; 1400 } 1401 1402 /** 1403 * set a property for the attribute. 1404 * 1405 * @param key the attribute Map key 1406 * @param value the attribute Map value 1407 */ 1408 public void setProperty(String key, Object value) { 1409 properties.put(key, value); 1410 } 1411 1412 /** 1413 * get a property for a given key. 1414 * 1415 * @param key the attribute Map key 1416 * 1417 * @return the property 1418 */ 1419 public Object getProperty(String key) { 1420 return properties.get(key); 1421 } 1422 1423 /** 1424 * get all property keys. 1425 * 1426 * @return the Collection of property keys 1427 */ 1428 public Collection<String> getPropertyKeys() { 1429 return properties.keySet(); 1430 } 1431 1432 /** 1433 * Returns the name of the object. For example, "Raster Image #2". 1434 * 1435 * @return The name of the object. 1436 */ 1437 public final String getAttributeName() { 1438 return getName(); 1439 } 1440 1441 /** 1442 * Retrieves the attribute data from the file. 1443 * 1444 * @return the attribute data. 1445 * 1446 * @throws Exception 1447 * if the data can not be retrieved 1448 */ 1449 public final Object getAttributeData() throws Exception, OutOfMemoryError { 1450 return getData(); 1451 } 1452 1453 /** 1454 * Returns the datatype of the attribute. 1455 * 1456 * @return the datatype of the attribute. 1457 */ 1458 public final Datatype getAttributeDatatype() { 1459 return getDatatype(); 1460 } 1461 1462 /** 1463 * Returns the space type for the attribute. It returns a 1464 * negative number if it failed to retrieve the type information from 1465 * the file. 1466 * 1467 * @return the space type for the attribute. 1468 */ 1469 public final int getAttributeSpaceType() { 1470 return getSpaceType(); 1471 } 1472 1473 /** 1474 * Returns the rank (number of dimensions) of the attribute. It returns a 1475 * negative number if it failed to retrieve the dimension information from 1476 * the file. 1477 * 1478 * @return the number of dimensions of the attribute. 1479 */ 1480 public final int getAttributeRank() { 1481 return getRank(); 1482 } 1483 1484 /** 1485 * Returns the selected size of the rows and columns of the attribute. It returns a 1486 * negative number if it failed to retrieve the size information from 1487 * the file. 1488 * 1489 * @return the selected size of the rows and colums of the attribute. 1490 */ 1491 public final int getAttributePlane() { 1492 return (int)getWidth() * (int)getHeight(); 1493 } 1494 1495 /** 1496 * Returns the array that contains the dimension sizes of the data value of 1497 * the attribute. It returns null if it failed to retrieve the dimension 1498 * information from the file. 1499 * 1500 * @return the dimension sizes of the attribute. 1501 */ 1502 public final long[] getAttributeDims() { 1503 return getDims(); 1504 } 1505 1506 /** 1507 * @return true if the data is a single scalar point; otherwise, returns 1508 * false. 1509 */ 1510 public boolean isAttributeScalar() { 1511 return isScalar(); 1512 } 1513 1514 /** 1515 * Not for public use in the future. 1516 * 1517 * setData() is not safe to use because it changes memory buffer 1518 * of the dataset object. Dataset operations such as write/read 1519 * will fail if the buffer type or size is changed. 1520 * 1521 * @param d the object data -must be an array of Objects 1522 */ 1523 public void setAttributeData(Object d) { 1524 setData(d); 1525 } 1526 1527 /** 1528 * Writes the memory buffer of this dataset to file. 1529 * 1530 * @throws Exception if buffer can not be written 1531 */ 1532 public void writeAttribute() throws Exception { 1533 write(); 1534 } 1535 1536 /** 1537 * Writes the given data buffer into this attribute in a file. 1538 * 1539 * The data buffer is a vector that contains the data values of compound fields. The data is written 1540 * into file as one data blob. 1541 * 1542 * @param buf 1543 * The vector that contains the data values of compound fields. 1544 * 1545 * @throws Exception 1546 * If there is an error at the library level. 1547 */ 1548 public void writeAttribute(Object buf) throws Exception { 1549 write(buf); 1550 } 1551 1552 /** 1553 * Returns a string representation of the data value. For 1554 * example, "0, 255". 1555 * 1556 * For a compound datatype, it will be a 1D array of strings with field 1557 * members separated by the delimiter. For example, 1558 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1559 * float} of three data points. 1560 * 1561 * @param delimiter 1562 * The delimiter used to separate individual data points. It 1563 * can be a comma, semicolon, tab or space. For example, 1564 * toString(",") will separate data by commas. 1565 * 1566 * @return the string representation of the data values. 1567 */ 1568 public String toAttributeString(String delimiter) { 1569 return toString(delimiter, -1); 1570 } 1571 1572 /** 1573 * Returns a string representation of the data value. For 1574 * example, "0, 255". 1575 * 1576 * For a compound datatype, it will be a 1D array of strings with field 1577 * members separated by the delimiter. For example, 1578 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1579 * float} of three data points. 1580 * 1581 * @param delimiter 1582 * The delimiter used to separate individual data points. It 1583 * can be a comma, semicolon, tab or space. For example, 1584 * toString(",") will separate data by commas. 1585 * @param maxItems 1586 * The maximum number of Array values to return 1587 * 1588 * @return the string representation of the data values. 1589 */ 1590 public String toAttributeString(String delimiter, int maxItems) { 1591 Object theData = originalBuf; 1592 if (theData == null) { 1593 log.debug("toString: value is null"); 1594 return null; 1595 } 1596 1597 if (!(theData instanceof List<?>)) { 1598 log.trace("toString: value is not list"); 1599 return null; 1600 } 1601 1602 // attribute value is an array 1603 StringBuilder sb = new StringBuilder(); 1604 int numberTypes = ((ArrayList<Object[]>)theData).size(); 1605 List<Datatype> cmpdTypes = getDatatype().getCompoundMemberTypes(); 1606 int n = Array.getLength(((ArrayList<Object[]>)theData).get(0)); 1607 if ((maxItems > 0) && (n > maxItems)) 1608 n = maxItems; 1609 1610 for (int i = 0; i < n; i++) { 1611 if (i > 0) 1612 sb.append(delimiter); 1613 sb.append("{"); 1614 for (int dv = 0; dv < numberTypes; dv++) { 1615 if (dv > 0) 1616 sb.append(delimiter); 1617 1618 Object theobj = ((ArrayList<Object[]>)theData).get(dv); 1619 1620 Class<? extends Object> valClass = theobj.getClass(); 1621 1622 if (!valClass.isArray()) { 1623 log.trace("toString: member - not array"); 1624 String strValue = theobj.toString(); 1625 if (maxItems > 0 && strValue.length() > maxItems) { 1626 // truncate the extra characters 1627 strValue = strValue.substring(0, maxItems); 1628 } 1629 sb.append(strValue); 1630 continue; 1631 } 1632 1633 log.trace("toString[{}]: is_enum={} is_unsigned={} isStdRef={}", i, cmpdTypes.get(dv).isEnum(), 1634 cmpdTypes.get(dv).isUnsigned(), ((H5Datatype) getDatatype()).isStdRef()); 1635 1636 if (cmpdTypes.get(dv).isEnum()) { 1637 String cname = valClass.getName(); 1638 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1639 log.trace("toString: is_enum with cname={} dname={}", cname, dname); 1640 1641 Map<String, String> map = cmpdTypes.get(dv).getEnumMembers(); 1642 String theValue = null; 1643 switch (dname) { 1644 case 'B': 1645 byte[] barray = (byte[]) theobj; 1646 short sValue = barray[i]; 1647 theValue = String.valueOf(sValue); 1648 if (map.containsKey(theValue)) { 1649 sb.append(map.get(theValue)); 1650 } 1651 else 1652 sb.append(sValue); 1653 break; 1654 case 'S': 1655 short[] sarray = (short[]) theobj; 1656 int iValue = sarray[0]; 1657 theValue = String.valueOf(iValue); 1658 if (map.containsKey(theValue)) { 1659 sb.append(map.get(theValue)); 1660 } 1661 else 1662 sb.append(iValue); 1663 break; 1664 case 'I': 1665 int[] iarray = (int[]) theobj; 1666 long lValue = iarray[i]; 1667 theValue = String.valueOf(lValue); 1668 if (map.containsKey(theValue)) { 1669 sb.append(map.get(theValue)); 1670 } 1671 else 1672 sb.append(lValue); 1673 break; 1674 case 'J': 1675 long[] larray = (long[]) theobj; 1676 Long l = larray[i]; 1677 theValue = Long.toString(l); 1678 if (map.containsKey(theValue)) { 1679 sb.append(map.get(theValue)); 1680 } 1681 else 1682 sb.append(theValue); 1683 break; 1684 default: 1685 sb.append(Array.get(theobj, i)); 1686 break; 1687 } 1688 } 1689 else if (cmpdTypes.get(dv).isUnsigned()) { 1690 String cname = valClass.getName(); 1691 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1692 log.trace("toString: is_unsigned with cname={} dname={}", cname, dname); 1693 1694 switch (dname) { 1695 case 'B': 1696 byte[] barray = (byte[]) theobj; 1697 short sValue = barray[i]; 1698 if (sValue < 0) { 1699 sValue += 256; 1700 } 1701 sb.append(sValue); 1702 break; 1703 case 'S': 1704 short[] sarray = (short[]) theobj; 1705 int iValue = sarray[i]; 1706 if (iValue < 0) { 1707 iValue += 65536; 1708 } 1709 sb.append(iValue); 1710 break; 1711 case 'I': 1712 int[] iarray = (int[]) theobj; 1713 long lValue = iarray[i]; 1714 if (lValue < 0) { 1715 lValue += 4294967296L; 1716 } 1717 sb.append(lValue); 1718 break; 1719 case 'J': 1720 long[] larray = (long[]) theobj; 1721 Long l = larray[i]; 1722 String theValue = Long.toString(l); 1723 if (l < 0) { 1724 l = (l << 1) >>> 1; 1725 BigInteger big1 = new BigInteger("9223372036854775808"); // 2^65 1726 BigInteger big2 = new BigInteger(l.toString()); 1727 BigInteger big = big1.add(big2); 1728 theValue = big.toString(); 1729 } 1730 sb.append(theValue); 1731 break; 1732 default: 1733 String strValue = Array.get(theobj, i).toString(); 1734 if (maxItems > 0 && strValue.length() > maxItems) { 1735 // truncate the extra characters 1736 strValue = strValue.substring(0, maxItems); 1737 } 1738 sb.append(strValue); 1739 break; 1740 } 1741 } 1742 else if (((H5Datatype) getDatatype()).isStdRef()) { 1743 String cname = valClass.getName(); 1744 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1745 log.trace("toString: isStdRef with cname={} dname={}", cname, dname); 1746 String ref_str = ((H5ReferenceType) getDatatype()).getObjectReferenceName((byte[])theData); 1747 log.trace("toString: ref_str={}", ref_str); 1748 sb.append(ref_str); 1749 } 1750 else { 1751 log.trace("toString: not enum or unsigned"); 1752 Object value = Array.get(theobj, i); 1753 String strValue; 1754 1755 if (value == null) { 1756 strValue = "null"; 1757 } 1758 else { 1759 strValue = value.toString(); 1760 } 1761 1762 if (maxItems > 0 && strValue.length() > maxItems) { 1763 // truncate the extra characters 1764 strValue = strValue.substring(0, maxItems); 1765 } 1766 sb.append(strValue); 1767 } 1768 } // end for (int dv = 0; dv < numberTypes; dv++) 1769 sb.append("}"); 1770 } // end for (int i = 1; i < n; i++) 1771 1772 return sb.toString(); 1773 } 1774 1775 /* Implement interface H5Attribute */ 1776 1777 /** 1778 * The general read and write attribute operations for hdf5 object data. 1779 * 1780 * @param attr_id 1781 * the attribute to access 1782 * @param ioType 1783 * the type of IO operation 1784 * @param objBuf 1785 * the data buffer to use for write operation 1786 * 1787 * @return the attribute data 1788 * 1789 * @throws Exception 1790 * if the data can not be retrieved 1791 */ 1792 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception { 1793 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1794 Object theData = null; 1795 1796 long dt_size = dsDatatype.getDatatypeSize(); 1797 log.trace("AttributeCommonIO(): create native"); 1798 long tid = dsDatatype.createNative(); 1799 if (ioType == H5File.IO_TYPE.READ) { 1800 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1801 1802 long lsize = 1; 1803 for (int j = 0; j < dims.length; j++) { 1804 lsize *= dims[j]; 1805 } 1806 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1807 1808 try { 1809 // Read data. 1810 Object attr_data = new byte[(int)(dt_size * lsize)]; 1811 1812 try { 1813 H5.H5Aread(attr_id, tid, attr_data); 1814 } 1815 catch (Exception ex) { 1816 log.debug("AttributeCommonIO(): H5Aread failure: ", ex); 1817 } 1818 theData = compoundTypeIO(dsDatatype, (int)lsize, dsDatatype, attr_data, new int[]{0}); 1819 } 1820 catch (Exception ex) { 1821 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1822 throw new Exception(ex.getMessage(), ex); 1823 } 1824 finally { 1825 dsDatatype.close(tid); 1826 } 1827 for (int i = 0; i < ((ArrayList<Object[]>)theData).size(); i++) { 1828 Object theobj = ((ArrayList<Object[]>)theData).get(i); 1829 log.trace("AttributeCommonIO():read ioType data: {}", theobj); 1830 } 1831 originalBuf = theData; 1832 isDataLoaded = true; 1833 } // H5File.IO_TYPE.READ 1834 else { 1835 theData = compoundTypeWriteIO(dsDatatype, dsDatatype, objBuf, new int[]{0}); 1836 try { 1837 H5.H5Awrite(attr_id, tid, theData); 1838 } 1839 catch (Exception ex) { 1840 log.debug("AttributeCommonIO(): H5Awrite failure: ", ex); 1841 } 1842 finally { 1843 dsDatatype.close(tid); 1844 } 1845 } // H5File.IO_TYPE.WRITE 1846 1847 return theData; 1848 } 1849 1850 /** 1851 * Read a subset of an attribute for hdf5 object data. 1852 * 1853 * @return the selected attribute data 1854 * 1855 * @throws Exception 1856 * if the data can not be retrieved 1857 */ 1858 public Object AttributeSelection() throws Exception { 1859 return originalBuf; 1860// H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1861// Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints); 1862// if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { 1863// log.trace("AttributeSelection(): isText: converting byte array to string array"); 1864// theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize()); 1865// } 1866// else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) { 1867// log.trace("AttributeSelection(): isFloat: converting byte array to BigDecimal array"); 1868// theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData); 1869// } 1870// else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) { 1871// log.trace("AttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array"); 1872// long[] arrayDims = dsDatatype.getArrayDims(); 1873// int asize = (int)nPoints; 1874// for (int j = 0; j < arrayDims.length; j++) { 1875// asize *= arrayDims[j]; 1876// } 1877// theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData); 1878// } 1879// Object theOrig = originalBuf; 1880 1881 /* 1882 * Copy the selection from originalBuf to theData Only three dims are involved and selected data is 2 dimensions 1883 * getHeight() is the row dimension getWidth() is the col dimension 1884 * getDepth() is the frame dimension 1885 */ 1886// long[] start = getStartDims(); 1887// long curFrame = start[selectedIndex[2]]; 1888// for (int col = 0; col < (int)getWidth(); col++) { 1889// for (int row = 0; row < (int)getHeight(); row++) { 1890 1891// int k = (int)startDims[selectedIndex[2]] * (int)getDepth(); 1892// int index = row * (int)getWidth() + col; 1893// log.trace("compoundAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, col, k); 1894// int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() + 1895// col * (int)getHeight() + 1896// row);// * (int) dsDatatype.getDatatypeSize(); 1897// int toIndex = (col * (int)getHeight() + 1898// row);// * (int) dsDatatype.getDatatypeSize(); 1899// int objSize = 1; 1900// if (dsDatatype.isArray()) { 1901// long[] arrayDims = dsDatatype.getArrayDims(); 1902// objSize = (int)arrayDims.length; 1903// } 1904// else if (dsDatatype.isRef()) { 1905// objSize = (int)HDF5Constants.H5R_REF_BUF_SIZE; 1906// fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1907// toIndex = toIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1908// } 1909// for (int i = 0; i < ((ArrayList<Object[]>)theOrig).size(); i++) { 1910// Object theOrigobj = ((ArrayList<Object[]>)theOrig).get(i); 1911// Object theDataobj = ((ArrayList<Object[]>)theData).get(i); 1912// log.trace("compoundAttributeSelection(): theOrig={} theData={}", theOrigobj, theDataobj); 1913// System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1914// } 1915// } 1916// } 1917 1918// log.trace("compoundAttributeSelection(): theData={}", theData); 1919// return theData; 1920 } 1921}