001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.text.DecimalFormat; 021import java.util.ArrayList; 022import java.util.Arrays; 023import java.util.Collection; 024import java.util.HashMap; 025import java.util.Iterator; 026import java.util.List; 027import java.util.Map; 028import java.util.Vector; 029 030import hdf.hdf5lib.H5; 031import hdf.hdf5lib.HDF5Constants; 032import hdf.hdf5lib.HDFNativeData; 033import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 034import hdf.hdf5lib.exceptions.HDF5Exception; 035 036import hdf.object.Attribute; 037import hdf.object.CompoundDS; 038import hdf.object.Dataset; 039import hdf.object.Datatype; 040import hdf.object.FileFormat; 041import hdf.object.Group; 042import hdf.object.HObject; 043import hdf.object.MetaDataContainer; 044import hdf.object.Utils; 045 046import hdf.object.h5.H5Datatype; 047 048/** 049 * The H5CompoundAttr class defines an HDF5 attribute of compound datatypes. 050 * 051 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 052 * dataset, group or named datatype. 053 * 054 * Like a dataset, an attribute has a name, datatype and dataspace. 055 * 056 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a 057 * collection of one or more atomic types or small arrays of such types. Each member of a compound 058 * type has a name which is unique within that type, and a byte offset that determines the first 059 * byte (smallest byte address) of that member in a compound datum. 060 * 061 * For more information on HDF5 attributes and datatypes, read the <a href= 062 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 063 * User's Guide</a>. 064 * 065 * There are two basic types of compound attributes: simple compound data and nested compound data. 066 * Members of a simple compound attribute have atomic datatypes. Members of a nested compound attribute 067 * are compound or array of compound data. 068 * 069 * Since Java does not understand C structures, we cannot directly read/write compound data values 070 * as in the following C example. 071 * 072 * <pre> 073 * typedef struct s1_t { 074 * int a; 075 * float b; 076 * double c; 077 * } s1_t; 078 * s1_t s1[LENGTH]; 079 * ... 080 * H5Dwrite(..., s1); 081 * H5Dread(..., s1); 082 * </pre> 083 * 084 * Values of compound data fields are stored in java.util.Vector object. We read and write compound 085 * data by fields instead of compound structure. As for the example above, the java.util.Vector 086 * object has three elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands 087 * the primitive datatypes of int, float and double, we will be able to read/write the compound data 088 * by field. 089 * 090 * @version 1.0 6/15/2021 091 * @author Allen Byrne 092 */ 093public class H5CompoundAttr extends CompoundDS implements H5Attribute 094{ 095 private static final long serialVersionUID = 2072473407027648309L; 096 097 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundAttr.class); 098 099 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 100 protected HObject parentObject; 101 102 /** additional information and properties for the attribute, Attribute interface */ 103 private transient Map<String, Object> properties; 104 105 /** 106 * Create an attribute with specified name, data type and dimension sizes. 107 * 108 * @param parentObj 109 * the HObject to which this H5CompoundAttr is attached. 110 * @param attrName 111 * the name of the attribute. 112 * @param attrType 113 * the datatype of the attribute. 114 * @param attrDims 115 * the dimension sizes of the attribute, null for scalar attribute 116 * 117 * @see hdf.object.Datatype 118 */ 119 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 120 this(parentObj, attrName, attrType, attrDims, null); 121 } 122 123 /** 124 * Create an attribute with specific name and value. 125 * 126 * @param parentObj 127 * the HObject to which this H5CompoundAttr is attached. 128 * @param attrName 129 * the name of the attribute. 130 * @param attrType 131 * the datatype of the attribute. 132 * @param attrDims 133 * the dimension sizes of the attribute, null for scalar attribute 134 * @param attrValue 135 * the value of the attribute, null if no value 136 * 137 * @see hdf.object.Datatype 138 */ 139 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 140 public H5CompoundAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 141 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 142 (parentObj == null) ? null : parentObj.getFullName(), null); 143 144 log.trace("CompoundAttr: start {}", parentObj); 145 this.parentObject = parentObj; 146 147 datatype = attrType; 148 149 if (attrValue != null) { 150 data = attrValue; 151 originalBuf = attrValue; 152 isDataLoaded = true; 153 } 154 properties = new HashMap(); 155 156 if (attrDims == null) { 157 rank = 1; 158 dims = new long[] { 1 }; 159 isScalar = true; 160 } 161 else { 162 dims = attrDims; 163 rank = dims.length; 164 isScalar = false; 165 } 166 167 selectedDims = new long[rank]; 168 startDims = new long[rank]; 169 selectedStride = new long[rank]; 170 171 numberOfMembers = 0; 172 memberNames = null; 173 isMemberSelected = null; 174 memberTypes = null; 175 176 log.trace("attrName={}, attrType={}, attrValue={}, rank={}", 177 attrName, attrType.getDescription(), data, rank); 178 179 resetSelection(); 180 } 181 182 /* 183 * (non-Javadoc) 184 * 185 * @see hdf.object.HObject#open() 186 */ 187 @Override 188 public long open() { 189 if (parentObject == null) { 190 log.debug("open(): attribute's parent object is null"); 191 return HDF5Constants.H5I_INVALID_HID; 192 } 193 194 long aid = HDF5Constants.H5I_INVALID_HID; 195 long pObjID = HDF5Constants.H5I_INVALID_HID; 196 197 try { 198 pObjID = parentObject.open(); 199 if (pObjID >= 0) { 200 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 201 log.trace("open(): FILE_TYPE_HDF5"); 202 if (H5.H5Aexists(pObjID, getName())) 203 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 204 } 205 } 206 207 log.trace("open(): aid={}", aid); 208 } 209 catch (Exception ex) { 210 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 211 aid = HDF5Constants.H5I_INVALID_HID; 212 } 213 finally { 214 parentObject.close(pObjID); 215 } 216 217 return aid; 218 } 219 220 /* 221 * (non-Javadoc) 222 * 223 * @see hdf.object.HObject#close(int) 224 */ 225 @Override 226 public void close(long aid) { 227 if (aid >= 0) { 228 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 229 log.trace("close(): FILE_TYPE_HDF5"); 230 try { 231 H5.H5Aclose(aid); 232 } 233 catch (HDF5Exception ex) { 234 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 235 } 236 } 237 } 238 } 239 240 241 /** 242 * Retrieves datatype and dataspace information from file and sets the attribute 243 * in memory. 244 * 245 * The init() is designed to support lazy operation in a attribute object. When a 246 * data object is retrieved from file, the datatype, dataspace and raw data are 247 * not loaded into memory. When it is asked to read the raw data from file, 248 * init() is first called to get the datatype and dataspace information, then 249 * load the raw data from file. 250 * 251 * init() is also used to reset the selection of a attribute (start, stride and 252 * count) to the default, which is the entire attribute for 1D or 2D datasets. In 253 * the following example, init() at step 1) retrieves datatype and dataspace 254 * information from file. getData() at step 3) reads only one data point. init() 255 * at step 4) resets the selection to the whole attribute. getData() at step 4) 256 * reads the values of whole attribute into memory. 257 * 258 * <pre> 259 * dset = (Dataset) file.get(NAME_DATASET); 260 * 261 * // 1) get datatype and dataspace information from file 262 * attr.init(); 263 * rank = attr.getAttributeRank(); // rank = 2, a 2D attribute 264 * count = attr.getSelectedDims(); 265 * start = attr.getStartDims(); 266 * dims = attr.getAttributeDims(); 267 * 268 * // 2) select only one data point 269 * for (int i = 0; i < rank; i++) { 270 * start[0] = 0; 271 * count[i] = 1; 272 * } 273 * 274 * // 3) read one data point 275 * data = attr.getAttributeData(); 276 * 277 * // 4) reset selection to the whole attribute 278 * attr.init(); 279 * 280 * // 5) clean the memory data buffer 281 * attr.clearData(); 282 * 283 * // 6) Read the whole attribute 284 * data = attr.getAttributeData(); 285 * </pre> 286 */ 287 @Override 288 public void init() { 289 if (inited) { 290 resetSelection(); 291 log.trace("init(): H5CompoundAttr already inited"); 292 return; 293 } 294 295 long aid = HDF5Constants.H5I_INVALID_HID; 296 long tid = HDF5Constants.H5I_INVALID_HID; 297 long sid = HDF5Constants.H5I_INVALID_HID; 298 int tclass = HDF5Constants.H5I_INVALID_HID; 299 flatNameList = new Vector<>(); 300 flatTypeList = new Vector<>(); 301 long[] memberTIDs = null; 302 303 log.trace("init(): FILE_TYPE_HDF5"); 304 aid = open(); 305 if (aid >= 0) { 306 try { 307 sid = H5.H5Aget_space(aid); 308 rank = H5.H5Sget_simple_extent_ndims(sid); 309 space_type = H5.H5Sget_simple_extent_type(sid); 310 tid = H5.H5Aget_type(aid); 311 tclass = H5.H5Tget_class(tid); 312 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 313 314 long tmptid = 0; 315 316 // Handle ARRAY and VLEN types by getting the base type 317 if (tclass == HDF5Constants.H5T_ARRAY || tclass == HDF5Constants.H5T_VLEN) { 318 try { 319 tmptid = tid; 320 tid = H5.H5Tget_super(tmptid); 321 log.trace("init(): H5T_ARRAY or H5T_VLEN class old={}, new={}", tmptid, tid); 322 } 323 catch (Exception ex) { 324 log.debug("init(): H5T_ARRAY or H5T_VLEN H5Tget_super({}) failure: ", tmptid, ex); 325 tid = -1; 326 } 327 finally { 328 try { 329 H5.H5Tclose(tmptid); 330 } 331 catch (HDF5Exception ex) { 332 log.debug("init(): H5Tclose({}) failure: ", tmptid, ex); 333 } 334 } 335 } 336 337 if (tclass == HDF5Constants.H5T_COMPOUND) { 338 // initialize member information 339 H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList); 340 numberOfMembers = flatNameList.size(); 341 log.trace("init(): numberOfMembers={}", numberOfMembers); 342 343 memberNames = new String[numberOfMembers]; 344 memberTIDs = new long[numberOfMembers]; 345 memberTypes = new Datatype[numberOfMembers]; 346 memberOrders = new int[numberOfMembers]; 347 isMemberSelected = new boolean[numberOfMembers]; 348 memberDims = new Object[numberOfMembers]; 349 350 for (int i = 0; i < numberOfMembers; i++) { 351 isMemberSelected[i] = true; 352 memberTIDs[i] = flatTypeList.get(i).createNative(); 353 354 try { 355 memberTypes[i] = flatTypeList.get(i); 356 } 357 catch (Exception ex) { 358 log.debug("init(): failed to create datatype for member[{}]: ", i, ex); 359 memberTypes[i] = null; 360 } 361 362 memberNames[i] = flatNameList.get(i); 363 memberOrders[i] = 1; 364 memberDims[i] = null; 365 log.trace("init()[{}]: memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}", i, i, 366 memberNames[i], i, memberTIDs[i], i, memberTypes[i]); 367 368 try { 369 tclass = H5.H5Tget_class(memberTIDs[i]); 370 } 371 catch (HDF5Exception ex) { 372 log.debug("init(): H5Tget_class({}) failure: ", memberTIDs[i], ex); 373 } 374 375 if (tclass == HDF5Constants.H5T_ARRAY) { 376 int n = H5.H5Tget_array_ndims(memberTIDs[i]); 377 long mdim[] = new long[n]; 378 H5.H5Tget_array_dims(memberTIDs[i], mdim); 379 int idim[] = new int[n]; 380 for (int j = 0; j < n; j++) 381 idim[j] = (int) mdim[j]; 382 memberDims[i] = idim; 383 tmptid = H5.H5Tget_super(memberTIDs[i]); 384 memberOrders[i] = (int) (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid)); 385 try { 386 H5.H5Tclose(tmptid); 387 } 388 catch (HDF5Exception ex) { 389 log.debug("init(): memberTIDs[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 390 } 391 } 392 } // (int i=0; i<numberOfMembers; i++) 393 } 394 395 if (rank == 0) { 396 // a scalar data point 397 isScalar = true; 398 rank = 1; 399 dims = new long[] { 1 }; 400 log.trace("init(): rank is a scalar data point"); 401 } 402 else { 403 isScalar = false; 404 dims = new long[rank]; 405 maxDims = new long[rank]; 406 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 407 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 408 } 409 410 inited = true; 411 } 412 catch (HDF5Exception ex) { 413 numberOfMembers = 0; 414 memberNames = null; 415 memberTypes = null; 416 memberOrders = null; 417 log.debug("init(): ", ex); 418 } 419 finally { 420 try { 421 H5.H5Tclose(tid); 422 } 423 catch (HDF5Exception ex2) { 424 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 425 } 426 try { 427 H5.H5Sclose(sid); 428 } 429 catch (HDF5Exception ex2) { 430 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 431 } 432 433 if (memberTIDs != null) { 434 for (int i = 0; i < memberTIDs.length; i++) { 435 try { 436 H5.H5Tclose(memberTIDs[i]); 437 } 438 catch (Exception ex) { 439 log.debug("init(): H5Tclose(memberTIDs[{}] {}) failure: ", i, memberTIDs[i], ex); 440 } 441 } 442 } 443 } 444 445 close(aid); 446 447 startDims = new long[rank]; 448 selectedDims = new long[rank]; 449 450 resetSelection(); 451 } 452 else { 453 log.debug("init(): failed to open attribute"); 454 } 455 } 456 457 /** 458 * Returns the datatype of the data object. 459 * 460 * @return the datatype of the data object. 461 */ 462 @Override 463 public Datatype getDatatype() { 464 if (!inited) 465 init(); 466 467 if (datatype == null) { 468 long aid = HDF5Constants.H5I_INVALID_HID; 469 long tid = HDF5Constants.H5I_INVALID_HID; 470 471 aid = open(); 472 if (aid >= 0) { 473 try { 474 tid = H5.H5Aget_type(aid); 475 datatype = new H5Datatype(getFileFormat(), tid); 476 } 477 catch (Exception ex) { 478 log.debug("getDatatype(): ", ex); 479 } 480 finally { 481 try { 482 H5.H5Tclose(tid); 483 } 484 catch (HDF5Exception ex) { 485 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 486 } 487 try { 488 H5.H5Aclose(aid); 489 } 490 catch (HDF5Exception ex) { 491 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 492 } 493 } 494 } 495 } 496 497 return datatype; 498 } 499 500 /** 501 * Returns the data buffer of the attribute in memory. 502 * 503 * If data is already loaded into memory, returns the data; otherwise, calls 504 * read() to read data from file into a memory buffer and returns the memory 505 * buffer. 506 * 507 * The whole attribute is read into memory. Users can also select 508 * a subset from the whole data. Subsetting is done in an implicit way. 509 * 510 * <b>How to Select a Subset</b> 511 * 512 * A selection is specified by three arrays: start, stride and count. 513 * <ol> 514 * <li>start: offset of a selection 515 * <li>stride: determines how many elements to move in each dimension 516 * <li>count: number of elements to select in each dimension 517 * </ol> 518 * getStartDims(), getStride() and getSelectedDims() returns the start, 519 * stride and count arrays respectively. Applications can make a selection 520 * by changing the values of the arrays. 521 * 522 * The following example shows how to make a subset. In the example, the 523 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 524 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 525 * We want to select every other data point in dims[1] and dims[2] 526 * 527 * <pre> 528 * int rank = attribute.getRank(); // number of dimensions of the attribute 529 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 530 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 531 * long[] start = attribute.getStartDims(); // the offset of the selection 532 * long[] stride = attribute.getStride(); // the stride of the attribute 533 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 534 * 535 * // select dim1 and dim2 as 2D data for display,and slice through dim0 536 * selectedIndex[0] = 1; 537 * selectedIndex[1] = 2; 538 * selectedIndex[2] = 0; 539 * 540 * // reset the selection arrays 541 * for (int i = 0; i < rank; i++) { 542 * start[i] = 0; 543 * selected[i] = 1; 544 * stride[i] = 1; 545 * } 546 * 547 * // set stride to 2 on dim1 and dim2 so that every other data point is 548 * // selected. 549 * stride[1] = 2; 550 * stride[2] = 2; 551 * 552 * // set the selection size of dim1 and dim2 553 * selected[1] = dims[1] / stride[1]; 554 * selected[2] = dims[1] / stride[2]; 555 * 556 * // when H5CompoundAttr.getData() is called, the selection above will be used since 557 * // the dimension arrays are passed by reference. Changes of these arrays 558 * // outside the attribute object directly change the values of these array 559 * // in the attribute object. 560 * </pre> 561 * 562 * For H5CompoundAttr, the memory data object is an java.util.List object. Each 563 * element of the list is a data array that corresponds to a compound field. 564 * 565 * For example, if compound attribute "comp" has the following nested 566 * structure, and member datatypes 567 * 568 * <pre> 569 * comp --> m01 (int) 570 * comp --> m02 (float) 571 * comp --> nest1 --> m11 (char) 572 * comp --> nest1 --> m12 (String) 573 * comp --> nest1 --> nest2 --> m21 (long) 574 * comp --> nest1 --> nest2 --> m22 (double) 575 * </pre> 576 * 577 * getData() returns a list of six arrays: {int[], float[], char[], 578 * String[], long[] and double[]}. 579 * 580 * @return the memory buffer of the attribute. 581 * 582 * @throws Exception if object can not be read 583 * @throws OutOfMemoryError if memory is exhausted 584 */ 585 @Override 586 public Object getData() throws Exception, OutOfMemoryError { 587 log.trace("getData(): isDataLoaded={}", isDataLoaded); 588 if (!isDataLoaded) 589 data = read(); // load the data, attributes read all data 590 591 nPoints = 1; 592 log.trace("getData(): selectedDims length={}", selectedDims.length); 593 int point_len = selectedDims.length; 594 //Partial data for 3 or more dimensions 595 if (rank > 2) 596 point_len = 3; 597 for (int j = 0; j < point_len; j++) { 598 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 599 nPoints *= selectedDims[j]; 600 } 601 log.trace("getData: read {}", nPoints); 602 603 // apply the selection for 3 or more dimensions 604 // selection only expects to use 3 selectedDims 605 // where selectedIndex[0] is the row dimension 606 // where selectedIndex[1] is the col dimension 607 // where selectedIndex[2] is the frame dimension 608 if (rank > 2) 609 data = AttributeSelection(); 610 611 return data; 612 } 613 614 /* 615 * (non-Javadoc) 616 * 617 * @see hdf.object.Attribute#readBytes() 618 */ 619 @Override 620 public byte[] readBytes() throws HDF5Exception { 621 byte[] theData = null; 622 623 if (!isInited()) 624 init(); 625 626 long aid = open(); 627 if (aid >= 0) { 628 long tid = HDF5Constants.H5I_INVALID_HID; 629 630 try { 631 long[] lsize = { 1 }; 632 for (int j = 0; j < selectedDims.length; j++) 633 lsize[0] *= selectedDims[j]; 634 635 tid = H5.H5Aget_type(aid); 636 long size = H5.H5Tget_size(tid) * lsize[0]; 637 log.trace("readBytes(): size={}", size); 638 639 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 640 throw new Exception("Invalid int size"); 641 642 theData = new byte[(int)size]; 643 644 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 645 H5.H5Aread(aid, tid, theData); 646 } 647 catch (Exception ex) { 648 log.debug("readBytes(): failed to read data: ", ex); 649 } 650 finally { 651 try { 652 H5.H5Tclose(tid); 653 } 654 catch (HDF5Exception ex2) { 655 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 656 } 657 close(aid); 658 } 659 } 660 661 return theData; 662 } 663 664 /** 665 * Reads the data from file. 666 * 667 * read() reads the data from file to a memory buffer and returns the memory 668 * buffer. The attribute object does not hold the memory buffer. To store the 669 * memory buffer in the attribute object, one must call getData(). 670 * 671 * By default, the whole attribute is read into memory. 672 * 673 * For CompoundAttr, the memory data object is an java.util.List object. Each 674 * element of the list is a data array that corresponds to a compound field. 675 * 676 * For example, if compound dataset "comp" has the following nested 677 * structure, and member datatypes 678 * 679 * <pre> 680 * comp --> m01 (int) 681 * comp --> m02 (float) 682 * comp --> nest1 --> m11 (char) 683 * comp --> nest1 --> m12 (String) 684 * comp --> nest1 --> nest2 --> m21 (long) 685 * comp --> nest1 --> nest2 --> m22 (double) 686 * </pre> 687 * 688 * getData() returns a list of six arrays: {int[], float[], char[], 689 * String[], long[] and double[]}. 690 * 691 * @return the data read from file. 692 * 693 * @see #getData() 694 * @see hdf.object.DataFormat#read() 695 * 696 * @throws Exception 697 * if object can not be read 698 */ 699 @Override 700 public Object read() throws Exception { 701 Object readData = null; 702 703 if (!isInited()) 704 init(); 705 706 try { 707 readData = compoundAttributeCommonIO(H5File.IO_TYPE.READ, null); 708 } 709 catch (Exception ex) { 710 log.debug("read(): failed to read compound attribute: ", ex); 711 throw new Exception("failed to read compound attribute: " + ex.getMessage(), ex); 712 } 713 714 return readData; 715 } 716 717 /** 718 * Writes the given data buffer into this attribute in a file. 719 * 720 * The data buffer is a vector that contains the data values of compound fields. The data is written 721 * into file as one data blob. 722 * 723 * @param buf 724 * The vector that contains the data values of compound fields. 725 * 726 * @throws Exception 727 * If there is an error at the HDF5 library level. 728 */ 729 @Override 730 public void write(Object buf) throws Exception { 731 if (this.getFileFormat().isReadOnly()) 732 throw new Exception("cannot write to compound attribute in file opened as read-only"); 733 734 if (!buf.equals(data)) 735 setData(buf); 736 737 init(); 738 739 if (parentObject == null) { 740 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 741 return; 742 } 743 744 ((MetaDataContainer) getParentObject()).writeMetadata(this); 745 746 try { 747 compoundAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 748 } 749 catch (Exception ex) { 750 log.debug("write(Object): failed to write compound attribute: ", ex); 751 throw new Exception("failed to write compound attribute: " + ex.getMessage(), ex); 752 } 753 resetSelection(); 754 } 755 756 /* 757 * Routine to convert datatypes that are read in as byte arrays to 758 * regular types. 759 */ 760 protected Object convertByteMember(final Datatype dtype, byte[] byteData) { 761 Object theObj = null; 762 763 if (dtype.isFloat() && dtype.getDatatypeSize() == 16) 764 theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0); 765 else 766 theObj = super.convertByteMember(dtype, byteData); 767 768 return theObj; 769 } 770 771 private Object compoundAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception { 772 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 773 Object theData = null; 774 775 if (numberOfMembers <= 0) { 776 log.debug("compoundAttributeCommonIO(): attribute contains no members"); 777 throw new Exception("dataset contains no members"); 778 } 779 780 /* 781 * I/O type-specific pre-initialization. 782 */ 783 if (ioType == H5File.IO_TYPE.WRITE) { 784 if ((writeBuf == null) || !(writeBuf instanceof List)) { 785 log.debug("compoundAttributeCommonIO(): writeBuf is null or invalid"); 786 throw new Exception("write buffer is null or invalid"); 787 } 788 789 /* 790 * Check for any unsupported datatypes and fail early before 791 * attempting to write to the attribute. 792 */ 793 if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) { 794 log.debug("compoundAttributeCommonIO(): cannot write attribute of type ARRAY of COMPOUND"); 795 throw new HDF5Exception("Unsupported attribute of type ARRAY of COMPOUND"); 796 } 797 798 if (dsDatatype.isVLEN() && dsDatatype.getDatatypeBase().isCompound()) { 799 log.debug("compoundAttributeCommonIO(): cannot write attribute of type VLEN of COMPOUND"); 800 throw new HDF5Exception("Unsupported attribute of type VLEN of COMPOUND"); 801 } 802 } 803 804 long aid = open(); 805 if (aid >= 0) { 806 log.trace("compoundAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 807 try { 808 theData = AttributeCommonIO(aid, ioType, writeBuf); 809 } 810 finally { 811 close(aid); 812 } 813 } 814 else 815 log.debug("compoundAttributeCommonIO(): failed to open attribute"); 816 817 return theData; 818 } 819 820 /* 821 * Private recursive routine to read/write an entire compound datatype field by 822 * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of 823 * COMPOUND datatypes. 824 * 825 * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a 826 * running counter so that we can index properly into the flattened name list 827 * generated from H5Datatype.extractCompoundInfo() at attribute init time. 828 */ 829 private Object compoundTypeIO(H5Datatype parentType, int nSelPoints, final H5Datatype cmpdType, 830 Object dataBuf, int[] globalMemberIndex) { 831 Object theData = null; 832 833 if (cmpdType.isArray()) { 834 log.trace("compoundTypeIO(): ARRAY type"); 835 836 long[] arrayDims = cmpdType.getArrayDims(); 837 int arrSize = nSelPoints; 838 for (int i = 0; i < arrayDims.length; i++) { 839 arrSize *= arrayDims[i]; 840 } 841 theData = compoundTypeIO(cmpdType, arrSize, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 842 } 843 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 844 /* 845 * TODO: true variable-length support. 846 */ 847 String[] errVal = new String[nSelPoints]; 848 String errStr = "*UNSUPPORTED*"; 849 850 for (int j = 0; j < nSelPoints; j++) 851 errVal[j] = errStr; 852 853 /* 854 * Setup a fake data list. 855 */ 856 Datatype baseType = cmpdType.getDatatypeBase(); 857 while (baseType != null && !baseType.isCompound()) { 858 baseType = baseType.getDatatypeBase(); 859 } 860 861 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, nSelPoints); 862 fakeVlenData.add(errVal); 863 864 theData = fakeVlenData; 865 } 866 else if (cmpdType.isCompound()) { 867 long parentLength = parentType.getDatatypeSize(); 868 List<Object> memberDataList = null; 869 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 870 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 871 872 log.trace("compoundTypeIO(): read {} members: parentLength={}", typeList.size(), parentLength); 873 874 memberDataList = (List<Object>) H5Datatype.allocateArray(cmpdType, nSelPoints); 875 876 try { 877 for (int i = 0; i < typeList.size(); i++) { 878 long memberOffset = 0; //offset into dataBuf 879 H5Datatype memberType = null; 880 String memberName = null; 881 Object memberData = null; 882 883 try { 884 memberType = (H5Datatype) typeList.get(i); 885 memberOffset = offsetList.get(i); 886 } 887 catch (Exception ex) { 888 log.debug("compoundTypeIO(): get member {} failure: ", i, ex); 889 globalMemberIndex[0]++; 890 continue; 891 } 892 893 /* 894 * Since the type list used here is not a flattened structure, we need to skip 895 * the member selection check for compound types, as otherwise having a single 896 * member not selected would skip the reading/writing for the entire compound 897 * type. The member selection check will be deferred to the recursive compound 898 * read/write below. 899 */ 900 if (!memberType.isCompound()) { 901 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 902 log.debug("compoundTypeIO(): member[{}] is not selected", i); 903 globalMemberIndex[0]++; 904 continue; // the field is not selected 905 } 906 } 907 908 if (!memberType.isCompound()) { 909 try { 910 memberName = new String(flatNameList.get(globalMemberIndex[0])); 911 } 912 catch (Exception ex) { 913 log.debug("compoundTypeIO(): get member {} name failure: ", i, ex); 914 memberName = "null"; 915 } 916 } 917 918 log.trace("compoundTypeIO(): member[{}]({}) is type {} offset {}", i, memberName, 919 memberType.getDescription(), memberOffset); 920 921 try { 922 int mt_typesize = (int)memberType.getDatatypeSize(); 923 log.trace("compoundTypeIO(): member[{}] mt_typesize={}", i, mt_typesize); 924 byte[] memberbuf = new byte[nSelPoints * mt_typesize]; 925 for (int dimindx = 0; dimindx < nSelPoints; dimindx++) 926 System.arraycopy(dataBuf, (int)memberOffset + dimindx * (int)parentLength, memberbuf, dimindx * mt_typesize, mt_typesize); 927 928 if (memberType.isCompound()) { 929 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 930 globalMemberIndex); 931 } 932 else if (memberType.isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) { 933 /* 934 * Recursively detect any nested array/vlen of compound types. 935 */ 936 boolean compoundFound = false; 937 938 Datatype base = memberType.getDatatypeBase(); 939 while (base != null) { 940 if (base.isCompound()) 941 compoundFound = true; 942 943 base = base.getDatatypeBase(); 944 } 945 946 if (compoundFound) { 947 /* 948 * Skip the top-level array/vlen type. 949 */ 950 globalMemberIndex[0]++; 951 952 memberData = compoundTypeIO(cmpdType, nSelPoints, memberType, memberbuf, 953 globalMemberIndex); 954 } 955 else { 956 memberData = convertByteMember(memberType, (byte[])memberbuf); 957 globalMemberIndex[0]++; 958 } 959 } 960 else { 961 memberData = convertByteMember(memberType, (byte[])memberbuf); 962 globalMemberIndex[0]++; 963 } 964 } 965 catch (Exception ex) { 966 log.debug("compoundTypeIO(): failed to read member {}: ", i, ex); 967 globalMemberIndex[0]++; 968 memberData = null; 969 } 970 971 if (memberData == null) { 972 String[] errVal = new String[nSelPoints]; 973 String errStr = "*ERROR*"; 974 975 for (int j = 0; j < nSelPoints; j++) 976 errVal[j] = errStr; 977 978 memberData = errVal; 979 } 980 981 memberDataList.add(memberData); 982 } // (i = 0; i < atomicTypeList.size(); i++) 983 } 984 catch (Exception ex) { 985 log.debug("compoundTypeIO(): failure: ", ex); 986 memberDataList = null; 987 } 988 989 theData = memberDataList; 990 } 991 992 return theData; 993 } 994 995 private Object compoundTypeWriteIO(H5Datatype parentType, final H5Datatype cmpdType, 996 Object dataBuf, int[] globalMemberIndex) { 997 Object theData = null; 998 if (cmpdType.isArray()) { 999 Object memberData = null; 1000 log.trace("compoundTypeWriteIO(): ARRAY type"); 1001 1002 theData = compoundTypeWriteIO(cmpdType, (H5Datatype) cmpdType.getDatatypeBase(), dataBuf, globalMemberIndex); 1003 } 1004 else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) { 1005 /* 1006 * TODO: true variable-length support. 1007 */ 1008 String errVal = new String("*UNSUPPORTED*"); 1009 1010 /* 1011 * Setup a fake data bytes. 1012 */ 1013 Datatype baseType = cmpdType.getDatatypeBase(); 1014 while (baseType != null && !baseType.isCompound()) { 1015 baseType = baseType.getDatatypeBase(); 1016 } 1017 1018 List<Object> fakeVlenData = (List<Object>) H5Datatype.allocateArray((H5Datatype) baseType, 1); 1019 fakeVlenData.add(errVal); 1020 1021 theData = convertMemberByte((H5Datatype)baseType, fakeVlenData); 1022 } 1023 else if (cmpdType.isCompound()) { 1024 long parentLength = parentType.getDatatypeSize(); 1025 List<Object> memberDataList = null; 1026 List<Datatype> typeList = cmpdType.getCompoundMemberTypes(); 1027 List<Long> offsetList = cmpdType.getCompoundMemberOffsets(); 1028 1029 log.trace("compoundTypeWriteIO(): write {} members", typeList.size()); 1030 1031 theData = (Object)new byte[(int)cmpdType.getDatatypeSize()]; 1032 try { 1033 for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) { 1034 long memberOffset = 0; //offset into dataBuf 1035 H5Datatype memberType = null; 1036 String memberName = null; 1037 Object memberData = null; 1038 1039 try { 1040 memberType = (H5Datatype) typeList.get(i); 1041 memberOffset = offsetList.get(i); 1042 } 1043 catch (Exception ex) { 1044 log.debug("compoundTypeWriteIO(): get member {} failure: ", i, ex); 1045 globalMemberIndex[0]++; 1046 continue; 1047 } 1048 long memberLength = memberType.getDatatypeSize(); 1049 1050 /* 1051 * Since the type list used here is not a flattened structure, we need to skip the member selection 1052 * check for compound types, as otherwise having a single member not selected would skip the 1053 * reading/writing for the entire compound type. The member selection check will be deferred to the 1054 * recursive compound read/write below. 1055 */ 1056 if (!memberType.isCompound()) { 1057 if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) { 1058 log.debug("compoundTypeWriteIO(): member[{}] is not selected", i); 1059 globalMemberIndex[0]++; 1060 continue; // the field is not selected 1061 } 1062 } 1063 1064 if (!memberType.isCompound()) { 1065 try { 1066 memberName = new String(flatNameList.get(globalMemberIndex[0])); 1067 } 1068 catch (Exception ex) { 1069 log.debug("compoundTypeWriteIO(): get member {} name failure: ", i, ex); 1070 memberName = "null"; 1071 } 1072 } 1073 1074 log.trace("compoundTypeWriteIO(): member[{}]({}) is type {} offset {}", i, memberName, 1075 memberType.getDescription(), memberOffset); 1076 1077 try { 1078 /* 1079 * TODO: currently doesn't correctly handle non-selected compound members. 1080 */ 1081 memberData = ((List<?>) dataBuf).get(i); 1082 } 1083 catch (Exception ex) { 1084 log.debug("compoundTypeWriteIO(): get member[{}] data failure: ", i, ex); 1085 globalMemberIndex[0]++; 1086 continue; 1087 } 1088 1089 if (memberData == null) { 1090 log.debug("compoundTypeWriteIO(): member[{}] data is null", i); 1091 globalMemberIndex[0]++; 1092 continue; 1093 } 1094 1095 try { 1096 if (memberType.isCompound()) { 1097 List<?> nestedList = (List<?>) ((List<?>) dataBuf).get(i); 1098 memberData = compoundTypeWriteIO(cmpdType, memberType, nestedList, globalMemberIndex); 1099 } 1100 else { 1101 memberData = writeSingleCompoundMember(memberType, memberData); 1102 globalMemberIndex[0]++; 1103 } 1104 } 1105 catch (Exception ex) { 1106 log.debug("compoundTypeWriteIO(): failed to write member[{}]: ", i, ex); 1107 globalMemberIndex[0]++; 1108 } 1109 1110 byte[] indexedBytes = convertMemberByte((H5Datatype)memberType, memberData); 1111 System.arraycopy(indexedBytes, 0, theData, writeListIndex, (int)memberLength); 1112 writeListIndex += memberLength; 1113 } // (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++) 1114 } 1115 catch (Exception ex) { 1116 log.debug("compoundTypeWriteIO(): failure: ", ex); 1117 theData = null; 1118 } 1119 } 1120 1121 return theData; 1122 } 1123 1124 /* 1125 * Routine to convert datatypes that are in object arrays to 1126 * bytes. 1127 */ 1128 private byte[] convertMemberByte(final Datatype dtype, Object theObj) { 1129 byte[] byteData = null; 1130 1131 if (dtype.getDatatypeSize() == 1) { 1132 /* 1133 * Normal byte[] type, such as an integer datatype of size 1. 1134 */ 1135 byteData = (byte[])theObj; 1136 } 1137 else if (dtype.isString() && !dtype.isVarStr() && convertByteToString) { 1138 log.trace("convertMemberByte(): converting string array to byte array"); 1139 1140 byteData = stringToByte((String[])theObj, (int) dtype.getDatatypeSize()); 1141 } 1142 else if (dtype.isInteger()) { 1143 log.trace("convertMemberByte(): converting integer array to byte array"); 1144 1145 switch ((int)dtype.getDatatypeSize()) { 1146 case 1: 1147 /* 1148 * Normal byte[] type, such as an integer datatype of size 1. 1149 */ 1150 byteData = (byte[])theObj; 1151 break; 1152 case 2: 1153 byteData = HDFNativeData.shortToByte(0, 1, (short[])theObj); 1154 break; 1155 case 4: 1156 byteData = HDFNativeData.intToByte(0, 1, (int[])theObj); 1157 break; 1158 case 8: 1159 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1160 break; 1161 default: 1162 log.debug("convertMemberByte(): invalid datatype size"); 1163 byteData = null; 1164 break; 1165 } 1166 } 1167 else if (dtype.isFloat()) { 1168 log.trace("convertMemberByte(): converting float array to byte array"); 1169 1170 if (dtype.getDatatypeSize() == 16) 1171 byteData = ((H5Datatype)dtype).bigDecimalToByte((BigDecimal[])theObj, 0); 1172 else if (dtype.getDatatypeSize() == 8) 1173 byteData = HDFNativeData.doubleToByte(0, 1, (double[])theObj); 1174 else 1175 byteData = HDFNativeData.floatToByte(0, 1, (float[])theObj); 1176 } 1177 else if (dtype.isRef()) { 1178 log.trace("convertMemberByte(): reference type - converting long array to byte array"); 1179 1180 byteData = HDFNativeData.longToByte(0, 1, (long[])theObj); 1181 } 1182 else if (dtype.isArray()) { 1183 Datatype baseType = dtype.getDatatypeBase(); 1184 1185 /* 1186 * Retrieve the real base datatype in the case of ARRAY of ARRAY datatypes. 1187 */ 1188 while (baseType.isArray()) 1189 baseType = baseType.getDatatypeBase(); 1190 1191 /* 1192 * Optimize for the common cases of Arrays. 1193 */ 1194 switch (baseType.getDatatypeClass()) { 1195 case Datatype.CLASS_INTEGER: 1196 case Datatype.CLASS_FLOAT: 1197 case Datatype.CLASS_CHAR: 1198 case Datatype.CLASS_STRING: 1199 case Datatype.CLASS_BITFIELD: 1200 case Datatype.CLASS_OPAQUE: 1201 case Datatype.CLASS_COMPOUND: 1202 case Datatype.CLASS_REFERENCE: 1203 case Datatype.CLASS_ENUM: 1204 case Datatype.CLASS_VLEN: 1205 case Datatype.CLASS_TIME: 1206 byteData = convertMemberByte(baseType, theObj); 1207 break; 1208 1209 case Datatype.CLASS_ARRAY: 1210 { 1211 Datatype arrayType = dtype.getDatatypeBase(); 1212 1213 long[] arrayDims = dtype.getArrayDims(); 1214 int arrSize = 1; 1215 for (int i = 0; i < arrayDims.length; i++) { 1216 arrSize *= arrayDims[i]; 1217 } 1218 1219 byteData = new byte[arrSize * (int)arrayType.getDatatypeSize()]; 1220 1221 for (int i = 0; i < arrSize; i++) { 1222 byte[] indexedBytes = convertMemberByte(arrayType, ((Object[]) theObj)[i]); 1223 System.arraycopy(indexedBytes, 0, byteData, (int)(i * arrayType.getDatatypeSize()), (int)arrayType.getDatatypeSize()); 1224 } 1225 1226 break; 1227 } 1228 1229 case Datatype.CLASS_NO_CLASS: 1230 default: 1231 log.debug("convertMemberByte(): invalid datatype class"); 1232 byteData = null; 1233 } 1234 } 1235 else if (dtype.isCompound()) { 1236 /* 1237 * TODO: still valid after reading change? 1238 */ 1239 byteData = (byte[])convertCompoundMemberBytes(dtype, (List<Object>)theObj); 1240 } 1241 else { 1242 byteData = (byte[])theObj; 1243 } 1244 1245 return byteData; 1246 } 1247 1248 /** 1249 * Given an array of objects representing a compound Datatype, converts each of 1250 * its members into bytes and returns the results. 1251 * 1252 * @param dtype 1253 * The compound datatype to convert 1254 * @param theObj 1255 * The object array representing the data of the compound Datatype 1256 * @return The converted bytes of the objects 1257 */ 1258 private byte[] convertCompoundMemberBytes(final Datatype dtype, List<Object> theObj) { 1259 List<Datatype> allSelectedTypes = Arrays.asList(this.getSelectedMemberTypes()); 1260 List<Datatype> localTypes = new ArrayList<>(dtype.getCompoundMemberTypes()); 1261 Iterator<Datatype> localIt = localTypes.iterator(); 1262 while (localIt.hasNext()) { 1263 Datatype curType = localIt.next(); 1264 1265 if (curType.isCompound()) 1266 continue; 1267 1268 if (!allSelectedTypes.contains(curType)) 1269 localIt.remove(); 1270 } 1271 1272 byte[] byteData = new byte[(int)dtype.getDatatypeSize()]; 1273 for (int i = 0, index = 0; i < localTypes.size(); i++) { 1274 Datatype curType = localTypes.get(i); 1275 byte[] indexedBytes = null; 1276 if (curType.isCompound()) 1277 indexedBytes = convertCompoundMemberBytes(curType, (List<Object>)theObj.get(i)); 1278 else 1279 indexedBytes = convertMemberByte(curType, theObj.get(i)); 1280 1281 System.arraycopy(indexedBytes, 0, byteData, index + (int)curType.getDatatypeSize(), (int)curType.getDatatypeSize()); 1282 index += curType.getDatatypeSize(); 1283 } 1284 1285 return byteData; 1286 } 1287 1288 /* 1289 * Private routine to convert a single field of a compound datatype. 1290 */ 1291 private Object writeSingleCompoundMember(final H5Datatype memberType, Object theData) throws Exception { 1292 /* 1293 * Check for any unsupported datatypes before attempting to write this compound 1294 * member. 1295 */ 1296 if (memberType.isVLEN() && !memberType.isVarStr()) { 1297 log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported"); 1298 throw new Exception("writing of VL non-strings is not currently supported"); 1299 } 1300 1301 /* 1302 * Perform any necessary data conversions before writing the data. 1303 */ 1304 Object tmpData = theData; 1305 try { 1306 if (memberType.isUnsigned()) { 1307 // Check if we need to convert unsigned integer data from Java-style 1308 // to C-style integers 1309 long tsize = memberType.getDatatypeSize(); 1310 String cname = theData.getClass().getName(); 1311 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1312 boolean doIntConversion = (((tsize == 1) && (dname == 'S')) 1313 || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J'))); 1314 1315 if (doIntConversion) { 1316 log.trace("writeSingleCompoundMember(): converting integer data to unsigned C-type integers"); 1317 tmpData = convertToUnsignedC(theData, null); 1318 } 1319 } 1320 else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) { 1321 log.trace("writeSingleCompoundMember(): converting string array to byte array"); 1322 tmpData = stringToByte((String[]) theData, (int) memberType.getDatatypeSize()); 1323 } 1324 else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) { 1325 log.trace("writeSingleCompoundMember(): converting enum names to values"); 1326 tmpData = memberType.convertEnumNameToValue((String[]) theData); 1327 } 1328 } 1329 catch (Exception ex) { 1330 log.debug("writeSingleCompoundMember(): data conversion failure: ", ex); 1331 tmpData = null; 1332 } 1333 1334 if (tmpData == null) { 1335 log.debug("writeSingleCompoundMember(): data is null"); 1336 } 1337 1338 return tmpData; 1339 } 1340 1341 /** 1342 * Converts the data values of this data object to appropriate Java integers if 1343 * they are unsigned integers. 1344 * 1345 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1346 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 1347 * 1348 * @return the converted data buffer. 1349 */ 1350 @Override 1351 public Object convertFromUnsignedC() { 1352 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 1353 } 1354 1355 /** 1356 * Converts Java integer data values of this data object back to unsigned C-type 1357 * integer data if they are unsigned integers. 1358 * 1359 * @see hdf.object.Dataset#convertToUnsignedC(Object) 1360 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 1361 * 1362 * @return the converted data buffer. 1363 */ 1364 @Override 1365 public Object convertToUnsignedC() { 1366 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 1367 } 1368 1369 /* Implement interface Attribute */ 1370 1371 /** 1372 * Returns the HObject to which this Attribute is currently "attached". 1373 * 1374 * @return the HObject to which this Attribute is currently "attached". 1375 */ 1376 public HObject getParentObject() { 1377 return parentObject; 1378 } 1379 1380 /** 1381 * Sets the HObject to which this Attribute is "attached". 1382 * 1383 * @param pObj 1384 * the new HObject to which this Attribute is "attached". 1385 */ 1386 public void setParentObject(HObject pObj) { 1387 parentObject = pObj; 1388 } 1389 1390 /** 1391 * set a property for the attribute. 1392 * 1393 * @param key the attribute Map key 1394 * @param value the attribute Map value 1395 */ 1396 public void setProperty(String key, Object value) { 1397 properties.put(key, value); 1398 } 1399 1400 /** 1401 * get a property for a given key. 1402 * 1403 * @param key the attribute Map key 1404 * 1405 * @return the property 1406 */ 1407 public Object getProperty(String key) { 1408 return properties.get(key); 1409 } 1410 1411 /** 1412 * get all property keys. 1413 * 1414 * @return the Collection of property keys 1415 */ 1416 public Collection<String> getPropertyKeys() { 1417 return properties.keySet(); 1418 } 1419 1420 /** 1421 * Returns the name of the object. For example, "Raster Image #2". 1422 * 1423 * @return The name of the object. 1424 */ 1425 public final String getAttributeName() { 1426 return getName(); 1427 } 1428 1429 /** 1430 * Retrieves the attribute data from the file. 1431 * 1432 * @return the attribute data. 1433 * 1434 * @throws Exception 1435 * if the data can not be retrieved 1436 */ 1437 public final Object getAttributeData() throws Exception, OutOfMemoryError { 1438 return getData(); 1439 } 1440 1441 /** 1442 * Returns the datatype of the attribute. 1443 * 1444 * @return the datatype of the attribute. 1445 */ 1446 public final Datatype getAttributeDatatype() { 1447 return getDatatype(); 1448 } 1449 1450 /** 1451 * Returns the space type for the attribute. It returns a 1452 * negative number if it failed to retrieve the type information from 1453 * the file. 1454 * 1455 * @return the space type for the attribute. 1456 */ 1457 public final int getAttributeSpaceType() { 1458 return getSpaceType(); 1459 } 1460 1461 /** 1462 * Returns the rank (number of dimensions) of the attribute. It returns a 1463 * negative number if it failed to retrieve the dimension information from 1464 * the file. 1465 * 1466 * @return the number of dimensions of the attribute. 1467 */ 1468 public final int getAttributeRank() { 1469 return getRank(); 1470 } 1471 1472 /** 1473 * Returns the selected size of the rows and columns of the attribute. It returns a 1474 * negative number if it failed to retrieve the size information from 1475 * the file. 1476 * 1477 * @return the selected size of the rows and colums of the attribute. 1478 */ 1479 public final int getAttributePlane() { 1480 return (int)getWidth() * (int)getHeight(); 1481 } 1482 1483 /** 1484 * Returns the array that contains the dimension sizes of the data value of 1485 * the attribute. It returns null if it failed to retrieve the dimension 1486 * information from the file. 1487 * 1488 * @return the dimension sizes of the attribute. 1489 */ 1490 public final long[] getAttributeDims() { 1491 return getDims(); 1492 } 1493 1494 /** 1495 * @return true if the data is a single scalar point; otherwise, returns 1496 * false. 1497 */ 1498 public boolean isAttributeScalar() { 1499 return isScalar(); 1500 } 1501 1502 /** 1503 * Not for public use in the future. 1504 * 1505 * setData() is not safe to use because it changes memory buffer 1506 * of the dataset object. Dataset operations such as write/read 1507 * will fail if the buffer type or size is changed. 1508 * 1509 * @param d the object data -must be an array of Objects 1510 */ 1511 public void setAttributeData(Object d) { 1512 setData(d); 1513 } 1514 1515 /** 1516 * Writes the memory buffer of this dataset to file. 1517 * 1518 * @throws Exception if buffer can not be written 1519 */ 1520 public void writeAttribute() throws Exception { 1521 write(); 1522 } 1523 1524 /** 1525 * Writes the given data buffer into this attribute in a file. 1526 * 1527 * The data buffer is a vector that contains the data values of compound fields. The data is written 1528 * into file as one data blob. 1529 * 1530 * @param buf 1531 * The vector that contains the data values of compound fields. 1532 * 1533 * @throws Exception 1534 * If there is an error at the library level. 1535 */ 1536 public void writeAttribute(Object buf) throws Exception { 1537 write(buf); 1538 } 1539 1540 /** 1541 * Returns a string representation of the data value. For 1542 * example, "0, 255". 1543 * 1544 * For a compound datatype, it will be a 1D array of strings with field 1545 * members separated by the delimiter. For example, 1546 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1547 * float} of three data points. 1548 * 1549 * @param delimiter 1550 * The delimiter used to separate individual data points. It 1551 * can be a comma, semicolon, tab or space. For example, 1552 * toString(",") will separate data by commas. 1553 * 1554 * @return the string representation of the data values. 1555 */ 1556 public String toAttributeString(String delimiter) { 1557 return toString(delimiter, -1); 1558 } 1559 1560 /** 1561 * Returns a string representation of the data value. For 1562 * example, "0, 255". 1563 * 1564 * For a compound datatype, it will be a 1D array of strings with field 1565 * members separated by the delimiter. For example, 1566 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 1567 * float} of three data points. 1568 * 1569 * @param delimiter 1570 * The delimiter used to separate individual data points. It 1571 * can be a comma, semicolon, tab or space. For example, 1572 * toString(",") will separate data by commas. 1573 * @param maxItems 1574 * The maximum number of Array values to return 1575 * 1576 * @return the string representation of the data values. 1577 */ 1578 public String toAttributeString(String delimiter, int maxItems) { 1579 Object theData = originalBuf; 1580 if (theData == null) { 1581 log.debug("toString: value is null"); 1582 return null; 1583 } 1584 1585 if (!(theData instanceof List<?>)) { 1586 log.trace("toString: value is not list"); 1587 return null; 1588 } 1589 1590 // attribute value is an array 1591 StringBuilder sb = new StringBuilder(); 1592 int numberTypes = ((ArrayList<Object[]>)theData).size(); 1593 List<Datatype> cmpdTypes = getDatatype().getCompoundMemberTypes(); 1594 int n = Array.getLength(((ArrayList<Object[]>)theData).get(0)); 1595 if ((maxItems > 0) && (n > maxItems)) 1596 n = maxItems; 1597 1598 for (int i = 0; i < n; i++) { 1599 if (i > 0) 1600 sb.append(delimiter); 1601 sb.append("{"); 1602 for (int dv = 0; dv < numberTypes; dv++) { 1603 if (dv > 0) 1604 sb.append(delimiter); 1605 1606 Object theobj = ((ArrayList<Object[]>)theData).get(dv); 1607 1608 Class<? extends Object> valClass = theobj.getClass(); 1609 1610 if (!valClass.isArray()) { 1611 log.trace("toString: member - not array"); 1612 String strValue = theobj.toString(); 1613 if (maxItems > 0 && strValue.length() > maxItems) { 1614 // truncate the extra characters 1615 strValue = strValue.substring(0, maxItems); 1616 } 1617 sb.append(strValue); 1618 continue; 1619 } 1620 1621 log.trace("toString[{}]: is_enum={} is_unsigned={}", i, cmpdTypes.get(dv).isEnum(), 1622 cmpdTypes.get(dv).isUnsigned()); 1623 1624 if (cmpdTypes.get(dv).isEnum()) { 1625 String cname = valClass.getName(); 1626 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1627 log.trace("toString: is_enum with cname={} dname={}", cname, dname); 1628 1629 Map<String, String> map = cmpdTypes.get(dv).getEnumMembers(); 1630 String theValue = null; 1631 switch (dname) { 1632 case 'B': 1633 byte[] barray = (byte[]) theobj; 1634 short sValue = barray[i]; 1635 theValue = String.valueOf(sValue); 1636 if (map.containsKey(theValue)) { 1637 sb.append(map.get(theValue)); 1638 } 1639 else 1640 sb.append(sValue); 1641 break; 1642 case 'S': 1643 short[] sarray = (short[]) theobj; 1644 int iValue = sarray[0]; 1645 theValue = String.valueOf(iValue); 1646 if (map.containsKey(theValue)) { 1647 sb.append(map.get(theValue)); 1648 } 1649 else 1650 sb.append(iValue); 1651 break; 1652 case 'I': 1653 int[] iarray = (int[]) theobj; 1654 long lValue = iarray[i]; 1655 theValue = String.valueOf(lValue); 1656 if (map.containsKey(theValue)) { 1657 sb.append(map.get(theValue)); 1658 } 1659 else 1660 sb.append(lValue); 1661 break; 1662 case 'J': 1663 long[] larray = (long[]) theobj; 1664 Long l = larray[i]; 1665 theValue = Long.toString(l); 1666 if (map.containsKey(theValue)) { 1667 sb.append(map.get(theValue)); 1668 } 1669 else 1670 sb.append(theValue); 1671 break; 1672 default: 1673 sb.append(Array.get(theobj, i)); 1674 break; 1675 } 1676 } 1677 else if (cmpdTypes.get(dv).isUnsigned()) { 1678 String cname = valClass.getName(); 1679 char dname = cname.charAt(cname.lastIndexOf('[') + 1); 1680 log.trace("toString: is_unsigned with cname={} dname={}", cname, dname); 1681 1682 switch (dname) { 1683 case 'B': 1684 byte[] barray = (byte[]) theobj; 1685 short sValue = barray[i]; 1686 if (sValue < 0) { 1687 sValue += 256; 1688 } 1689 sb.append(sValue); 1690 break; 1691 case 'S': 1692 short[] sarray = (short[]) theobj; 1693 int iValue = sarray[i]; 1694 if (iValue < 0) { 1695 iValue += 65536; 1696 } 1697 sb.append(iValue); 1698 break; 1699 case 'I': 1700 int[] iarray = (int[]) theobj; 1701 long lValue = iarray[i]; 1702 if (lValue < 0) { 1703 lValue += 4294967296L; 1704 } 1705 sb.append(lValue); 1706 break; 1707 case 'J': 1708 long[] larray = (long[]) theobj; 1709 Long l = larray[i]; 1710 String theValue = Long.toString(l); 1711 if (l < 0) { 1712 l = (l << 1) >>> 1; 1713 BigInteger big1 = new BigInteger("9223372036854775808"); // 2^65 1714 BigInteger big2 = new BigInteger(l.toString()); 1715 BigInteger big = big1.add(big2); 1716 theValue = big.toString(); 1717 } 1718 sb.append(theValue); 1719 break; 1720 default: 1721 String strValue = Array.get(theobj, i).toString(); 1722 if (maxItems > 0 && strValue.length() > maxItems) { 1723 // truncate the extra characters 1724 strValue = strValue.substring(0, maxItems); 1725 } 1726 sb.append(strValue); 1727 break; 1728 } 1729 } 1730 else { 1731 log.trace("toString: not enum or unsigned"); 1732 Object value = Array.get(theobj, i); 1733 String strValue; 1734 1735 if (value == null) { 1736 strValue = "null"; 1737 } 1738 else { 1739 strValue = value.toString(); 1740 } 1741 1742 if (maxItems > 0 && strValue.length() > maxItems) { 1743 // truncate the extra characters 1744 strValue = strValue.substring(0, maxItems); 1745 } 1746 sb.append(strValue); 1747 } 1748 } // end for (int dv = 0; dv < numberTypes; dv++) 1749 sb.append("}"); 1750 } // end for (int i = 1; i < n; i++) 1751 1752 return sb.toString(); 1753 } 1754 1755 /* Implement interface H5Attribute */ 1756 1757 /** 1758 * The general read and write attribute operations for hdf5 object data. 1759 * 1760 * @param attr_id 1761 * the attribute to access 1762 * @param ioType 1763 * the type of IO operation 1764 * @param objBuf 1765 * the data buffer to use for write operation 1766 * 1767 * @return the attribute data 1768 * 1769 * @throws Exception 1770 * if the data can not be retrieved 1771 */ 1772 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception { 1773 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1774 Object theData = null; 1775 1776 long dt_size = dsDatatype.getDatatypeSize(); 1777 log.trace("AttributeCommonIO(): create native"); 1778 long tid = dsDatatype.createNative(); 1779 if (ioType == H5File.IO_TYPE.READ) { 1780 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1781 1782 long lsize = 1; 1783 for (int j = 0; j < dims.length; j++) { 1784 lsize *= dims[j]; 1785 } 1786 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1787 1788 try { 1789 // Read data. 1790 Object attr_data = new byte[(int)(dt_size * lsize)]; 1791 1792 try { 1793 H5.H5Aread(attr_id, tid, attr_data); 1794 } 1795 catch (Exception ex) { 1796 log.debug("AttributeCommonIO(): H5Aread failure: ", ex); 1797 } 1798 theData = compoundTypeIO(dsDatatype, (int)lsize, dsDatatype, attr_data, new int[]{0}); 1799 } 1800 catch (Exception ex) { 1801 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1802 throw new Exception(ex.getMessage(), ex); 1803 } 1804 finally { 1805 dsDatatype.close(tid); 1806 } 1807 for (int i = 0; i < ((ArrayList<Object[]>)theData).size(); i++) { 1808 Object theobj = ((ArrayList<Object[]>)theData).get(i); 1809 log.trace("AttributeCommonIO():read ioType data: {}", theobj); 1810 } 1811 originalBuf = theData; 1812 isDataLoaded = true; 1813 } // H5File.IO_TYPE.READ 1814 else { 1815 theData = compoundTypeWriteIO(dsDatatype, dsDatatype, objBuf, new int[]{0}); 1816 try { 1817 H5.H5Awrite(attr_id, tid, theData); 1818 } 1819 catch (Exception ex) { 1820 log.debug("AttributeCommonIO(): H5Awrite failure: ", ex); 1821 } 1822 finally { 1823 dsDatatype.close(tid); 1824 } 1825 } // H5File.IO_TYPE.WRITE 1826 1827 return theData; 1828 } 1829 1830 /** 1831 * Read a subset of an attribute for hdf5 object data. 1832 * 1833 * @return the selected attribute data 1834 * 1835 * @throws Exception 1836 * if the data can not be retrieved 1837 */ 1838 public Object AttributeSelection() throws Exception { 1839 return originalBuf; 1840// H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1841// Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints); 1842// if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { 1843// log.trace("AttributeSelection(): isText: converting byte array to string array"); 1844// theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize()); 1845// } 1846// else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) { 1847// log.trace("AttributeSelection(): isFloat: converting byte array to BigDecimal array"); 1848// theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData); 1849// } 1850// else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) { 1851// log.trace("AttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array"); 1852// long[] arrayDims = dsDatatype.getArrayDims(); 1853// int asize = (int)nPoints; 1854// for (int j = 0; j < arrayDims.length; j++) { 1855// asize *= arrayDims[j]; 1856// } 1857// theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData); 1858// } 1859// else if (dsDatatype.isRefObj()) { 1860// log.trace("AttributeSelection(): isREF: converting byte array to long array"); 1861// theData = HDFNativeData.byteToLong((byte[]) theData); 1862// } 1863// Object theOrig = originalBuf; 1864 1865 /* 1866 * Copy the selection from originalBuf to theData Only three dims are involved and selected data is 2 dimensions 1867 * getHeight() is the row dimension getWidth() is the col dimension 1868 * getDepth() is the frame dimension 1869 */ 1870// long[] start = getStartDims(); 1871// long curFrame = start[selectedIndex[2]]; 1872// for (int col = 0; col < (int)getWidth(); col++) { 1873// for (int row = 0; row < (int)getHeight(); row++) { 1874 1875// int k = (int)startDims[selectedIndex[2]] * (int)getDepth(); 1876// int index = row * (int)getWidth() + col; 1877// log.trace("compoundAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, col, k); 1878// int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() + 1879// col * (int)getHeight() + 1880// row);// * (int) dsDatatype.getDatatypeSize(); 1881// int toIndex = (col * (int)getHeight() + 1882// row);// * (int) dsDatatype.getDatatypeSize(); 1883// int objSize = 1; 1884// if (dsDatatype.isArray()) { 1885// long[] arrayDims = dsDatatype.getArrayDims(); 1886// objSize = (int)arrayDims.length; 1887// } 1888// for (int i = 0; i < ((ArrayList<Object[]>)theOrig).size(); i++) { 1889// Object theOrigobj = ((ArrayList<Object[]>)theOrig).get(i); 1890// Object theDataobj = ((ArrayList<Object[]>)theData).get(i); 1891// log.trace("compoundAttributeSelection(): theOrig={} theData={}", theOrigobj, theDataobj); 1892// System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1893// } 1894// } 1895// } 1896 1897// log.trace("compoundAttributeSelection(): theData={}", theData); 1898// return theData; 1899 } 1900}