001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.nio.ByteBuffer; 021import java.nio.ByteOrder; 022import java.text.DecimalFormat; 023 024import java.util.ArrayList; 025import java.util.Arrays; 026import java.util.Collection; 027import java.util.HashMap; 028import java.util.Iterator; 029import java.util.List; 030import java.util.Map; 031import java.util.Vector; 032 033import org.slf4j.Logger; 034import org.slf4j.LoggerFactory; 035 036import hdf.hdf5lib.H5; 037import hdf.hdf5lib.HDF5Constants; 038import hdf.hdf5lib.HDFNativeData; 039import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 040import hdf.hdf5lib.exceptions.HDF5Exception; 041import hdf.hdf5lib.exceptions.HDF5LibraryException; 042 043import hdf.object.Attribute; 044import hdf.object.DataFormat; 045import hdf.object.Dataset; 046import hdf.object.Datatype; 047import hdf.object.FileFormat; 048import hdf.object.Group; 049import hdf.object.HObject; 050import hdf.object.MetaDataContainer; 051import hdf.object.ScalarDS; 052 053import hdf.object.h5.H5Attribute; 054import hdf.object.h5.H5Datatype; 055import hdf.object.h5.H5ReferenceType; 056 057/** 058 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 059 * dataset, group or named datatype. 060 * 061 * Like a dataset, an attribute has a name, datatype and dataspace. 062 * 063 * For more details on attributes, <a href= 064 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 065 * User's Guide</a> 066 * 067 * The following code is an example of an attribute with 1D integer array of two elements. 068 * 069 * <pre> 070 * // Example of creating a new attribute 071 * // The name of the new attribute 072 * String name = "Data range"; 073 * // Creating an unsigned 1-byte integer datatype 074 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 075 * 1, // size in bytes 076 * Datatype.ORDER_LE, // byte order 077 * Datatype.SIGN_NONE); // unsigned 078 * // 1-D array of size two 079 * long[] dims = {2}; 080 * // The value of the attribute 081 * int[] value = {0, 255}; 082 * // Create a new attribute 083 * Attribute dataRange = new H5ScalarAttr(name, type, dims); 084 * // Set the attribute value 085 * dataRange.setValue(value); 086 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 087 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 088 * </pre> 089 * 090 * 091 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and 092 * strings. 093 * 094 * @see hdf.object.Datatype 095 * 096 * @version 1.0 6/15/2021 097 * @author Allen Byrne 098 */ 099public class H5ScalarAttr extends ScalarDS implements H5Attribute 100{ 101 private static final long serialVersionUID = 2072473407027648309L; 102 103 private static final Logger log = LoggerFactory.getLogger(H5ScalarAttr.class); 104 105 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 106 protected HObject parentObject; 107 108 /** additional information and properties for the attribute, Attribute interface */ 109 private transient Map<String, Object> properties; 110 111 /** 112 * flag to indicate if the datatype in file is the same as dataype in memory 113 */ 114 protected boolean isNativeDatatype = false; 115 116 /** 117 * Create an attribute with specified name, data type and dimension sizes. 118 * 119 * For scalar attribute, the dimension size can be either an array of size one 120 * or null, and the rank can be either 1 or zero. Attribute is a general class 121 * and is independent of file format, e.g., the implementation of attribute 122 * applies to both HDF4 and HDF5. 123 * 124 * The following example creates a string attribute with the name "CLASS" and 125 * value "IMAGE". 126 * 127 * <pre> 128 * long[] attrDims = { 1 }; 129 * String attrName = "CLASS"; 130 * String[] classValue = { "IMAGE" }; 131 * Datatype attrType = null; 132 * try { 133 * attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 134 * } 135 * catch (Exception ex) {} 136 * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims); 137 * attr.setValue(classValue); 138 * </pre> 139 * 140 * @param parentObj 141 * the HObject to which this H5ScalarAttr is attached. 142 * @param attrName 143 * the name of the attribute. 144 * @param attrType 145 * the datatype of the attribute. 146 * @param attrDims 147 * the dimension sizes of the attribute, null for scalar attribute 148 * 149 * @see hdf.object.Datatype 150 */ 151 public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 152 this(parentObj, attrName, attrType, attrDims, null); 153 } 154 155 /** 156 * Create an attribute with specific name and value. 157 * 158 * For scalar attribute, the dimension size can be either an array of size one 159 * or null, and the rank can be either 1 or zero. Attribute is a general class 160 * and is independent of file format, e.g., the implementation of attribute 161 * applies to both HDF4 and HDF5. 162 * 163 * The following example creates a string attribute with the name "CLASS" and 164 * value "IMAGE". 165 * 166 * <pre> 167 * long[] attrDims = { 1 }; 168 * String attrName = "CLASS"; 169 * String[] classValue = { "IMAGE" }; 170 * Datatype attrType = null; 171 * try { 172 * attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 173 * } 174 * catch (Exception ex) {} 175 * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue); 176 * </pre> 177 * 178 * @param parentObj 179 * the HObject to which this H5ScalarAttr is attached. 180 * @param attrName 181 * the name of the attribute. 182 * @param attrType 183 * the datatype of the attribute. 184 * @param attrDims 185 * the dimension sizes of the attribute, null for scalar attribute 186 * @param attrValue 187 * the value of the attribute, null if no value 188 * 189 * @see hdf.object.Datatype 190 */ 191 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 192 public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 193 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 194 (parentObj == null) ? null : parentObj.getFullName(), null); 195 196 log.trace("H5ScalarAttr: start {}", parentObj); 197 this.parentObject = parentObj; 198 199 datatype = attrType; 200 201 if (attrValue != null) { 202 data = attrValue; 203 originalBuf = attrValue; 204 isDataLoaded = true; 205 } 206 properties = new HashMap(); 207 208 if (attrDims == null) { 209 rank = 1; 210 dims = new long[] { 1 }; 211 isScalar = true; 212 } 213 else { 214 dims = attrDims; 215 rank = dims.length; 216 isScalar = false; 217 } 218 219 selectedDims = new long[rank]; 220 startDims = new long[rank]; 221 selectedStride = new long[rank]; 222 223 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 224 attrName, attrType.getDescription(), data, rank, getDatatype().isUnsigned()); 225 226 resetSelection(); 227 } 228 229 /* 230 * (non-Javadoc) 231 * 232 * @see hdf.object.HObject#open() 233 */ 234 @Override 235 public long open() { 236 if (parentObject == null) { 237 log.debug("open(): attribute's parent object is null"); 238 return HDF5Constants.H5I_INVALID_HID; 239 } 240 241 long aid = HDF5Constants.H5I_INVALID_HID; 242 long pObjID = HDF5Constants.H5I_INVALID_HID; 243 244 try { 245 pObjID = parentObject.open(); 246 if (pObjID >= 0) { 247 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 248 log.trace("open(): FILE_TYPE_HDF5"); 249 if (H5.H5Aexists(pObjID, getName())) 250 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 251 } 252 } 253 254 log.trace("open(): aid={}", aid); 255 } 256 catch (Exception ex) { 257 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 258 aid = HDF5Constants.H5I_INVALID_HID; 259 } 260 finally { 261 parentObject.close(pObjID); 262 } 263 264 return aid; 265 } 266 267 /* 268 * (non-Javadoc) 269 * 270 * @see hdf.object.HObject#close(int) 271 */ 272 @Override 273 public void close(long aid) { 274 if (aid >= 0) { 275 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 276 log.trace("close(): FILE_TYPE_HDF5"); 277 try { 278 H5.H5Aclose(aid); 279 } 280 catch (HDF5Exception ex) { 281 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 282 } 283 } 284 } 285 } 286 287 /** 288 * Retrieves datatype and dataspace information from file and sets the attribute 289 * in memory. 290 * 291 * The init() is designed to support lazy operation in a attribute object. When a 292 * data object is retrieved from file, the datatype, dataspace and raw data are 293 * not loaded into memory. When it is asked to read the raw data from file, 294 * init() is first called to get the datatype and dataspace information, then 295 * load the raw data from file. 296 */ 297 @Override 298 public void init() { 299 if (inited) { 300 // already called. Initialize only once 301 resetSelection(); 302 log.trace("init(): H5ScalarAttr already initialized"); 303 return; 304 } 305 306 long aid = HDF5Constants.H5I_INVALID_HID; 307 long tid = HDF5Constants.H5I_INVALID_HID; 308 long sid = HDF5Constants.H5I_INVALID_HID; 309 long nativeTID = HDF5Constants.H5I_INVALID_HID; 310 311 aid = open(); 312 if (aid >= 0) { 313 try { 314 sid = H5.H5Aget_space(aid); 315 rank = H5.H5Sget_simple_extent_ndims(sid); 316 space_type = H5.H5Sget_simple_extent_type(sid); 317 tid = H5.H5Aget_type(aid); 318 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 319 320 if (rank == 0) { 321 // a scalar data point 322 isScalar = true; 323 rank = 1; 324 dims = new long[] { 1 }; 325 log.trace("init(): rank is a scalar data point"); 326 } 327 else { 328 isScalar = false; 329 dims = new long[rank]; 330 maxDims = new long[rank]; 331 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 332 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 333 } 334 335 if (datatype == null) { 336 try { 337 int nativeClass = H5.H5Tget_class(tid); 338 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 339 long lsize = 1; 340 if (rank > 0) { 341 log.trace("init(): rank={}, dims={}", rank, dims); 342 for (int j = 0; j < dims.length; j++) { 343 lsize *= dims[j]; 344 } 345 } 346 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 347 } 348 else 349 datatype = new H5Datatype(getFileFormat(), tid); 350 351 log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", 352 tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(), 353 datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef()); 354 } 355 catch (Exception ex) { 356 log.debug("init(): failed to create datatype for attribute: ", ex); 357 datatype = null; 358 } 359 } 360 361 // Check if the datatype in the file is the native datatype 362 try { 363 nativeTID = H5.H5Tget_native_type(tid); 364 isNativeDatatype = H5.H5Tequal(tid, nativeTID); 365 log.trace("init(): isNativeDatatype={}", isNativeDatatype); 366 } 367 catch (Exception ex) { 368 log.debug("init(): check if native type failure: ", ex); 369 } 370 371 inited = true; 372 } 373 catch (HDF5Exception ex) { 374 log.debug("init(): ", ex); 375 } 376 finally { 377 try { 378 H5.H5Tclose(nativeTID); 379 } 380 catch (Exception ex2) { 381 log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2); 382 } 383 try { 384 H5.H5Tclose(tid); 385 } 386 catch (HDF5Exception ex2) { 387 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 388 } 389 try { 390 H5.H5Sclose(sid); 391 } 392 catch (HDF5Exception ex2) { 393 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 394 } 395 396 } 397 398 close(aid); 399 400 startDims = new long[rank]; 401 selectedDims = new long[rank]; 402 403 resetSelection(); 404 } 405 else { 406 log.debug("init(): failed to open attribute"); 407 } 408 } 409 410 /** 411 * Returns the datatype of the data object. 412 * 413 * @return the datatype of the data object. 414 */ 415 @Override 416 public Datatype getDatatype() { 417 if (!inited) 418 init(); 419 420 if (datatype == null) { 421 long aid = HDF5Constants.H5I_INVALID_HID; 422 long tid = HDF5Constants.H5I_INVALID_HID; 423 424 aid = open(); 425 if (aid >= 0) { 426 try { 427 tid = H5.H5Aget_type(aid); 428 log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype); 429 if (!isNativeDatatype) { 430 long tmptid = -1; 431 try { 432 tmptid = H5Datatype.toNative(tid); 433 if (tmptid >= 0) { 434 try { 435 H5.H5Tclose(tid); 436 } 437 catch (Exception ex2) { 438 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2); 439 } 440 tid = tmptid; 441 } 442 } 443 catch (Exception ex) { 444 log.debug("getDatatype(): toNative: ", ex); 445 } 446 } 447 int nativeClass = H5.H5Tget_class(tid); 448 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 449 long lsize = 1; 450 long sid = H5.H5Aget_space(aid); 451 int rank = H5.H5Sget_simple_extent_ndims(sid); 452 if (rank > 0) { 453 long dims[] = new long[rank]; 454 H5.H5Sget_simple_extent_dims(sid, dims, null); 455 log.trace("getDatatype(): rank={}, dims={}", rank, dims); 456 for (int j = 0; j < dims.length; j++) { 457 lsize *= dims[j]; 458 } 459 } 460 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 461 } 462 else 463 datatype = new H5Datatype(getFileFormat(), tid); 464 } 465 catch (Exception ex) { 466 log.debug("getDatatype(): ", ex); 467 } 468 finally { 469 try { 470 H5.H5Tclose(tid); 471 } 472 catch (HDF5Exception ex) { 473 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 474 } 475 try { 476 H5.H5Aclose(aid); 477 } 478 catch (HDF5Exception ex) { 479 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 480 } 481 } 482 } 483 } 484 485 return datatype; 486 } 487 488 /** 489 * Returns the data buffer of the attribute in memory. 490 * 491 * If data is already loaded into memory, returns the data; otherwise, calls 492 * read() to read data from file into a memory buffer and returns the memory 493 * buffer. 494 * 495 * The whole attribute is read into memory. Users can also select 496 * a subset from the whole data. Subsetting is done in an implicit way. 497 * 498 * <b>How to Select a Subset</b> 499 * 500 * A selection is specified by three arrays: start, stride and count. 501 * <ol> 502 * <li>start: offset of a selection 503 * <li>stride: determines how many elements to move in each dimension 504 * <li>count: number of elements to select in each dimension 505 * </ol> 506 * getStartDims(), getStride() and getSelectedDims() returns the start, 507 * stride and count arrays respectively. Applications can make a selection 508 * by changing the values of the arrays. 509 * 510 * The following example shows how to make a subset. In the example, the 511 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 512 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 513 * We want to select every other data point in dims[1] and dims[2] 514 * 515 * <pre> 516 * int rank = attribute.getRank(); // number of dimensions of the attribute 517 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 518 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 519 * long[] start = attribute.getStartDims(); // the offset of the selection 520 * long[] stride = attribute.getStride(); // the stride of the attribute 521 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 522 * 523 * // select dim1 and dim2 as 2D data for display,and slice through dim0 524 * selectedIndex[0] = 1; 525 * selectedIndex[1] = 2; 526 * selectedIndex[2] = 0; 527 * 528 * // reset the selection arrays 529 * for (int i = 0; i < rank; i++) { 530 * start[i] = 0; 531 * selected[i] = 1; 532 * stride[i] = 1; 533 * } 534 * 535 * // set stride to 2 on dim1 and dim2 so that every other data point is 536 * // selected. 537 * stride[1] = 2; 538 * stride[2] = 2; 539 * 540 * // set the selection size of dim1 and dim2 541 * selected[1] = dims[1] / stride[1]; 542 * selected[2] = dims[1] / stride[2]; 543 * 544 * // when H5ScalarAttr.getData() is called, the selection above will be used since 545 * // the dimension arrays are passed by reference. Changes of these arrays 546 * // outside the attribute object directly change the values of these array 547 * // in the attribute object. 548 * </pre> 549 * 550 * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte, 551 * short, int, float, double or String type based on the datatype of the 552 * attribute. 553 * 554 * @return the memory buffer of the attribute. 555 * 556 * @throws Exception if object can not be read 557 * @throws OutOfMemoryError if memory is exhausted 558 */ 559 @Override 560 public Object getData() throws Exception, OutOfMemoryError { 561 log.trace("getData(): isDataLoaded={}", isDataLoaded); 562 if (!isDataLoaded) 563 data = read(); // load the data, attributes read all data 564 565 nPoints = 1; 566 log.trace("getData(): selectedDims length={}", selectedDims.length); 567 int point_len = selectedDims.length; 568 //Partial data for 3 or more dimensions 569 if (rank > 2) 570 point_len = 3; 571 for (int j = 0; j < point_len; j++) { 572 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 573 nPoints *= selectedDims[j]; 574 } 575 log.trace("getData: read {}", nPoints); 576 577 // apply the selection for 3 or more dimensions 578 // selection only expects to use 3 selectedDims 579 // where selectedIndex[0] is the row dimension 580 // where selectedIndex[1] is the col dimension 581 // where selectedIndex[2] is the frame dimension 582 if (rank > 2) 583 data = AttributeSelection(); 584 585 return data; 586 } 587 588 /* 589 * (non-Javadoc) 590 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 591 */ 592 @Override 593 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 594 // not supported 595 throw new UnsupportedOperationException("copy operation unsupported for H5."); 596 } 597 598 /* 599 * (non-Javadoc) 600 * 601 * @see hdf.object.Attribute#readBytes() 602 */ 603 @Override 604 public byte[] readBytes() throws HDF5Exception { 605 byte[] theData = null; 606 607 if (!isInited()) 608 init(); 609 610 long aid = open(); 611 if (aid >= 0) { 612 long tid = HDF5Constants.H5I_INVALID_HID; 613 614 try { 615 long[] lsize = { 1 }; 616 for (int j = 0; j < selectedDims.length; j++) 617 lsize[0] *= selectedDims[j]; 618 619 tid = H5.H5Aget_type(aid); 620 long size = H5.H5Tget_size(tid) * lsize[0]; 621 log.trace("readBytes(): size = {}", size); 622 623 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 624 throw new Exception("Invalid int size"); 625 626 theData = new byte[(int)size]; 627 628 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 629 H5.H5Aread(aid, tid, theData); 630 } 631 catch (Exception ex) { 632 log.debug("readBytes(): failed to read data: ", ex); 633 } 634 finally { 635 try { 636 H5.H5Tclose(tid); 637 } 638 catch (HDF5Exception ex2) { 639 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 640 } 641 close(aid); 642 } 643 } 644 645 return theData; 646 } 647 648 /** 649 * Reads the data from file. 650 * 651 * read() reads the data from file to a memory buffer and returns the memory 652 * buffer. The attribute object does not hold the memory buffer. To store the 653 * memory buffer in the attribute object, one must call getData(). 654 * 655 * By default, the whole attribute is read into memory. 656 * 657 * For ScalarAttr, the memory data buffer is a one-dimensional array of byte, 658 * short, int, float, double or String type based on the datatype of the 659 * attribute. 660 * 661 * @return the data read from file. 662 * 663 * @see #getData() 664 * @see hdf.object.DataFormat#read() 665 * 666 * @throws Exception 667 * if object can not be read 668 */ 669 @Override 670 public Object read() throws Exception { 671 Object readData = null; 672 673 if (!isInited()) 674 init(); 675 676 try { 677 readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null); 678 } 679 catch (Exception ex) { 680 log.debug("read(): failed to read scalar attribute: ", ex); 681 throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex); 682 } 683 684 return readData; 685 } 686 687 /** 688 * Writes the given data buffer into this attribute in a file. 689 * 690 * @param buf 691 * The buffer that contains the data values. 692 * 693 * @throws Exception 694 * If there is an error at the HDF5 library level. 695 */ 696 @Override 697 public void write(Object buf) throws Exception { 698 if (this.getFileFormat().isReadOnly()) 699 throw new Exception("cannot write to scalar attribute in file opened as read-only"); 700 701 if (!buf.equals(data)) 702 setData(buf); 703 704 if (parentObject == null) { 705 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 706 return; 707 } 708 709 ((MetaDataContainer) getParentObject()).writeMetadata(this); 710 711 try { 712 scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 713 } 714 catch (Exception ex) { 715 log.debug("write(Object): failed to write to scalar attribute: ", ex); 716 throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex); 717 } 718 resetSelection(); 719 } 720 721 private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception { 722 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 723 Object theData = null; 724 725 /* 726 * I/O type-specific pre-initialization. 727 */ 728 if (ioType == H5File.IO_TYPE.WRITE) { 729 if (writeBuf == null) { 730 log.debug("scalarAttributeCommonIO(): writeBuf is null"); 731 throw new Exception("write buffer is null"); 732 } 733 } 734 735 long aid = open(); 736 if (aid >= 0) { 737 log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 738 try { 739 theData = AttributeCommonIO(aid, ioType, writeBuf); 740 } 741 finally { 742 close(aid); 743 } 744 } 745 else 746 log.debug("scalarAttributeCommonIO(): failed to open attribute"); 747 748 return theData; 749 } 750 751 /* Implement interface Attribute */ 752 753 /** 754 * Returns the HObject to which this Attribute is currently "attached". 755 * 756 * @return the HObject to which this Attribute is currently "attached". 757 */ 758 @Override 759 public HObject getParentObject() { 760 return parentObject; 761 } 762 763 /** 764 * Sets the HObject to which this Attribute is "attached". 765 * 766 * @param pObj 767 * the new HObject to which this Attribute is "attached". 768 */ 769 @Override 770 public void setParentObject(HObject pObj) { 771 parentObject = pObj; 772 } 773 774 /** 775 * set a property for the attribute. 776 * 777 * @param key the attribute Map key 778 * @param value the attribute Map value 779 */ 780 @Override 781 public void setProperty(String key, Object value) { 782 properties.put(key, value); 783 } 784 785 /** 786 * get a property for a given key. 787 * 788 * @param key the attribute Map key 789 * 790 * @return the property 791 */ 792 @Override 793 public Object getProperty(String key) { 794 return properties.get(key); 795 } 796 797 /** 798 * get all property keys. 799 * 800 * @return the Collection of property keys 801 */ 802 @Override 803 public Collection<String> getPropertyKeys() { 804 return properties.keySet(); 805 } 806 807 /** 808 * Returns the name of the object. For example, "Raster Image #2". 809 * 810 * @return The name of the object. 811 */ 812 @Override 813 public final String getAttributeName() { 814 return getName(); 815 } 816 817 /** 818 * Retrieves the attribute data from the file. 819 * 820 * @return the attribute data. 821 * 822 * @throws Exception 823 * if the data can not be retrieved 824 */ 825 @Override 826 public final Object getAttributeData() throws Exception, OutOfMemoryError { 827 return getData(); 828 } 829 830 /** 831 * Returns the datatype of the attribute. 832 * 833 * @return the datatype of the attribute. 834 */ 835 @Override 836 public final Datatype getAttributeDatatype() { 837 return getDatatype(); 838 } 839 840 /** 841 * Returns the space type for the attribute. It returns a 842 * negative number if it failed to retrieve the type information from 843 * the file. 844 * 845 * @return the space type for the attribute. 846 */ 847 @Override 848 public final int getAttributeSpaceType() { 849 return getSpaceType(); 850 } 851 852 /** 853 * Returns the rank (number of dimensions) of the attribute. It returns a 854 * negative number if it failed to retrieve the dimension information from 855 * the file. 856 * 857 * @return the number of dimensions of the attribute. 858 */ 859 @Override 860 public final int getAttributeRank() { 861 return getRank(); 862 } 863 864 /** 865 * Returns the selected size of the rows and columns of the attribute. It returns a 866 * negative number if it failed to retrieve the size information from 867 * the file. 868 * 869 * @return the selected size of the rows and colums of the attribute. 870 */ 871 @Override 872 public final int getAttributePlane() { 873 return (int)getWidth() * (int)getHeight(); 874 } 875 876 /** 877 * Returns the array that contains the dimension sizes of the data value of 878 * the attribute. It returns null if it failed to retrieve the dimension 879 * information from the file. 880 * 881 * @return the dimension sizes of the attribute. 882 */ 883 @Override 884 public final long[] getAttributeDims() { 885 return getDims(); 886 } 887 888 /** 889 * @return true if the data is a single scalar point; otherwise, returns 890 * false. 891 */ 892 @Override 893 public boolean isAttributeScalar() { 894 return isScalar(); 895 } 896 897 /** 898 * Not for public use in the future. 899 * 900 * setData() is not safe to use because it changes memory buffer 901 * of the dataset object. Dataset operations such as write/read 902 * will fail if the buffer type or size is changed. 903 * 904 * @param d the object data -must be an array of Objects 905 */ 906 @Override 907 public void setAttributeData(Object d) { 908 setData(d); 909 } 910 911 /** 912 * Writes the memory buffer of this dataset to file. 913 * 914 * @throws Exception if buffer can not be written 915 */ 916 @Override 917 public void writeAttribute() throws Exception { 918 write(); 919 } 920 921 /** 922 * Writes the given data buffer into this attribute in a file. 923 * 924 * The data buffer is a vector that contains the data values of compound fields. The data is written 925 * into file as one data blob. 926 * 927 * @param buf 928 * The vector that contains the data values of compound fields. 929 * 930 * @throws Exception 931 * If there is an error at the library level. 932 */ 933 @Override 934 public void writeAttribute(Object buf) throws Exception { 935 write(buf); 936 } 937 938 /** 939 * Returns a string representation of the data value. For 940 * example, "0, 255". 941 * 942 * For a compound datatype, it will be a 1D array of strings with field 943 * members separated by the delimiter. For example, 944 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 945 * float} of three data points. 946 * 947 * @param delimiter 948 * The delimiter used to separate individual data points. It 949 * can be a comma, semicolon, tab or space. For example, 950 * toString(",") will separate data by commas. 951 * 952 * @return the string representation of the data values. 953 */ 954 @Override 955 public String toAttributeString(String delimiter) { 956 return toString(delimiter, -1); 957 } 958 959 /** 960 * Returns a string representation of the data value. For 961 * example, "0, 255". 962 * 963 * For a compound datatype, it will be a 1D array of strings with field 964 * members separated by the delimiter. For example, 965 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 966 * float} of three data points. 967 * 968 * @param delimiter 969 * The delimiter used to separate individual data points. It 970 * can be a comma, semicolon, tab or space. For example, 971 * toString(",") will separate data by commas. 972 * @param maxItems 973 * The maximum number of Array values to return 974 * 975 * @return the string representation of the data values. 976 */ 977 @Override 978 public String toAttributeString(String delimiter, int maxItems) { 979 Object theData = originalBuf; 980 if (theData == null) { 981 log.debug("toAttributeString: value is null"); 982 return null; 983 } 984 985 Class<? extends Object> valClass = theData.getClass(); 986 if (!valClass.isArray() && !getDatatype().isRef()) { 987 log.trace("toAttributeString: finish - not array"); 988 String strValue = theData.toString(); 989 if (maxItems > 0 && strValue.length() > maxItems) 990 // truncate the extra characters 991 strValue = strValue.substring(0, maxItems); 992 return strValue; 993 } 994 995 int n = 0; 996 Datatype dtype = getDatatype(); 997 // value is an array 998 if (valClass.isArray()) { 999 n = Array.getLength(theData); 1000 if (dtype.isRef()) 1001 n /= (int)dtype.getDatatypeSize(); 1002 } 1003 else 1004 n = ((ArrayList<Object[]>)theData).size(); 1005 if ((maxItems > 0) && (n > maxItems)) 1006 n = maxItems; 1007 1008 return toString(theData, dtype, delimiter, n); 1009 } 1010 1011 @Override 1012 protected String toString(Object theData, Datatype theType, String delimiter, int count) { 1013 log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), 1014 theType.isUnsigned(), count); 1015 StringBuilder sb = new StringBuilder(); 1016 Class<? extends Object> valClass = theData.getClass(); 1017 log.trace("toString:valClass={}", valClass); 1018 1019 H5Datatype dtype = (H5Datatype)theType; 1020 log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef()); 1021 if (dtype.isStdRef()) { 1022 return ((H5ReferenceType)dtype).toString(delimiter, count); 1023 } 1024 else if (dtype.isVLEN() && !dtype.isVarStr()) { 1025 log.trace("toString: vlen"); 1026 String strValue; 1027 1028 for (int k = 0; k < count; k++) { 1029 Object value = Array.get(theData, k); 1030 if (value == null) 1031 strValue = "null"; 1032 else { 1033 if (dtype.getDatatypeBase().isRef()) { 1034 ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value; 1035 log.trace("toString: vlen value={}", ref_value); 1036 strValue = "{"; 1037 for (int m = 0; m < ref_value.size(); m++) { 1038 byte[] curBytes = ref_value.get(m); 1039 if (m > 0) 1040 strValue += ", "; 1041 if (H5ReferenceType.zeroArrayCheck(curBytes)) 1042 strValue += "NULL"; 1043 else { 1044 if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) { 1045 strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT); 1046 } 1047 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1048 try { 1049 strValue += H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), curBytes); 1050 } 1051 catch (Exception ex) { 1052 ex.printStackTrace(); 1053 } 1054 } 1055 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1056 try { 1057 strValue += H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), curBytes); 1058 } 1059 catch (Exception ex) { 1060 ex.printStackTrace(); 1061 } 1062 } 1063 } 1064 } 1065 strValue += "}"; 1066 } 1067 else 1068 strValue = value.toString(); 1069 } 1070 if (k > 0) 1071 sb.append(", "); 1072 sb.append(strValue); 1073 } 1074 } 1075 else if (dtype.isRef()) { 1076 log.trace("toString: ref"); 1077 String strValue = "NULL"; 1078 byte[] rElements = null; 1079 1080 for (int k = 0; k < count; k++) { 1081 // need to iterate if type is ArrayList 1082 if (theData instanceof ArrayList) 1083 rElements = (byte[]) ((ArrayList) theData).get(k); 1084 else 1085 rElements = (byte[]) Array.get(theData, k); 1086 1087 if (H5ReferenceType.zeroArrayCheck(rElements)) 1088 strValue = "NULL"; 1089 else { 1090 if (dtype.isStdRef()) { 1091 strValue = H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT); 1092 } 1093 else if (dtype.getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1094 try { 1095 strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), rElements); 1096 } 1097 catch (Exception ex) { 1098 ex.printStackTrace(); 1099 } 1100 } 1101 else if (dtype.getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1102 try { 1103 strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), rElements); 1104 } 1105 catch (Exception ex) { 1106 ex.printStackTrace(); 1107 } 1108 } 1109 } 1110 if (k > 0) 1111 sb.append(", "); 1112 sb.append(strValue); 1113 } 1114 } 1115 else { 1116 return super.toString(theData, theType, delimiter, count); 1117 } 1118 1119 return sb.toString(); 1120 } 1121 1122 /* Implement interface H5Attribute */ 1123 1124 /** 1125 * The general read and write attribute operations for hdf5 object data. 1126 * 1127 * @param attr_id 1128 * the attribute to access 1129 * @param ioType 1130 * the type of IO operation 1131 * @param objBuf 1132 * the data buffer to use for write operation 1133 * 1134 * @return the attribute data 1135 * 1136 * @throws Exception 1137 * if the data can not be retrieved 1138 */ 1139 @Override 1140 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception { 1141 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1142 Object theData = null; 1143 1144 long dt_size = dsDatatype.getDatatypeSize(); 1145 log.trace("AttributeCommonIO(): create native"); 1146 long tid = dsDatatype.createNative(); 1147 1148 if (ioType == H5File.IO_TYPE.READ) { 1149 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1150 log.trace("AttributeCommonIO():read ioType isVLEN={}", dsDatatype.isVLEN()); 1151 1152 long lsize = 1; 1153 for (int j = 0; j < dims.length; j++) 1154 lsize *= dims[j]; 1155 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1156 1157 try { 1158 if (dsDatatype.isVarStr()) { 1159 String[] strs = new String[(int) lsize]; 1160 for (int j = 0; j < lsize; j++) 1161 strs[j] = ""; 1162 try { 1163 log.trace("AttributeCommonIO():read ioType H5Aread_VLStrings"); 1164 H5.H5Aread_VLStrings(attr_id, tid, strs); 1165 } 1166 catch (Exception ex) { 1167 log.debug("AttributeCommonIO():read ioType H5Aread_VLStrings failure: ", ex); 1168 ex.printStackTrace(); 1169 } 1170 theData = strs; 1171 } 1172 else if (dsDatatype.isCompound()) { 1173 String[] strs = new String[(int) lsize]; 1174 for (int j = 0; j < lsize; j++) 1175 strs[j] = ""; 1176 try { 1177 log.trace("AttributeCommonIO():read ioType H5AreadComplex"); 1178 H5.H5AreadComplex(attr_id, tid, strs); 1179 } 1180 catch (Exception ex) { 1181 ex.printStackTrace(); 1182 } 1183 theData = strs; 1184 } 1185 else if (dsDatatype.isVLEN()) { 1186 log.trace("AttributeCommonIO():read ioType:VLEN-REF H5Aread isArray()={}", dsDatatype.isArray()); 1187 theData = new ArrayList[(int)lsize]; 1188 for (int j = 0; j < lsize; j++) 1189 ((ArrayList[])theData)[j] = new ArrayList<byte[]>(); 1190 1191 try { 1192 H5.H5AreadVL(attr_id, tid, (Object[])theData); 1193 } 1194 catch (Exception ex) { 1195 log.debug("AttributeCommonIO():read ioType:VLEN-REF H5Aread failure: ", ex); 1196 ex.printStackTrace(); 1197 } 1198 } 1199 else { 1200 Object attr_data = null; 1201 try { 1202 attr_data = H5Datatype.allocateArray(dsDatatype, (int) lsize); 1203 } 1204 catch (OutOfMemoryError e) { 1205 log.debug("AttributeCommonIO():read ioType out of memory", e); 1206 theData = null; 1207 } 1208 if (attr_data == null) 1209 log.debug("AttributeCommonIO():read ioType allocateArray returned null"); 1210 1211 log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray()); 1212 try { 1213 H5.H5Aread(attr_id, tid, attr_data); 1214 } 1215 catch (Exception ex) { 1216 log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex); 1217 ex.printStackTrace(); 1218 } 1219 1220 /* 1221 * Perform any necessary data conversions. 1222 */ 1223 if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) { 1224 log.trace("AttributeCommonIO():read ioType isText: converting byte array to string array"); 1225 theData = byteToString((byte[]) attr_data, (int) dsDatatype.getDatatypeSize()); 1226 } 1227 else if (dsDatatype.isFloat() && dt_size == 16) { 1228 log.trace("AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array"); 1229 theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) attr_data); 1230 } 1231 else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) { 1232 log.trace("AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array"); 1233 long[] arrayDims = dsDatatype.getArrayDims(); 1234 int asize = (int)nPoints; 1235 for (int j = 0; j < arrayDims.length; j++) { 1236 asize *= arrayDims[j]; 1237 } 1238 theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) attr_data); 1239 } 1240 else if (dsDatatype.isRef() && (attr_data instanceof byte[])) { 1241 log.trace("AttributeCommonIO():read ioType isRef: converting byte array to List of bytes"); 1242 theData = new ArrayList<byte[]>((int)lsize); 1243 for (int m = 0; m < (int) lsize; m++) { 1244 byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()]; 1245 try { 1246 System.arraycopy(attr_data, m * (int)dt_size, curBytes, 0, (int)dsDatatype.getDatatypeSize()); 1247 ((ArrayList<byte[]>)theData).add(curBytes); 1248 } 1249 catch (Exception err) { 1250 log.trace("AttributeCommonIO(): arraycopy failure: ", err); 1251 } 1252 } 1253 } 1254 else 1255 theData = attr_data; 1256 } 1257 } 1258 catch (HDF5DataFiltersException exfltr) { 1259 log.debug("AttributeCommonIO():read ioType read failure: ", exfltr); 1260 throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr); 1261 } 1262 catch (Exception ex) { 1263 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1264 throw new Exception(ex.getMessage(), ex); 1265 } 1266 finally { 1267 dsDatatype.close(tid); 1268 } 1269 log.trace("AttributeCommonIO():read ioType data: {}", theData); 1270 originalBuf = theData; 1271 isDataLoaded = true; 1272 } // H5File.IO_TYPE.READ 1273 else { 1274 /* 1275 * Perform any necessary data conversions before writing the data. 1276 * 1277 * Note that v-len strings do not get converted, regardless of 1278 * conversion request type. 1279 */ 1280 Object tmpData = objBuf; 1281 try { 1282 // Check if we need to convert integer data 1283 String cname = objBuf.getClass().getName(); 1284 char dname = cname.charAt(cname.lastIndexOf("[") + 1); 1285 boolean doIntConversion = (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I')) 1286 || ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted)); 1287 1288 if (doIntConversion) { 1289 log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers"); 1290 tmpData = convertToUnsignedC(objBuf, null); 1291 } 1292 else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString && !(objBuf instanceof byte[])) { 1293 log.trace("AttributeCommonIO(): converting string array to byte array"); 1294 tmpData = stringToByte((String[]) objBuf, (int)dt_size); 1295 } 1296 else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) { 1297 log.trace("AttributeCommonIO(): converting enum names to values"); 1298 tmpData = dsDatatype.convertEnumNameToValue((String[]) objBuf); 1299 } 1300 else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) { 1301 log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array"); 1302 throw new Exception("data conversion failure: cannot write BigDecimal values"); 1303 //tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf); 1304 } 1305 } 1306 catch (Exception ex) { 1307 log.debug("AttributeCommonIO(): data conversion failure: ", ex); 1308 throw new Exception("data conversion failure: " + ex.getMessage()); 1309 } 1310 1311 /* 1312 * Actually write the data now that everything has been setup. 1313 */ 1314 try { 1315 if (dsDatatype.isVarStr()) { 1316 log.trace("AttributeCommonIO(): H5Awrite_VLStrings aid={} tid={}", attr_id, tid); 1317 1318 H5.H5Awrite_VLStrings(attr_id, tid, (Object[]) tmpData); 1319 } 1320 else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) { 1321 log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid); 1322 1323 H5.H5AwriteVL(attr_id, tid, (Object[]) tmpData); 1324 } 1325 else { 1326 log.trace("AttributeCommonIO(): dsDatatype.isRef()={} data is String={}", dsDatatype.isRef(), tmpData instanceof String); 1327 if (dsDatatype.isRef() && tmpData instanceof String) { 1328 // reference is a path+name to the object 1329 log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE"); 1330 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1331 byte[] refBuf = H5.H5Rcreate_object(getFID(), (String) tmpData, HDF5Constants.H5P_DEFAULT); 1332 if (refBuf != null) { 1333 H5.H5Awrite(attr_id, tid, refBuf); 1334 H5.H5Rdestroy(refBuf); 1335 } 1336 } 1337 else if (Array.get(tmpData, 0) instanceof String) { 1338 int len = ((String[]) tmpData).length; 1339 byte[] bval = Dataset.stringToByte((String[]) tmpData, (int)dt_size); 1340 if (bval != null && bval.length == dt_size * len) { 1341 bval[bval.length - 1] = 0; 1342 tmpData = bval; 1343 } 1344 log.trace("AttributeCommonIO(): String={}: {}", tmpData); 1345 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1346 H5.H5Awrite(attr_id, tid, tmpData); 1347 } 1348 else { 1349 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1350 H5.H5Awrite(attr_id, tid, tmpData); 1351 } 1352 } 1353 } 1354 catch (Exception ex) { 1355 log.debug("AttributeCommonIO(): write failure: ", ex); 1356 throw new Exception(ex.getMessage()); 1357 } 1358 finally { 1359 dsDatatype.close(tid); 1360 } 1361 } // H5File.IO_TYPE.WRITE 1362 1363 return theData; 1364 } 1365 1366 /** 1367 * Read a subset of an attribute for hdf5 object data. 1368 * 1369 * @return the selected attribute data 1370 * 1371 * @throws Exception 1372 * if the data can not be retrieved 1373 */ 1374 @Override 1375 public Object AttributeSelection() throws Exception { 1376 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1377 int dsSize = (int)dsDatatype.getDatatypeSize(); 1378 if (dsDatatype.isArray()) 1379 dsSize = (int)dsDatatype.getDatatypeBase().getDatatypeSize(); 1380 Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints); 1381 if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { 1382 log.trace("scalarAttributeSelection(): isText: converting byte array to string array"); 1383 theData = byteToString((byte[]) theData, dsSize); 1384 } 1385 else if (dsDatatype.isFloat() && dsSize == 16) { 1386 log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array"); 1387 theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData); 1388 } 1389 else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsSize == 16) { 1390 log.trace("scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array"); 1391 long[] arrayDims = dsDatatype.getArrayDims(); 1392 int asize = (int)nPoints; 1393 for (int j = 0; j < arrayDims.length; j++) { 1394 asize *= arrayDims[j]; 1395 } 1396 theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData); 1397 } 1398 Object theOrig = originalBuf; 1399 log.trace("scalarAttributeSelection(): originalBuf={} with datatype size={}", originalBuf, dsSize); 1400 1401 //Copy the selection from originalBuf to theData 1402 //Only three dims are involved and selected data is 2 dimensions 1403 // getHeight() is the row dimension 1404 // getWidth() is the col dimension 1405 // getDepth() is the frame dimension 1406 long[] start = getStartDims(); 1407 long curFrame = start[selectedIndex[2]]; 1408 int k = (int)startDims[selectedIndex[2]] * (int)getDepth(); 1409 for (int col = 0; col < (int)getWidth(); col++) { 1410 for (int row = 0; row < (int)getHeight(); row++) { 1411 int index = row * (int)getWidth() + col; 1412 log.trace("scalarAttributeSelection(): point[{}] row:col:k={}:{}:{}", curFrame, row, col, k); 1413 int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() + 1414 col * (int)getHeight() + row); 1415 int toIndex = (col * (int)getHeight() + row); 1416 int objSize = 1; 1417 if (dsDatatype.isArray()) { 1418 long[] arrayDims = dsDatatype.getArrayDims(); 1419 objSize = arrayDims.length; 1420 try { 1421 System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1422 } 1423 catch (Exception err) { 1424 log.debug("scalarAttributeSelection(): arraycopy failure: ", err); 1425 } 1426 } 1427 else if (dsDatatype.isStdRef()) { 1428 objSize = (int)HDF5Constants.H5R_REF_BUF_SIZE; 1429 fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1430 toIndex = toIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1431 try { 1432 System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1433 } 1434 catch (Exception err) { 1435 log.debug("scalarAttributeSelection(): arraycopy failure: ", err); 1436 } 1437 } 1438 else { 1439 if (theOrig instanceof ArrayList) { 1440 if (dsDatatype.isRef()) { 1441 byte[] rElements = (byte[]) ((ArrayList) theOrig).get(fromIndex); 1442 try { 1443 System.arraycopy(rElements, 0, theData, toIndex * dsSize, dsSize); 1444 } 1445 catch (Exception err) { 1446 log.trace("scalarAttributeSelection(): refarraycopy failure: ", err); 1447 } 1448 } 1449 else { 1450 Object value = Array.get(theOrig, fromIndex); 1451 log.trace("scalarAttributeSelection(): value={}", value); 1452 ((ArrayList<Object>)theData).add(toIndex, value); 1453 } 1454 } 1455 else 1456 theData = theOrig; 1457 } 1458 } 1459 } 1460 1461 log.trace("scalarAttributeSelection(): theData={}", theData); 1462 return theData; 1463 } 1464}