001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.lang.reflect.Array; 018import java.math.BigDecimal; 019import java.math.BigInteger; 020import java.nio.ByteBuffer; 021import java.nio.ByteOrder; 022import java.text.DecimalFormat; 023 024import java.util.ArrayList; 025import java.util.Arrays; 026import java.util.Collection; 027import java.util.HashMap; 028import java.util.Iterator; 029import java.util.List; 030import java.util.Map; 031import java.util.Vector; 032 033import org.slf4j.Logger; 034import org.slf4j.LoggerFactory; 035 036import hdf.hdf5lib.H5; 037import hdf.hdf5lib.HDF5Constants; 038import hdf.hdf5lib.HDFNativeData; 039import hdf.hdf5lib.exceptions.HDF5DataFiltersException; 040import hdf.hdf5lib.exceptions.HDF5Exception; 041import hdf.hdf5lib.exceptions.HDF5LibraryException; 042 043import hdf.object.Attribute; 044import hdf.object.DataFormat; 045import hdf.object.Dataset; 046import hdf.object.Datatype; 047import hdf.object.FileFormat; 048import hdf.object.Group; 049import hdf.object.HObject; 050import hdf.object.MetaDataContainer; 051import hdf.object.ScalarDS; 052 053import hdf.object.h5.H5Attribute; 054import hdf.object.h5.H5Datatype; 055import hdf.object.h5.H5ReferenceType; 056 057/** 058 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group or named 059 * datatype. 060 * 061 * Like a dataset, an attribute has a name, datatype and dataspace. 062 * 063 * For more details on attributes, <a href="https://hdfgroup.github.io/hdf5/_h5_a__u_g.html#sec_attribute">HDF5 064 * Attributes in HDF5 User Guide</a> 065 * 066 * The following code is an example of an attribute with 1D integer array of two elements. 067 * 068 * <pre> 069 * // Example of creating a new attribute 070 * // The name of the new attribute 071 * String name = "Data range"; 072 * // Creating an unsigned 1-byte integer datatype 073 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 074 * 1, // size in bytes 075 * Datatype.ORDER_LE, // byte order 076 * Datatype.SIGN_NONE); // unsigned 077 * // 1-D array of size two 078 * long[] dims = {2}; 079 * // The value of the attribute 080 * int[] value = {0, 255}; 081 * // Create a new attribute 082 * Attribute dataRange = new H5ScalarAttr(name, type, dims); 083 * // Set the attribute value 084 * dataRange.setValue(value); 085 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 086 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 087 * </pre> 088 * 089 * 090 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and strings. 091 * 092 * @see hdf.object.Datatype 093 * 094 * @version 1.0 6/15/2021 095 * @author Allen Byrne 096 */ 097public class H5ScalarAttr extends ScalarDS implements H5Attribute 098{ 099 private static final long serialVersionUID = 2072473407027648309L; 100 101 private static final Logger log = LoggerFactory.getLogger(H5ScalarAttr.class); 102 103 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 104 protected HObject parentObject; 105 106 /** additional information and properties for the attribute, Attribute interface */ 107 private transient Map<String, Object> properties; 108 109 /** 110 * flag to indicate if the datatype in file is the same as dataype in memory 111 */ 112 protected boolean isNativeDatatype = false; 113 114 /** 115 * Create an attribute with specified name, data type and dimension sizes. 116 * 117 * For scalar attribute, the dimension size can be either an array of size one 118 * or null, and the rank can be either 1 or zero. Attribute is a general class 119 * and is independent of file format, e.g., the implementation of attribute 120 * applies to both HDF4 and HDF5. 121 * 122 * The following example creates a string attribute with the name "CLASS" and 123 * value "IMAGE". 124 * 125 * <pre> 126 * long[] attrDims = { 1 }; 127 * String attrName = "CLASS"; 128 * String[] classValue = { "IMAGE" }; 129 * Datatype attrType = null; 130 * try { 131 * attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 132 * } 133 * catch (Exception ex) {} 134 * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims); 135 * attr.setValue(classValue); 136 * </pre> 137 * 138 * @param parentObj 139 * the HObject to which this H5ScalarAttr is attached. 140 * @param attrName 141 * the name of the attribute. 142 * @param attrType 143 * the datatype of the attribute. 144 * @param attrDims 145 * the dimension sizes of the attribute, null for scalar attribute 146 * 147 * @see hdf.object.Datatype 148 */ 149 public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 150 this(parentObj, attrName, attrType, attrDims, null); 151 } 152 153 /** 154 * Create an attribute with specific name and value. 155 * 156 * For scalar attribute, the dimension size can be either an array of size one 157 * or null, and the rank can be either 1 or zero. Attribute is a general class 158 * and is independent of file format, e.g., the implementation of attribute 159 * applies to both HDF4 and HDF5. 160 * 161 * The following example creates a string attribute with the name "CLASS" and 162 * value "IMAGE". 163 * 164 * <pre> 165 * long[] attrDims = { 1 }; 166 * String attrName = "CLASS"; 167 * String[] classValue = { "IMAGE" }; 168 * Datatype attrType = null; 169 * try { 170 * attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 171 * } 172 * catch (Exception ex) {} 173 * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue); 174 * </pre> 175 * 176 * @param parentObj 177 * the HObject to which this H5ScalarAttr is attached. 178 * @param attrName 179 * the name of the attribute. 180 * @param attrType 181 * the datatype of the attribute. 182 * @param attrDims 183 * the dimension sizes of the attribute, null for scalar attribute 184 * @param attrValue 185 * the value of the attribute, null if no value 186 * 187 * @see hdf.object.Datatype 188 */ 189 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 190 public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 191 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 192 (parentObj == null) ? null : parentObj.getFullName(), null); 193 194 log.trace("H5ScalarAttr: start {}", parentObj); 195 this.parentObject = parentObj; 196 197 datatype = attrType; 198 199 if (attrValue != null) { 200 data = attrValue; 201 originalBuf = attrValue; 202 isDataLoaded = true; 203 } 204 properties = new HashMap(); 205 206 if (attrDims == null) { 207 rank = 1; 208 dims = new long[] { 1 }; 209 isScalar = true; 210 } 211 else { 212 dims = attrDims; 213 rank = dims.length; 214 isScalar = false; 215 } 216 217 selectedDims = new long[rank]; 218 startDims = new long[rank]; 219 selectedStride = new long[rank]; 220 221 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 222 attrName, attrType.getDescription(), data, rank, getDatatype().isUnsigned()); 223 224 resetSelection(); 225 } 226 227 /* 228 * (non-Javadoc) 229 * 230 * @see hdf.object.HObject#open() 231 */ 232 @Override 233 public long open() { 234 if (parentObject == null) { 235 log.debug("open(): attribute's parent object is null"); 236 return HDF5Constants.H5I_INVALID_HID; 237 } 238 239 long aid = HDF5Constants.H5I_INVALID_HID; 240 long pObjID = HDF5Constants.H5I_INVALID_HID; 241 242 try { 243 pObjID = parentObject.open(); 244 if (pObjID >= 0) { 245 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 246 log.trace("open(): FILE_TYPE_HDF5"); 247 if (H5.H5Aexists(pObjID, getName())) 248 aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT); 249 } 250 } 251 252 log.trace("open(): aid={}", aid); 253 } 254 catch (Exception ex) { 255 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 256 aid = HDF5Constants.H5I_INVALID_HID; 257 } 258 finally { 259 parentObject.close(pObjID); 260 } 261 262 return aid; 263 } 264 265 /* 266 * (non-Javadoc) 267 * 268 * @see hdf.object.HObject#close(int) 269 */ 270 @Override 271 public void close(long aid) { 272 if (aid >= 0) { 273 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) { 274 log.trace("close(): FILE_TYPE_HDF5"); 275 try { 276 H5.H5Aclose(aid); 277 } 278 catch (HDF5Exception ex) { 279 log.debug("close(): H5Aclose({}) failure: ", aid, ex); 280 } 281 } 282 } 283 } 284 285 /** 286 * Retrieves datatype and dataspace information from file and sets the attribute 287 * in memory. 288 * 289 * The init() is designed to support lazy operation in a attribute object. When a 290 * data object is retrieved from file, the datatype, dataspace and raw data are 291 * not loaded into memory. When it is asked to read the raw data from file, 292 * init() is first called to get the datatype and dataspace information, then 293 * load the raw data from file. 294 */ 295 @Override 296 public void init() { 297 if (inited) { 298 // already called. Initialize only once 299 resetSelection(); 300 log.trace("init(): H5ScalarAttr already initialized"); 301 return; 302 } 303 304 long aid = HDF5Constants.H5I_INVALID_HID; 305 long tid = HDF5Constants.H5I_INVALID_HID; 306 long sid = HDF5Constants.H5I_INVALID_HID; 307 long nativeTID = HDF5Constants.H5I_INVALID_HID; 308 309 aid = open(); 310 if (aid >= 0) { 311 try { 312 sid = H5.H5Aget_space(aid); 313 rank = H5.H5Sget_simple_extent_ndims(sid); 314 space_type = H5.H5Sget_simple_extent_type(sid); 315 if (space_type == HDF5Constants.H5S_NULL) 316 isNULL = true; 317 else 318 isNULL = false; 319 tid = H5.H5Aget_type(aid); 320 log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type); 321 322 if (rank == 0) { 323 // a scalar data point 324 isScalar = true; 325 rank = 1; 326 dims = new long[] { 1 }; 327 log.trace("init(): rank is a scalar data point"); 328 } 329 else { 330 isScalar = false; 331 dims = new long[rank]; 332 maxDims = new long[rank]; 333 H5.H5Sget_simple_extent_dims(sid, dims, maxDims); 334 log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims); 335 } 336 337 if (datatype == null) { 338 try { 339 int nativeClass = H5.H5Tget_class(tid); 340 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 341 long lsize = 1; 342 if (rank > 0) { 343 log.trace("init(): rank={}, dims={}", rank, dims); 344 for (int j = 0; j < dims.length; j++) { 345 lsize *= dims[j]; 346 } 347 } 348 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 349 } 350 else 351 datatype = new H5Datatype(getFileFormat(), tid); 352 353 log.trace("init(): tid={} is tclass={} has isText={} : isNamed={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}", 354 tid, datatype.getDatatypeClass(), ((H5Datatype) datatype).isText(), datatype.isNamed(), datatype.isVLEN(), 355 datatype.isEnum(), datatype.isUnsigned(), ((H5Datatype) datatype).isRegRef()); 356 } 357 catch (Exception ex) { 358 log.debug("init(): failed to create datatype for attribute: ", ex); 359 datatype = null; 360 } 361 } 362 363 // Check if the datatype in the file is the native datatype 364 try { 365 nativeTID = H5.H5Tget_native_type(tid); 366 isNativeDatatype = H5.H5Tequal(tid, nativeTID); 367 log.trace("init(): isNativeDatatype={}", isNativeDatatype); 368 } 369 catch (Exception ex) { 370 log.debug("init(): check if native type failure: ", ex); 371 } 372 373 inited = true; 374 } 375 catch (HDF5Exception ex) { 376 log.debug("init(): ", ex); 377 } 378 finally { 379 try { 380 H5.H5Tclose(nativeTID); 381 } 382 catch (Exception ex2) { 383 log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2); 384 } 385 try { 386 H5.H5Tclose(tid); 387 } 388 catch (HDF5Exception ex2) { 389 log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2); 390 } 391 try { 392 H5.H5Sclose(sid); 393 } 394 catch (HDF5Exception ex2) { 395 log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2); 396 } 397 398 } 399 400 close(aid); 401 402 startDims = new long[rank]; 403 selectedDims = new long[rank]; 404 405 resetSelection(); 406 } 407 else { 408 log.debug("init(): failed to open attribute"); 409 } 410 } 411 412 /** 413 * Returns the datatype of the data object. 414 * 415 * @return the datatype of the data object. 416 */ 417 @Override 418 public Datatype getDatatype() { 419 if (!inited) 420 init(); 421 422 if (datatype == null) { 423 long aid = HDF5Constants.H5I_INVALID_HID; 424 long tid = HDF5Constants.H5I_INVALID_HID; 425 426 aid = open(); 427 if (aid >= 0) { 428 try { 429 tid = H5.H5Aget_type(aid); 430 log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype); 431 if (!isNativeDatatype) { 432 long tmptid = -1; 433 try { 434 tmptid = H5Datatype.toNative(tid); 435 if (tmptid >= 0) { 436 try { 437 H5.H5Tclose(tid); 438 } 439 catch (Exception ex2) { 440 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2); 441 } 442 tid = tmptid; 443 } 444 } 445 catch (Exception ex) { 446 log.debug("getDatatype(): toNative: ", ex); 447 } 448 } 449 int nativeClass = H5.H5Tget_class(tid); 450 if (nativeClass == HDF5Constants.H5T_REFERENCE) { 451 long lsize = 1; 452 long sid = H5.H5Aget_space(aid); 453 int rank = H5.H5Sget_simple_extent_ndims(sid); 454 if (rank > 0) { 455 long dims[] = new long[rank]; 456 H5.H5Sget_simple_extent_dims(sid, dims, null); 457 log.trace("getDatatype(): rank={}, dims={}", rank, dims); 458 for (int j = 0; j < dims.length; j++) { 459 lsize *= dims[j]; 460 } 461 } 462 datatype = new H5ReferenceType(getFileFormat(), lsize, tid); 463 } 464 else 465 datatype = new H5Datatype(getFileFormat(), tid); 466 } 467 catch (Exception ex) { 468 log.debug("getDatatype(): ", ex); 469 } 470 finally { 471 try { 472 H5.H5Tclose(tid); 473 } 474 catch (HDF5Exception ex) { 475 log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex); 476 } 477 try { 478 H5.H5Aclose(aid); 479 } 480 catch (HDF5Exception ex) { 481 log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex); 482 } 483 } 484 } 485 } 486 487 return datatype; 488 } 489 490 /** 491 * Returns the data buffer of the attribute in memory. 492 * 493 * If data is already loaded into memory, returns the data; otherwise, calls 494 * read() to read data from file into a memory buffer and returns the memory 495 * buffer. 496 * 497 * The whole attribute is read into memory. Users can also select 498 * a subset from the whole data. Subsetting is done in an implicit way. 499 * 500 * <b>How to Select a Subset</b> 501 * 502 * A selection is specified by three arrays: start, stride and count. 503 * <ol> 504 * <li>start: offset of a selection 505 * <li>stride: determines how many elements to move in each dimension 506 * <li>count: number of elements to select in each dimension 507 * </ol> 508 * getStartDims(), getStride() and getSelectedDims() returns the start, 509 * stride and count arrays respectively. Applications can make a selection 510 * by changing the values of the arrays. 511 * 512 * The following example shows how to make a subset. In the example, the 513 * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200; 514 * dims[1]=100; dims[2]=50; dims[3]=10; <br> 515 * We want to select every other data point in dims[1] and dims[2] 516 * 517 * <pre> 518 * int rank = attribute.getRank(); // number of dimensions of the attribute 519 * long[] dims = attribute.getDims(); // the dimension sizes of the attribute 520 * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute 521 * long[] start = attribute.getStartDims(); // the offset of the selection 522 * long[] stride = attribute.getStride(); // the stride of the attribute 523 * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display 524 * 525 * // select dim1 and dim2 as 2D data for display,and slice through dim0 526 * selectedIndex[0] = 1; 527 * selectedIndex[1] = 2; 528 * selectedIndex[2] = 0; 529 * 530 * // reset the selection arrays 531 * for (int i = 0; i < rank; i++) { 532 * start[i] = 0; 533 * selected[i] = 1; 534 * stride[i] = 1; 535 * } 536 * 537 * // set stride to 2 on dim1 and dim2 so that every other data point is 538 * // selected. 539 * stride[1] = 2; 540 * stride[2] = 2; 541 * 542 * // set the selection size of dim1 and dim2 543 * selected[1] = dims[1] / stride[1]; 544 * selected[2] = dims[1] / stride[2]; 545 * 546 * // when H5ScalarAttr.getData() is called, the selection above will be used since 547 * // the dimension arrays are passed by reference. Changes of these arrays 548 * // outside the attribute object directly change the values of these array 549 * // in the attribute object. 550 * </pre> 551 * 552 * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte, 553 * short, int, float, double or String type based on the datatype of the 554 * attribute. 555 * 556 * @return the memory buffer of the attribute. 557 * 558 * @throws Exception if object can not be read 559 * @throws OutOfMemoryError if memory is exhausted 560 */ 561 @Override 562 public Object getData() throws Exception, OutOfMemoryError { 563 log.trace("getData(): isDataLoaded={}", isDataLoaded); 564 if (!isDataLoaded) 565 data = read(); // load the data, attributes read all data 566 567 nPoints = 1; 568 log.trace("getData(): selectedDims length={}", selectedDims.length); 569 int point_len = selectedDims.length; 570 //Partial data for 3 or more dimensions 571 if (rank > 2) 572 point_len = 3; 573 for (int j = 0; j < point_len; j++) { 574 log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]); 575 nPoints *= selectedDims[j]; 576 } 577 log.trace("getData: read {}", nPoints); 578 579 // apply the selection for 3 or more dimensions 580 // selection only expects to use 3 selectedDims 581 // where selectedIndex[0] is the row dimension 582 // where selectedIndex[1] is the col dimension 583 // where selectedIndex[2] is the frame dimension 584 if (rank > 2) 585 data = AttributeSelection(); 586 587 return data; 588 } 589 590 /* 591 * (non-Javadoc) 592 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 593 */ 594 @Override 595 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 596 // not supported 597 throw new UnsupportedOperationException("copy operation unsupported for H5."); 598 } 599 600 /* 601 * (non-Javadoc) 602 * 603 * @see hdf.object.Attribute#readBytes() 604 */ 605 @Override 606 public byte[] readBytes() throws HDF5Exception { 607 byte[] theData = null; 608 609 if (!isInited()) 610 init(); 611 612 long aid = open(); 613 if (aid >= 0) { 614 long tid = HDF5Constants.H5I_INVALID_HID; 615 616 try { 617 long[] lsize = { 1 }; 618 for (int j = 0; j < selectedDims.length; j++) 619 lsize[0] *= selectedDims[j]; 620 621 tid = H5.H5Aget_type(aid); 622 long size = H5.H5Tget_size(tid) * lsize[0]; 623 log.trace("readBytes(): size = {}", size); 624 625 if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE) 626 throw new Exception("Invalid int size"); 627 628 theData = new byte[(int)size]; 629 630 log.trace("readBytes(): read attribute id {} of size={}", tid, lsize); 631 H5.H5Aread(aid, tid, theData); 632 } 633 catch (Exception ex) { 634 log.debug("readBytes(): failed to read data: ", ex); 635 } 636 finally { 637 try { 638 H5.H5Tclose(tid); 639 } 640 catch (HDF5Exception ex2) { 641 log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2); 642 } 643 close(aid); 644 } 645 } 646 647 return theData; 648 } 649 650 /** 651 * Reads the data from file. 652 * 653 * read() reads the data from file to a memory buffer and returns the memory 654 * buffer. The attribute object does not hold the memory buffer. To store the 655 * memory buffer in the attribute object, one must call getData(). 656 * 657 * By default, the whole attribute is read into memory. 658 * 659 * For ScalarAttr, the memory data buffer is a one-dimensional array of byte, 660 * short, int, float, double or String type based on the datatype of the 661 * attribute. 662 * 663 * @return the data read from file. 664 * 665 * @see #getData() 666 * @see hdf.object.DataFormat#read() 667 * 668 * @throws Exception 669 * if object can not be read 670 */ 671 @Override 672 public Object read() throws Exception { 673 Object readData = null; 674 675 if (!isInited()) 676 init(); 677 678 try { 679 readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null); 680 } 681 catch (Exception ex) { 682 log.debug("read(): failed to read scalar attribute: ", ex); 683 throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex); 684 } 685 686 return readData; 687 } 688 689 /** 690 * Writes the given data buffer into this attribute in a file. 691 * 692 * @param buf 693 * The buffer that contains the data values. 694 * 695 * @throws Exception 696 * If there is an error at the HDF5 library level. 697 */ 698 @Override 699 public void write(Object buf) throws Exception { 700 if (this.getFileFormat().isReadOnly()) 701 throw new Exception("cannot write to scalar attribute in file opened as read-only"); 702 703 if (!buf.equals(data)) 704 setData(buf); 705 706 if (parentObject == null) { 707 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 708 return; 709 } 710 711 ((MetaDataContainer) getParentObject()).writeMetadata(this); 712 713 try { 714 scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf); 715 } 716 catch (Exception ex) { 717 log.debug("write(Object): failed to write to scalar attribute: ", ex); 718 throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex); 719 } 720 resetSelection(); 721 } 722 723 private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception { 724 H5Datatype dsDatatype = (H5Datatype)getDatatype(); 725 Object theData = null; 726 727 /* 728 * I/O type-specific pre-initialization. 729 */ 730 if (ioType == H5File.IO_TYPE.WRITE) { 731 if (writeBuf == null) { 732 log.debug("scalarAttributeCommonIO(): writeBuf is null"); 733 throw new Exception("write buffer is null"); 734 } 735 } 736 737 long aid = open(); 738 if (aid >= 0) { 739 log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded); 740 try { 741 theData = AttributeCommonIO(aid, ioType, writeBuf); 742 } 743 finally { 744 close(aid); 745 } 746 } 747 else 748 log.debug("scalarAttributeCommonIO(): failed to open attribute"); 749 750 return theData; 751 } 752 753 /* Implement interface Attribute */ 754 755 /** 756 * Returns the HObject to which this Attribute is currently "attached". 757 * 758 * @return the HObject to which this Attribute is currently "attached". 759 */ 760 @Override 761 public HObject getParentObject() { 762 return parentObject; 763 } 764 765 /** 766 * Sets the HObject to which this Attribute is "attached". 767 * 768 * @param pObj 769 * the new HObject to which this Attribute is "attached". 770 */ 771 @Override 772 public void setParentObject(HObject pObj) { 773 parentObject = pObj; 774 } 775 776 /** 777 * set a property for the attribute. 778 * 779 * @param key the attribute Map key 780 * @param value the attribute Map value 781 */ 782 @Override 783 public void setProperty(String key, Object value) { 784 properties.put(key, value); 785 } 786 787 /** 788 * get a property for a given key. 789 * 790 * @param key the attribute Map key 791 * 792 * @return the property 793 */ 794 @Override 795 public Object getProperty(String key) { 796 return properties.get(key); 797 } 798 799 /** 800 * get all property keys. 801 * 802 * @return the Collection of property keys 803 */ 804 @Override 805 public Collection<String> getPropertyKeys() { 806 return properties.keySet(); 807 } 808 809 /** 810 * Returns the name of the object. For example, "Raster Image #2". 811 * 812 * @return The name of the object. 813 */ 814 @Override 815 public final String getAttributeName() { 816 return getName(); 817 } 818 819 /** 820 * Retrieves the attribute data from the file. 821 * 822 * @return the attribute data. 823 * 824 * @throws Exception 825 * if the data can not be retrieved 826 */ 827 @Override 828 public final Object getAttributeData() throws Exception, OutOfMemoryError { 829 return getData(); 830 } 831 832 /** 833 * Returns the datatype of the attribute. 834 * 835 * @return the datatype of the attribute. 836 */ 837 @Override 838 public final Datatype getAttributeDatatype() { 839 return getDatatype(); 840 } 841 842 /** 843 * Returns the space type for the attribute. It returns a 844 * negative number if it failed to retrieve the type information from 845 * the file. 846 * 847 * @return the space type for the attribute. 848 */ 849 @Override 850 public final int getAttributeSpaceType() { 851 return getSpaceType(); 852 } 853 854 /** 855 * Returns the rank (number of dimensions) of the attribute. It returns a 856 * negative number if it failed to retrieve the dimension information from 857 * the file. 858 * 859 * @return the number of dimensions of the attribute. 860 */ 861 @Override 862 public final int getAttributeRank() { 863 return getRank(); 864 } 865 866 /** 867 * Returns the selected size of the rows and columns of the attribute. It returns a 868 * negative number if it failed to retrieve the size information from 869 * the file. 870 * 871 * @return the selected size of the rows and colums of the attribute. 872 */ 873 @Override 874 public final int getAttributePlane() { 875 return (int)getWidth() * (int)getHeight(); 876 } 877 878 /** 879 * Returns the array that contains the dimension sizes of the data value of 880 * the attribute. It returns null if it failed to retrieve the dimension 881 * information from the file. 882 * 883 * @return the dimension sizes of the attribute. 884 */ 885 @Override 886 public final long[] getAttributeDims() { 887 return getDims(); 888 } 889 890 /** 891 * @return true if the dataspace is a NULL; otherwise, returns false. 892 */ 893 @Override 894 public boolean isAttributeNULL() { 895 return isNULL(); 896 } 897 898 /** 899 * @return true if the data is a single scalar point; otherwise, returns false. 900 */ 901 @Override 902 public boolean isAttributeScalar() { 903 return isScalar(); 904 } 905 906 /** 907 * Not for public use in the future. 908 * 909 * setData() is not safe to use because it changes memory buffer 910 * of the dataset object. Dataset operations such as write/read 911 * will fail if the buffer type or size is changed. 912 * 913 * @param d the object data -must be an array of Objects 914 */ 915 @Override 916 public void setAttributeData(Object d) { 917 setData(d); 918 } 919 920 /** 921 * Writes the memory buffer of this dataset to file. 922 * 923 * @throws Exception if buffer can not be written 924 */ 925 @Override 926 public void writeAttribute() throws Exception { 927 write(); 928 } 929 930 /** 931 * Writes the given data buffer into this attribute in a file. 932 * 933 * The data buffer is a vector that contains the data values of compound fields. The data is written 934 * into file as one data blob. 935 * 936 * @param buf 937 * The vector that contains the data values of compound fields. 938 * 939 * @throws Exception 940 * If there is an error at the library level. 941 */ 942 @Override 943 public void writeAttribute(Object buf) throws Exception { 944 write(buf); 945 } 946 947 /** 948 * Returns a string representation of the data value. For 949 * example, "0, 255". 950 * 951 * For a compound datatype, it will be a 1D array of strings with field 952 * members separated by the delimiter. For example, 953 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 954 * float} of three data points. 955 * 956 * @param delimiter 957 * The delimiter used to separate individual data points. It 958 * can be a comma, semicolon, tab or space. For example, 959 * toString(",") will separate data by commas. 960 * 961 * @return the string representation of the data values. 962 */ 963 @Override 964 public String toAttributeString(String delimiter) { 965 return toString(delimiter, -1); 966 } 967 968 /** 969 * Returns a string representation of the data value. For 970 * example, "0, 255". 971 * 972 * For a compound datatype, it will be a 1D array of strings with field 973 * members separated by the delimiter. For example, 974 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 975 * float} of three data points. 976 * 977 * @param delimiter 978 * The delimiter used to separate individual data points. It 979 * can be a comma, semicolon, tab or space. For example, 980 * toString(",") will separate data by commas. 981 * @param maxItems 982 * The maximum number of Array values to return 983 * 984 * @return the string representation of the data values. 985 */ 986 @Override 987 public String toAttributeString(String delimiter, int maxItems) { 988 Object theData = originalBuf; 989 if (theData == null) { 990 log.debug("toAttributeString: value is null"); 991 return null; 992 } 993 994 Class<? extends Object> valClass = theData.getClass(); 995 if (!valClass.isArray() && !getDatatype().isRef()) { 996 log.trace("toAttributeString: finish - not array"); 997 String strValue = theData.toString(); 998 if (maxItems > 0 && strValue.length() > maxItems) 999 // truncate the extra characters 1000 strValue = strValue.substring(0, maxItems); 1001 return strValue; 1002 } 1003 1004 int n = 0; 1005 Datatype dtype = getDatatype(); 1006 // value is an array 1007 if (valClass.isArray()) { 1008 n = Array.getLength(theData); 1009 if (dtype.isRef()) 1010 n /= (int)dtype.getDatatypeSize(); 1011 } 1012 else 1013 n = ((ArrayList<Object[]>)theData).size(); 1014 if ((maxItems > 0) && (n > maxItems)) 1015 n = maxItems; 1016 1017 return toString(theData, dtype, delimiter, n); 1018 } 1019 1020 @Override 1021 protected String toString(Object theData, Datatype theType, String delimiter, int count) { 1022 log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), 1023 theType.isUnsigned(), count); 1024 StringBuilder sb = new StringBuilder(); 1025 Class<? extends Object> valClass = theData.getClass(); 1026 log.trace("toString:valClass={}", valClass); 1027 1028 H5Datatype dtype = (H5Datatype)theType; 1029 log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef()); 1030 if (dtype.isStdRef()) { 1031 return ((H5ReferenceType)dtype).toString(delimiter, count); 1032 } 1033 else if (dtype.isVLEN() && !dtype.isVarStr()) { 1034 log.trace("toString: vlen"); 1035 String strValue; 1036 1037 for (int k = 0; k < count; k++) { 1038 Object value = Array.get(theData, k); 1039 if (value == null) 1040 strValue = "null"; 1041 else { 1042 if (dtype.getDatatypeBase().isRef()) { 1043 ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value; 1044 log.trace("toString: vlen value={}", ref_value); 1045 strValue = "{"; 1046 for (int m = 0; m < ref_value.size(); m++) { 1047 byte[] curBytes = ref_value.get(m); 1048 if (m > 0) 1049 strValue += ", "; 1050 if (H5ReferenceType.zeroArrayCheck(curBytes)) 1051 strValue += "NULL"; 1052 else { 1053 if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) { 1054 strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT); 1055 } 1056 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1057 try { 1058 strValue += H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), curBytes); 1059 } 1060 catch (Exception ex) { 1061 ex.printStackTrace(); 1062 } 1063 } 1064 else if (dtype.getDatatypeBase().getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1065 try { 1066 strValue += H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), curBytes); 1067 } 1068 catch (Exception ex) { 1069 ex.printStackTrace(); 1070 } 1071 } 1072 } 1073 } 1074 strValue += "}"; 1075 } 1076 else 1077 strValue = value.toString(); 1078 } 1079 if (k > 0) 1080 sb.append(", "); 1081 sb.append(strValue); 1082 } 1083 } 1084 else if (dtype.isRef()) { 1085 log.trace("toString: ref"); 1086 String strValue = "NULL"; 1087 byte[] rElements = null; 1088 1089 for (int k = 0; k < count; k++) { 1090 // need to iterate if type is ArrayList 1091 if (theData instanceof ArrayList) 1092 rElements = (byte[]) ((ArrayList) theData).get(k); 1093 else 1094 rElements = (byte[]) Array.get(theData, k); 1095 1096 if (H5ReferenceType.zeroArrayCheck(rElements)) 1097 strValue = "NULL"; 1098 else { 1099 if (dtype.isStdRef()) { 1100 strValue = H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT); 1101 } 1102 else if (dtype.getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) { 1103 try { 1104 strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(), rElements); 1105 } 1106 catch (Exception ex) { 1107 ex.printStackTrace(); 1108 } 1109 } 1110 else if (dtype.getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) { 1111 try { 1112 strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(), rElements); 1113 } 1114 catch (Exception ex) { 1115 ex.printStackTrace(); 1116 } 1117 } 1118 } 1119 if (k > 0) 1120 sb.append(", "); 1121 sb.append(strValue); 1122 } 1123 } 1124 else { 1125 return super.toString(theData, theType, delimiter, count); 1126 } 1127 1128 return sb.toString(); 1129 } 1130 1131 /* Implement interface H5Attribute */ 1132 1133 /** 1134 * The general read and write attribute operations for hdf5 object data. 1135 * 1136 * @param attr_id 1137 * the attribute to access 1138 * @param ioType 1139 * the type of IO operation 1140 * @param objBuf 1141 * the data buffer to use for write operation 1142 * 1143 * @return the attribute data 1144 * 1145 * @throws Exception 1146 * if the data can not be retrieved 1147 */ 1148 @Override 1149 public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception { 1150 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1151 Object theData = null; 1152 1153 long dt_size = dsDatatype.getDatatypeSize(); 1154 log.trace("AttributeCommonIO(): create native"); 1155 long tid = dsDatatype.createNative(); 1156 1157 if (ioType == H5File.IO_TYPE.READ) { 1158 log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}", dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj()); 1159 log.trace("AttributeCommonIO():read ioType isVLEN={}", dsDatatype.isVLEN()); 1160 1161 long lsize = 1; 1162 for (int j = 0; j < dims.length; j++) 1163 lsize *= dims[j]; 1164 log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize); 1165 1166 try { 1167 if (dsDatatype.isVarStr()) { 1168 String[] strs = new String[(int) lsize]; 1169 for (int j = 0; j < lsize; j++) 1170 strs[j] = ""; 1171 try { 1172 log.trace("AttributeCommonIO():read ioType H5Aread_VLStrings"); 1173 H5.H5Aread_VLStrings(attr_id, tid, strs); 1174 } 1175 catch (Exception ex) { 1176 log.debug("AttributeCommonIO():read ioType H5Aread_VLStrings failure: ", ex); 1177 ex.printStackTrace(); 1178 } 1179 theData = strs; 1180 } 1181 else if (dsDatatype.isCompound()) { 1182 String[] strs = new String[(int) lsize]; 1183 for (int j = 0; j < lsize; j++) 1184 strs[j] = ""; 1185 try { 1186 log.trace("AttributeCommonIO():read ioType H5AreadComplex"); 1187 H5.H5AreadComplex(attr_id, tid, strs); 1188 } 1189 catch (Exception ex) { 1190 ex.printStackTrace(); 1191 } 1192 theData = strs; 1193 } 1194 else if (dsDatatype.isVLEN()) { 1195 log.trace("AttributeCommonIO():read ioType:VLEN-REF H5Aread isArray()={}", dsDatatype.isArray()); 1196 theData = new ArrayList[(int)lsize]; 1197 for (int j = 0; j < lsize; j++) 1198 ((ArrayList[])theData)[j] = new ArrayList<byte[]>(); 1199 1200 try { 1201 H5.H5AreadVL(attr_id, tid, (Object[])theData); 1202 } 1203 catch (Exception ex) { 1204 log.debug("AttributeCommonIO():read ioType:VLEN-REF H5Aread failure: ", ex); 1205 ex.printStackTrace(); 1206 } 1207 } 1208 else { 1209 Object attr_data = null; 1210 try { 1211 attr_data = H5Datatype.allocateArray(dsDatatype, (int) lsize); 1212 } 1213 catch (OutOfMemoryError e) { 1214 log.debug("AttributeCommonIO():read ioType out of memory", e); 1215 theData = null; 1216 } 1217 if (attr_data == null) 1218 log.debug("AttributeCommonIO():read ioType allocateArray returned null"); 1219 1220 log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray()); 1221 try { 1222 H5.H5Aread(attr_id, tid, attr_data); 1223 } 1224 catch (Exception ex) { 1225 log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex); 1226 ex.printStackTrace(); 1227 } 1228 1229 /* 1230 * Perform any necessary data conversions. 1231 */ 1232 if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) { 1233 log.trace("AttributeCommonIO():read ioType isText: converting byte array to string array"); 1234 theData = byteToString((byte[]) attr_data, (int) dsDatatype.getDatatypeSize()); 1235 } 1236 else if (dsDatatype.isFloat() && dt_size == 16) { 1237 log.trace("AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array"); 1238 theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) attr_data); 1239 } 1240 else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsDatatype.getDatatypeBase().getDatatypeSize() == 16) { 1241 log.trace("AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array"); 1242 long[] arrayDims = dsDatatype.getArrayDims(); 1243 int asize = (int)nPoints; 1244 for (int j = 0; j < arrayDims.length; j++) { 1245 asize *= arrayDims[j]; 1246 } 1247 theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) attr_data); 1248 } 1249 else if (dsDatatype.isRef() && (attr_data instanceof byte[])) { 1250 log.trace("AttributeCommonIO():read ioType isRef: converting byte array to List of bytes"); 1251 theData = new ArrayList<byte[]>((int)lsize); 1252 for (int m = 0; m < (int) lsize; m++) { 1253 byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()]; 1254 try { 1255 System.arraycopy(attr_data, m * (int)dt_size, curBytes, 0, (int)dsDatatype.getDatatypeSize()); 1256 ((ArrayList<byte[]>)theData).add(curBytes); 1257 } 1258 catch (Exception err) { 1259 log.trace("AttributeCommonIO(): arraycopy failure: ", err); 1260 } 1261 } 1262 } 1263 else 1264 theData = attr_data; 1265 } 1266 } 1267 catch (HDF5DataFiltersException exfltr) { 1268 log.debug("AttributeCommonIO():read ioType read failure: ", exfltr); 1269 throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr); 1270 } 1271 catch (Exception ex) { 1272 log.debug("AttributeCommonIO():read ioType read failure: ", ex); 1273 throw new Exception(ex.getMessage(), ex); 1274 } 1275 finally { 1276 dsDatatype.close(tid); 1277 } 1278 log.trace("AttributeCommonIO():read ioType data: {}", theData); 1279 originalBuf = theData; 1280 isDataLoaded = true; 1281 } // H5File.IO_TYPE.READ 1282 else { 1283 /* 1284 * Perform any necessary data conversions before writing the data. 1285 * 1286 * Note that v-len strings do not get converted, regardless of 1287 * conversion request type. 1288 */ 1289 Object tmpData = objBuf; 1290 try { 1291 // Check if we need to convert integer data 1292 String cname = objBuf.getClass().getName(); 1293 char dname = cname.charAt(cname.lastIndexOf("[") + 1); 1294 boolean doIntConversion = (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I')) 1295 || ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted)); 1296 1297 if (doIntConversion) { 1298 log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers"); 1299 tmpData = convertToUnsignedC(objBuf, null); 1300 } 1301 else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString && !(objBuf instanceof byte[])) { 1302 log.trace("AttributeCommonIO(): converting string array to byte array"); 1303 tmpData = stringToByte((String[]) objBuf, (int)dt_size); 1304 } 1305 else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) { 1306 log.trace("AttributeCommonIO(): converting enum names to values"); 1307 tmpData = dsDatatype.convertEnumNameToValue((String[]) objBuf); 1308 } 1309 else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) { 1310 log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array"); 1311 throw new Exception("data conversion failure: cannot write BigDecimal values"); 1312 //tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf); 1313 } 1314 } 1315 catch (Exception ex) { 1316 log.debug("AttributeCommonIO(): data conversion failure: ", ex); 1317 throw new Exception("data conversion failure: " + ex.getMessage()); 1318 } 1319 1320 /* 1321 * Actually write the data now that everything has been setup. 1322 */ 1323 try { 1324 if (dsDatatype.isVarStr()) { 1325 log.trace("AttributeCommonIO(): H5Awrite_VLStrings aid={} tid={}", attr_id, tid); 1326 1327 H5.H5Awrite_VLStrings(attr_id, tid, (Object[]) tmpData); 1328 } 1329 else if (dsDatatype.isVLEN() || (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) { 1330 log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid); 1331 1332 H5.H5AwriteVL(attr_id, tid, (Object[]) tmpData); 1333 } 1334 else { 1335 log.trace("AttributeCommonIO(): dsDatatype.isRef()={} data is String={}", dsDatatype.isRef(), tmpData instanceof String); 1336 if (dsDatatype.isRef() && tmpData instanceof String) { 1337 // reference is a path+name to the object 1338 log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE"); 1339 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1340 byte[] refBuf = H5.H5Rcreate_object(getFID(), (String) tmpData, HDF5Constants.H5P_DEFAULT); 1341 if (refBuf != null) { 1342 H5.H5Awrite(attr_id, tid, refBuf); 1343 H5.H5Rdestroy(refBuf); 1344 } 1345 } 1346 else if (Array.get(tmpData, 0) instanceof String) { 1347 int len = ((String[]) tmpData).length; 1348 byte[] bval = Dataset.stringToByte((String[]) tmpData, (int)dt_size); 1349 if (bval != null && bval.length == dt_size * len) { 1350 bval[bval.length - 1] = 0; 1351 tmpData = bval; 1352 } 1353 log.trace("AttributeCommonIO(): String={}: {}", tmpData); 1354 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1355 H5.H5Awrite(attr_id, tid, tmpData); 1356 } 1357 else { 1358 log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid); 1359 H5.H5Awrite(attr_id, tid, tmpData); 1360 } 1361 } 1362 } 1363 catch (Exception ex) { 1364 log.debug("AttributeCommonIO(): write failure: ", ex); 1365 throw new Exception(ex.getMessage()); 1366 } 1367 finally { 1368 dsDatatype.close(tid); 1369 } 1370 } // H5File.IO_TYPE.WRITE 1371 1372 return theData; 1373 } 1374 1375 /** 1376 * Read a subset of an attribute for hdf5 object data. 1377 * 1378 * @return the selected attribute data 1379 * 1380 * @throws Exception 1381 * if the data can not be retrieved 1382 */ 1383 @Override 1384 public Object AttributeSelection() throws Exception { 1385 H5Datatype dsDatatype = (H5Datatype) getDatatype(); 1386 int dsSize = (int)dsDatatype.getDatatypeSize(); 1387 if (dsDatatype.isArray()) 1388 dsSize = (int)dsDatatype.getDatatypeBase().getDatatypeSize(); 1389 Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints); 1390 if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { 1391 log.trace("scalarAttributeSelection(): isText: converting byte array to string array"); 1392 theData = byteToString((byte[]) theData, dsSize); 1393 } 1394 else if (dsDatatype.isFloat() && dsSize == 16) { 1395 log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array"); 1396 theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData); 1397 } 1398 else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsSize == 16) { 1399 log.trace("scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array"); 1400 long[] arrayDims = dsDatatype.getArrayDims(); 1401 int asize = (int)nPoints; 1402 for (int j = 0; j < arrayDims.length; j++) { 1403 asize *= arrayDims[j]; 1404 } 1405 theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) theData); 1406 } 1407 Object theOrig = originalBuf; 1408 log.trace("scalarAttributeSelection(): originalBuf={} with datatype size={}", originalBuf, dsSize); 1409 1410 //Copy the selection from originalBuf to theData 1411 //Only three dims are involved and selected data is 2 dimensions 1412 // getHeight() is the row dimension 1413 // getWidth() is the col dimension 1414 // getDepth() is the frame dimension 1415 long[] start = getStartDims(); 1416 long curFrame = start[selectedIndex[2]]; 1417 int k = (int)startDims[selectedIndex[2]] * (int)getDepth(); 1418 for (int col = 0; col < (int)getWidth(); col++) { 1419 for (int row = 0; row < (int)getHeight(); row++) { 1420 int index = row * (int)getWidth() + col; 1421 log.trace("scalarAttributeSelection(): point[{}] row:col:k={}:{}:{}", curFrame, row, col, k); 1422 int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() + 1423 col * (int)getHeight() + row); 1424 int toIndex = (col * (int)getHeight() + row); 1425 int objSize = 1; 1426 if (dsDatatype.isArray()) { 1427 long[] arrayDims = dsDatatype.getArrayDims(); 1428 objSize = arrayDims.length; 1429 try { 1430 System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1431 } 1432 catch (Exception err) { 1433 log.debug("scalarAttributeSelection(): arraycopy failure: ", err); 1434 } 1435 } 1436 else if (dsDatatype.isStdRef()) { 1437 objSize = (int)HDF5Constants.H5R_REF_BUF_SIZE; 1438 fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1439 toIndex = toIndex * HDF5Constants.H5R_REF_BUF_SIZE; 1440 try { 1441 System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); 1442 } 1443 catch (Exception err) { 1444 log.debug("scalarAttributeSelection(): arraycopy failure: ", err); 1445 } 1446 } 1447 else { 1448 if (theOrig instanceof ArrayList) { 1449 if (dsDatatype.isRef()) { 1450 byte[] rElements = (byte[]) ((ArrayList) theOrig).get(fromIndex); 1451 try { 1452 System.arraycopy(rElements, 0, theData, toIndex * dsSize, dsSize); 1453 } 1454 catch (Exception err) { 1455 log.trace("scalarAttributeSelection(): refarraycopy failure: ", err); 1456 } 1457 } 1458 else { 1459 Object value = Array.get(theOrig, fromIndex); 1460 log.trace("scalarAttributeSelection(): value={}", value); 1461 ((ArrayList<Object>)theData).add(toIndex, value); 1462 } 1463 } 1464 else 1465 theData = theOrig; 1466 } 1467 } 1468 } 1469 1470 log.trace("scalarAttributeSelection(): theData={}", theData); 1471 return theData; 1472 } 1473}