001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.CompoundDataFormat; 027import hdf.object.CompoundDS; 028import hdf.object.DataFormat; 029import hdf.object.Dataset; 030import hdf.object.Datatype; 031import hdf.object.FileFormat; 032import hdf.object.Group; 033import hdf.object.HObject; 034import hdf.object.MetaDataContainer; 035 036/** 037 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 038 * dataset, group or named datatype. 039 * 040 * Like a dataset, an attribute has a name, datatype and dataspace. 041 * 042 * For more details on attributes, <a href= 043 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 044 * User's Guide</a> 045 * 046 * The following code is an example of an attribute with 1D integer array of two elements. 047 * 048 * <pre> 049 * // Example of creating a new attribute 050 * // The name of the new attribute 051 * String name = "Data range"; 052 * // Creating an unsigned 1-byte integer datatype 053 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 054 * 1, // size in bytes 055 * Datatype.ORDER_LE, // byte order 056 * Datatype.SIGN_NONE); // unsigned 057 * // 1-D array of size two 058 * long[] dims = {2}; 059 * // The value of the attribute 060 * int[] value = {0, 255}; 061 * // Create a new attribute 062 * Attribute dataRange = new Attribute(name, type, dims); 063 * // Set the attribute value 064 * dataRange.setValue(value); 065 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 066 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 067 * </pre> 068 * 069 * For a compound datatype, the value of an H4CompoundAttribute will be a 1D array of strings with field members separated 070 * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 071 * float} of three data points. 072 * 073 * @see hdf.object.Datatype 074 * 075 * @version 2.0 4/2/2018 076 * @author Peter X. Cao, Jordan T. Henderson 077 */ 078public class H4CompoundAttribute extends CompoundDS implements Attribute { 079 080 private static final long serialVersionUID = 2072473407027648309L; 081 082 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4CompoundAttribute.class); 083 084 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 085 protected HObject parentObject; 086 087 /** additional information and properties for the attribute, Attribute interface */ 088 private transient Map<String, Object> properties; 089 090 /** 091 * Create an attribute with specified name, data type and dimension sizes. 092 * 093 * For scalar attribute, the dimension size can be either an array of size one 094 * or null, and the rank can be either 1 or zero. Attribute is a general class 095 * and is independent of file format, e.g., the implementation of attribute 096 * applies to both HDF4 and HDF5. 097 * 098 * The following example creates a string attribute with the name "CLASS" and 099 * value "IMAGE". 100 * 101 * <pre> 102 * long[] attrDims = { 1 }; 103 * String attrName = "CLASS"; 104 * String[] classValue = { "IMAGE" }; 105 * Datatype attrType = null; 106 * try { 107 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 108 * } 109 * catch (Exception ex) {} 110 * Attribute attr = new Attribute(attrName, attrType, attrDims); 111 * attr.setValue(classValue); 112 * </pre> 113 * 114 * @param parentObj 115 * the HObject to which this Attribute is attached. 116 * @param attrName 117 * the name of the attribute. 118 * @param attrType 119 * the datatype of the attribute. 120 * @param attrDims 121 * the dimension sizes of the attribute, null for scalar attribute 122 * 123 * @see hdf.object.Datatype 124 */ 125 public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 126 this(parentObj, attrName, attrType, attrDims, null); 127 } 128 129 /** 130 * Create an attribute with specific name and value. 131 * 132 * For scalar attribute, the dimension size can be either an array of size one 133 * or null, and the rank can be either 1 or zero. Attribute is a general class 134 * and is independent of file format, e.g., the implementation of attribute 135 * applies to both HDF4 and HDF5. 136 * 137 * The following example creates a string attribute with the name "CLASS" and 138 * value "IMAGE". 139 * 140 * <pre> 141 * long[] attrDims = { 1 }; 142 * String attrName = "CLASS"; 143 * String[] classValue = { "IMAGE" }; 144 * Datatype attrType = null; 145 * try { 146 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 147 * } 148 * catch (Exception ex) {} 149 * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue); 150 * </pre> 151 * 152 * @param parentObj 153 * the HObject to which this Attribute is attached. 154 * @param attrName 155 * the name of the attribute. 156 * @param attrType 157 * the datatype of the attribute. 158 * @param attrDims 159 * the dimension sizes of the attribute, null for scalar attribute 160 * @param attrValue 161 * the value of the attribute, null if no value 162 * 163 * @see hdf.object.Datatype 164 */ 165 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 166 public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 167 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 168 (parentObj == null) ? null : parentObj.getFullName(), null); 169 170 log.trace("H4CompoundAttribute: start {}", parentObj); 171 172 this.parentObject = parentObj; 173 174 datatype = attrType; 175 176 if (attrValue != null) { 177 data = attrValue; 178 originalBuf = attrValue; 179 isDataLoaded = true; 180 } 181 properties = new HashMap(); 182 183 if (attrDims == null) { 184 rank = 1; 185 dims = new long[] { 1 }; 186 } 187 else { 188 dims = attrDims; 189 rank = dims.length; 190 } 191 192 selectedDims = new long[rank]; 193 startDims = new long[rank]; 194 selectedStride = new long[rank]; 195 196 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 197 attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 198 199 resetSelection(); 200 } 201 202 /* 203 * (non-Javadoc) 204 * 205 * @see hdf.object.HObject#open() 206 */ 207 @Override 208 public long open() { 209 if (parentObject == null) { 210 log.debug("open(): attribute's parent object is null"); 211 return -1; 212 } 213 214 long aid = -1; 215 long pObjID = -1; 216 217 try { 218 pObjID = parentObject.open(); 219 if (pObjID >= 0) { 220 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 221 log.trace("open(): FILE_TYPE_HDF4"); 222 /* 223 * TODO: Get type of HDF4 object this is attached to and retrieve attribute info. 224 */ 225 } 226 } 227 228 log.trace("open(): aid={}", aid); 229 } 230 catch (Exception ex) { 231 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 232 aid = -1; 233 } 234 finally { 235 parentObject.close(pObjID); 236 } 237 238 return aid; 239 } 240 241 /* 242 * (non-Javadoc) 243 * 244 * @see hdf.object.HObject#close(int) 245 */ 246 @Override 247 public void close(long aid) { 248 if (aid >= 0) { 249 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 250 log.trace("close(): FILE_TYPE_HDF4"); 251 /* 252 * TODO: Get type of HDF4 object this is attached to and close attribute. 253 */ 254 } 255 } 256 } 257 258 @Override 259 public void init() { 260 if (inited) { 261 resetSelection(); 262 log.trace("init(): Attribute already inited"); 263 return; 264 } 265 266 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 267 log.trace("init(): FILE_TYPE_HDF4"); 268 /* 269 * TODO: If HDF4 attribute object needs to init dependent objects. 270 */ 271 inited = true; 272 } 273 274 resetSelection(); 275 } 276 277 /** 278 * Reads the data from file. 279 * 280 * read() reads the data from file to a memory buffer and returns the memory 281 * buffer. The dataset object does not hold the memory buffer. To store the 282 * memory buffer in the dataset object, one must call getData(). 283 * 284 * By default, the whole dataset is read into memory. Users can also select 285 * a subset to read. Subsetting is done in an implicit way. 286 * 287 * @return the data read from file. 288 * 289 * @see #getData() 290 * 291 * @throws Exception 292 * if object can not be read 293 * @throws OutOfMemoryError 294 * if memory is exhausted 295 */ 296 @Override 297 public Object read() throws Exception, OutOfMemoryError { 298 if (!inited) 299 init(); 300 301 /* 302 * TODO: For now, convert a compound Attribute's data (String[]) into a List for 303 * convenient processing 304 */ 305 if (getDatatype().isCompound() && !(data instanceof List)) { 306 List<String> valueList = Arrays.asList((String[]) data); 307 308 data = valueList; 309 } 310 311 return data; 312 } 313 314 /* Implement abstract Dataset */ 315 316 /** 317 * Writes a memory buffer to the object in the file. 318 * 319 * @param buf 320 * The buffer that contains the data values. 321 * 322 * @throws Exception 323 * if data can not be written 324 */ 325 @Override 326 public void write(Object buf) throws Exception { 327 log.trace("function of dataset: write(Object) start"); 328 if (!buf.equals(data)) 329 setData(buf); 330 331 init(); 332 333 if (parentObject == null) { 334 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 335 return; 336 } 337 338 ((MetaDataContainer) getParentObject()).writeMetadata(this); 339 } 340 341 /* 342 * (non-Javadoc) 343 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 344 */ 345 @Override 346 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 347 // not supported 348 throw new UnsupportedOperationException("copy operation unsupported for H4."); 349 } 350 351 /* 352 * (non-Javadoc) 353 * @see hdf.object.Dataset#readBytes() 354 */ 355 @Override 356 public byte[] readBytes() throws Exception { 357 // not supported 358 throw new UnsupportedOperationException("readBytes operation unsupported for H4."); 359 } 360 361 /** 362 * Given an array of bytes representing a compound Datatype and a start index 363 * and length, converts len number of bytes into the correct Object type and 364 * returns it. 365 * 366 * @param data 367 * The byte array representing the data of the compound Datatype 368 * @param data_type 369 * The type of data to convert the bytes to 370 * @param start 371 * The start index of the bytes to get 372 * @param len 373 * The number of bytes to convert 374 * @return The converted type of the bytes 375 */ 376 protected Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) { 377 return null; 378 } 379 380 /** 381 * Converts the data values of this data object to appropriate Java integers if 382 * they are unsigned integers. 383 * 384 * @see hdf.object.Dataset#convertToUnsignedC(Object) 385 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 386 * 387 * @return the converted data buffer. 388 */ 389 @Override 390 public Object convertFromUnsignedC() { 391 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 392 } 393 394 /** 395 * Converts Java integer data values of this data object back to unsigned C-type 396 * integer data if they are unsigned integers. 397 * 398 * @see hdf.object.Dataset#convertToUnsignedC(Object) 399 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 400 * 401 * @return the converted data buffer. 402 */ 403 @Override 404 public Object convertToUnsignedC() { 405 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 406 } 407 408 /* Implement interface Attribute */ 409 410 /** 411 * Returns the HObject to which this Attribute is currently "attached". 412 * 413 * @return the HObject to which this Attribute is currently "attached". 414 */ 415 public HObject getParentObject() { 416 return parentObject; 417 } 418 419 /** 420 * Sets the HObject to which this Attribute is "attached". 421 * 422 * @param pObj 423 * the new HObject to which this Attribute is "attached". 424 */ 425 public void setParentObject(HObject pObj) { 426 parentObject = pObj; 427 } 428 429 /** 430 * set a property for the attribute. 431 * 432 * @param key the attribute Map key 433 * @param value the attribute Map value 434 */ 435 public void setProperty(String key, Object value) { 436 properties.put(key, value); 437 } 438 439 /** 440 * get a property for a given key. 441 * 442 * @param key the attribute Map key 443 * 444 * @return the property 445 */ 446 public Object getProperty(String key) { 447 return properties.get(key); 448 } 449 450 /** 451 * get all property keys. 452 * 453 * @return the Collection of property keys 454 */ 455 public Collection<String> getPropertyKeys() { 456 return properties.keySet(); 457 } 458 459 /** 460 * Returns the name of the object. For example, "Raster Image #2". 461 * 462 * @return The name of the object. 463 */ 464 public final String getAttributeName() { 465 return getName(); 466 } 467 468 /** 469 * Retrieves the attribute data from the file. 470 * 471 * @return the attribute data. 472 * 473 * @throws Exception 474 * if the data can not be retrieved 475 */ 476 public final Object getAttributeData() throws Exception, OutOfMemoryError { 477 return getData(); 478 } 479 480 /** 481 * Returns the datatype of the attribute. 482 * 483 * @return the datatype of the attribute. 484 */ 485 public final Datatype getAttributeDatatype() { 486 return getDatatype(); 487 } 488 489 /** 490 * Returns the space type for the attribute. It returns a 491 * negative number if it failed to retrieve the type information from 492 * the file. 493 * 494 * @return the space type for the attribute. 495 */ 496 public final int getAttributeSpaceType() { 497 return getSpaceType(); 498 } 499 500 /** 501 * Returns the rank (number of dimensions) of the attribute. It returns a 502 * negative number if it failed to retrieve the dimension information from 503 * the file. 504 * 505 * @return the number of dimensions of the attribute. 506 */ 507 public final int getAttributeRank() { 508 return getRank(); 509 } 510 511 /** 512 * Returns the selected size of the rows and columns of the attribute. It returns a 513 * negative number if it failed to retrieve the size information from 514 * the file. 515 * 516 * @return the selected size of the rows and colums of the attribute. 517 */ 518 public final int getAttributePlane() { 519 return (int)getWidth() * (int)getHeight(); 520 } 521 522 /** 523 * Returns the array that contains the dimension sizes of the data value of 524 * the attribute. It returns null if it failed to retrieve the dimension 525 * information from the file. 526 * 527 * @return the dimension sizes of the attribute. 528 */ 529 public final long[] getAttributeDims() { 530 return getDims(); 531 } 532 533 /** 534 * @return true if the data is a single scalar point; otherwise, returns 535 * false. 536 */ 537 public boolean isAttributeScalar() { 538 return isScalar(); 539 } 540 541 /** 542 * Not for public use in the future. 543 * 544 * setData() is not safe to use because it changes memory buffer 545 * of the dataset object. Dataset operations such as write/read 546 * will fail if the buffer type or size is changed. 547 * 548 * @param d the object data -must be an array of Objects 549 */ 550 public void setAttributeData(Object d) { 551 setData(d); 552 } 553 554 /** 555 * Writes the memory buffer of this dataset to file. 556 * 557 * @throws Exception if buffer can not be written 558 */ 559 public void writeAttribute() throws Exception { 560 write(); 561 } 562 563 /** 564 * Writes the given data buffer into this attribute in a file. 565 * 566 * The data buffer is a vector that contains the data values of compound fields. The data is written 567 * into file as one data blob. 568 * 569 * @param buf 570 * The vector that contains the data values of compound fields. 571 * 572 * @throws Exception 573 * If there is an error at the library level. 574 */ 575 public void writeAttribute(Object buf) throws Exception { 576 write(buf); 577 } 578 579 /** 580 * Returns a string representation of the data value. For 581 * example, "0, 255". 582 * 583 * For a compound datatype, it will be a 1D array of strings with field 584 * members separated by the delimiter. For example, 585 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 586 * float} of three data points. 587 * 588 * @param delimiter 589 * The delimiter used to separate individual data points. It 590 * can be a comma, semicolon, tab or space. For example, 591 * toString(",") will separate data by commas. 592 * 593 * @return the string representation of the data values. 594 */ 595 public String toAttributeString(String delimiter) { 596 return toString(delimiter, -1); 597 } 598 599 /** 600 * Returns a string representation of the data value. For 601 * example, "0, 255". 602 * 603 * For a compound datatype, it will be a 1D array of strings with field 604 * members separated by the delimiter. For example, 605 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 606 * float} of three data points. 607 * 608 * @param delimiter 609 * The delimiter used to separate individual data points. It 610 * can be a comma, semicolon, tab or space. For example, 611 * toString(",") will separate data by commas. 612 * @param maxItems 613 * The maximum number of Array values to return 614 * 615 * @return the string representation of the data values. 616 */ 617 public String toAttributeString(String delimiter, int maxItems) { 618 return toString(delimiter, maxItems); 619 } 620}