001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.DataFormat; 027import hdf.object.Dataset; 028import hdf.object.Datatype; 029import hdf.object.FileFormat; 030import hdf.object.Group; 031import hdf.object.HObject; 032import hdf.object.MetaDataContainer; 033import hdf.object.ScalarDS; 034 035/** 036 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 037 * dataset, group or named datatype. 038 * 039 * Like a dataset, an attribute has a name, datatype and dataspace. 040 * 041 * For more details on attributes, <a href= 042 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 043 * User's Guide</a> 044 * 045 * The following code is an example of an attribute with 1D integer array of two elements. 046 * 047 * <pre> 048 * // Example of creating a new attribute 049 * // The name of the new attribute 050 * String name = "Data range"; 051 * // Creating an unsigned 1-byte integer datatype 052 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 053 * 1, // size in bytes 054 * Datatype.ORDER_LE, // byte order 055 * Datatype.SIGN_NONE); // unsigned 056 * // 1-D array of size two 057 * long[] dims = {2}; 058 * // The value of the attribute 059 * int[] value = {0, 255}; 060 * // Create a new attribute 061 * Attribute dataRange = new Attribute(name, type, dims); 062 * // Set the attribute value 063 * dataRange.setValue(value); 064 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 065 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 066 * </pre> 067 * 068 * 069 * For an atomic datatype, the value of an H4ScalarAttribute will be a 1D array of integers, floats and 070 * strings. 071 * 072 * @see hdf.object.Datatype 073 * 074 * @version 2.0 4/2/2018 075 * @author Peter X. Cao, Jordan T. Henderson 076 */ 077public class H4ScalarAttribute extends ScalarDS implements Attribute { 078 079 private static final long serialVersionUID = 2072473407027648309L; 080 081 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4ScalarAttribute.class); 082 083 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 084 protected HObject parentObject; 085 086 /** additional information and properties for the attribute, Attribute interface */ 087 private transient Map<String, Object> properties; 088 089 /** 090 * Create an attribute with specified name, data type and dimension sizes. 091 * 092 * For scalar attribute, the dimension size can be either an array of size one 093 * or null, and the rank can be either 1 or zero. Attribute is a general class 094 * and is independent of file format, e.g., the implementation of attribute 095 * applies to both HDF4 and HDF5. 096 * 097 * The following example creates a string attribute with the name "CLASS" and 098 * value "IMAGE". 099 * 100 * <pre> 101 * long[] attrDims = { 1 }; 102 * String attrName = "CLASS"; 103 * String[] classValue = { "IMAGE" }; 104 * Datatype attrType = null; 105 * try { 106 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 107 * } 108 * catch (Exception ex) {} 109 * Attribute attr = new Attribute(attrName, attrType, attrDims); 110 * attr.setValue(classValue); 111 * </pre> 112 * 113 * @param parentObj 114 * the HObject to which this Attribute is attached. 115 * @param attrName 116 * the name of the attribute. 117 * @param attrType 118 * the datatype of the attribute. 119 * @param attrDims 120 * the dimension sizes of the attribute, null for scalar attribute 121 * 122 * @see hdf.object.Datatype 123 */ 124 public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 125 this(parentObj, attrName, attrType, attrDims, null); 126 } 127 128 /** 129 * Create an attribute with specific name and value. 130 * 131 * For scalar attribute, the dimension size can be either an array of size one 132 * or null, and the rank can be either 1 or zero. Attribute is a general class 133 * and is independent of file format, e.g., the implementation of attribute 134 * applies to both HDF4 and HDF5. 135 * 136 * The following example creates a string attribute with the name "CLASS" and 137 * value "IMAGE". 138 * 139 * <pre> 140 * long[] attrDims = { 1 }; 141 * String attrName = "CLASS"; 142 * String[] classValue = { "IMAGE" }; 143 * Datatype attrType = null; 144 * try { 145 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 146 * } 147 * catch (Exception ex) {} 148 * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue); 149 * </pre> 150 * 151 * @param parentObj 152 * the HObject to which this Attribute is attached. 153 * @param attrName 154 * the name of the attribute. 155 * @param attrType 156 * the datatype of the attribute. 157 * @param attrDims 158 * the dimension sizes of the attribute, null for scalar attribute 159 * @param attrValue 160 * the value of the attribute, null if no value 161 * 162 * @see hdf.object.Datatype 163 */ 164 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 165 public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 166 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 167 (parentObj == null) ? null : parentObj.getFullName(), null); 168 169 log.trace("H4ScalarAttribute: start {}", parentObj); 170 this.parentObject = parentObj; 171 172 unsignedConverted = false; 173 174 datatype = attrType; 175 176 if (attrValue != null) { 177 data = attrValue; 178 originalBuf = attrValue; 179 isDataLoaded = true; 180 } 181 properties = new HashMap(); 182 183 if (attrDims == null) { 184 rank = 1; 185 dims = new long[] { 1 }; 186 } 187 else { 188 dims = attrDims; 189 rank = dims.length; 190 } 191 192 selectedDims = new long[rank]; 193 startDims = new long[rank]; 194 selectedStride = new long[rank]; 195 196 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 197 attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 198 199 resetSelection(); 200 } 201 202 /* 203 * (non-Javadoc) 204 * 205 * @see hdf.object.HObject#open() 206 */ 207 @Override 208 public long open() { 209 if (parentObject == null) { 210 log.debug("open(): attribute's parent object is null"); 211 return -1; 212 } 213 214 long aid = -1; 215 long pObjID = -1; 216 217 try { 218 pObjID = parentObject.open(); 219 if (pObjID >= 0) { 220 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 221 log.trace("open(): FILE_TYPE_HDF4"); 222 /* 223 * TODO: Get type of HDF4 object this is attached to and retrieve attribute info. 224 */ 225 } 226 } 227 228 log.trace("open(): aid={}", aid); 229 } 230 catch (Exception ex) { 231 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 232 aid = -1; 233 } 234 finally { 235 parentObject.close(pObjID); 236 } 237 238 return aid; 239 } 240 241 /* 242 * (non-Javadoc) 243 * 244 * @see hdf.object.HObject#close(int) 245 */ 246 @Override 247 public void close(long aid) { 248 if (aid >= 0) { 249 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 250 log.trace("close(): FILE_TYPE_HDF4"); 251 /* 252 * TODO: Get type of HDF4 object this is attached to and close attribute. 253 */ 254 } 255 } 256 } 257 258 @Override 259 public void init() { 260 if (inited) { 261 resetSelection(); 262 log.trace("init(): Attribute already inited"); 263 return; 264 } 265 266 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 267 log.trace("init(): FILE_TYPE_HDF4"); 268 /* 269 * TODO: If HDF4 attribute object needs to init dependent objects. 270 */ 271 inited = true; 272 } 273 274 resetSelection(); 275 } 276 277 /** 278 * Reads the data from file. 279 * 280 * read() reads the data from file to a memory buffer and returns the memory 281 * buffer. The dataset object does not hold the memory buffer. To store the 282 * memory buffer in the dataset object, one must call getData(). 283 * 284 * By default, the whole dataset is read into memory. Users can also select 285 * a subset to read. Subsetting is done in an implicit way. 286 * 287 * @return the data read from file. 288 * 289 * @see #getData() 290 * 291 * @throws Exception 292 * if object can not be read 293 * @throws OutOfMemoryError 294 * if memory is exhausted 295 */ 296 @Override 297 public Object read() throws Exception, OutOfMemoryError { 298 if (!inited) 299 init(); 300 301 return data; 302 } 303 304 /* Implement abstract Dataset */ 305 306 /** 307 * Writes a memory buffer to the object in the file. 308 * 309 * @param buf 310 * The buffer that contains the data values. 311 * 312 * @throws Exception 313 * if data can not be written 314 */ 315 @Override 316 public void write(Object buf) throws Exception { 317 log.trace("function of dataset: write(Object) start"); 318 if (!buf.equals(data)) 319 setData(buf); 320 321 init(); 322 323 if (parentObject == null) { 324 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 325 return; 326 } 327 328 ((MetaDataContainer) getParentObject()).writeMetadata(this); 329 } 330 331 /* 332 * (non-Javadoc) 333 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 334 */ 335 @Override 336 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 337 // not supported 338 throw new UnsupportedOperationException("copy operation unsupported for H4."); 339 } 340 341 /* 342 * (non-Javadoc) 343 * @see hdf.object.Dataset#readBytes() 344 */ 345 @Override 346 public byte[] readBytes() throws Exception { 347 // not supported 348 throw new UnsupportedOperationException("readBytes operation unsupported for H4."); 349 } 350 351 /* Implement interface Attribute */ 352 353 /** 354 * Returns the HObject to which this Attribute is currently "attached". 355 * 356 * @return the HObject to which this Attribute is currently "attached". 357 */ 358 public HObject getParentObject() { 359 return parentObject; 360 } 361 362 /** 363 * Sets the HObject to which this Attribute is "attached". 364 * 365 * @param pObj 366 * the new HObject to which this Attribute is "attached". 367 */ 368 public void setParentObject(HObject pObj) { 369 parentObject = pObj; 370 } 371 372 /** 373 * set a property for the attribute. 374 * 375 * @param key the attribute Map key 376 * @param value the attribute Map value 377 */ 378 public void setProperty(String key, Object value) { 379 properties.put(key, value); 380 } 381 382 /** 383 * get a property for a given key. 384 * 385 * @param key the attribute Map key 386 * 387 * @return the property 388 */ 389 public Object getProperty(String key) { 390 return properties.get(key); 391 } 392 393 /** 394 * get all property keys. 395 * 396 * @return the Collection of property keys 397 */ 398 public Collection<String> getPropertyKeys() { 399 return properties.keySet(); 400 } 401 402 /** 403 * Returns the name of the object. For example, "Raster Image #2". 404 * 405 * @return The name of the object. 406 */ 407 public final String getAttributeName() { 408 return getName(); 409 } 410 411 /** 412 * Retrieves the attribute data from the file. 413 * 414 * @return the attribute data. 415 * 416 * @throws Exception 417 * if the data can not be retrieved 418 */ 419 public final Object getAttributeData() throws Exception, OutOfMemoryError { 420 return getData(); 421 } 422 423 /** 424 * Returns the datatype of the attribute. 425 * 426 * @return the datatype of the attribute. 427 */ 428 public final Datatype getAttributeDatatype() { 429 return getDatatype(); 430 } 431 432 /** 433 * Returns the space type for the attribute. It returns a 434 * negative number if it failed to retrieve the type information from 435 * the file. 436 * 437 * @return the space type for the attribute. 438 */ 439 public final int getAttributeSpaceType() { 440 return getSpaceType(); 441 } 442 443 /** 444 * Returns the rank (number of dimensions) of the attribute. It returns a 445 * negative number if it failed to retrieve the dimension information from 446 * the file. 447 * 448 * @return the number of dimensions of the attribute. 449 */ 450 public final int getAttributeRank() { 451 return getRank(); 452 } 453 454 /** 455 * Returns the selected size of the rows and columns of the attribute. It returns a 456 * negative number if it failed to retrieve the size information from 457 * the file. 458 * 459 * @return the selected size of the rows and colums of the attribute. 460 */ 461 public final int getAttributePlane() { 462 return (int)getWidth() * (int)getHeight(); 463 } 464 465 /** 466 * Returns the array that contains the dimension sizes of the data value of 467 * the attribute. It returns null if it failed to retrieve the dimension 468 * information from the file. 469 * 470 * @return the dimension sizes of the attribute. 471 */ 472 public final long[] getAttributeDims() { 473 return getDims(); 474 } 475 476 /** 477 * @return true if the data is a single scalar point; otherwise, returns 478 * false. 479 */ 480 public boolean isAttributeScalar() { 481 return isScalar(); 482 } 483 484 /** 485 * Not for public use in the future. 486 * 487 * setData() is not safe to use because it changes memory buffer 488 * of the dataset object. Dataset operations such as write/read 489 * will fail if the buffer type or size is changed. 490 * 491 * @param d the object data -must be an array of Objects 492 */ 493 public void setAttributeData(Object d) { 494 setData(d); 495 } 496 497 /** 498 * Writes the memory buffer of this dataset to file. 499 * 500 * @throws Exception if buffer can not be written 501 */ 502 public void writeAttribute() throws Exception { 503 write(); 504 } 505 506 /** 507 * Writes the given data buffer into this attribute in a file. 508 * 509 * The data buffer is a vector that contains the data values of compound fields. The data is written 510 * into file as one data blob. 511 * 512 * @param buf 513 * The vector that contains the data values of compound fields. 514 * 515 * @throws Exception 516 * If there is an error at the library level. 517 */ 518 public void writeAttribute(Object buf) throws Exception { 519 write(buf); 520 } 521 522 /** 523 * Returns a string representation of the data value. For 524 * example, "0, 255". 525 * 526 * For a compound datatype, it will be a 1D array of strings with field 527 * members separated by the delimiter. For example, 528 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 529 * float} of three data points. 530 * 531 * @param delimiter 532 * The delimiter used to separate individual data points. It 533 * can be a comma, semicolon, tab or space. For example, 534 * toString(",") will separate data by commas. 535 * 536 * @return the string representation of the data values. 537 */ 538 public String toAttributeString(String delimiter) { 539 return toString(delimiter, -1); 540 } 541 542 /** 543 * Returns a string representation of the data value. For 544 * example, "0, 255". 545 * 546 * For a compound datatype, it will be a 1D array of strings with field 547 * members separated by the delimiter. For example, 548 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 549 * float} of three data points. 550 * 551 * @param delimiter 552 * The delimiter used to separate individual data points. It 553 * can be a comma, semicolon, tab or space. For example, 554 * toString(",") will separate data by commas. 555 * @param maxItems 556 * The maximum number of Array values to return 557 * 558 * @return the string representation of the data values. 559 */ 560 public String toAttributeString(String delimiter, int maxItems) { 561 return toString(delimiter, maxItems); 562 } 563}