001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.nc2; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.DataFormat; 027import hdf.object.Dataset; 028import hdf.object.Datatype; 029import hdf.object.FileFormat; 030import hdf.object.Group; 031import hdf.object.HObject; 032import hdf.object.ScalarDS; 033 034/** 035 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 036 * dataset, group or named datatype. 037 * 038 * Like a dataset, an attribute has a name, datatype and dataspace. 039 * 040 * For more details on attributes, <a href= 041 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 042 * User's Guide</a> 043 * 044 * The following code is an example of an attribute with 1D integer array of two elements. 045 * 046 * <pre> 047 * // Example of creating a new attribute 048 * // The name of the new attribute 049 * String name = "Data range"; 050 * // Creating an unsigned 1-byte integer datatype 051 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 052 * 1, // size in bytes 053 * Datatype.ORDER_LE, // byte order 054 * Datatype.SIGN_NONE); // unsigned 055 * // 1-D array of size two 056 * long[] dims = {2}; 057 * // The value of the attribute 058 * int[] value = {0, 255}; 059 * // Create a new attribute 060 * Attribute dataRange = new Attribute(name, type, dims); 061 * // Set the attribute value 062 * dataRange.setValue(value); 063 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 064 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 065 * </pre> 066 * 067 * 068 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and 069 * strings. For a compound datatype, it will be a 1D array of strings with field members separated 070 * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 071 * float} of three data points. 072 * 073 * @see hdf.object.Datatype 074 * 075 * @version 2.0 4/2/2018 076 * @author Peter X. Cao, Jordan T. Henderson 077 */ 078public class NC2Attribute extends ScalarDS implements Attribute 079{ 080 private static final long serialVersionUID = 2072473407027648309L; 081 082 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NC2Attribute.class); 083 084 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 085 protected HObject parentObject; 086 087 /** additional information and properties for the attribute, Attribute interface */ 088 private transient Map<String, Object> properties; 089 090 /** 091 * Create an attribute with specified name, data type and dimension sizes. 092 * 093 * For scalar attribute, the dimension size can be either an array of size one 094 * or null, and the rank can be either 1 or zero. Attribute is a general class 095 * and is independent of file format, e.g., the implementation of attribute 096 * applies to both HDF4 and HDF5. 097 * 098 * The following example creates a string attribute with the name "CLASS" and 099 * value "IMAGE". 100 * 101 * <pre> 102 * long[] attrDims = { 1 }; 103 * String attrName = "CLASS"; 104 * String[] classValue = { "IMAGE" }; 105 * Datatype attrType = null; 106 * try { 107 * attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 108 * } 109 * catch (Exception ex) {} 110 * Attribute attr = new Attribute(attrName, attrType, attrDims); 111 * attr.setValue(classValue); 112 * </pre> 113 * 114 * @param parentObj 115 * the HObject to which this Attribute is attached. 116 * @param attrName 117 * the name of the attribute. 118 * @param attrType 119 * the datatype of the attribute. 120 * @param attrDims 121 * the dimension sizes of the attribute, null for scalar attribute 122 * 123 * @see hdf.object.Datatype 124 */ 125 public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 126 this(parentObj, attrName, attrType, attrDims, null); 127 } 128 129 /** 130 * Create an attribute with specific name and value. 131 * 132 * For scalar attribute, the dimension size can be either an array of size one 133 * or null, and the rank can be either 1 or zero. Attribute is a general class 134 * and is independent of file format, e.g., the implementation of attribute 135 * applies to both HDF4 and HDF5. 136 * 137 * The following example creates a string attribute with the name "CLASS" and 138 * value "IMAGE". 139 * 140 * <pre> 141 * long[] attrDims = { 1 }; 142 * String attrName = "CLASS"; 143 * String[] classValue = { "IMAGE" }; 144 * Datatype attrType = null; 145 * try { 146 * attrType = new NC2Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 147 * } 148 * catch (Exception ex) {} 149 * NC2Attribute attr = new NC2Attribute(attrName, attrType, attrDims, classValue); 150 * </pre> 151 * 152 * @param parentObj 153 * the HObject to which this Attribute is attached. 154 * @param attrName 155 * the name of the attribute. 156 * @param attrType 157 * the datatype of the attribute. 158 * @param attrDims 159 * the dimension sizes of the attribute, null for scalar attribute 160 * @param attrValue 161 * the value of the attribute, null if no value 162 * 163 * @see hdf.object.Datatype 164 */ 165 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 166 public NC2Attribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 167 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 168 (parentObj == null) ? null : parentObj.getFullName(), null); 169 170 log.trace("NC2Attribute: start {}", parentObj); 171 this.parentObject = parentObj; 172 173 unsignedConverted = false; 174 175 datatype = attrType; 176 177 if (attrValue != null) { 178 data = attrValue; 179 originalBuf = attrValue; 180 isDataLoaded = true; 181 } 182 properties = new HashMap(); 183 184 if (attrDims == null) { 185 rank = 1; 186 dims = new long[] { 1 }; 187 } 188 else { 189 dims = attrDims; 190 rank = dims.length; 191 } 192 193 selectedDims = new long[rank]; 194 startDims = new long[rank]; 195 selectedStride = new long[rank]; 196 197 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 198 attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 199 200 resetSelection(); 201 } 202 203 /* 204 * (non-Javadoc) 205 * 206 * @see hdf.object.HObject#open() 207 */ 208 @Override 209 public long open() { 210 long aid = -1; 211 long pObjID = -1; 212 213 if (parentObject == null) { 214 log.debug("open(): attribute's parent object is null"); 215 return -1; 216 } 217 218 try { 219 pObjID = parentObject.open(); 220 if (pObjID >= 0) { 221 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) { 222 log.trace("open(): FILE_TYPE_NC3"); 223 /* 224 * TODO: Get type of netcdf3 object this is attached to and retrieve attribute info. 225 */ 226 } 227 } 228 229 log.trace("open(): aid={}", aid); 230 } 231 catch (Exception ex) { 232 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 233 aid = -1; 234 } 235 finally { 236 parentObject.close(pObjID); 237 } 238 239 return aid; 240 } 241 242 /* 243 * (non-Javadoc) 244 * 245 * @see hdf.object.HObject#close(int) 246 */ 247 @Override 248 public void close(long aid) { 249 if (aid >= 0) { 250 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) { 251 log.trace("close(): FILE_TYPE_NC3"); 252 /* 253 * TODO: Get type of netcdf3 object this is attached to and close attribute. 254 */ 255 } 256 } 257 } 258 259 @Override 260 public void init() { 261 if (inited) { 262 resetSelection(); 263 log.trace("init(): NC2Attribute already inited"); 264 return; 265 } 266 267 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_NC3))) { 268 log.trace("init(): FILE_TYPE_NC3"); 269 /* 270 * TODO: If netcdf3 attribute object needs to init dependent objects. 271 */ 272 inited = true; 273 } 274 275 resetSelection(); 276 } 277 278 /** 279 * Reads the data from file. 280 * 281 * read() reads the data from file to a memory buffer and returns the memory 282 * buffer. The dataset object does not hold the memory buffer. To store the 283 * memory buffer in the dataset object, one must call getData(). 284 * 285 * By default, the whole dataset is read into memory. Users can also select 286 * a subset to read. Subsetting is done in an implicit way. 287 * 288 * @return the data read from file. 289 * 290 * @see #getData() 291 * 292 * @throws Exception 293 * if object can not be read 294 * @throws OutOfMemoryError 295 * if memory is exhausted 296 */ 297 @Override 298 public Object read() throws Exception, OutOfMemoryError { 299 if (!inited) 300 init(); 301 302 return data; 303 } 304 305 /* Implement abstract Dataset */ 306 307 /** 308 * Writes a memory buffer to the object in the file. 309 * 310 * @param buf 311 * the data to write 312 * 313 * @throws Exception 314 * if data can not be written 315 */ 316 @Override 317 public void write(Object buf) throws Exception { 318 log.trace("function of dataset: write(Object) start"); 319 if (!buf.equals(data)) 320 setData(buf); 321 322 init(); 323 324 if (parentObject == null) { 325 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 326 return; 327 } 328 } 329 330 /* 331 * (non-Javadoc) 332 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 333 */ 334 @Override 335 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 336 // not supported 337 throw new UnsupportedOperationException("copy operation unsupported for NC2."); 338 } 339 340 /* 341 * (non-Javadoc) 342 * @see hdf.object.Dataset#readBytes() 343 */ 344 @Override 345 public byte[] readBytes() throws Exception { 346 // not supported 347 throw new UnsupportedOperationException("readBytes operation unsupported for NC2."); 348 } 349 350 /* Implement interface Attribute */ 351 352 /** 353 * Returns the HObject to which this Attribute is currently "attached". 354 * 355 * @return the HObject to which this Attribute is currently "attached". 356 */ 357 public HObject getParentObject() { 358 return parentObject; 359 } 360 361 /** 362 * Sets the HObject to which this Attribute is "attached". 363 * 364 * @param pObj 365 * the new HObject to which this Attribute is "attached". 366 */ 367 public void setParentObject(HObject pObj) { 368 parentObject = pObj; 369 } 370 371 /** 372 * set a property for the attribute. 373 * 374 * @param key the attribute Map key 375 * @param value the attribute Map value 376 */ 377 public void setProperty(String key, Object value) { 378 properties.put(key, value); 379 } 380 381 /** 382 * get a property for a given key. 383 * 384 * @param key the attribute Map key 385 * 386 * @return the property 387 */ 388 public Object getProperty(String key) { 389 return properties.get(key); 390 } 391 392 /** 393 * get all property keys. 394 * 395 * @return the Collection of property keys 396 */ 397 public Collection<String> getPropertyKeys() { 398 return properties.keySet(); 399 } 400 401 /** 402 * Returns the name of the object. For example, "Raster Image #2". 403 * 404 * @return The name of the object. 405 */ 406 public final String getAttributeName() { 407 return getName(); 408 } 409 410 /** 411 * Retrieves the attribute data from the file. 412 * 413 * @return the attribute data. 414 * 415 * @throws Exception 416 * if the data can not be retrieved 417 */ 418 public final Object getAttributeData() throws Exception, OutOfMemoryError { 419 return getData(); 420 } 421 422 /** 423 * Returns the datatype of the attribute. 424 * 425 * @return the datatype of the attribute. 426 */ 427 public final Datatype getAttributeDatatype() { 428 return getDatatype(); 429 } 430 431 /** 432 * Returns the space type for the attribute. It returns a 433 * negative number if it failed to retrieve the type information from 434 * the file. 435 * 436 * @return the space type for the attribute. 437 */ 438 public final int getAttributeSpaceType() { 439 return getSpaceType(); 440 } 441 442 /** 443 * Returns the rank (number of dimensions) of the attribute. It returns a 444 * negative number if it failed to retrieve the dimension information from 445 * the file. 446 * 447 * @return the number of dimensions of the attribute. 448 */ 449 public final int getAttributeRank() { 450 return getRank(); 451 } 452 453 /** 454 * Returns the selected size of the rows and columns of the attribute. It returns a 455 * negative number if it failed to retrieve the size information from 456 * the file. 457 * 458 * @return the selected size of the rows and colums of the attribute. 459 */ 460 public final int getAttributePlane() { 461 return (int)getWidth() * (int)getHeight(); 462 } 463 464 /** 465 * Returns the array that contains the dimension sizes of the data value of 466 * the attribute. It returns null if it failed to retrieve the dimension 467 * information from the file. 468 * 469 * @return the dimension sizes of the attribute. 470 */ 471 public final long[] getAttributeDims() { 472 return getDims(); 473 } 474 475 /** 476 * @return true if the data is a single scalar point; otherwise, returns 477 * false. 478 */ 479 public boolean isAttributeScalar() { 480 return isScalar(); 481 } 482 483 /** 484 * Not for public use in the future. 485 * 486 * setData() is not safe to use because it changes memory buffer 487 * of the dataset object. Dataset operations such as write/read 488 * will fail if the buffer type or size is changed. 489 * 490 * @param d the object data -must be an array of Objects 491 */ 492 public void setAttributeData(Object d) { 493 setData(d); 494 } 495 496 /** 497 * Writes the memory buffer of this dataset to file. 498 * 499 * @throws Exception if buffer can not be written 500 */ 501 public void writeAttribute() throws Exception { 502 write(); 503 } 504 505 /** 506 * Writes the given data buffer into this attribute in a file. 507 * 508 * The data buffer is a vector that contains the data values of compound fields. The data is written 509 * into file as one data blob. 510 * 511 * @param buf 512 * The vector that contains the data values of compound fields. 513 * 514 * @throws Exception 515 * If there is an error at the library level. 516 */ 517 public void writeAttribute(Object buf) throws Exception { 518 write(buf); 519 } 520 521 /** 522 * Returns a string representation of the data value. For 523 * example, "0, 255". 524 * 525 * For a compound datatype, it will be a 1D array of strings with field 526 * members separated by the delimiter. For example, 527 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 528 * float} of three data points. 529 * 530 * @param delimiter 531 * The delimiter used to separate individual data points. It 532 * can be a comma, semicolon, tab or space. For example, 533 * toString(",") will separate data by commas. 534 * 535 * @return the string representation of the data values. 536 */ 537 public String toAttributeString(String delimiter) { 538 return toString(delimiter, -1); 539 } 540 541 /** 542 * Returns a string representation of the data value. For 543 * example, "0, 255". 544 * 545 * For a compound datatype, it will be a 1D array of strings with field 546 * members separated by the delimiter. For example, 547 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 548 * float} of three data points. 549 * 550 * @param delimiter 551 * The delimiter used to separate individual data points. It 552 * can be a comma, semicolon, tab or space. For example, 553 * toString(",") will separate data by commas. 554 * @param maxItems 555 * The maximum number of Array values to return 556 * 557 * @return the string representation of the data values. 558 */ 559 public String toAttributeString(String delimiter, int maxItems) { 560 return toString(delimiter, maxItems); 561 } 562}