001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.fits; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.DataFormat; 027import hdf.object.Dataset; 028import hdf.object.Datatype; 029import hdf.object.FileFormat; 030import hdf.object.Group; 031import hdf.object.HObject; 032import hdf.object.ScalarDS; 033 034/** 035 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a 036 * dataset, group or named datatype. 037 * 038 * Like a dataset, an attribute has a name, datatype and dataspace. 039 * 040 * For more details on attributes, <a href= 041 * "https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html">HDF5 042 * User's Guide</a> 043 * 044 * The following code is an example of an attribute with 1D integer array of two elements. 045 * 046 * <pre> 047 * // Example of creating a new attribute 048 * // The name of the new attribute 049 * String name = "Data range"; 050 * // Creating an unsigned 1-byte integer datatype 051 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 052 * 1, // size in bytes 053 * Datatype.ORDER_LE, // byte order 054 * Datatype.SIGN_NONE); // unsigned 055 * // 1-D array of size two 056 * long[] dims = {2}; 057 * // The value of the attribute 058 * int[] value = {0, 255}; 059 * // Create a new attribute 060 * FitsAttribute dataRange = new FitsAttribute(name, type, dims); 061 * // Set the attribute value 062 * dataRange.setValue(value); 063 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 064 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 065 * </pre> 066 * 067 * 068 * For an atomic datatype, the value of an FitsAttribute will be a 1D array of integers, floats and 069 * strings. For a compound datatype, it will be a 1D array of strings with field members separated 070 * by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 071 * float} of three data points. 072 * 073 * @see hdf.object.Datatype 074 * 075 * @version 2.0 4/2/2018 076 * @author Peter X. Cao, Jordan T. Henderson 077 */ 078public class FitsAttribute extends ScalarDS implements Attribute { 079 080 private static final long serialVersionUID = 2072473407027648309L; 081 082 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(FitsAttribute.class); 083 084 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 085 protected HObject parentObject; 086 087 /** additional information and properties for the attribute, Attribute interface */ 088 private transient Map<String, Object> properties; 089 090 /** 091 * Create an attribute with specified name, data type and dimension sizes. 092 * 093 * For scalar attribute, the dimension size can be either an array of size one 094 * or null, and the rank can be either 1 or zero. Attribute is a general class 095 * and is independent of file format, e.g., the implementation of attribute 096 * applies to both HDF4 and HDF5. 097 * 098 * The following example creates a string attribute with the name "CLASS" and 099 * value "IMAGE". 100 * 101 * <pre> 102 * long[] attrDims = { 1 }; 103 * String attrName = "CLASS"; 104 * String[] classValue = { "IMAGE" }; 105 * Datatype attrType = null; 106 * try { 107 * attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 108 * } 109 * catch (Exception ex) {} 110 * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims); 111 * attr.setValue(classValue); 112 * </pre> 113 * 114 * @param parentObj 115 * the HObject to which this Attribute is attached. 116 * @param attrName 117 * the name of the attribute. 118 * @param attrType 119 * the datatype of the attribute. 120 * @param attrDims 121 * the dimension sizes of the attribute, null for scalar attribute 122 * 123 * @see hdf.object.Datatype 124 */ 125 public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) { 126 this(parentObj, attrName, attrType, attrDims, null); 127 } 128 129 /** 130 * Create an attribute with specific name and value. 131 * 132 * For scalar attribute, the dimension size can be either an array of size one 133 * or null, and the rank can be either 1 or zero. Attribute is a general class 134 * and is independent of file format, e.g., the implementation of attribute 135 * applies to both HDF4 and HDF5. 136 * 137 * The following example creates a string attribute with the name "CLASS" and 138 * value "IMAGE". 139 * 140 * <pre> 141 * long[] attrDims = { 1 }; 142 * String attrName = "CLASS"; 143 * String[] classValue = { "IMAGE" }; 144 * Datatype attrType = null; 145 * try { 146 * attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 147 * } 148 * catch (Exception ex) {} 149 * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims, classValue); 150 * </pre> 151 * 152 * @param parentObj 153 * the HObject to which this FitsAttribute is attached. 154 * @param attrName 155 * the name of the attribute. 156 * @param attrType 157 * the datatype of the attribute. 158 * @param attrDims 159 * the dimension sizes of the attribute, null for scalar attribute 160 * @param attrValue 161 * the value of the attribute, null if no value 162 * 163 * @see hdf.object.Datatype 164 */ 165 @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) 166 public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, Object attrValue) { 167 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 168 (parentObj == null) ? null : parentObj.getFullName(), null); 169 170 log.trace("FitsAttribute: start {}", parentObj); 171 this.parentObject = parentObj; 172 173 unsignedConverted = false; 174 175 datatype = attrType; 176 177 if (attrValue != null) { 178 data = attrValue; 179 originalBuf = attrValue; 180 isDataLoaded = true; 181 } 182 properties = new HashMap(); 183 184 if (attrDims == null) { 185 rank = 1; 186 dims = new long[] { 1 }; 187 } 188 else { 189 dims = attrDims; 190 rank = dims.length; 191 } 192 193 selectedDims = new long[rank]; 194 startDims = new long[rank]; 195 selectedStride = new long[rank]; 196 197 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", 198 attrName, getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 199 200 resetSelection(); 201 } 202 203 /* 204 * (non-Javadoc) 205 * 206 * @see hdf.object.HObject#open() 207 */ 208 @Override 209 public long open() { 210 if (parentObject == null) { 211 log.debug("open(): attribute's parent object is null"); 212 return -1; 213 } 214 215 return -1; 216 } 217 218 /* 219 * (non-Javadoc) 220 * 221 * @see hdf.object.HObject#close(int) 222 */ 223 @Override 224 public void close(long aid) { 225 } 226 227 @Override 228 public void init() { 229 if (inited) { 230 resetSelection(); 231 log.trace("init(): FitsAttribute already inited"); 232 return; 233 } 234 } 235 236 /** 237 * Reads the data from file. 238 * 239 * read() reads the data from file to a memory buffer and returns the memory 240 * buffer. The dataset object does not hold the memory buffer. To store the 241 * memory buffer in the dataset object, one must call getData(). 242 * 243 * By default, the whole dataset is read into memory. Users can also select 244 * a subset to read. Subsetting is done in an implicit way. 245 * 246 * @return the data read from file. 247 * 248 * @see #getData() 249 * 250 * @throws Exception 251 * if object can not be read 252 * @throws OutOfMemoryError 253 * if memory is exhausted 254 */ 255 @Override 256 public Object read() throws Exception, OutOfMemoryError { 257 if (!inited) init(); 258 259 return data; 260 } 261 262 /* Implement abstract Dataset */ 263 264 /* 265 * (non-Javadoc) 266 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 267 */ 268 @Override 269 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) 270 throws Exception { 271 // not supported 272 throw new UnsupportedOperationException("copy operation unsupported for FITS."); 273 } 274 275 /* 276 * (non-Javadoc) 277 * @see hdf.object.Dataset#readBytes() 278 */ 279 @Override 280 public byte[] readBytes() throws Exception { 281 // not supported 282 throw new UnsupportedOperationException("readBytes operation unsupported for FITS."); 283 } 284 285 /** 286 * Writes a memory buffer to the object in the file. 287 * 288 * @param buf 289 * the data to write 290 * 291 * @throws Exception 292 * if data can not be written 293 */ 294 @Override 295 public void write(Object buf) throws Exception { 296 // not supported 297 throw new UnsupportedOperationException("write operation unsupported for FITS."); 298 } 299 300 /** 301 * Returns the HObject to which this Attribute is currently "attached". 302 * 303 * @return the HObject to which this Attribute is currently "attached". 304 */ 305 public HObject getParentObject() { 306 return parentObject; 307 } 308 309 /** 310 * Sets the HObject to which this Attribute is "attached". 311 * 312 * @param pObj 313 * the new HObject to which this Attribute is "attached". 314 */ 315 public void setParentObject(HObject pObj) { 316 parentObject = pObj; 317 } 318 319 /** 320 * set a property for the attribute. 321 * 322 * @param key the attribute Map key 323 * @param value the attribute Map value 324 */ 325 public void setProperty(String key, Object value) { 326 properties.put(key, value); 327 } 328 329 /** 330 * get a property for a given key. 331 * 332 * @param key the attribute Map key 333 * 334 * @return the property 335 */ 336 public Object getProperty(String key) { 337 return properties.get(key); 338 } 339 340 /** 341 * get all property keys. 342 * 343 * @return the Collection of property keys 344 */ 345 public Collection<String> getPropertyKeys() { 346 return properties.keySet(); 347 } 348 349 /** 350 * Returns the name of the object. For example, "Raster Image #2". 351 * 352 * @return The name of the object. 353 */ 354 public final String getAttributeName() { 355 return getName(); 356 } 357 358 /** 359 * Retrieves the attribute data from the file. 360 * 361 * @return the attribute data. 362 * 363 * @throws Exception 364 * if the data can not be retrieved 365 */ 366 public final Object getAttributeData() throws Exception, OutOfMemoryError { 367 return getData(); 368 } 369 370 /** 371 * Returns the datatype of the attribute. 372 * 373 * @return the datatype of the attribute. 374 */ 375 public final Datatype getAttributeDatatype() { 376 return getDatatype(); 377 } 378 379 /** 380 * Returns the space type for the attribute. It returns a 381 * negative number if it failed to retrieve the type information from 382 * the file. 383 * 384 * @return the space type for the attribute. 385 */ 386 public final int getAttributeSpaceType() { 387 return getSpaceType(); 388 } 389 390 /** 391 * Returns the rank (number of dimensions) of the attribute. It returns a 392 * negative number if it failed to retrieve the dimension information from 393 * the file. 394 * 395 * @return the number of dimensions of the attribute. 396 */ 397 public final int getAttributeRank() { 398 return getRank(); 399 } 400 401 /** 402 * Returns the selected size of the rows and columns of the attribute. It returns a 403 * negative number if it failed to retrieve the size information from 404 * the file. 405 * 406 * @return the selected size of the rows and colums of the attribute. 407 */ 408 public final int getAttributePlane() { 409 return (int)getWidth() * (int)getHeight(); 410 } 411 412 /** 413 * Returns the array that contains the dimension sizes of the data value of 414 * the attribute. It returns null if it failed to retrieve the dimension 415 * information from the file. 416 * 417 * @return the dimension sizes of the attribute. 418 */ 419 public final long[] getAttributeDims() { 420 return getDims(); 421 } 422 423 /** 424 * @return true if the data is a single scalar point; otherwise, returns 425 * false. 426 */ 427 public boolean isAttributeScalar() { 428 return isScalar(); 429 } 430 431 /** 432 * Not for public use in the future. 433 * 434 * setData() is not safe to use because it changes memory buffer 435 * of the dataset object. Dataset operations such as write/read 436 * will fail if the buffer type or size is changed. 437 * 438 * @param d the object data -must be an array of Objects 439 */ 440 public void setAttributeData(Object d) { 441 setData(d); 442 } 443 444 /** 445 * Writes the memory buffer of this dataset to file. 446 * 447 * @throws Exception if buffer can not be written 448 */ 449 public void writeAttribute() throws Exception { 450 write(); 451 } 452 453 /** 454 * Writes the given data buffer into this attribute in a file. 455 * 456 * The data buffer is a vector that contains the data values of compound fields. The data is written 457 * into file as one data blob. 458 * 459 * @param buf 460 * The vector that contains the data values of compound fields. 461 * 462 * @throws Exception 463 * If there is an error at the library level. 464 */ 465 public void writeAttribute(Object buf) throws Exception { 466 write(buf); 467 } 468 469 /** 470 * Returns a string representation of the data value. For 471 * example, "0, 255". 472 * 473 * For a compound datatype, it will be a 1D array of strings with field 474 * members separated by the delimiter. For example, 475 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 476 * float} of three data points. 477 * 478 * @param delimiter 479 * The delimiter used to separate individual data points. It 480 * can be a comma, semicolon, tab or space. For example, 481 * toString(",") will separate data by commas. 482 * 483 * @return the string representation of the data values. 484 */ 485 public String toAttributeString(String delimiter) { 486 return toString(delimiter, -1); 487 } 488 489 /** 490 * Returns a string representation of the data value. For 491 * example, "0, 255". 492 * 493 * For a compound datatype, it will be a 1D array of strings with field 494 * members separated by the delimiter. For example, 495 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 496 * float} of three data points. 497 * 498 * @param delimiter 499 * The delimiter used to separate individual data points. It 500 * can be a comma, semicolon, tab or space. For example, 501 * toString(",") will separate data by commas. 502 * @param maxItems 503 * The maximum number of Array values to return 504 * 505 * @return the string representation of the data values. 506 */ 507 public String toAttributeString(String delimiter, int maxItems) { 508 return toString(delimiter, maxItems); 509 } 510}