001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.fits; 016 017import java.lang.reflect.Array; 018import java.util.Iterator; 019import java.util.List; 020import java.util.Vector; 021 022import hdf.object.Dataset; 023import hdf.object.Datatype; 024import hdf.object.FileFormat; 025import hdf.object.Group; 026import hdf.object.HObject; 027import hdf.object.ScalarDS; 028import hdf.object.MetaDataContainer; 029 030import hdf.object.fits.FitsAttribute; 031 032import nom.tam.fits.BasicHDU; 033import nom.tam.fits.Header; 034import nom.tam.fits.HeaderCard; 035 036/** 037 * FitsDataset describes an multi-dimension array of HDF5 scalar or atomic data 038 * types, such as byte, int, short, long, float, double and string, 039 * and operations performed on the scalar dataset 040 * 041 * The library predefines a modest number of datatypes. For details, read <a 042 * href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm"> 043 * The Datatype Interface (H5T)</a> 044 * 045 * @version 1.1 9/4/2007 046 * @author Peter X. Cao 047 */ 048public class FitsDataset extends ScalarDS implements MetaDataContainer 049{ 050 private static final long serialVersionUID = 3944770379558335171L; 051 052 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(FitsDataset.class); 053 054 /** 055 * The list of attributes of this data object. Members of the list are 056 * instance of Attribute. 057 */ 058 private List attributeList; 059 060 /** the native dataset */ 061 private BasicHDU nativeDataset; 062 063 /** 064 * Constructs an FitsDataset object with specific netcdf variable. 065 * 066 * @param fileFormat the netcdf file. 067 * @param hdu the BasicHDU. 068 * @param dName the name for this dataset. 069 * @param oid the unique identifier for this dataset. 070 */ 071 public FitsDataset(FileFormat fileFormat, BasicHDU hdu, String dName, long[] oid) { 072 super(fileFormat, dName, HObject.SEPARATOR, oid); 073 unsignedConverted = false; 074 nativeDataset = hdu; 075 } 076 077 /** 078 * Check if the object has any attributes attached. 079 * 080 * @return true if it has any attributes, false otherwise. 081 */ 082 @Override 083 public boolean hasAttribute() { 084 return false; 085 } 086 087 // Implementing Dataset 088 @Override 089 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception { 090 // not supported 091 throw new UnsupportedOperationException("copy operation unsupported for FITS."); 092 } 093 094 /* 095 * (non-Javadoc) 096 * @see hdf.object.Dataset#readBytes() 097 */ 098 @Override 099 public byte[] readBytes() throws Exception { 100 // not supported 101 throw new UnsupportedOperationException("readBytes operation unsupported for FITS."); 102 } 103 104 /** 105 * Reads the data from file. 106 * 107 * read() reads the data from file to a memory buffer and returns the memory 108 * buffer. The dataset object does not hold the memory buffer. To store the 109 * memory buffer in the dataset object, one must call getData(). 110 * 111 * By default, the whole dataset is read into memory. Users can also select 112 * a subset to read. Subsetting is done in an implicit way. 113 * 114 * @return the data read from file. 115 * 116 * @see #getData() 117 * 118 * @throws Exception 119 * if object can not be read 120 * @throws OutOfMemoryError 121 * if memory is exhausted 122 */ 123 @Override 124 public Object read() throws Exception { 125 Object theData = null; 126 Object fitsData = null; 127 128 if (nativeDataset == null) 129 return null; 130 131 try { 132 fitsData = nativeDataset.getData().getData(); 133 } 134 catch (Exception ex) { 135 throw new UnsupportedOperationException("This implementation only supports integer and float dataset. " + 136 "It may not work for other datatypes. \n"+ex); 137 } 138 139 int n = get1DLength(fitsData); 140 141 theData = FitsDatatype.allocateArray(nativeDataset.getBitPix(), n); 142 143 to1Darray(fitsData, theData, 0); 144 145 return theData; 146 } 147 148 /** 149 * Writes a memory buffer to the object in the file. 150 * 151 * @param buf 152 * the data to write 153 * 154 * @throws Exception 155 * if data can not be written 156 */ 157 @Override 158 public void write(Object buf) throws Exception { 159 // not supported 160 throw new UnsupportedOperationException("write operation unsupported for FITS."); 161 } 162 163 /** 164 * Retrieves the object's metadata, such as attributes, from the file. 165 * 166 * Metadata, such as attributes, is stored in a List. 167 * 168 * @return the list of metadata objects. 169 * 170 * @throws Exception 171 * if the metadata can not be retrieved 172 */ 173 @SuppressWarnings("rawtypes") 174 public List getMetadata() throws Exception { 175 if (attributeList != null) 176 return attributeList; 177 178 if (nativeDataset == null) 179 return null; 180 181 Header header = nativeDataset.getHeader(); 182 if (header == null) 183 return null; 184 185 attributeList = new Vector(); 186 HeaderCard hc = null; 187 Iterator it = header.iterator(); 188 FitsAttribute attr = null; 189 Datatype dtype = new FitsDatatype(Datatype.CLASS_STRING, 80, 0, 0); 190 long[] dims = {1}; 191 String value = null; 192 while (it.hasNext()) { 193 value = ""; 194 hc = (HeaderCard)it.next(); 195 attr = new FitsAttribute(this, hc.getKey(), dtype, dims); 196 String tvalue = hc.getValue(); 197 if (tvalue != null) 198 value += tvalue; 199 tvalue = hc.getComment(); 200 if (tvalue != null) 201 value += " / " + tvalue; 202 attr.setAttributeData(value); 203 attributeList.add(attr); 204 } 205 206 return attributeList; 207 } 208 209 /** 210 * Writes a specific piece of metadata (such as an attribute) into the file. 211 * 212 * If an HDF(4&5) attribute exists in the file, this method updates its 213 * value. If the attribute does not exist in the file, it creates the 214 * attribute in the file and attaches it to the object. It will fail to 215 * write a new attribute to the object where an attribute with the same name 216 * already exists. To update the value of an existing attribute in the file, 217 * one needs to get the instance of the attribute by getMetadata(), change 218 * its values, then use writeMetadata() to write the value. 219 * 220 * @param info 221 * the metadata to write. 222 * 223 * @throws Exception 224 * if the metadata can not be written 225 */ 226 public void writeMetadata(Object info) throws Exception { 227 // not supported 228 throw new UnsupportedOperationException("writeMetadata operation unsupported for FITS."); 229 } 230 231 /** 232 * Deletes an existing piece of metadata from this object. 233 * 234 * @param info 235 * the metadata to delete. 236 * 237 * @throws Exception 238 * if the metadata can not be removed 239 */ 240 public void removeMetadata(Object info) throws Exception { 241 // not supported 242 throw new UnsupportedOperationException("removeMetadata operation unsupported for FITS."); 243 } 244 245 /** 246 * Updates an existing piece of metadata attached to this object. 247 * 248 * @param info 249 * the metadata to update. 250 * 251 * @throws Exception 252 * if the metadata can not be updated 253 */ 254 public void updateMetadata(Object info) throws Exception { 255 // not supported 256 throw new UnsupportedOperationException("updateMetadata operation unsupported for FITS."); 257 } 258 259 /* 260 * (non-Javadoc) 261 * @see hdf.object.HObject#open() 262 */ 263 @Override 264 public long open() { 265 return -1; 266 } 267 268 /* 269 * (non-Javadoc) 270 * @see hdf.object.HObject#close(int) 271 */ 272 @Override 273 public void close(long did) { 274 // Nothing to implement 275 } 276 277 /* 278 * (non-Javadoc) 279 * @see hdf.object.Dataset#init() 280 */ 281 @Override 282 public void init() { 283 if (nativeDataset == null) 284 return; 285 286 if (inited) 287 return; // already called. Initialize only once 288 289 int[] axes= null; 290 try { 291 axes = nativeDataset.getAxes(); 292 } 293 catch (Exception ex) { 294 log.debug("nativeDataset.getAxes():", ex); 295 } 296 297 if (axes == null) 298 return; 299 300 301 rank = axes.length; 302 if (rank == 0) { 303 // a scalar data point 304 isScalar = true; 305 rank = 1; 306 dims = new long[] { 1 }; 307 } 308 else { 309 isScalar = false; 310 dims = new long[rank]; 311 for (int i=0; i<rank; i++) 312 dims[i] = axes[i]; 313 } 314 315 startDims = new long[rank]; 316 selectedDims = new long[rank]; 317 for (int i=0; i<rank; i++) { 318 startDims[i] = 0; 319 selectedDims[i] = 1; 320 } 321 322 if (rank == 1) { 323 selectedIndex[0] = 0; 324 selectedDims[0] = dims[0]; 325 } 326 else if (rank == 2) { 327 selectedIndex[0] = 0; 328 selectedIndex[1] = 1; 329 selectedDims[0] = dims[0]; 330 selectedDims[1] = dims[1]; 331 } 332 else if (rank > 2) { 333 selectedIndex[0] = 0; 334 selectedIndex[1] = 1; 335 selectedIndex[2] = 2; 336 selectedDims[0] = dims[0]; 337 selectedDims[1] = dims[1]; 338 } 339 340 if ((rank > 1) && isText) 341 selectedDims[1] = 1; 342 343 inited = true; 344 } 345 346 /* Implement abstart ScalarDS */ 347 348 /** 349 * Creates a new dataset. 350 * 351 * @param name the name of the dataset to create. 352 * @param pgroup the parent group of the new dataset. 353 * @param type the datatype of the dataset. 354 * @param dims the dimension size of the dataset. 355 * @param maxdims the max dimension size of the dataset. 356 * @param chunks the chunk size of the dataset. 357 * @param gzip the level of the gzip compression. 358 * @param data the array of data values. 359 * 360 * @return the new dataset if successful. Otherwise returns null. 361 * 362 * @throws Exception 363 * if there is an error 364 */ 365 public static FitsDataset create(String name, Group pgroup, Datatype type, 366 long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception { 367 // not supported 368 throw new UnsupportedOperationException("Unsupported operation for FITS."); 369 } 370 371 /** 372 * Returns the datatype of the data object. 373 * 374 * @return the datatype of the data object. 375 */ 376 @Override 377 public Datatype getDatatype() { 378 if (datatype == null) { 379 try { 380 datatype = new FitsDatatype(nativeDataset.getBitPix()); 381 } 382 catch (Exception ex) { 383 log.debug("getDatatype(): failed to create datatype: ", ex); 384 datatype = null; 385 } 386 } 387 388 return datatype; 389 } 390 391 /* 392 * (non-Javadoc) 393 * @see hdf.object.HObject#setName(java.lang.String) 394 */ 395 @Override 396 public void setName (String newName) throws Exception { 397 // not supported 398 throw new UnsupportedOperationException("Unsupported operation for FITS."); 399 } 400 401 private int get1DLength(Object data) throws Exception { 402 if (!data.getClass().isArray()) 403 return 1; 404 405 int len = Array.getLength(data); 406 407 int total = 0; 408 for (int i = 0; i < len; i++) 409 total += get1DLength(Array.get(data, i)); 410 411 return total; 412 } 413 414 /** copy multi-dimension array of fits data into 1D array */ 415 private int to1Darray(Object dataIn, Object dataOut, int offset) throws Exception { 416 Class component = dataIn.getClass().getComponentType(); 417 if (component == null) 418 return offset; 419 420 int size = Array.getLength(dataIn); 421 if (!component.isArray()) { 422 System.arraycopy(dataIn, 0, dataOut, offset, size); 423 return offset+size; 424 } 425 426 for (int i = size - 1; i >= 0; i--) 427 offset = to1Darray(Array.get(dataIn, i), dataOut, offset); 428 429 return offset; 430 } 431 432 //Implementing DataFormat 433 /* FITS does not support metadata */ 434 /** 435 * Retrieves the object's metadata, such as attributes, from the file. 436 * 437 * Metadata, such as attributes, is stored in a List. 438 * 439 * @param attrPropList 440 * the list of properties to get 441 * 442 * @return the list of metadata objects. 443 * 444 * @throws Exception 445 * if the metadata can not be retrieved 446 */ 447 public List getMetadata(int... attrPropList) throws Exception { 448 throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported"); 449 } 450}