001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.nc2; 016 017import java.io.IOException; 018import java.util.ArrayList; 019import java.util.Iterator; 020import java.util.LinkedList; 021import java.util.List; 022import java.util.Queue; 023import java.util.Vector; 024 025import hdf.object.Attribute; 026import hdf.object.Dataset; 027import hdf.object.Datatype; 028import hdf.object.FileFormat; 029import hdf.object.Group; 030import hdf.object.HObject; 031 032import org.slf4j.Logger; 033import org.slf4j.LoggerFactory; 034 035import ucar.nc2.NetcdfFile; 036import ucar.nc2.Variable; 037import ucar.nc2.iosp.netcdf3.N3header; 038 039/** 040 * This class provides file level APIs. File access APIs include retrieving the 041 * file hierarchy, opening and closing file, and writing file content to disk. 042 * 043 * @version 2.4 9/4/2007 044 * @author Peter X. Cao 045 */ 046public class NC2File extends FileFormat { 047 private static final long serialVersionUID = 6941235662108358451L; 048 049 private static final Logger log = LoggerFactory.getLogger(NC2File.class); 050 051 /** 052 * The root object of this file. 053 */ 054 private HObject rootObject; 055 056 /** 057 * The list of unique (tag, ref) pairs. It is used to avoid duplicate 058 * objects in memory. 059 */ 060 @SuppressWarnings("rawtypes") 061 private List objList; 062 063 /** the netcdf file */ 064 private NetcdfFile ncFile; 065 066 private static boolean isFileOpen; 067 068 /** 069 * Constructs an empty NC2File with read-only access. 070 */ 071 public NC2File() { this(""); } 072 073 /** 074 * Creates an NC2File object of given file name with read-only access. 075 * 076 * @param fileName 077 * A valid file name, with a relative or absolute path. 078 */ 079 public NC2File(String fileName) 080 { 081 super(fileName); 082 083 isFileOpen = false; 084 isReadOnly = true; 085 objList = new Vector(); 086 ncFile = null; 087 088 this.fid = -1; 089 090 if ((fullFileName != null) && (fullFileName.length() > 0)) { 091 try { 092 log.trace("NetcdfFile:{}", fullFileName); 093 ncFile = NetcdfFile.open(fullFileName); 094 this.fid = 1; 095 } 096 catch (Exception ex) { 097 log.trace("NC2File:{}", fullFileName, ex); 098 } 099 } 100 } 101 102 /** 103 * Checks if the given file format is a NetCDF3 file. 104 * 105 * @param fileformat 106 * the fileformat to be checked. 107 * 108 * @return true if the given file is a NetCDF3 file; otherwise returns false. 109 */ 110 @Override 111 public boolean isThisType(FileFormat fileformat) 112 { 113 return (fileformat instanceof NC2File); 114 } 115 116 /** 117 * Checks if the given file is a NetCDF file. 118 * 119 * @param filename 120 * the file to be checked. 121 * 122 * @return true if the given file is a NetCDF file; otherwise returns false. 123 */ 124 @Override 125 public boolean isThisType(String filename) 126 { 127 boolean isNetcdf = false; 128 ucar.unidata.io.RandomAccessFile raf = null; 129 130 try { 131 raf = new ucar.unidata.io.RandomAccessFile(filename, "r"); 132 } 133 catch (Exception ex) { 134 log.trace("raf null - exit", ex); 135 raf = null; 136 } 137 138 if (raf == null) { 139 return false; 140 } 141 142 try { 143 isNetcdf = N3header.isValidFile(raf); 144 } 145 catch (IOException e) { 146 log.trace("raf isValidFile - failure", e); 147 return false; 148 } 149 150 try { 151 raf.close(); 152 } 153 catch (Exception ex) { 154 log.trace("raf close:", ex); 155 } 156 157 log.trace("{} - isNetcdf:{}", filename, isNetcdf); 158 return isNetcdf; 159 } 160 161 /** 162 * Creates a NC2File instance with specified file name and READ access. 163 * Regardless of specified access, the NC2File implementation uses READ. 164 * 165 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 166 */ 167 @Override 168 public FileFormat createInstance(String filename, int access) throws Exception 169 { 170 return new NC2File(filename); 171 } 172 173 // Implementing FileFormat 174 @Override 175 public long open() throws Exception 176 { 177 log.trace("open(): start isFileOpen={}", isFileOpen); 178 179 if (!isFileOpen) { 180 isFileOpen = true; 181 rootObject = loadTree(); 182 } 183 184 return 0; 185 } 186 187 private HObject loadTree() 188 { 189 long[] oid = {0}; 190 // root object does not have a parent path or a parent node 191 NC2Group rootGroup = new NC2Group(this, "/", null, null, oid); 192 193 if (ncFile == null) { 194 return rootGroup; 195 } 196 197 log.trace("loadTree(): iterate members"); 198 Iterator it = ncFile.getVariables().iterator(); 199 Variable ncDataset = null; 200 NC2Dataset d = null; 201 while (it.hasNext()) { 202 ncDataset = (Variable)it.next(); 203 oid[0] = ncDataset.hashCode(); 204 d = new NC2Dataset(this, ncDataset, oid); 205 rootGroup.addToMemberList(d); 206 } 207 208 return rootGroup; 209 } 210 211 // Implementing FileFormat 212 @Override 213 public void close() throws IOException 214 { 215 if (ncFile != null) { 216 ncFile.close(); 217 } 218 219 isFileOpen = false; 220 fid = -1; 221 objList = null; 222 } 223 224 // Implementing FileFormat 225 @Override 226 public HObject getRootObject() 227 { 228 return rootObject; 229 } 230 231 /** 232 * Get the NetCDF file. 233 * 234 * @return the NetCDF file. 235 */ 236 public NetcdfFile getNetcdfFile() { return ncFile; } 237 238 @Override 239 public Group createGroup(String name, Group pgroup) throws Exception 240 { 241 throw new UnsupportedOperationException("Unsupported operation - create group."); 242 } 243 244 @Override 245 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception 246 { 247 throw new UnsupportedOperationException("Unsupported operation - create datatype."); 248 } 249 250 @Override 251 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) 252 throws Exception 253 { 254 throw new UnsupportedOperationException("Unsupported operation - create datatype."); 255 } 256 257 @Override 258 public Datatype createNamedDatatype(Datatype tnative, String name) throws Exception 259 { 260 throw new UnsupportedOperationException("netcdf3 does not support named datatype."); 261 } 262 263 @Override 264 public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, 265 long[] chunks, int gzip, Object fillValue, Object data) throws Exception 266 { 267 throw new UnsupportedOperationException("Unsupported operation create dataset."); 268 } 269 270 @Override 271 public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, 272 long[] chunks, int gzip, int ncomp, int intelace, Object data) throws Exception 273 { 274 throw new UnsupportedOperationException("Unsupported operation create image."); 275 } 276 277 @Override 278 public void delete(HObject obj)throws Exception 279 { 280 throw new UnsupportedOperationException("Unsupported operation."); 281 } 282 283 @Override 284 public HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception 285 { 286 throw new UnsupportedOperationException("Unsupported operation - copy."); 287 } 288 289 @Override 290 public void writeAttribute(HObject obj, hdf.object.Attribute attr, boolean attrExisted) throws Exception 291 { 292 throw new UnsupportedOperationException("Unsupported operation - write attribute."); 293 } 294 295 private HObject copyGroup(NC2Group srcGroup, NC2Group pgroup) throws Exception 296 { 297 throw new UnsupportedOperationException("Unsupported operation - copy group."); 298 } 299 300 private void copyDataset(Dataset srcDataset, NC2Group pgroup) throws Exception 301 { 302 throw new UnsupportedOperationException("Unsupported operation - copy dataset."); 303 } 304 305 /** 306 * Copies the attributes of one object to another object. 307 * 308 * NC3 does not support attribute copy 309 * 310 * @param src 311 * The source object. 312 * @param dst 313 * The destination object. 314 */ 315 public void copyAttributes(HObject src, HObject dst) 316 { 317 throw new UnsupportedOperationException("Unsupported operation copy attributes with HObject."); 318 } 319 320 /** 321 * Copies the attributes of one object to another object. 322 * 323 * NC3 does not support attribute copy 324 * 325 * @param srcID 326 * The source identifier. 327 * @param dstID 328 * The destination identifier. 329 */ 330 public void copyAttributes(int srcID, int dstID) 331 { 332 throw new UnsupportedOperationException("Unsupported operation - copy attributes."); 333 } 334 335 /** 336 * converts a ucar.nc2.Attribute into an hdf.object.nc2.NC2Attribute 337 * 338 * @param parent 339 * the parent object. 340 * @param netcdfAttr 341 * the ucar.nc2.Attribute object. 342 * 343 * @return the hdf.object.nc2.NC2Attribute if successful 344 */ 345 public static hdf.object.nc2.NC2Attribute convertAttribute(HObject parent, ucar.nc2.Attribute netcdfAttr) 346 { 347 hdf.object.nc2.NC2Attribute ncsaAttr = null; 348 349 if (netcdfAttr == null) { 350 return null; 351 } 352 353 String attrName = netcdfAttr.getShortName(); 354 long[] attrDims = {netcdfAttr.getLength()}; 355 log.trace("convertAttribute(): attrName={} len={}", attrName, netcdfAttr.getLength()); 356 Datatype attrType = null; 357 try { 358 attrType = new NC2Datatype(netcdfAttr.getDataType()); 359 } 360 catch (Exception ex) { 361 attrType = null; 362 } 363 ncsaAttr = new hdf.object.nc2.NC2Attribute(parent, attrName, attrType, attrDims); 364 Object[] attrValues = {netcdfAttr.getValue(0)}; 365 ncsaAttr.setData(attrValues); 366 367 log.trace("convertAttribute(): finish data={}", netcdfAttr.getValue(0)); 368 return ncsaAttr; 369 } 370 371 /** 372 * Retrieves the file structure from disk and returns the root object. 373 * 374 * First gets the top level objects or objects that do not belong to any 375 * groups. If a top level object is a group, call the depth_first() to 376 * retrieve the sub-tree of that group, recursively. 377 */ 378 private void loadIntoMemory() 379 { 380 if (fid < 0) { 381 log.debug("loadIntoMemory(): Invalid File Id"); 382 return; 383 } 384 } 385 386 /** 387 * Retrieves the tree structure of the file by depth-first order. The 388 * current implementation only retrieves groups and datasets. 389 * 390 * @param parentObject 391 * the parent object. 392 */ 393 private void depth_first(HObject parentObj) 394 { 395 log.trace("depth_first(pobj = {})", parentObj); 396 397 if (parentObj == null) { 398 log.debug("depth_first(): Parent object is null"); 399 return; 400 } 401 } // private depth_first() 402 403 /** 404 * Returns a list of all the members of this NetCDF3 in a 405 * breadth-first ordering that are rooted at the specified 406 * object. 407 */ 408 private static List<HObject> getMembersBreadthFirst(HObject obj) 409 { 410 List<HObject> allMembers = new ArrayList<>(); 411 Queue<HObject> queue = new LinkedList<>(); 412 HObject currentObject = obj; 413 414 queue.add(currentObject); 415 416 while (!queue.isEmpty()) { 417 currentObject = queue.remove(); 418 allMembers.add(currentObject); 419 420 if (currentObject instanceof Group) { 421 queue.addAll(((Group)currentObject).getMemberList()); 422 } 423 } 424 425 return allMembers; 426 } 427 428 /** 429 * Returns the version of the library. 430 */ 431 @Override 432 public String getLibversion() 433 { 434 return "NetCDF Java (version 4.3)"; 435 } 436 437 // implementing FileFormat 438 @Override 439 public HObject get(String path) throws Exception 440 { 441 throw new UnsupportedOperationException("get() is not supported"); 442 } 443}