001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.nc2; 016 017import java.io.IOException; 018import java.util.ArrayList; 019import java.util.Iterator; 020import java.util.LinkedList; 021import java.util.List; 022import java.util.Queue; 023import java.util.Vector; 024 025import ucar.nc2.NetcdfFile; 026import ucar.nc2.Variable; 027import ucar.nc2.iosp.netcdf3.N3header; 028import hdf.object.Dataset; 029import hdf.object.Datatype; 030import hdf.object.FileFormat; 031import hdf.object.Group; 032import hdf.object.HObject; 033 034/** 035 * This class provides file level APIs. File access APIs include retrieving the 036 * file hierarchy, opening and closing file, and writing file content to disk. 037 * 038 * @version 2.4 9/4/2007 039 * @author Peter X. Cao 040 */ 041public class NC2File extends FileFormat { 042 private static final long serialVersionUID = 6941235662108358451L; 043 044 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NC2File.class); 045 046 /** 047 * The root object of this file. 048 */ 049 private HObject rootObject; 050 051 /** 052 * The list of unique (tag, ref) pairs. It is used to avoid duplicate 053 * objects in memory. 054 */ 055 @SuppressWarnings("rawtypes") 056 private List objList; 057 058 /** the netcdf file */ 059 private NetcdfFile ncFile; 060 061 private static boolean isFileOpen; 062 063 /** 064 * Constructs an empty NC2File with read-only access. 065 */ 066 public NC2File() { 067 this(""); 068 } 069 070 /** 071 * Creates an NC2File object of given file name with read-only access. 072 * 073 * @param fileName 074 * A valid file name, with a relative or absolute path. 075 */ 076 public NC2File(String fileName) { 077 super(fileName); 078 079 isFileOpen = false; 080 isReadOnly = true; 081 objList = new Vector(); 082 ncFile = null; 083 084 this.fid = -1; 085 086 if ((fullFileName != null) && (fullFileName.length() > 0)) { 087 try { 088 log.trace("NetcdfFile:{}", fullFileName); 089 ncFile = NetcdfFile.open(fullFileName); 090 this.fid = 1; 091 } 092 catch (Exception ex) { 093 log.trace("NC2File:{}", fullFileName, ex); 094 } 095 } 096 } 097 098 /** 099 * Checks if the given file format is a NetCDF3 file. 100 * 101 * @param fileformat 102 * the fileformat to be checked. 103 * 104 * @return true if the given file is a NetCDF3 file; otherwise returns false. 105 */ 106 @Override 107 public boolean isThisType(FileFormat fileformat) { 108 return (fileformat instanceof NC2File); 109 } 110 111 /** 112 * Checks if the given file is a NetCDF file. 113 * 114 * @param filename 115 * the file to be checked. 116 * 117 * @return true if the given file is a NetCDF file; otherwise returns false. 118 */ 119 @Override 120 public boolean isThisType(String filename) { 121 boolean isNetcdf = false; 122 ucar.unidata.io.RandomAccessFile raf = null; 123 124 try { 125 raf = new ucar.unidata.io.RandomAccessFile(filename, "r"); 126 } 127 catch (Exception ex) { 128 log.trace("raf null - exit", ex); 129 raf = null; 130 } 131 132 if (raf == null) { 133 return false; 134 } 135 136 try { 137 isNetcdf = N3header.isValidFile(raf); 138 } 139 catch (IOException e) { 140 log.trace("raf isValidFile - failure", e); 141 return false; 142 } 143 144 try { 145 raf.close(); 146 } 147 catch (Exception ex) { 148 log.trace("raf close:", ex); 149 } 150 151 log.trace("{} - isNetcdf:{}", filename, isNetcdf); 152 return isNetcdf; 153 } 154 155 /** 156 * Creates a NC2File instance with specified file name and READ access. 157 * Regardless of specified access, the NC2File implementation uses READ. 158 * 159 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 160 */ 161 @Override 162 public FileFormat createInstance(String filename, int access) 163 throws Exception { 164 return new NC2File(filename); 165 } 166 167 // Implementing FileFormat 168 @Override 169 public long open() throws Exception { 170 log.trace("open(): start isFileOpen={}", isFileOpen); 171 172 if (!isFileOpen) { 173 isFileOpen = true; 174 rootObject = loadTree(); 175 } 176 177 return 0; 178 } 179 180 private HObject loadTree() { 181 long[] oid = { 0 }; 182 // root object does not have a parent path or a parent node 183 NC2Group rootGroup = new NC2Group(this, "/", null, null, oid); 184 185 if (ncFile == null) { 186 return rootGroup; 187 } 188 189 log.trace("loadTree(): iterate members"); 190 Iterator it = ncFile.getVariables().iterator(); 191 Variable ncDataset = null; 192 NC2Dataset d = null; 193 while (it.hasNext()) { 194 ncDataset = (Variable) it.next(); 195 oid[0] = ncDataset.hashCode(); 196 d = new NC2Dataset(this, ncDataset, oid); 197 rootGroup.addToMemberList(d); 198 } 199 200 return rootGroup; 201 } 202 203 // Implementing FileFormat 204 @Override 205 public void close() throws IOException { 206 if (ncFile != null) { 207 ncFile.close(); 208 } 209 210 isFileOpen = false; 211 fid = -1; 212 objList = null; 213 } 214 215 // Implementing FileFormat 216 @Override 217 public HObject getRootObject() { 218 return rootObject; 219 } 220 221 public NetcdfFile getNetcdfFile() { 222 return ncFile; 223 } 224 225 @Override 226 public Group createGroup(String name, Group pgroup) throws Exception { 227 throw new UnsupportedOperationException("Unsupported operation - create group."); 228 } 229 230 @Override 231 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) 232 throws Exception { 233 throw new UnsupportedOperationException("Unsupported operation - create datatype."); 234 } 235 236 @Override 237 public Datatype createDatatype(int tclass, int tsize, int torder, 238 int tsign, Datatype tbase) throws Exception { 239 throw new UnsupportedOperationException("Unsupported operation - create datatype."); 240 } 241 242 @Override 243 public Datatype createNamedDatatype(Datatype tnative, String name) throws Exception { 244 throw new UnsupportedOperationException("netcdf3 does not support named datatype."); 245 } 246 247 @Override 248 public Dataset createScalarDS(String name, Group pgroup, Datatype type, 249 long[] dims, long[] maxdims, long[] chunks, 250 int gzip, Object fillValue, Object data) throws Exception { 251 throw new UnsupportedOperationException("Unsupported operation create dataset."); 252 } 253 254 @Override 255 public Dataset createImage(String name, Group pgroup, Datatype type, 256 long[] dims, long[] maxdims, long[] chunks, 257 int gzip, int ncomp, int intelace, Object data) throws Exception { 258 throw new UnsupportedOperationException("Unsupported operation create image."); 259 } 260 261 @Override 262 public void delete(HObject obj) throws Exception { 263 throw new UnsupportedOperationException("Unsupported operation."); 264 } 265 266 @Override 267 public HObject copy(HObject srcObj, Group dstGroup, String dstName) 268 throws Exception { 269 throw new UnsupportedOperationException("Unsupported operation - copy."); 270 } 271 272 @Override 273 public void writeAttribute(HObject obj, hdf.object.Attribute attr, boolean attrExisted) throws Exception { 274 throw new UnsupportedOperationException("Unsupported operation - write attribute."); 275 } 276 277 private HObject copyGroup(NC2Group srcGroup, NC2Group pgroup) 278 throws Exception { 279 throw new UnsupportedOperationException("Unsupported operation - copy group."); 280 } 281 282 private void copyDataset(Dataset srcDataset, NC2Group pgroup) 283 throws Exception { 284 throw new UnsupportedOperationException("Unsupported operation - copy dataset."); 285 } 286 287 /* 288 * Copy attributes of the source object to the destination object. 289 */ 290 public void copyAttributes(HObject src, HObject dst) { 291 throw new UnsupportedOperationException("Unsupported operation copy attributes with HObject."); 292 } 293 294 /* 295 * Copy attributes of the source object to the destination object. 296 */ 297 public void copyAttributes(int srcID, int dstID) { 298 throw new UnsupportedOperationException("Unsupported operation - copy attributes."); 299 } 300 301 /* converts a ucar.nc2.Attribute into an hdf.object.Attribute */ 302 public static hdf.object.Attribute convertAttribute(HObject parent, ucar.nc2.Attribute netcdfAttr) { 303 hdf.object.Attribute ncsaAttr = null; 304 305 if (netcdfAttr == null) { 306 return null; 307 } 308 309 String attrName = netcdfAttr.getShortName(); 310 long[] attrDims = { netcdfAttr.getLength() }; 311 log.trace("convertAttribute(): attrName={} len={}", attrName, netcdfAttr.getLength()); 312 Datatype attrType = null; 313 try { 314 attrType = new NC2Datatype(netcdfAttr.getDataType()); 315 } 316 catch (Exception ex) { 317 attrType = null; 318 } 319 ncsaAttr = new hdf.object.Attribute(parent, attrName, attrType, attrDims); 320 Object[] attrValues = { netcdfAttr.getValue(0) }; 321 ncsaAttr.setData(attrValues); 322 323 log.trace("convertAttribute(): finish data={}", netcdfAttr.getValue(0)); 324 return ncsaAttr; 325 } 326 327 /** 328 * Retrieves the file structure from disk and returns the root object. 329 * <p> 330 * First gets the top level objects or objects that do not belong to any 331 * groups. If a top level object is a group, call the depth_first() to 332 * retrieve the sub-tree of that group, recursively. 333 */ 334 private void loadIntoMemory() { 335 if (fid < 0) { 336 log.debug("loadIntoMemory(): Invalid File Id"); 337 return; 338 } 339 } 340 341 /** 342 * Retrieves the tree structure of the file by depth-first order. The 343 * current implementation only retrieves groups and datasets. 344 * 345 * @param parentObject 346 * the parent object. 347 */ 348 private void depth_first(HObject parentObj) { 349 log.trace("depth_first(pobj = {})", parentObj); 350 351 if (parentObj == null) { 352 log.debug("depth_first(): Parent object is null"); 353 return; 354 } 355 } // private depth_first() 356 357 /** 358 * Returns a list of all the members of this NetCDF3 in a 359 * breadth-first ordering that are rooted at the specified 360 * object. 361 */ 362 private static List<HObject> getMembersBreadthFirst(HObject obj) { 363 List<HObject> allMembers = new ArrayList<>(); 364 Queue<HObject> queue = new LinkedList<>(); 365 HObject currentObject = obj; 366 367 queue.add(currentObject); 368 369 while(!queue.isEmpty()) { 370 currentObject = queue.remove(); 371 allMembers.add(currentObject); 372 373 if(currentObject instanceof Group) { 374 queue.addAll(((Group) currentObject).getMemberList()); 375 } 376 } 377 378 return allMembers; 379 } 380 381 /** 382 * Returns the version of the library. 383 */ 384 @Override 385 public String getLibversion() { 386 return "NetCDF Java (version 4.3)"; 387 } 388 389 // implementing FileFormat 390 @Override 391 public HObject get(String path) throws Exception { 392 throw new UnsupportedOperationException("get() is not supported"); 393 } 394}