001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.io.IOException;
018import java.util.ArrayList;
019import java.util.Iterator;
020import java.util.LinkedList;
021import java.util.List;
022import java.util.Queue;
023import java.util.Vector;
024
025import ucar.nc2.NetcdfFile;
026import ucar.nc2.Variable;
027import ucar.nc2.iosp.netcdf3.N3header;
028import hdf.object.Attribute;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034
035/**
036 * This class provides file level APIs. File access APIs include retrieving the
037 * file hierarchy, opening and closing file, and writing file content to disk.
038 *
039 * @version 2.4 9/4/2007
040 * @author Peter X. Cao
041 */
042public class NC2File extends FileFormat {
043    private static final long serialVersionUID = 6941235662108358451L;
044
045    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(NC2File.class);
046
047    /**
048     * The root object of this file.
049     */
050    private HObject                         rootObject;
051
052    /**
053     * The list of unique (tag, ref) pairs. It is used to avoid duplicate
054     * objects in memory.
055     */
056    @SuppressWarnings("rawtypes")
057    private List                            objList;
058
059    /** the netcdf file */
060    private NetcdfFile                      ncFile;
061
062    private static boolean isFileOpen;
063
064    /**
065     * Constructs an empty NC2File with read-only access.
066     */
067    public NC2File() {
068        this("");
069    }
070
071    /**
072     * Creates an NC2File object of given file name with read-only access.
073     *
074     * @param fileName
075     *            A valid file name, with a relative or absolute path.
076     */
077    public NC2File(String fileName) {
078        super(fileName);
079
080        isFileOpen = false;
081        isReadOnly = true;
082        objList = new Vector();
083        ncFile = null;
084
085        this.fid = -1;
086
087        if ((fullFileName != null) && (fullFileName.length() > 0)) {
088            try {
089                log.trace("NetcdfFile:{}", fullFileName);
090                ncFile = NetcdfFile.open(fullFileName);
091                this.fid = 1;
092            }
093            catch (Exception ex) {
094                log.trace("NC2File:{}", fullFileName, ex);
095            }
096        }
097    }
098
099    /**
100     * Checks if the given file format is a NetCDF3 file.
101     *
102     * @param fileformat
103     *            the fileformat to be checked.
104     *
105     * @return true if the given file is a NetCDF3 file; otherwise returns false.
106     */
107    @Override
108    public boolean isThisType(FileFormat fileformat) {
109        return (fileformat instanceof NC2File);
110    }
111
112    /**
113     * Checks if the given file is a NetCDF file.
114     *
115     * @param filename
116     *            the file to be checked.
117     *
118     * @return true if the given file is a NetCDF file; otherwise returns false.
119     */
120    @Override
121    public boolean isThisType(String filename) {
122        boolean isNetcdf = false;
123        ucar.unidata.io.RandomAccessFile raf = null;
124
125        try {
126            raf = new ucar.unidata.io.RandomAccessFile(filename, "r");
127        }
128        catch (Exception ex) {
129            log.trace("raf null - exit", ex);
130            raf = null;
131        }
132
133        if (raf == null) {
134            return false;
135        }
136
137        try {
138            isNetcdf = N3header.isValidFile(raf);
139        }
140        catch (IOException e) {
141            log.trace("raf isValidFile - failure", e);
142            return false;
143        }
144
145        try {
146            raf.close();
147        }
148        catch (Exception ex) {
149            log.trace("raf close:", ex);
150        }
151
152        log.trace("{} - isNetcdf:{}", filename, isNetcdf);
153        return isNetcdf;
154    }
155
156    /**
157     * Creates a NC2File instance with specified file name and READ access.
158     * Regardless of specified access, the NC2File implementation uses READ.
159     *
160     * @see hdf.object.FileFormat#createInstance(java.lang.String, int)
161     */
162    @Override
163    public FileFormat createInstance(String filename, int access)
164            throws Exception {
165        return new NC2File(filename);
166    }
167
168    // Implementing FileFormat
169    @Override
170    public long open() throws Exception {
171        log.trace("open(): start isFileOpen={}", isFileOpen);
172
173        if (!isFileOpen) {
174            isFileOpen = true;
175            rootObject = loadTree();
176        }
177
178        return 0;
179    }
180
181    private HObject loadTree() {
182        long[] oid = { 0 };
183        // root object does not have a parent path or a parent node
184        NC2Group rootGroup = new NC2Group(this, "/", null, null, oid);
185
186        if (ncFile == null) {
187            return rootGroup;
188        }
189
190        log.trace("loadTree(): iterate members");
191        Iterator it = ncFile.getVariables().iterator();
192        Variable ncDataset = null;
193        NC2Dataset d = null;
194        while (it.hasNext()) {
195            ncDataset = (Variable) it.next();
196            oid[0] = ncDataset.hashCode();
197            d = new NC2Dataset(this, ncDataset, oid);
198            rootGroup.addToMemberList(d);
199        }
200
201        return rootGroup;
202    }
203
204    // Implementing FileFormat
205    @Override
206    public void close() throws IOException {
207        if (ncFile != null) {
208            ncFile.close();
209        }
210
211        isFileOpen = false;
212        fid = -1;
213        objList = null;
214    }
215
216    // Implementing FileFormat
217    @Override
218    public HObject getRootObject() {
219        return rootObject;
220    }
221
222    /**
223     * @return the NetCDF file.
224     */
225    public NetcdfFile getNetcdfFile() {
226        return ncFile;
227    }
228
229    @Override
230    public Group createGroup(String name, Group pgroup) throws Exception {
231        throw new UnsupportedOperationException("Unsupported operation - create group.");
232    }
233
234    @Override
235    public Datatype createDatatype(int tclass, int tsize, int torder, int tsign)
236            throws Exception {
237        throw new UnsupportedOperationException("Unsupported operation - create datatype.");
238    }
239
240    @Override
241    public Datatype createDatatype(int tclass, int tsize, int torder,
242            int tsign, Datatype tbase) throws Exception {
243        throw new UnsupportedOperationException("Unsupported operation - create datatype.");
244    }
245
246    @Override
247    public Datatype createNamedDatatype(Datatype tnative, String name) throws Exception {
248        throw new UnsupportedOperationException("netcdf3 does not support named datatype.");
249    }
250
251    @Override
252    public Dataset createScalarDS(String name, Group pgroup, Datatype type,
253            long[] dims, long[] maxdims, long[] chunks,
254            int gzip, Object fillValue, Object data) throws Exception {
255        throw new UnsupportedOperationException("Unsupported operation create dataset.");
256    }
257
258    @Override
259    public Dataset createImage(String name, Group pgroup, Datatype type,
260            long[] dims, long[] maxdims, long[] chunks,
261            int gzip, int ncomp, int intelace, Object data) throws Exception {
262        throw new UnsupportedOperationException("Unsupported operation create image.");
263    }
264
265    @Override
266    public void delete(HObject obj) throws Exception {
267        throw new UnsupportedOperationException("Unsupported operation.");
268    }
269
270    @Override
271    public HObject copy(HObject srcObj, Group dstGroup, String dstName)
272            throws Exception {
273        throw new UnsupportedOperationException("Unsupported operation - copy.");
274    }
275
276    @Override
277    public void writeAttribute(HObject obj, hdf.object.Attribute attr, boolean attrExisted) throws Exception {
278        throw new UnsupportedOperationException("Unsupported operation - write attribute.");
279    }
280
281    private HObject copyGroup(NC2Group srcGroup, NC2Group pgroup)
282            throws Exception {
283        throw new UnsupportedOperationException("Unsupported operation - copy group.");
284    }
285
286    private void copyDataset(Dataset srcDataset, NC2Group pgroup)
287            throws Exception {
288        throw new UnsupportedOperationException("Unsupported operation - copy dataset.");
289    }
290
291    /**
292     * Copies the attributes of one object to another object.
293     *
294     * NC3 does not support attribute copy
295     *
296     * @param src
297     *            The source object.
298     * @param dst
299     *            The destination object.
300     */
301    public void copyAttributes(HObject src, HObject dst) {
302        throw new UnsupportedOperationException("Unsupported operation copy attributes with HObject.");
303    }
304
305    /**
306     * Copies the attributes of one object to another object.
307     *
308     * NC3 does not support attribute copy
309     *
310     * @param srcID
311     *            The source identifier.
312     * @param dstID
313     *            The destination identifier.
314     */
315    public void copyAttributes(int srcID, int dstID) {
316        throw new UnsupportedOperationException("Unsupported operation - copy attributes.");
317    }
318
319    /**
320     * converts a ucar.nc2.Attribute into an hdf.object.nc2.NC2Attribute
321     *
322     * @param parent
323     *            the parent object.
324     * @param netcdfAttr
325     *            the ucar.nc2.Attribute object.
326     *
327     * @return the hdf.object.nc2.NC2Attribute if successful
328     */
329    public static hdf.object.nc2.NC2Attribute convertAttribute(HObject parent, ucar.nc2.Attribute netcdfAttr) {
330        hdf.object.nc2.NC2Attribute ncsaAttr = null;
331
332        if (netcdfAttr == null) {
333            return null;
334        }
335
336        String attrName = netcdfAttr.getShortName();
337        long[] attrDims = { netcdfAttr.getLength() };
338        log.trace("convertAttribute(): attrName={} len={}", attrName, netcdfAttr.getLength());
339        Datatype attrType = null;
340        try {
341            attrType = new NC2Datatype(netcdfAttr.getDataType());
342        }
343        catch (Exception ex) {
344            attrType = null;
345        }
346        ncsaAttr = new hdf.object.nc2.NC2Attribute(parent, attrName, attrType, attrDims);
347        Object[] attrValues = { netcdfAttr.getValue(0) };
348        ncsaAttr.setData(attrValues);
349
350        log.trace("convertAttribute(): finish data={}", netcdfAttr.getValue(0));
351        return ncsaAttr;
352    }
353
354    /**
355     * Retrieves the file structure from disk and returns the root object.
356     *
357     * First gets the top level objects or objects that do not belong to any
358     * groups. If a top level object is a group, call the depth_first() to
359     * retrieve the sub-tree of that group, recursively.
360     */
361    private void loadIntoMemory() {
362        if (fid < 0) {
363            log.debug("loadIntoMemory(): Invalid File Id");
364            return;
365        }
366    }
367
368    /**
369     * Retrieves the tree structure of the file by depth-first order. The
370     * current implementation only retrieves groups and datasets.
371     *
372     * @param parentObject
373     *            the parent object.
374     */
375    private void depth_first(HObject parentObj) {
376        log.trace("depth_first(pobj = {})", parentObj);
377
378        if (parentObj == null) {
379            log.debug("depth_first(): Parent object is null");
380            return;
381        }
382    } // private depth_first()
383
384    /**
385     * Returns a list of all the members of this NetCDF3 in a
386     * breadth-first ordering that are rooted at the specified
387     * object.
388     */
389    private static List<HObject> getMembersBreadthFirst(HObject obj) {
390        List<HObject> allMembers = new ArrayList<>();
391        Queue<HObject> queue = new LinkedList<>();
392        HObject currentObject = obj;
393
394        queue.add(currentObject);
395
396        while(!queue.isEmpty()) {
397            currentObject = queue.remove();
398            allMembers.add(currentObject);
399
400            if(currentObject instanceof Group) {
401                queue.addAll(((Group) currentObject).getMemberList());
402            }
403        }
404
405        return allMembers;
406    }
407
408    /**
409     * Returns the version of the library.
410     */
411    @Override
412    public String getLibversion() {
413        return "NetCDF Java (version 4.3)";
414    }
415
416    // implementing FileFormat
417    @Override
418    public HObject get(String path) throws Exception {
419        throw new UnsupportedOperationException("get() is not supported");
420    }
421}