001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.util.List;
018import java.util.Vector;
019
020import org.slf4j.Logger;
021import org.slf4j.LoggerFactory;
022
023import hdf.object.Dataset;
024import hdf.object.Datatype;
025import hdf.object.FileFormat;
026import hdf.object.Group;
027import hdf.object.HObject;
028import hdf.object.MetaDataContainer;
029import hdf.object.ScalarDS;
030
031import ucar.ma2.DataType;
032import ucar.nc2.Variable;
033
034/**
035 * NC2Dataset describes an multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
036 * float, double and string, and operations performed on the scalar dataset
037 *
038 * The library predefines a modest number of datatypes. For details, read
039 * <a href="https://hdfgroup.github.io/hdf5/_h5_t__u_g.html#sec_datatype">HDF5 Datatypes in HDF5 User Guide</a>
040 *
041 * @version 1.1 9/4/2007
042 * @author Peter X. Cao
043 */
044public class NC2Dataset extends ScalarDS implements MetaDataContainer {
045    private static final long serialVersionUID = -6031051694304457461L;
046
047    private static final Logger   log = LoggerFactory.getLogger(NC2Dataset.class);
048
049    /** tag for netCDF datasets.
050     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
051     */
052    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
053    public static final int                 DFTAG_NDG_NETCDF = 67687001;
054
055    /**
056     * The list of attributes of this data object. Members of the list are
057     * instance of Attribute.
058     */
059    @SuppressWarnings("rawtypes")
060    private List                            attributeList;
061
062    /** the native dataset */
063    private Variable nativeDataset;
064
065    /**
066     * Constructs an NC2Dataset object with specific netcdf variable.
067     *
068     * @param fileFormat
069     *            the netcdf file.
070     * @param ncDataset
071     *            the netcdf variable.
072     * @param oid
073     *            the unique identifier of this data object.
074     */
075    public NC2Dataset(FileFormat fileFormat, Variable ncDataset, long[] oid) {
076        super(fileFormat, ncDataset.getName(), HObject.SEPARATOR, oid);
077        unsignedConverted = false;
078        nativeDataset = ncDataset;
079    }
080
081    /**
082     * Check if the object has any attributes attached.
083     *
084     * @return true if it has any attributes, false otherwise.
085     */
086    @Override
087    public boolean hasAttribute() {
088        return false;
089    }
090
091    // Implementing Dataset
092    @Override
093    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
094        // not supported
095        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
096    }
097
098    // implementing Dataset
099    @Override
100    public byte[] readBytes() throws Exception {
101        // not supported
102        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
103    }
104
105    // Implementing DataFormat
106    /**
107     * Reads the data from file.
108     *
109     * read() reads the data from file to a memory buffer and returns the memory
110     * buffer. The dataset object does not hold the memory buffer. To store the
111     * memory buffer in the dataset object, one must call getData().
112     *
113     * By default, the whole dataset is read into memory. Users can also select
114     * a subset to read. Subsetting is done in an implicit way.
115     *
116     * @return the data read from file.
117     *
118     * @see #getData()
119     *
120     * @throws Exception
121     *             if object can not be read
122     * @throws OutOfMemoryError
123     *             if memory is exhausted
124     */
125    @Override
126    public Object read() throws Exception {
127        Object theData = null;
128
129        if (nativeDataset == null)
130            return null;
131
132        int[] origin = new int[rank];
133        int[] shape = new int[rank];
134
135        for (int i = 0; i < rank; i++) {
136            origin[i] = (int) startDims[i];
137            shape[i] = (int) selectedDims[i];
138            log.trace("read(): origin-shape [{}]={}-{}", i, origin[i], shape[i]);
139        }
140
141        ucar.ma2.Array ncArray = null;
142
143        try {
144            ncArray = nativeDataset.read(origin, shape);
145        }
146        catch (Exception ex) {
147            ncArray = nativeDataset.read();
148        }
149        Object oneD = ncArray.copyTo1DJavaArray();
150
151        if (oneD == null)
152            return null;
153
154        if (oneD.getClass().getName().startsWith("[C")) {
155            char[] charA = (char[]) oneD;
156            int nCols = (int) getWidth();
157            int nRows = (int) getHeight();
158
159            String[] strA = new String[nRows];
160            String allStr = new String(charA);
161
162            int indx0 = 0;
163            for (int i = 0; i < nRows; i++) {
164                indx0 = i * nCols;
165                strA[i] = allStr.substring(indx0, indx0 + nCols);
166            }
167            theData = strA;
168        }
169        else {
170            theData = oneD;
171        }
172
173        return theData;
174    }
175
176    // Implementing DataFormat
177    /**
178     * Writes a memory buffer to the object in the file.
179     *
180     * @param buf
181     *            the data to write
182     *
183     * @throws Exception
184     *             if data can not be written
185     */
186    @Override
187    public void write(Object buf) throws Exception {
188        // not supported
189        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
190    }
191
192    // Implementing DataFormat
193    /**
194     * Retrieves the object's metadata, such as attributes, from the file.
195     *
196     * Metadata, such as attributes, is stored in a List.
197     *
198     * @return the list of metadata objects.
199     *
200     * @throws Exception
201     *             if the metadata can not be retrieved
202     */
203    @Override
204    public List getMetadata() throws Exception {
205        if (attributeList != null)
206            return attributeList;
207
208        if (nativeDataset == null)
209            return (attributeList = null);
210
211        List ncAttrList = nativeDataset.getAttributes();
212        if (ncAttrList == null)
213            return (attributeList = null);
214
215        int n = ncAttrList.size();
216        attributeList = new Vector(n);
217        ucar.nc2.Attribute ncAttr = null;
218        for (int i = 0; i < n; i++) {
219            ncAttr = (ucar.nc2.Attribute) ncAttrList.get(i);
220            log.trace("getMetadata(): Attribute[{}]:{}", i, ncAttr.toString());
221            attributeList.add(NC2File.convertAttribute(this, ncAttr));
222        }
223
224        return attributeList;
225    }
226
227    // implementing DataFormat
228    /**
229     * Writes a specific piece of metadata (such as an attribute) into the file.
230     *
231     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
232     * value. If the attribute does not exist in the file, it creates the
233     * attribute in the file and attaches it to the object. It will fail to
234     * write a new attribute to the object where an attribute with the same name
235     * already exists. To update the value of an existing attribute in the file,
236     * one needs to get the instance of the attribute by getMetadata(), change
237     * its values, then use writeMetadata() to write the value.
238     *
239     * @param info
240     *            the metadata to write.
241     *
242     * @throws Exception
243     *             if the metadata can not be written
244     */
245   @Override
246    public void writeMetadata(Object info) throws Exception {
247        // not supported
248        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
249    }
250
251    // implementing DataFormat
252   /**
253    * Deletes an existing piece of metadata from this object.
254    *
255    * @param info
256    *            the metadata to delete.
257    *
258    * @throws Exception
259    *             if the metadata can not be removed
260    */
261    @Override
262    public void removeMetadata(Object info) throws Exception {
263        // not supported
264        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
265    }
266
267    /**
268     * Updates an existing piece of metadata attached to this object.
269     *
270     * @param info
271     *            the metadata to update.
272     *
273     * @throws Exception
274     *             if the metadata can not be updated
275     */
276    @Override
277    public void updateMetadata(Object info) throws Exception {
278        // not supported
279        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
280    }
281
282    // Implementing HObject
283    @Override
284    public long open() {
285        return -1;
286    }
287
288    // Implementing HObject
289    @Override
290    public void close(long did) {
291    }
292
293    /**
294     * Retrieve and initialize dimensions and member information.
295     */
296    @Override
297    public void init() {
298        if (nativeDataset == null)
299            return;
300
301        if (inited)
302            return; // already called. Initialize only once
303
304        isText = nativeDataset.getDataType().equals(DataType.STRING);
305        boolean isChar = nativeDataset.getDataType().equals(DataType.CHAR);
306
307        rank = nativeDataset.getRank();
308        log.trace("init(): rank:{}", rank);
309
310        if (rank == 0) {
311            // a scalar data point
312            isScalar = true;
313            rank = 1;
314            dims = new long[] { 1 };
315        }
316        else {
317            isScalar = false;
318            dims = new long[rank];
319            for (int i = 0; i < rank; i++)
320                dims[i] = (nativeDataset.getDimension(i).getLength());
321        }
322
323        startDims = new long[rank];
324        selectedDims = new long[rank];
325        for (int i = 0; i < rank; i++) {
326            startDims[i] = 0;
327            selectedDims[i] = 1;
328        }
329
330        if (rank == 1) {
331            selectedIndex[0] = 0;
332            selectedDims[0] = dims[0];
333        }
334        else if (rank == 2) {
335            selectedIndex[0] = 0;
336            selectedIndex[1] = 1;
337            selectedDims[0] = dims[0];
338            selectedDims[1] = dims[1];
339        }
340        else if (rank > 2) {
341            selectedIndex[0] = 0;
342            selectedIndex[1] = 1;
343            selectedIndex[2] = 2;
344            selectedDims[0] = dims[0];
345            selectedDims[1] = dims[1];
346        }
347
348        if ((rank > 1) && isText)
349            selectedDims[1] = 1;
350
351        inited = true;
352    }
353
354    /**
355     * Creates a new dataset.
356     *
357     * @param name
358     *            the name of the dataset to create.
359     * @param pgroup
360     *            the parent group of the new dataset.
361     * @param type
362     *            the datatype of the dataset.
363     * @param dims
364     *            the dimension size of the dataset.
365     * @param maxdims
366     *            the max dimension size of the dataset.
367     * @param chunks
368     *            the chunk size of the dataset.
369     * @param gzip
370     *            the level of the gzip compression.
371     * @param data
372     *            the array of data values.
373     *
374     * @return the new dataset if successful. Otherwise returns null.
375     *
376     * @throws Exception
377     *            if there is an error
378    */
379    public static NC2Dataset create(String name, Group pgroup, Datatype type,
380            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data) throws Exception {
381        // not supported
382        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
383    }
384
385    // implementing ScalarDS
386    /**
387     * Returns the datatype of the data object.
388     *
389     * @return the datatype of the data object.
390     */
391   @Override
392    public Datatype getDatatype() {
393        if (datatype == null) {
394            try {
395                datatype = new NC2Datatype(nativeDataset.getDataType());
396            }
397            catch (Exception ex) {
398                datatype = null;
399            }
400        }
401
402        return datatype;
403    }
404
405    /**
406     * Sets the name of the data object.
407     *
408     * @param newName
409     *            the new name of the object.
410     */
411    @Override
412    public void setName(String newName) throws Exception {
413        // not supported
414        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
415    }
416
417    //Implementing DataFormat
418    /**
419     * Retrieves the object's metadata, such as attributes, from the file.
420     *
421     * Metadata, such as attributes, is stored in a List.
422     *
423     * @param attrPropList
424     *             the list of properties to get
425     *
426     * @return the list of metadata objects.
427     *
428     * @throws Exception
429     *             if the metadata can not be retrieved
430     */
431    public List getMetadata(int... attrPropList) throws Exception {
432        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
433    }
434}