001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see https://support.hdfgroup.org/products/licenses.html               *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.nc2;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.object.Dataset;
021import hdf.object.Datatype;
022import hdf.object.FileFormat;
023import hdf.object.Group;
024import hdf.object.HObject;
025import hdf.object.ScalarDS;
026import ucar.ma2.DataType;
027import ucar.nc2.Variable;
028
029/**
030 * NC2Dataset describes an multi-dimension array of HDF5 scalar or atomic data
031 * types, such as byte, int, short, long, float, double and string, and
032 * operations performed on the scalar dataset
033 * <p>
034 * The library predefines a modest number of datatypes. For details, read <a
035 * href="https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/HDF5_Users_Guide/Datatypes/HDF5_Datatypes.htm">HDF5 Datatypes</a>
036 *
037 * @version 1.1 9/4/2007
038 * @author Peter X. Cao
039 */
040public class NC2Dataset extends ScalarDS {
041    private static final long serialVersionUID = -6031051694304457461L;
042
043    private static final org.slf4j.Logger   log = org.slf4j.LoggerFactory.getLogger(NC2Dataset.class);
044
045    /** tag for netCDF datasets.
046     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
047     */
048    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
049    public static final int                 DFTAG_NDG_NETCDF = 67687001;
050
051    /**
052     * The list of attributes of this data object. Members of the list are
053     * instance of Attribute.
054     */
055    @SuppressWarnings("rawtypes")
056    private List                            attributeList;
057
058    private Variable nativeDataset;
059
060    /**
061     * Constructs an NC2Dataset object with specific netcdf variable.
062     *
063     * @param fileFormat
064     *            the netcdf file.
065     * @param ncDataset
066     *            the netcdf variable.
067     * @param oid
068     *            the unique identifier of this data object.
069     */
070    public NC2Dataset(FileFormat fileFormat, Variable ncDataset, long[] oid) {
071        super(fileFormat, ncDataset.getName(), HObject.SEPARATOR, oid);
072        unsignedConverted = false;
073        nativeDataset = ncDataset;
074    }
075
076    /*
077     * (non-Javadoc)
078     * @see hdf.object.DataFormat#hasAttribute()
079     */
080    @Override
081    public boolean hasAttribute() {
082        return false;
083    }
084
085    // Implementing Dataset
086    @Override
087    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff)
088            throws Exception {
089        // not supported
090        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
091    }
092
093    // implementing Dataset
094    @Override
095    public byte[] readBytes() throws Exception {
096        // not supported
097        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
098    }
099
100    // Implementing DataFormat
101    @Override
102    public Object read() throws Exception {
103        Object theData = null;
104
105        if (nativeDataset == null) {
106            return null;
107        }
108
109        int[] origin = new int[rank];
110        int[] shape = new int[rank];
111
112        for (int i = 0; i < rank; i++) {
113            origin[i] = (int) startDims[i];
114            shape[i] = (int) selectedDims[i];
115            log.trace("read(): origin-shape [{}]={}-{}", i, origin[i], shape[i]);
116        }
117
118        ucar.ma2.Array ncArray = null;
119
120        try {
121            ncArray = nativeDataset.read(origin, shape);
122        }
123        catch (Exception ex) {
124            ncArray = nativeDataset.read();
125        }
126        Object oneD = ncArray.copyTo1DJavaArray();
127
128        if (oneD == null) {
129            return null;
130        }
131
132        if (oneD.getClass().getName().startsWith("[C")) {
133            char[] charA = (char[]) oneD;
134            int nCols = (int) selectedDims[selectedIndex[1]];
135            int nRows = (int) selectedDims[selectedIndex[0]];
136
137            String[] strA = new String[nRows];
138            String allStr = new String(charA);
139
140            int indx0 = 0;
141            for (int i = 0; i < nRows; i++) {
142                indx0 = i * nCols;
143                strA[i] = allStr.substring(indx0, indx0 + nCols);
144            }
145            theData = strA;
146        }
147        else {
148            theData = oneD;
149        }
150
151        return theData;
152    }
153
154    // Implementing DataFormat
155    @Override
156    public void write(Object buf) throws Exception {
157        // not supported
158        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
159    }
160
161    // Implementing DataFormat
162    @Override
163    public List getMetadata() throws Exception {
164        if (attributeList != null) {
165            return attributeList;
166        }
167
168        if (nativeDataset == null) {
169            return (attributeList = null);
170        }
171
172        List ncAttrList = nativeDataset.getAttributes();
173        if (ncAttrList == null) {
174            return (attributeList = null);
175        }
176
177        int n = ncAttrList.size();
178        attributeList = new Vector(n);
179        ucar.nc2.Attribute ncAttr = null;
180        for (int i = 0; i < n; i++) {
181            ncAttr = (ucar.nc2.Attribute) ncAttrList.get(i);
182            log.trace("getMetadata(): Attribute[{}]:{}", i, ncAttr.toString());
183            attributeList.add(NC2File.convertAttribute(this, ncAttr));
184        }
185
186        return attributeList;
187    }
188
189    // implementing DataFormat
190    @Override
191    public void writeMetadata(Object info) throws Exception {
192        // not supported
193        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
194    }
195
196    // implementing DataFormat
197    @Override
198    public void removeMetadata(Object info) throws Exception {
199        // not supported
200        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
201    }
202
203    // implementing DataFormat
204    @Override
205    public void updateMetadata(Object info) throws Exception {
206        // not supported
207        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
208    }
209
210    // Implementing HObject
211    @Override
212    public long open() {
213        return -1;
214    }
215
216    // Implementing HObject
217    @Override
218    public void close(long did) {
219    }
220
221    /**
222     * Retrieve and initialize dimensions and member information.
223     */
224    @Override
225    public void init() {
226        if (nativeDataset == null) {
227            return;
228        }
229
230        if (inited) {
231            return; // already called. Initialize only once
232        }
233
234        isText = nativeDataset.getDataType().equals(DataType.STRING);
235        boolean isChar = nativeDataset.getDataType().equals(DataType.CHAR);
236
237        rank = nativeDataset.getRank();
238        log.trace("init(): rank:{}", rank);
239
240        if (rank == 0) {
241            // a scalar data point
242            rank = 1;
243            dims = new long[1];
244            dims[0] = 1;
245        }
246        else {
247            dims = new long[rank];
248            for (int i = 0; i < rank; i++) {
249                dims[i] = (nativeDataset.getDimension(i).getLength());
250            }
251        }
252
253        startDims = new long[rank];
254        selectedDims = new long[rank];
255        for (int i = 0; i < rank; i++) {
256            startDims[i] = 0;
257            selectedDims[i] = 1;
258        }
259
260        if (rank == 1) {
261            selectedIndex[0] = 0;
262            selectedDims[0] = dims[0];
263        }
264        else if (rank == 2) {
265            selectedIndex[0] = 0;
266            selectedIndex[1] = 1;
267            selectedDims[0] = dims[0];
268            selectedDims[1] = dims[1];
269        }
270        else if (rank > 2) {
271            selectedIndex[0] = 0;
272            selectedIndex[1] = 1;
273            selectedIndex[2] = 2;
274            selectedDims[0] = dims[0];
275            selectedDims[1] = dims[1];
276        }
277
278        if ((rank > 1) && isText) {
279            selectedDims[1] = 1;
280        }
281
282        inited = true;
283    }
284
285    // Implementing ScalarDS
286    @Override
287    public byte[][] getPalette() {
288        if (palette == null) {
289            palette = readPalette(0);
290        }
291
292        return palette;
293    }
294
295    /**
296     * read specific image palette from file.
297     *
298     * @param idx
299     *            the palette index to read
300     * @return the palette data into two-dimension byte array, byte[3][256]
301     */
302    @Override
303    public byte[][] readPalette(int idx) {
304        return null;
305    }
306
307    /**
308     * Creates a new dataset.
309     *
310     * @param name
311     *            the name of the dataset to create.
312     * @param pgroup
313     *            the parent group of the new dataset.
314     * @param type
315     *            the datatype of the dataset.
316     * @param dims
317     *            the dimension size of the dataset.
318     * @param maxdims
319     *            the max dimension size of the dataset.
320     * @param chunks
321     *            the chunk size of the dataset.
322     * @param gzip
323     *            the level of the gzip compression.
324     * @param data
325     *            the array of data values.
326     *
327     * @return the new dataset if successful. Otherwise returns null.
328     *
329     * @throws Exception
330     *            if there is an error
331    */
332    public static NC2Dataset create(String name, Group pgroup, Datatype type,
333            long[] dims, long[] maxdims, long[] chunks, int gzip, Object data)
334            throws Exception {
335        // not supported
336        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
337    }
338
339    /**
340     * returns the byte array of palette refs. returns null if there is no
341     * palette attribute attached to this dataset.
342     */
343    @Override
344    public byte[] getPaletteRefs() {
345        return null;
346    }
347
348    // implementing ScalarDS
349    @Override
350    public Datatype getDatatype() {
351        if (datatype == null) {
352            try {
353                datatype = new NC2Datatype(nativeDataset.getDataType());
354            }
355            catch (Exception ex) {
356                datatype = null;
357            }
358        }
359
360        return datatype;
361    }
362
363    /**
364     * Sets the name of the data object.
365     * <p>
366     *
367     * @param newName
368     *            the new name of the object.
369     */
370    @Override
371    public void setName(String newName) throws Exception {
372        // not supported
373        throw new UnsupportedOperationException("Unsupported operation for NetCDF.");
374    }
375
376    //Implementing DataFormat
377    public List getMetadata(int... attrPropList) throws Exception {
378        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
379    }
380
381}