hdf-java/ 0000755 0051151 0001133 00000000000 11352224122 011000 5 ustar xcao hdf hdf-java/test/ 0000755 0051151 0001133 00000000000 11352224122 011757 5 ustar xcao hdf hdf-java/test/unittests/ 0000755 0051151 0001133 00000000000 11352224121 014020 5 ustar xcao hdf hdf-java/test/unittests/H5TestFile.java 0000755 0051151 0001133 00000026273 11352224121 016614 0 ustar xcao hdf /**
*
*/
package test.unittests;
import java.util.Vector;
import ncsa.hdf.object.*;
import ncsa.hdf.object.h5.*;
/**
* Creates an HDF5 file for unit tests.
*
* @author xcao
*
*/
public class H5TestFile {
public final static String NAME_FILE_H5="TestHDF5.h5";
public final static String NAME_GROUP = "/g0";
public final static String NAME_GROUP_ATTR = "/g0_attr";
public final static String NAME_GROUP_SUB = NAME_GROUP+"/g00";
public final static String NAME_DATASET_INT = "/dataset_int";
public final static String NAME_DATASET_FLOAT = "/dataset_float";
public final static String NAME_DATASET_CHAR = "/dataset_byte";
public final static String NAME_DATASET_STR = "/dataset_str";
public final static String NAME_DATASET_ENUM = "/dataset_enum";
public final static String NAME_DATASET_IMAGE = "/dataset_image";
public final static String NAME_DATASET_IMAGE_PALETTE = "/wave_palete";
public final static String NAME_DATASET_OBJ_REF = "/dataset_obj_ref";
public final static String NAME_DATASET_COMPOUND = "/dataset_comp";
public final static String NAME_DATASET_INT_SUB = NAME_GROUP + "/dataset_int";
public final static String NAME_DATASET_FLOAT_SUB_SUB = NAME_GROUP_SUB+ "/dataset_float";
public final static String NAME_DATASET_COMPOUND_SUB = NAME_GROUP + "/dataset_comp";
public final static String NAME_DATATYPE_INT = NAME_GROUP + "/datatype_int";
public final static String NAME_DATATYPE_UINT = NAME_GROUP + "/datatype_uint";
public final static String NAME_DATATYPE_FLOAT = NAME_GROUP + "/datatype_float";
public final static String NAME_DATATYPE_STR = NAME_GROUP + "/datatype_str";
public final static String NAME_HARD_LINK_TO_IMAGE = "a_link_to_the_image";
public final static String OBJ_NAMES[] = {NAME_GROUP, NAME_GROUP_ATTR, NAME_GROUP_SUB,
NAME_DATASET_INT, NAME_DATASET_FLOAT, NAME_DATASET_CHAR, NAME_DATASET_STR,
NAME_DATASET_ENUM, NAME_DATASET_IMAGE, NAME_DATASET_COMPOUND, NAME_DATASET_INT_SUB,
NAME_DATASET_FLOAT_SUB_SUB, NAME_DATASET_COMPOUND_SUB, NAME_DATATYPE_INT,
NAME_DATATYPE_UINT, NAME_DATATYPE_FLOAT, NAME_DATATYPE_STR, NAME_DATASET_OBJ_REF};
// data space information
public final static int DATATYPE_SIZE = 4;
public final static int RANK = 2;
public final static long DIM1 = 50;
public final static long DIM2 = 10;
public static final long DIM3 = 20;
public final static long[] DIMs = {DIM1, DIM2};
public final static long[] CHUNKs = {DIM1/2, DIM2/2};
public final static int STR_LEN = 20;
public final static int DIM_SIZE = (int)(DIM1*DIM2);;
/* testing data */
public final static int[] DATA_INT = new int[DIM_SIZE];
public final static long[] DATA_LONG = new long[DIM_SIZE];
public final static float[] DATA_FLOAT = new float[DIM_SIZE];
public final static byte[] DATA_BYTE = new byte[DIM_SIZE];
public final static String[] DATA_STR = new String[DIM_SIZE];
public final static int[] DATA_ENUM = new int[DIM_SIZE];
public final static Vector DATA_COMP = new Vector(3);
public final static byte[] DATA_PALETTE = createWavePalette();
// compound names and datatypes
public final static String[] COMPOUND_MEMBER_NAMES = {"int32", "float32", "string", "uint32"};
public final static H5Datatype[] COMPOUND_MEMBER_DATATYPES = {
new H5Datatype(Datatype.CLASS_INTEGER, DATATYPE_SIZE, -1, -1),
new H5Datatype(Datatype.CLASS_FLOAT, DATATYPE_SIZE, -1, -1),
new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1),
new H5Datatype(Datatype.CLASS_INTEGER, DATATYPE_SIZE, -1, Datatype.SIGN_NONE)};
// attributes
public final static Attribute ATTRIBUTE_STR = new Attribute(
"strAttr",
new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1),
new long[] {1},
new String[] {"String attribute."});
public final static Attribute ATTRIBUTE_INT_ARRAY = new Attribute(
"arrayInt",
new H5Datatype(Datatype.CLASS_INTEGER, DATATYPE_SIZE, -1, -1),
new long[] {10},
new int[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
/**
* Creates an HDF5 test file.
*
* The test file contains the following objects:
*
/dataset_byte Dataset {50, 10}
/dataset_comp Dataset {50, 10}
/dataset_enum Dataset {50, 10}
/dataset_float Dataset {50, 10}
/dataset_int Dataset {50, 10}
/dataset_image Dataset {50, 10}
/dataset_str Dataset {50, 10}
/g0 Group
/g0/dataset_int Dataset {50, 10}
/g0/g00 Group
/g0/g00/dataset_float Dataset {50, 10}
/g0_attr Group
*
* @throws Exception
*/
public static final H5File createTestFile(String fileName) throws Exception
{
H5File file=null;
Group g0, g1, g00;
final Dataset[] dsets = new Dataset[11];
if ((fileName == null) || (fileName.length()<1)) {
fileName = NAME_FILE_H5;
}
final H5Datatype typeInt = new H5Datatype(Datatype.CLASS_INTEGER, DATATYPE_SIZE, -1, -1);
final H5Datatype typeByte = new H5Datatype(Datatype.CLASS_INTEGER, 1, -1, Datatype.SIGN_NONE);
final H5Datatype typeFloat = new H5Datatype(Datatype.CLASS_FLOAT, DATATYPE_SIZE, -1, -1);
final H5Datatype typeStr = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
final H5Datatype typeChar = new H5Datatype(Datatype.CLASS_CHAR, 1, -1, -1);
final H5Datatype typeEnum = new H5Datatype(Datatype.CLASS_ENUM, DATATYPE_SIZE, -1, -1);
final H5Datatype typeRef = new H5Datatype(Datatype.CLASS_REFERENCE, -1, -1, -1);
for (int i=0; i
* The palette values are stored in a two-dimensional byte array and arrange
* by color components of red, green and blue. palette[][] = byte[3][256],
* where, palette[0][], palette[1][] and palette[2][] are the red, green and
* blue components respectively.
* @return the wave palette in the form of byte[3][256]
*/
private static final byte[] createWavePalette()
{
final byte[] p = new byte[768]; //256*3
for (int i=1; i<255; i++)
{
p[3*i] = (byte) ((Math.sin(((double)i/40-3.2))+1)*128);
p[3*i+1] = (byte) ((1-Math.sin((i/2.55-3.1)))*70+30);
p[3*i+2] = (byte) ((1-Math.sin(((double)i/40-3.1)))*128);
}
p[0] = p[1] = p[2] = 0;
p[765] = p[766] = p[767] = (byte)255;
return p;
}
}
hdf-java/test/unittests/DataFormatTest.java 0000755 0051151 0001133 00000012243 11352224121 017552 0 ustar xcao hdf /**
*
*/
package test.unittests;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.object.Attribute;
import ncsa.hdf.object.DataFormat;
import ncsa.hdf.object.Datatype;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.h5.H5Datatype;
import ncsa.hdf.object.h5.H5File;
import junit.framework.TestCase;
import java.util.List;
/**
* @author rsinha
*
*/
public class DataFormatTest extends TestCase {
private static final H5File H5FILE = new H5File();
private H5File testFile = null;
private DataFormat testGroup = null;
/**
* @param arg0
*/
public DataFormatTest(String arg0) {
super(arg0);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
testFile = (H5File)H5FILE.open(H5TestFile.NAME_FILE_H5, FileFormat.WRITE);
assertNotNull(testFile);
testGroup = testFile.get(H5TestFile.NAME_GROUP_ATTR);
assertNotNull(testGroup);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
final int fid = testFile.getFID();
if (fid > 0) {
int nObjs = 0;
try { nObjs = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); }
catch (final Exception ex) { fail("H5.H5Fget_obj_count() failed. "+ ex); }
assertEquals(1, nObjs); // file id should be the only one left open
}
if (testFile != null) {
try { testFile.close(); } catch (final Exception ex) {}
testFile = null;
}
}
/**
* Test method for {@link ncsa.hdf.object.DataFormat#getFile()}.
*
* - Test if the file name is correct
*
*/
public final void testGetFile() {
if (!testGroup.getFile().equals(H5TestFile.NAME_FILE_H5)) {
fail("getFile() fails.");
}
}
/**
* Test method for {@link ncsa.hdf.object.DataFormat#getMetadata()}.
*
* - Reading the attributes
*
- Checking the values of attributes
*
*/
public final void testGetMetadata() {
Attribute strAttr = null;
Attribute arrayIntAttr = null;
List mdataList = null;
try {
mdataList = testGroup.getMetadata();
}
catch (final Exception ex) {
fail("getMetadata() failed. " + ex );
}
strAttr = (Attribute) mdataList.get(0);
arrayIntAttr = (Attribute) mdataList.get(1);
String[] value = (String[]) strAttr.getValue();
if (!value[0].equals("String attribute.")) {
fail("getMdata() failed.");
}
int[] intValue = (int[]) arrayIntAttr.getValue();
long[] dims = arrayIntAttr.getDataDims();
for (int i = 0; i < dims[0]; i++) {
if (intValue[i] != i+1) {
fail("getValue() failed");
}
}
}
/**
* Test method for {@link ncsa.hdf.object.DataFormat#writeMetadata(java.lang.Object)}.
*
* - Writing new attributes
*
- Checking that the new attributes are written in file
*
*/
public final void testWriteMetadata() {
long[] attrDims = {1};
String attrName = "CLASS";
String[] classValue = {"IMAGE"};
Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length()+1, -1, -1);
Attribute attr = new Attribute(attrName, attrType, attrDims);
assertNotNull(testGroup);
assertNotNull(attr);
attr.setValue(classValue);
try {
testGroup.writeMetadata(attr);
} catch (Exception ex) {
fail("writeMetadata() failed " + ex.getMessage());
}
List mdataList = null;
try {
mdataList = testGroup.getMetadata();
}
catch (final Exception ex) {
fail("getMetadata() failed. " + ex );
}
assertEquals(3, mdataList.size());
Attribute strAttr = null;
Attribute arrayIntAttr = null;
strAttr = (Attribute) mdataList.get(0);
arrayIntAttr = (Attribute) mdataList.get(1);
String[] value = (String[]) strAttr.getValue();
if (!value[0].equals("String attribute.")) {
fail("writeMdata() failed.");
}
int[] intValue = (int[]) arrayIntAttr.getValue();
long[] dims = arrayIntAttr.getDataDims();
for (int i = 0; i < dims[0]; i++) {
if (intValue[i] != i+1) {
fail("writeValue() failed");
}
}
strAttr = (Attribute) mdataList.get(2);
value = (String[]) strAttr.getValue();
if (!value[0].equals("IMAGE")) {
fail("writeMetadata() failed.");
}
}
/**
* Test method for {@link ncsa.hdf.object.DataFormat#removeMetadata(java.lang.Object)}.
*
*/
public final void testRemoveMetadata() {
List mdataList = null;
try {
mdataList = testGroup.getMetadata();
}
catch (final Exception ex) {
fail("getMetadata() failed. " + ex.getMessage());
}
Attribute strAttr = (Attribute) mdataList.get(2);
try {
testGroup.removeMetadata(strAttr);
} catch (Exception e) {
fail("removeMetadata() failed " + e.getMessage());
}
assertEquals(2, mdataList.size());
}
}
hdf-java/test/unittests/ScalarDSTest.java 0000755 0051151 0001133 00000011400 11352224121 017156 0 ustar xcao hdf /**
*
*/
package test.unittests;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.h5.H5File;
import ncsa.hdf.object.h5.H5Group;
import junit.framework.TestCase;
import ncsa.hdf.object.ScalarDS;
/**
* @author rsinha
*
*/
public class ScalarDSTest extends TestCase {
private static final H5File H5FILE = new H5File();
private H5File testFile = null;
private H5Group testGroup = null;
private ScalarDS intDset = null;
private ScalarDS floatDset = null;
private ScalarDS charDset = null;
private ScalarDS strDset = null;
private ScalarDS enumDset = null;
private ScalarDS imageDset = null;
private ScalarDS imagePalete = null;
private ScalarDS ORDset = null;
/**
* @param arg0
*/
public ScalarDSTest(String arg0) {
super(arg0);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
testFile = (H5File)H5FILE.open(H5TestFile.NAME_FILE_H5, FileFormat.READ);
assertNotNull(testFile);
testGroup = (H5Group) testFile.get(H5TestFile.NAME_GROUP_ATTR);
assertNotNull(testGroup);
intDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_INT);
assertNotNull(intDset);
intDset.init();
floatDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_FLOAT);
assertNotNull(floatDset);
floatDset.init();
charDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_CHAR);
assertNotNull(charDset);
charDset.init();
strDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_STR);
assertNotNull(strDset);
strDset.init();
enumDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_ENUM);
assertNotNull(enumDset);
enumDset.init();
imageDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_IMAGE);
assertNotNull(imageDset);
imageDset.init();
ORDset = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_OBJ_REF);
assertNotNull(ORDset);
ORDset.init();
imagePalete = (ScalarDS) testFile.get(H5TestFile.NAME_DATASET_IMAGE_PALETTE);
assertNotNull(imagePalete);
imagePalete.init();
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
final int fid = testFile.getFID();
if (fid > 0) {
int nObjs = 0;
try { nObjs = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); }
catch (final Exception ex) { fail("H5.H5Fget_obj_count() failed. "+ ex); }
assertEquals(1, nObjs); // file id should be the only one left open
}
if (testFile != null) {
try { testFile.close(); } catch (final Exception ex) {}
testFile = null;
}
}
/**
*
* What to test:
*
* - Test for general functionality
*
* - Check for all datasets whether it is an image or not (only image should return true).
*
- Check for all datasets whether imageDisplay is set (only image should return true).
*
- Get Image Data Range from image dataset and check if it is valid.
*
- Check the interlace value for the image dataset.
*
- Check if the image is unsigned byte.
*
- Check setting the image to unsigned byte.
*
- Check true color for the image dataset.
*
*
*/
public final void testImageFunctionality() {
assertTrue(imageDset.hasAttribute());
assertTrue(imageDset.isImage());
assertTrue(imageDset.isImageDisplay());
assertEquals(imageDset.getInterlace(), -1);
assertFalse(imageDset.isTrueColor());
assertFalse(intDset.isImage());
assertFalse(floatDset.isImage());
assertFalse(charDset.isImage());
assertFalse(enumDset.isImage());
assertFalse(imagePalete.isImage());
assertFalse(ORDset.isImage());
assertFalse(intDset.isImageDisplay());
assertFalse(floatDset.isImageDisplay());
assertFalse(charDset.isImageDisplay());
assertFalse(enumDset.isImageDisplay());
assertFalse(imagePalete.isImageDisplay());
assertFalse(ORDset.isImageDisplay());
intDset.setIsImageDisplay(true);
assertTrue(intDset.isImageDisplay());
intDset.setIsImageDisplay(false);
assertFalse(intDset.isImageDisplay());
}
/** What to test:
*
* - Test for general functionality
*
* - For all datasets in the file check if it is text. Only string dataset should return true.
*
*
*/
public final void testIsText() {
assertTrue(strDset.isText());
assertFalse(imageDset.isText());
assertFalse(intDset.isText());
assertFalse(floatDset.isText());
assertFalse(charDset.isText());
assertFalse(enumDset.isText());
assertFalse(ORDset.isText());
assertFalse(imagePalete.isText());
}
}
hdf-java/test/unittests/FileFormatTest.java 0000755 0051151 0001133 00000011755 11352224121 017567 0 ustar xcao hdf /**
*
*/
package test.unittests;
import java.util.Enumeration;
import junit.framework.TestCase;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.h5.H5File;
/**
* @author rsinha
*
*/
public class FileFormatTest extends TestCase {
private static final H5File H5FILE = new H5File();
private FileFormat testFile = null;
/**
* @param arg0
*/
public FileFormatTest(String arg0) {
super(arg0);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
testFile = H5FILE.open(H5TestFile.NAME_FILE_H5, FileFormat.WRITE);
assertNotNull(testFile);
testFile.open();
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
final int fid = testFile.getFID();
if (fid > 0) {
int nObjs = 0;
try { nObjs = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); }
catch (final Exception ex) { fail("H5.H5Fget_obj_count() failed. "+ ex); }
assertEquals(1, nObjs); // file id should be the only one left open
}
if (testFile != null) {
try { testFile.close(); } catch (final Exception ex) {}
testFile = null;
}
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#create(java.lang.String, int)}.
*
* What to test:
*
* - Create a file that is already created with option FILE_CREATE_OPEN.
*
- Create a file that is already created and opened with option FILE_CREATE_DELETE.
*
- Create a file that is already created and not opened with FILE_CREATE_DELETE.
*
- Create a file that is new with FILE_CREATE_DELETE.
*
- Create a file that is new with FILE_CREATE_OPEN.
*
*
*/
/*
* RUTH - come back and update this with new method, createInstance
public final void testCreateStringInt() {
FileFormat f = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
try {
f.create(H5TestFile.NAME_FILE_H5, FileFormat.FILE_CREATE_OPEN);
} catch (Exception ex) {
fail("Create Failed " + ex.getMessage());
}
try {
f.create(H5TestFile.NAME_FILE_H5, FileFormat.FILE_CREATE_DELETE);
} catch (Exception ex) {
; //Expected to fail.
}
try {
f.create("simpleFile", FileFormat.FILE_CREATE_DELETE);
} catch (Exception ex) {
fail("Create failed " + ex.getMessage());
}
try {
f.create("testFile", FileFormat.FILE_CREATE_DELETE);
} catch (Exception ex) {
fail("Create failed " + ex.getMessage());
}
try {
f.create("testFile", FileFormat.FILE_CREATE_OPEN);
} catch (Exception ex) {
fail("Create failed " + ex.getMessage());
}
}
*/
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getNumberOfMembers()}.
*
*
* - Test the number of compements.
*
*/
public final void testGetNumberOfMembers() {
assertEquals(testFile.getNumberOfMembers(), 21);
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getFileFormat(java.lang.String)}.
*
*
*/
public final void testGetFileFormat() {
FileFormat f = FileFormat.getFileFormat("HDF5");
assertNotNull(f);
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getFileFormatKeys()}.
*
*
* - current file formats are HDF5, HDF.
*
*/
public final void testGetFileFormatKeys() {
Enumeration e = FileFormat.getFileFormatKeys();
String keys[] = {"HDF5", "HDF4"};
int pos = 0;
while (e.hasMoreElements()) {
assertEquals(keys[pos++], e.nextElement());
}
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getFID()}.
*
*
* - Make sure the fid is not -1.
*
*/
public final void testGetFID() {
assertTrue((testFile.getFID() != -1));
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getInstance(java.lang.String)}.
*
*
* - Open an non existing file.
*
- Open an exisiting file.
*
*/
public final void testGetInstance() {
H5File f = null;
try {
f = (H5File) FileFormat.getInstance("test_hdf5.h5");
}
catch (Exception ex) {
;
}
assertNull(f);
try {
f = (H5File) FileFormat.getInstance(H5TestFile.NAME_FILE_H5);
}
catch (Exception ex){
fail("getInstance() failed" + ex.getMessage());
}
assertNotNull(f);
}
/**
* Test method for {@link ncsa.hdf.object.FileFormat#getFileFormats()}.
*
*
* - Test that the FileFormat object is formed for HDF5.
*
*/
public final void testGetFileFormats() {
FileFormat f = FileFormat.getFileFormat("HDF5");
assertNotNull(f);
FileFormat f1 = FileFormat.getFileFormat("ALL");
assertNull(f1);
}
}
hdf-java/test/unittests/CompoundDSTest.java 0000755 0051151 0001133 00000012170 11352224121 017542 0 ustar xcao hdf /**
*
*/
package test.unittests;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.object.Datatype;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.h5.H5File;
import junit.framework.TestCase;
import ncsa.hdf.object.CompoundDS;
/**
* @author rsinha
*
*/
public class CompoundDSTest extends TestCase {
private static final H5File H5FILE = new H5File();
private H5File testFile = null;
private CompoundDS testDS = null;
/**
* @param arg0
*/
public CompoundDSTest(String arg0) {
super(arg0);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
testFile = (H5File)H5FILE.open(H5TestFile.NAME_FILE_H5, FileFormat.WRITE);
assertNotNull(testFile);
testDS = (CompoundDS) testFile.get(H5TestFile.NAME_DATASET_COMPOUND);
assertNotNull(testDS);
testDS.init();
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
final int fid = testFile.getFID();
if (fid > 0) {
int nObjs = 0;
try { nObjs = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); }
catch (final Exception ex) { fail("H5.H5Fget_obj_count() failed. "+ ex); }
assertEquals(1, nObjs); // file id should be the only one left open
}
if (testFile != null) {
try { testFile.close(); } catch (final Exception ex) {}
testFile = null;
}
}
/**
* For the Compund Dataset in the Test File, we are checking
*
* - geting the memberCount.
*
- the names of each member in the dataset.
*
- the types of each member in the dataset.
*
- the orders of each member in the dataset.
*
- the dims of each member in the dataset.
*
*/
public final void testFieldsHaveCorrectNameTypeOrderAndDims() {
int correctMemberCount = H5TestFile.COMPOUND_MEMBER_NAMES.length;
assertEquals(testDS.getMemberCount(), correctMemberCount);
String[] names = testDS.getMemberNames();
for (int i = 0; i < correctMemberCount; i++) {
if (!names[i].equals(H5TestFile.COMPOUND_MEMBER_NAMES[i])) {
fail("Member Name at position " + i + "should be " + H5TestFile.COMPOUND_MEMBER_NAMES[i] +
", while getMemberNames returns " + names[i]);
}
}
Datatype[] types = testDS.getMemberTypes();
for (int i = 0; i < correctMemberCount; i++) {
if (!types[i].getDatatypeDescription().equals(H5TestFile.COMPOUND_MEMBER_DATATYPES[i].getDatatypeDescription())) {
fail("Member Type at position " + i + "should be " +
H5TestFile.COMPOUND_MEMBER_DATATYPES[i].getDatatypeDescription() +
", while getMemberTypes returns " + types[i].getDatatypeDescription());
}
}
int[] orders = testDS.getMemberOrders();
for (int i = 0; i < correctMemberCount; i++) {
if (orders[i] != 1) {
fail("Member Order at position " + i + "should be " + 1 + ", while getMemberOrders returns " + orders[i]);
}
}
for (int i = 0; i < correctMemberCount; i++) {
assertNull(testDS.getMemeberDims(i)); // all scalar data
}
}
/**
* For the Compund Dataset in the Test File, we are checking
*
* - Geting the selectMemberCount method on the default selection.
*
- setting ths member selection so that no member is selected.
*
- setting the member selection so that all members are exlplicitly selected.
*
- Adding one member at a time and checking if the addition is working properly.
*
*/
public final void testSelectionDeselectionCountWorks() {
if (testDS.getSelectedMemberCount() != H5TestFile.COMPOUND_MEMBER_NAMES.length) {
fail("Right after init getSelectedMemberCount returns" + testDS.getSelectedMemberCount()
+ ", when it should return " + H5TestFile.COMPOUND_MEMBER_NAMES.length);
}
testDS.setMemberSelection(false);
assertEquals(testDS.getSelectedMemberCount(), 0);
testDS.setMemberSelection(true);
assertEquals(testDS.getSelectedMemberCount(), H5TestFile.COMPOUND_MEMBER_NAMES.length);
testDS.setMemberSelection(false);
assertEquals(testDS.getSelectedMemberCount(), 0);
for (int i = 0; i < testDS.getMemberCount(); i++) {
testDS.selectMember(i);
int[] orders = testDS.getSelectedMemberOrders();
Datatype[] types = testDS.getMemberTypes();
for (int j = 0; j <= i; j++) {
if (!testDS.isMemberSelected(j)) {
fail("Member " + j + "was selected while isMemberSelected says it wasnt.");
}
if (orders[j] != 1) {
fail("Member Order at position " + j + "should be " + 1 + ", while getMemberOrders returns " + orders[j]);
}
if (!types[j].getDatatypeDescription().equals(H5TestFile.COMPOUND_MEMBER_DATATYPES[j].getDatatypeDescription())) {
fail("Member Type at position " + i + "should be " +
H5TestFile.COMPOUND_MEMBER_DATATYPES[j].getDatatypeDescription() +
", while getMemberTypes returns " + types[j].getDatatypeDescription());
}
}
}
}
}
hdf-java/test/unittests/H5BugFixTest.java 0000755 0051151 0001133 00000026712 11352224121 017117 0 ustar xcao hdf /**
*
*/
package test.unittests;
import java.util.Vector;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.object.*;
import ncsa.hdf.object.h5.*;
import junit.framework.TestCase;
/**
* TestCase for bug fixes.
*
* This class tests all the public methods in H5CompoundDS class.
*
* The test file contains the following objects.
*
*
/dataset_byte Dataset {50, 10}
/dataset_comp Dataset {50, 10}
/dataset_enum Dataset {50, 10}
/dataset_float Dataset {50, 10}
/dataset_image Dataset {50, 10}
/dataset_int Dataset {50, 10}
/dataset_str Dataset {50, 10}
/g0 Group
/g0/dataset_comp Dataset {50, 10}
/g0/dataset_int Dataset {50, 10}
/g0/datatype_float Type
/g0/datatype_int Type
/g0/datatype_str Type
/g0/g00 Group
/g0/g00/dataset_float Dataset {50, 10}
/g0_attr Group
*
*
*@author Peter Cao, The HDF Group
*/
public class H5BugFixTest extends TestCase {
private static final int NLOOPS = 10;
private static final H5File H5FILE = new H5File();
private H5File testFile = null;
/**
* @param arg0
*/
public H5BugFixTest(final String arg0) {
super(arg0);
}
private static void collectGarbage() {
try {
System.gc();
Thread.sleep(100);
System.runFinalization();
Thread.sleep(100);
}
catch (final Exception ex){
ex.printStackTrace();
}
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
testFile = (H5File)H5FILE.open(H5TestFile.NAME_FILE_H5, FileFormat.WRITE);
assertNotNull(testFile);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
if (testFile != null) {
try { testFile.close(); } catch (final Exception ex) {}
testFile = null;
}
}
/**
* The following program fails because dataset.init() does not reset the selection of dataspace.
*
* The bug appears on hdf-java 2.4 beta04 or earlier version. It is fixed at later version.
*
*
1) read the table cell (using dataset selection to select only that row of the table)
2) re-initialize the Dataset
3) call 'Dataset.clearData()'
4) call 'Dataset.getData()'
5) change the correct column/row **
6) call 'Dataset.write()'
7) close the file
8) reopen the file and read the table cell as in step 1
9) assert that the value has been changed and is correct
This sequence of actions works correctly on the hdf-java library built for
64-bit solaris that we received in August 2006. On the latest (beta-d), This
fails when attempting to change the value of the 1st and 4th rows (however, it
works for the 0th row).
*
*
*/
public final void testBug847() throws Exception {
Vector data = null;
final H5CompoundDS dset = (H5CompoundDS)testFile.get(H5TestFile.NAME_DATASET_COMPOUND);
assertNotNull(dset);
for (int loop=0; loop 0);
// check the data values
int[] ints = (int[])data.get(0);
float[] floats = (float[])data.get(1);
String[] strs = (String[])data.get(2);
assertNotNull(ints);
assertNotNull(floats);
assertNotNull(strs);
for (int i=0; i
* The bug appears on hdf-java 2.4 beta05 or earlier version. It is fixed at later version.
*
*
while (true)
{
H5File file = new H5File(H5TestFile.NAME_FILE_H5, H5File.READ);
//file.open();
file.get("/Table0");
file.get("/Group0");
int n = H5.H5Fget_obj_count(file.getFID(), HDF5Constants.H5F_OBJ_ALL);
if (n>1)
System.out.println("*** Possible memory leak!!!");
file.close();
}
*
*/
public final void testBug863() throws Exception {
int nObjs = 0; // number of object left open
Dataset dset =null;
final String dnames[] = {
H5TestFile.NAME_DATASET_CHAR, H5TestFile.NAME_DATASET_COMPOUND,
H5TestFile.NAME_DATASET_COMPOUND_SUB, H5TestFile.NAME_DATASET_ENUM,
H5TestFile.NAME_DATASET_FLOAT, H5TestFile.NAME_DATASET_IMAGE,
H5TestFile.NAME_DATASET_INT, H5TestFile.NAME_DATASET_STR,
H5TestFile.NAME_DATASET_INT_SUB, H5TestFile.NAME_DATASET_FLOAT_SUB_SUB};
// test two open options: open full tree or open individual object only
for (int openOption=0; openOption<2; openOption++){
for (int i=0; i