#!/bin/sh # # Copyright by The HDF Group and # The Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of H4H5TOOLS. The full H4H5TOOLS copyright notice, # including terms governing use, modification, and redistribution, is # contained in the files COPYING and Copyright.html. COPYING can be found # at the root of the source code distribution tree; Copyright.html can be # found at the root level of an installed copy of the electronic H4H5TOOLS # document set, is linked from the top-level documents page, and can be # found at http://www.hdfgroup.org/h4toh5/Copyright.html. If you do not # have access to either file, you may request a copy from help@hdfgroup.org. # # Purpose: # # Test script for the h4toh5 tests. # # Using the h4toh5 convert to convert a pre-created hdf file to an hdf5 # file (output file), then compare it with a pre-created corresponding # hdf5 file (expected file). If the same, that particular test passes. # If not the same, the output file and expected file are processed by # the h5dump tool to see if they produce the same results. If the same, # the test passes. If not, show the difference of the two results and # report the test failed. # # # h5dump is default to use the one in your PATH. It can be overridden # by setting $H5DUMP to a different value such as /usr/local/bin/h5dump. # srcdir=@srcdir@ #H4TOH5='h4toh5 -ospe' # The tool name H4TOH5='h4toh5' # The tool name H4TOH5_BIN=`pwd`/$H4TOH5 # The path of the tool binary h5path=@HDF5PATH@ export LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ H5SZIP=@H5_USE_FILTER_SZIP@ H5SZIP_ENCODE=@H5_USE_SZIP_ENCODER@ H5ZLIB=@H5_USE_FILTER_ZLIB@ H4SZIP=@H4_USE_FILTER_SZIP@ H5_USE_HDFEOS2=@H5_USE_HDFEOS2@ CMP='cmp -s' DIFF='diff -c' CP='cp' DIRNAME='dirname' LS='ls' AWK='awk' RM='rm -f' SED='sed ' H5DIFF="$h5path/h5diff -q" # Verify if $H5DUMP is a valid command. tmpfile=/tmp/testh4toh5.$$ $H5DIFF -h > $tmpfile if test -s "$tmpfile"; then : else echo " Could not run the '$H5DUMP' command. The test can still proceed" echo " but it may fail if '$H5DUMP' is needed to verify the output." echo " You can make sure '$H5DUMP' is among your shell PATH and run" echo " the test again. You may also visit http://hdf.ncsa.uiuc.edu/" echo " or email hdfhelp@ncsa.uiuc.edu for more information." H5DUMP=: fi $RM $tmpfile # The build (current) directory might be different than the source directory. if test -z "$srcdir"; then srcdir=. fi mkdir ../testfiles >/dev/null 2>&1 SRCDIR="$srcdir/../testfiles" TESTDIR="../testfiles" OUTDIR="../testfiles/Results" test -d "$OUTDIR" || mkdir $OUTDIR nerrors=0 verbose=yes ###################################################################### # test files # -------------------------------------------------------------------- # All the test files copy from source directory to test directory # NOTE: Keep this framework to add/remove test files. # Any test files from other tools can be used in this framework. # This list are also used for checking exist. # Comment '#' without space can be used. # -------------------------------------------------------------------- LIST_HDF_TEST_FILES=" $SRCDIR/gr_typ_test_ds.hdf $SRCDIR/sds_attr_test_ds.hdf $SRCDIR/sds_dim_test_ds.hdf $SRCDIR/sds_dim_test2_ds.hdf $SRCDIR/sds_puredim_test_ds.hdf $SRCDIR/sds_dim_attr_ds.hdf $SRCDIR/sds_mix_dim_ds.hdf $SRCDIR/sds_typ_test_ds.hdf $SRCDIR/sdsnameclash_test_ds.hdf $SRCDIR/vg_all_test_ds.hdf $SRCDIR/anno_test.hdf $SRCDIR/gr_typ_test.hdf $SRCDIR/grnameclash_test.hdf $SRCDIR/gr_comp_test.hdf $SRCDIR/image_attr_test.hdf $SRCDIR/ras24il.hdf $SRCDIR/ras_24_test.hdf $SRCDIR/ras_8_test.hdf $SRCDIR/sds_attr_test.hdf $SRCDIR/sds_dim_test.hdf $SRCDIR/sds_typ_test.hdf $SRCDIR/sdsnameclash_test.hdf $SRCDIR/sds_comp_test.hdf $SRCDIR/vdata_test.hdf $SRCDIR/vdnameclash_test.hdf $SRCDIR/vg_hl_test.hdf $SRCDIR/vg_loop_test.hdf $SRCDIR/vgnameclash_test.hdf $SRCDIR/vg_all_test.hdf $SRCDIR/vdata_1f_test.hdf " # # copy test files and expected output files from source dirs to test dir # COPY_TESTFILES="$LIST_HDF_TEST_FILES" COPY_TESTFILES_TO_TESTDIR() { # copy test files. Used -f to make sure get a new copy for tstfile in $COPY_TESTFILES do # ignore '#' comment echo $tstfile | tr -d ' ' | grep '^#' > /dev/null RET=$? if [ $RET -eq 1 ]; then # skip cp if srcdir is same as destdir # this occurs when build/test performed in source dir and # make cp fail SDIR=`$DIRNAME $tstfile` INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then $CP -f $tstfile $TESTDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi fi fi done } CLEAN_TESTFILES_AND_TESTDIR() { # skip rm if srcdir is same as destdir # this occurs when build/test performed in source dir and # make cp fail SDIR=$SRCDIR INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then $RM -r $TESTDIR fi } # Print a line-line message left justified in a field of 70 characters # beginning with the word "Testing". TESTING() { SPACES=" " echo "Testing $* $SPACES" |cut -c1-70 |tr -d '\012' } # Print a message indicating that a test was skipped SKIP() { TESTING $H4TOH5 $@ echo " -SKIP-" } # Run a test and print PASS or *FAIL*. If a test fails then increment # the `nerrors' global variable and (if $verbose is set) display the # difference between the actual and the expected hdf4 files. The # expected hdf5 files are in testfiles/Expected directory. # The actual hdf5 file is not removed if $HDF5_NOCLEANUP is to a non-null # value. CONVERT() { # Run h4toh5 convert. TESTING $H4TOH5 $@ # # Set up arguments to run the conversion test. # The converter assumes all hdf4 files has the .hdf suffix as in the form # of foo.hdf. It creates the corresponding hdf5 files with the .h5 suffix # as in the form of foo.h5. One exception is that if exactly two file # names are given, it treats the first argument as an hdf4 file and creates # the corresponding hdf5 file with the name as the second argument, WITOUT # any consideration of the suffix. (For this test script, in order to # match the output hdf5 file with the expected hdf5 file, it expects the # second file of the two-files tests has the .h5 suffix too.) # # If SRCDIR != OUTDIR, need to copy the input hdf4 files from the SRCDIR # to the OUTDIR and transform the input file pathname because of the suffix # convention mentioned above. This way, the hdf5 files are always created # in the OUTDIR directory. # INFILES="" OUTFILES="" MULTIRUN="" H4TOH5OPT="" SHOULDFAIL="" H4TOH5FAILED="" case "$1" in -m) # multiple files conversion MULTIRUN="-m" shift for f in $*; do if test "$SRCDIR" != "$OUTDIR"; then cp $SRCDIR/$f $OUTDIR/$f fi INFILES="$INFILES $f" OUTFILES="$OUTFILES `basename $f .hdf`.h5" shift done ;; -s) # HDF4 file conversion following the old specification OUTPATH=`dirname $2` if test "$SRCDIR" != "$OUTDIR"; then test -d "$OUTDIR/$OUTPATH" || mkdir $OUTDIR/$OUTPATH cp $SRCDIR/$2 $OUTDIR/$2 fi INFILES="$2" OUTFILES="$3" H4TOH5OPT="-ospe" ;; -v) # HDF4 file conversion, 1 field vdata to atomic HDF5 dataset OUTPATH=`dirname $2` if test "$SRCDIR" != "$OUTDIR"; then test -d "$OUTDIR/$OUTPATH" || mkdir $OUTDIR/$OUTPATH cp $SRCDIR/$2 $OUTDIR/$2 fi INFILES="$2" OUTFILES="$3" H4TOH5OPT="-sv" ;; -n) # HDF-EOS2 file conversion (non-strict) OUTPATH=`dirname $2` if test "$SRCDIR" != "$OUTDIR"; then test -d "$OUTDIR/$OUTPATH" || mkdir $OUTDIR/$OUTPATH cp $SRCDIR/$2 $OUTDIR/$2 fi INFILES="$2" OUTFILES="$3" H4TOH5OPT="-eos -nc4" ;; -e) # HDF-EOS2 file conversion (strict) OUTPATH=`dirname $2` if test "$SRCDIR" != "$OUTDIR"; then test -d "$OUTDIR/$OUTPATH" || mkdir $OUTDIR/$OUTPATH cp $SRCDIR/$2 $OUTDIR/$2 fi INFILES="$2" OUTFILES="$3" H4TOH5OPT="-eos -nc4strict" ;; -f) # HDF-EOS2 file conversion that should fail SHOULDFAIL="yes" OUTPATH=`dirname $2` if test "$SRCDIR" != "$OUTDIR"; then test -d "$OUTDIR/$OUTPATH" || mkdir $OUTDIR/$OUTPATH cp $SRCDIR/$2 $OUTDIR/$2 fi INFILES="$2" OUTFILES="$3" H4TOH5OPT="-eos -nc4strict" ;; *) # Single file conversion case $# in 1) if test "$SRCDIR" != "$OUTDIR"; then cp $SRCDIR/$1 $OUTDIR/$1 fi INFILES="$1" OUTFILES="`basename $1 .hdf`.h5" ;; 2) # hdf4 file specified if test "$SRCDIR" != "$OUTDIR"; then cp $SRCDIR/$1 $OUTDIR/$1 fi INFILES="$1" OUTFILES="$2" ;; *) # Illegal echo "Illegal arguments" exit 1 ;; esac ;; esac FORCESUCCESS="" # run the conversion and remove input files that have been copied over curdir=`pwd` { cd $OUTDIR $H4TOH5_BIN $H4TOH5OPT $INFILES $OUTFILES 2>/dev/null CONV_RET_VALUE=$? if [ $CONV_RET_VALUE == 0 ]; then : else if [ "$SHOULDFAIL" = "yes" ] ; then FORCESUCCESS="yes" fi H4TOH5FAILED="yes" fi if test "$SRCDIR" != "$OUTDIR"; then $RM $INFILES fi if [ "$H4TOH5FAILED" = "yes" ] ; then if [ "$FORCESUCCESS" = "yes" ] ; then echo " PASSED" else echo "*FAILED*" nerrors="`expr $nerrors + 1`" fi else NEEDCMP="yes" fi } cd $curdir if [ "$H4TOH5FAILED" != "yes" ] ; then # Verify results result="passed" for f in $OUTFILES; do if $CMP $SRCDIR/Expected/$f $OUTDIR/$f; then : else # Use h5diff to compare the HDF5 files. # Now, testfiles directory has subdirectories; do not tear off path # outfile=`basename $f .h5` outfile=$f expect_out=$outfile.expect actual_out=$outfile.actual $H5DIFF $SRCDIR/Expected/$outfile $OUTDIR/$outfile RET=$? if [ $RET != 0 ] ; then echo "*FAILED*" nerrors="`expr $nerrors + 1`" else echo " PASSED" fi fi # Clean up output file if test -z "$H4H5_NOCLEANUP"; then $RM $expect_out $actual_out $RM $OUTDIR/$f fi done fi } ############################################################################## ############################################################################## ### T H E T E S T S ### ############################################################################## ############################################################################## $RM $OUTDIR/*.hdf $OUTDIR/*.tmp # # The HDF5 filenames are created based upon the HDF4 filenames # without the extension. # # test for converting H5 groups to H4 Vgroups. #CONVERT vg.hdf # # The test for conversion are the same as above with the only difference # being that the HDF5 filenames are given explicitly. # $RM $OUTDIR/*.tmp CONVERT gr_typ_test_ds.hdf gr_typ_test_ds.h5 CONVERT sds_attr_test_ds.hdf sds_attr_test_ds.h5 CONVERT sds_dim_test_ds.hdf sds_dim_test_ds.h5 CONVERT sds_dim_test2_ds.hdf sds_dim_test2_ds.h5 CONVERT sds_puredim_test_ds.hdf sds_puredim_test_ds.h5 CONVERT sds_dim_attr_ds.hdf sds_dim_attr_ds.h5 CONVERT sds_mix_dim_ds.hdf sds_mix_dim_ds.h5 CONVERT sds_typ_test_ds.hdf sds_typ_test_ds.h5 CONVERT sdsnameclash_test_ds.hdf sdsnameclash_test_ds.h5 CONVERT vg_all_test_ds.hdf vg_all_test_ds.h5 $RM $OUTDIR/*.tmp CONVERT -s anno_test.hdf anno_test.h5 CONVERT -s gr_typ_test.hdf gr_typ_test.h5 CONVERT -s grnameclash_test.hdf grnameclash_test.h5 if test $H5SZIP != "yes" -o $H5SZIP_ENCODE != "yes" -o $H5ZLIB != "yes" -o $H4SZIP != "yes"; then SKIP gr_comp_test.hdf gr_comp_test.h5 else CONVERT -s gr_comp_test.hdf gr_comp_test.h5 fi CONVERT -s image_attr_test.hdf image_attr_test.h5 #CONVERT -s image_maxsize.hdf image_maxsize.h5 CONVERT -s ras24il.hdf ras24il.h5 CONVERT -s ras_24_test.hdf ras_24_test.h5 CONVERT -s ras_8_test.hdf ras_8_test.h5 CONVERT -s sds_attr_test.hdf sds_attr_test.h5 CONVERT -s sds_dim_test.hdf sds_dim_test.h5 CONVERT -s sds_typ_test.hdf sds_typ_test.h5 CONVERT -s sdsnameclash_test.hdf sdsnameclash_test.h5 if test $H5SZIP != "yes" -o $H5SZIP_ENCODE != "yes" -o $H5ZLIB != "yes" -o $H4SZIP != "yes"; then SKIP sds_comp_test.hdf sds_comp_test.h5 else CONVERT -s sds_comp_test.hdf sds_comp_test.h5 fi CONVERT -s vdata_test.hdf vdata_test.h5 CONVERT -s vdnameclash_test.hdf vdnameclash_test.h5 CONVERT -s vg_hl_test.hdf vg_hl_test.h5 CONVERT -s vg_loop_test.hdf vg_loop_test.h5 CONVERT -s vgnameclash_test.hdf vgnameclash_test.h5 CONVERT -s vg_all_test.hdf vg_all_test.h5 #One field vdata conversion. Should be like default option of conversion, except one field vdata to HDF5 atomic dataset CONVERT -v vg_all_test_ds.hdf vg_all_test_ds.h5 CONVERT -v vdata_1f_test.hdf vdata_1f_test.h5 # # $RM $OUTDIR/*.hdf $OUTDIR/*.tmp # test HDF-EOS2 conversion if [ "$H5_USE_HDFEOS2" != "yes" ] ; then : else CONVERT -e grid/generic_type_grid.hdf grid/generic_type_grid.h5 #CONVERT -f grid/grid_sharedname.hdf grid/grid_sharedname.h5 # Failure causes memory leaking. SKIP SKIP grid/grid_sharedname.hdf grid/grid_sharedname.h5 CONVERT -e swath/generic_type_swath.hdf swath/generic_type_swath.h5 CONVERT -e swath/swath_sharedname.hdf swath/swath_sharedname.h5 CONVERT -e grid/grid_badname.hdf grid/grid_badname.h5 CONVERT -e swath/swath_badname.hdf swath/swath_badname.h5 CONVERT -e grid/grid_geo_xy.hdf grid/grid_geo_xy.h5 CONVERT -e grid/grid_geo_yx.hdf grid/grid_geo_yx.h5 CONVERT -e grid/grid_geo_timexy.hdf grid/grid_geo_timexy.h5 CONVERT -e grid/grid_geo_timeyx.hdf grid/grid_geo_timeyx.h5 CONVERT -e grid/grid_utm_xy.hdf grid/grid_utm_xy.h5 CONVERT -e grid/grid_utm_yx.hdf grid/grid_utm_yx.h5 CONVERT -e grid/grid_utm_timexy.hdf grid/grid_utm_timexy.h5 CONVERT -e grid/grid_utm_timeyx.hdf grid/grid_utm_timeyx.h5 CONVERT -e grid/grid_geo_xyxy.hdf grid/grid_geo_xyxy.h5 CONVERT -e grid/grid_geo_yxyx.hdf grid/grid_geo_yxyx.h5 #CONVERT -f grid/grid_geo_xyyx.hdf grid/grid_geo_xyyx.h5 # Failure causes memory leaking. SKIP SKIP grid/grid_geo_xyyx.hdf grid/grid_geo_xyyx.h5 CONVERT -n grid/grid_geo_xyyx.hdf grid/grid_geo_xyyx.h5 CONVERT -f grid/grid_geo_nil.hdf grid/grid_geo_nil.h5 CONVERT -e grid/grid_utm_xyxy.hdf grid/grid_utm_xyxy.h5 CONVERT -e grid/grid_utm_yxyx.hdf grid/grid_utm_yxyx.h5 #CONVERT -f grid/grid_utm_xyyx.hdf grid/grid_utm_xyyx.h5 # Failure causes memory leaking. SKIP SKIP grid/grid_utm_xyyx.hdf grid/grid_utm_xyyx.h5 CONVERT -n grid/grid_utm_xyyx.hdf grid/grid_utm_xyyx.h5 CONVERT -f grid/grid_utm_nil.hdf grid/grid_utm_nil.h5 SKIP grid/grid_multgrids.hdf grid/grid_multgrids.h5 CONVERT -e swath/swath_basic.hdf swath/swath_basic.h5 CONVERT -e swath/swath_0101.hdf swath/swath_0101.h5 CONVERT -e swath/swath_1111.hdf swath/swath_1111.h5 CONVERT -e swath/swath_2121.hdf swath/swath_2121.h5 CONVERT -e swath/swath_n11n11.hdf swath/swath_n11n11.h5 CONVERT -e swath/swath_n11n11_tight.hdf swath/swath_n11n11_tight.h5 CONVERT -e swath/swath_n21n21.hdf swath/swath_n21n21.h5 CONVERT -e swath/swath_0202.hdf swath/swath_0202.h5 CONVERT -e swath/swath_1212.hdf swath/swath_1212.h5 CONVERT -e swath/swath_2222.hdf swath/swath_2222.h5 CONVERT -e swath/swath_n12n12.hdf swath/swath_n12n12.h5 CONVERT -e swath/swath_n22n22.hdf swath/swath_n22n22.h5 CONVERT -e swath/swath_0n20n2.hdf swath/swath_0n20n2.h5 CONVERT -e swath/swath_1n21n2.hdf swath/swath_1n21n2.h5 CONVERT -e swath/swath_2n22n2.hdf swath/swath_2n22n2.h5 CONVERT -e swath/swath_n1n2n1n2.hdf swath/swath_n1n2n1n2.h5 CONVERT -e swath/swath_n2n2n2n2.hdf swath/swath_n2n2n2n2.h5 CONVERT -e swath/swath_qdd.hdf swath/swath_qdd.h5 CONVERT -e swath/swath_dqd.hdf swath/swath_dqd.h5 CONVERT -e swath/swath_ddq.hdf swath/swath_ddq.h5 CONVERT -e swath/swath_qgg.hdf swath/swath_qgg.h5 CONVERT -e swath/swath_gqg.hdf swath/swath_gqg.h5 CONVERT -e swath/swath_ggq.hdf swath/swath_ggq.h5 CONVERT -e swath/swath_tll.hdf swath/swath_tll.h5 CONVERT -e swath/swath_tll02.hdf swath/swath_tll02.h5 SKIP swath/swath_multswaths.hdf swath/swath_multswaths.h5 fi if test $nerrors -eq 0 ; then echo "All h4toh5 tests passed." fi if test -z "$H4H5_NOCLEANUP"; then $RM -r $OUTDIR # Clean up temporary files/directories CLEAN_TESTFILES_AND_TESTDIR fi exit $nerrors