001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the file COPYING. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * If you do not have access to this file, you may request a copy from * 011 * help@hdfgroup.org. * 012 ****************************************************************************/ 013 014package hdf.object.h5; 015 016import java.io.File; 017import java.lang.reflect.Array; 018import java.util.Enumeration; 019import java.util.Hashtable; 020import java.util.List; 021import java.util.Vector; 022 023import javax.swing.tree.DefaultMutableTreeNode; 024import javax.swing.tree.MutableTreeNode; 025import javax.swing.tree.TreeNode; 026 027import hdf.hdf5lib.H5; 028import hdf.hdf5lib.HDF5Constants; 029import hdf.hdf5lib.HDFNativeData; 030import hdf.hdf5lib.exceptions.HDF5Exception; 031import hdf.hdf5lib.structs.H5G_info_t; 032import hdf.hdf5lib.structs.H5L_info_t; 033import hdf.hdf5lib.structs.H5O_info_t; 034import hdf.object.Attribute; 035import hdf.object.Dataset; 036import hdf.object.Datatype; 037import hdf.object.FileFormat; 038import hdf.object.Group; 039import hdf.object.HObject; 040import hdf.object.ScalarDS; 041 042/** 043 * H5File is an implementation of the FileFormat class for HDF5 files. 044 * <p> 045 * The HDF5 file structure is stored in a tree that is made up of Java TreeNode objects. Each tree node represents an 046 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootNode</i>, the tree can 047 * be traversed to find a specific object. 048 * <p> 049 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 050 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 051 * 052 * <pre> 053 * HObject findObject(FileFormat file, String path) { 054 * if (file == null || path == null) 055 * return null; 056 * if (!path.endsWith("/")) 057 * path = path + "/"; 058 * DefaultMutableTreeNode theRoot = (DefaultMutableTreeNode) file 059 * .getRootNode(); 060 * if (theRoot == null) 061 * return null; 062 * else if (path.equals("/")) 063 * return (HObject) theRoot.getUserObject(); 064 * 065 * Enumeration local_enum = ((DefaultMutableTreeNode) theRoot) 066 * .breadthFirstEnumeration(); 067 * DefaultMutableTreeNode theNode = null; 068 * HObject theObj = null; 069 * while (local_enum.hasMoreElements()) { 070 * theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 071 * theObj = (HObject) theNode.getUserObject(); 072 * String fullPath = theObj.getFullName() + "/"; 073 * if (path.equals(fullPath) && theObj.getPath() != null ) { 074 * break; 075 * } 076 * return theObj; 077 * } 078 * </pre> 079 * 080 * @author Peter X. Cao 081 * @version 2.4 9/4/2007 082 */ 083public class H5File extends FileFormat { 084 private static final long serialVersionUID = 6247335559471526045L; 085 086 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 087 088 /** 089 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 090 * HDF5Constants.H5F_ACC_CREAT. 091 */ 092 private int flag; 093 094 /** 095 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 096 */ 097 private int indexType = HDF5Constants.H5_INDEX_NAME; 098 099 /** 100 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 101 */ 102 private int indexOrder = HDF5Constants.H5_ITER_INC; 103 104 /** 105 * The root node of the file hierarchy. 106 */ 107 private DefaultMutableTreeNode rootNode; 108 109 /** 110 * How many characters maximum in an attribute name? 111 */ 112 private static final int attrNameLen = 256; 113 114 /** 115 * The library version bounds 116 */ 117 private int[] libver; 118 119 private boolean attrFlag; 120 121 /*************************************************************************** 122 * Constructor 123 **************************************************************************/ 124 /** 125 * Constructs an H5File instance with an empty file name and read-only access. 126 */ 127 public H5File() { 128 this("", READ); 129 } 130 131 /** 132 * Constructs an H5File instance with specified file name and read/write access. 133 * <p> 134 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 135 * 136 * @param fileName 137 * A valid file name, with a relative or absolute path. 138 * 139 * @throws NullPointerException 140 * If the <code>fileName</code> argument is <code>null</code>. 141 */ 142 public H5File(String fileName) { 143 this(fileName, WRITE); 144 } 145 146 /** 147 * Constructs an H5File instance with specified file name and access. 148 * <p> 149 * The access parameter values and corresponding behaviors: 150 * <ul> 151 * <li>READ: Read-only access; open() will fail file doesn't exist.</li> 152 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 153 * access.</li> 154 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 155 * created or if file exists but can't be opened read/write.</li> 156 * </ul> 157 * <p> 158 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 159 * read/write or created. 160 * <p> 161 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 162 * file isn't yet open. 163 * 164 * @param fileName 165 * A valid file name, with a relative or absolute path. 166 * @param access 167 * The file access flag, which determines behavior when file is opened. Acceptable values are 168 * <code> READ, WRITE, </code> and <code>CREATE</code>. 169 * 170 * @throws NullPointerException 171 * If the <code>fileName</code> argument is <code>null</code>. 172 */ 173 public H5File(String fileName, int access) { 174 // Call FileFormat ctor to set absolute path name 175 super(fileName); 176 libver = new int[2]; 177 attrFlag = false; 178 179 // set metadata for the instance 180 rootNode = null; 181 this.fid = -1; 182 isReadOnly = (access == READ); 183 184 // At this point we just set up the flags for what happens later. 185 // We just pass unexpected access values on... subclasses may have 186 // their own values. 187 if (access == READ) { 188 flag = HDF5Constants.H5F_ACC_RDONLY; 189 } 190 else if (access == WRITE) { 191 flag = HDF5Constants.H5F_ACC_RDWR; 192 } 193 else if (access == CREATE) { 194 flag = HDF5Constants.H5F_ACC_CREAT; 195 } 196 else { 197 flag = access; 198 } 199 } 200 201 /*************************************************************************** 202 * Class methods 203 **************************************************************************/ 204 205 /** 206 * Copies the attributes of one object to another object. 207 * <p> 208 * This method copies all the attributes from one object (source object) to another (destination object). If an 209 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 210 * 256 characters will be truncated in the destination object. 211 * <p> 212 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 213 * there is no H5Object class and it is specific to HDF5 objects. 214 * <p> 215 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 216 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(int, int)} 217 * 218 * @param src 219 * The source object. 220 * @param dst 221 * The destination object. 222 * 223 * @see #copyAttributes(int, int) 224 */ 225 public static final void copyAttributes(HObject src, HObject dst) { 226 if ((src != null) && (dst != null)) { 227 int srcID = src.open(); 228 int dstID = dst.open(); 229 230 if ((srcID >= 0) && (dstID >= 0)) { 231 copyAttributes(srcID, dstID); 232 } 233 234 if (srcID >= 0) { 235 src.close(srcID); 236 } 237 238 if (dstID >= 0) { 239 dst.close(dstID); 240 } 241 } 242 } 243 244 /** 245 * Copies the attributes of one object to another object. 246 * <p> 247 * This method copies all the attributes from one object (source object) to another (destination object). If an 248 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 249 * 256 characters will be truncated in the destination object. 250 * <p> 251 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 252 * there is no H5Object class and it is specific to HDF5 objects. 253 * <p> 254 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 255 * exceptions are thrown. 256 * 257 * @param src_id 258 * The identifier of the source object. 259 * @param dst_id 260 * The identifier of the destination object. 261 */ 262 public static final void copyAttributes(int src_id, int dst_id) { 263 log.trace("copyAttributes(): start: src_id={} dst_id={}", src_id, dst_id); 264 int aid_src = -1; 265 int aid_dst = -1; 266 int asid = -1; 267 int atid = -1; 268 String aName = null; 269 H5O_info_t obj_info = null; 270 271 try { 272 obj_info = H5.H5Oget_info(src_id); 273 } 274 catch (Exception ex) { 275 obj_info.num_attrs = -1; 276 } 277 278 if (obj_info.num_attrs < 0) { 279 log.debug("copyAttributes(): no attributes"); 280 log.trace("copyAttributes(): finish"); 281 return; 282 } 283 284 for (int i = 0; i < obj_info.num_attrs; i++) { 285 try { 286 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 287 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 288 aName = H5.H5Aget_name(aid_src); 289 atid = H5.H5Aget_type(aid_src); 290 asid = H5.H5Aget_space(aid_src); 291 292 aid_dst = H5.H5Acreate(dst_id, aName, atid, asid, HDF5Constants.H5P_DEFAULT, 293 HDF5Constants.H5P_DEFAULT); 294 295 // use native data copy 296 H5.H5Acopy(aid_src, aid_dst); 297 298 } 299 catch (Exception ex) { 300 log.debug("copyAttributes(): Attribute[{}] failure: ", i, ex); 301 } 302 303 try { 304 H5.H5Sclose(asid); 305 } 306 catch (Exception ex) { 307 log.debug("copyAttributes(): Attribute[{}] H5Sclose(asid {}) failure: ", i, asid, ex); 308 } 309 try { 310 H5.H5Tclose(atid); 311 } 312 catch (Exception ex) { 313 log.debug("copyAttributes(): Attribute[{}] H5Tclose(atid {}) failure: ", i, atid, ex); 314 } 315 try { 316 H5.H5Aclose(aid_src); 317 } 318 catch (Exception ex) { 319 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_src {}) failure: ", i, aid_src, ex); 320 } 321 try { 322 H5.H5Aclose(aid_dst); 323 } 324 catch (Exception ex) { 325 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_dst {}) failure: ", i, aid_dst, ex); 326 } 327 328 } // for (int i=0; i<num_attr; i++) 329 } 330 331 /** 332 * Returns a list of attributes for the specified object. 333 * <p> 334 * This method returns a list containing the attributes associated with the identified object. If there are no 335 * associated attributes, an empty list will be returned. 336 * <p> 337 * Attribute names exceeding 256 characters will be truncated in the returned list. 338 * 339 * @param objID 340 * The identifier for the object whose attributes are to be returned. 341 * 342 * @return The list of the object's attributes. 343 * 344 * @throws HDF5Exception 345 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 346 * attributes. A variety of failures throw this exception. 347 * 348 * @see #getAttribute(int,int,int) 349 */ 350 public static final List<Attribute> getAttribute(int objID) throws HDF5Exception { 351 return H5File.getAttribute(objID, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 352 } 353 354 /** 355 * Returns a list of attributes for the specified object, in creation or alphabetical order. 356 * <p> 357 * This method returns a list containing the attributes associated with the identified object. If there are no 358 * associated attributes, an empty list will be returned. The list of attributes returned can be in increasing or 359 * decreasing, creation or alphabetical order. 360 * <p> 361 * Attribute names exceeding 256 characters will be truncated in the returned list. 362 * 363 * @param objID 364 * The identifier for the object whose attributes are to be returned. 365 * @param idx_type 366 * The type of index. Valid values are: 367 * <ul> 368 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name <li>H5_INDEX_CRT_ORDER: An index by 369 * creation order 370 * </ul> 371 * @param order 372 * The index traversal order. Valid values are: 373 * <ul> 374 * <li>H5_ITER_INC: A top-down iteration incrementing the index position at each step. <li>H5_ITER_DEC: A 375 * bottom-up iteration decrementing the index position at each step. 376 * </ul> 377 * 378 * @return The list of the object's attributes. 379 * 380 * @throws HDF5Exception 381 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 382 * attributes. A variety of failures throw this exception. 383 */ 384 385 public static final List<Attribute> getAttribute(int objID, int idx_type, int order) throws HDF5Exception { 386 log.trace("getAttribute(): start: objID={} idx_type={} order={}", objID, idx_type, order); 387 List<Attribute> attributeList = null; 388 int aid = -1; 389 int sid = -1; 390 int tid = -1; 391 H5O_info_t obj_info = null; 392 393 try { 394 obj_info = H5.H5Oget_info(objID); 395 } 396 catch (Exception ex) { 397 log.debug("getAttribute(): H5Oget_info(objID {}) failure: ", objID, ex); 398 } 399 if (obj_info.num_attrs <= 0) { 400 log.debug("getAttribute(): no attributes"); 401 log.trace("getAttribute(): finish"); 402 return (attributeList = new Vector<Attribute>()); 403 } 404 405 int n = (int) obj_info.num_attrs; 406 attributeList = new Vector<Attribute>(n); 407 log.trace("getAttribute(): num_attrs={}", n); 408 409 for (int i = 0; i < n; i++) { 410 long lsize = 1; 411 log.trace("getAttribute(): attribute[{}]", i); 412 413 try { 414 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 415 HDF5Constants.H5P_DEFAULT); 416 sid = H5.H5Aget_space(aid); 417 418 long dims[] = null; 419 int rank = H5.H5Sget_simple_extent_ndims(sid); 420 421 if (rank > 0) { 422 dims = new long[rank]; 423 H5.H5Sget_simple_extent_dims(sid, dims, null); 424 log.trace("getAttribute(): Attribute[{}] rank={}, dims={}", i, rank, dims); 425 for (int j = 0; j < dims.length; j++) { 426 lsize *= dims[j]; 427 } 428 } 429 String nameA = H5.H5Aget_name(aid); 430 log.trace("getAttribute(): Attribute[{}] is {}", i, nameA); 431 432 int tmptid = -1; 433 try { 434 tmptid = H5.H5Aget_type(aid); 435 tid = H5.H5Tget_native_type(tmptid); 436 log.trace("getAttribute(): Attribute[{}] tid={} native tmptid={} from aid={}", i, tid, tmptid, aid); 437 } 438 finally { 439 try { 440 H5.H5Tclose(tmptid); 441 } 442 catch (Exception ex) { 443 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 444 } 445 } 446 Datatype attrType = new H5Datatype(tid); 447 Attribute attr = new Attribute(nameA, attrType, dims); 448 attributeList.add(attr); 449 log.trace("getAttribute(): Attribute[{}] Datatype={}", i, attrType.getDatatypeDescription()); 450 451 boolean is_variable_str = false; 452 boolean isVLEN = false; 453 boolean isCompound = false; 454 boolean isScalar = false; 455 int tclass = H5.H5Tget_class(tid); 456 457 if (dims == null) 458 isScalar = true; 459 try { 460 is_variable_str = H5.H5Tis_variable_str(tid); 461 } 462 catch (Exception ex) { 463 log.debug("getAttribute(): Attribute[{}] H5Tis_variable_str(tid {}) failure: ", i, tid, ex); 464 } 465 isVLEN = (tclass == HDF5Constants.H5T_VLEN); 466 isCompound = (tclass == HDF5Constants.H5T_COMPOUND); 467 log.trace( 468 "getAttribute(): Attribute[{}] has size={} isCompound={} isScalar={} is_variable_str={} isVLEN={}", 469 i, lsize, isCompound, isScalar, is_variable_str, isVLEN); 470 471 // retrieve the attribute value 472 if (lsize <= 0) { 473 log.debug("getAttribute(): Attribute[{}] lsize <= 0", i); 474 log.trace("getAttribute(): Attribute[{}] continue", i); 475 continue; 476 } 477 478 if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) { 479 log.debug("getAttribute(): Attribute[{}] lsize outside valid Java int range; unsafe cast", i); 480 log.trace("getAttribute(): Attribute[{}] continue", i); 481 continue; 482 } 483 484 Object value = null; 485 if (is_variable_str) { 486 String[] strs = new String[(int) lsize]; 487 for (int j = 0; j < lsize; j++) { 488 strs[j] = ""; 489 } 490 try { 491 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 492 H5.H5AreadVL(aid, tid, strs); 493 } 494 catch (Exception ex) { 495 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 496 ex.printStackTrace(); 497 } 498 value = strs; 499 } 500 else if (isCompound || (isScalar && tclass == HDF5Constants.H5T_ARRAY)) { 501 String[] strs = new String[(int) lsize]; 502 for (int j = 0; j < lsize; j++) { 503 strs[j] = ""; 504 } 505 try { 506 log.trace("getAttribute: attribute[{}] H5AreadComplex", i); 507 H5.H5AreadComplex(aid, tid, strs); 508 } 509 catch (Exception ex) { 510 ex.printStackTrace(); 511 } 512 value = strs; 513 } 514 else if (isVLEN) { 515 String[] strs = new String[(int) lsize]; 516 for (int j = 0; j < lsize; j++) { 517 strs[j] = ""; 518 } 519 try { 520 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 521 H5.H5AreadComplex(aid, tid, strs); 522 } 523 catch (Exception ex) { 524 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 525 ex.printStackTrace(); 526 } 527 value = strs; 528 } 529 else { 530 value = H5Datatype.allocateArray(tid, (int) lsize); 531 if (value == null) { 532 log.debug("getAttribute(): Attribute[{}] allocateArray returned null", i); 533 log.trace("getAttribute(): Attribute[{}] continue", i); 534 continue; 535 } 536 537 if (tclass == HDF5Constants.H5T_ARRAY) { 538 int tmptid1 = -1; 539 int tmptid2 = -1; 540 try { 541 log.trace("getAttribute(): Attribute[{}] H5Aread ARRAY tid={}", i, tid); 542 H5.H5Aread(aid, tid, value); 543 } 544 catch (Exception ex) { 545 log.debug("getAttribute(): Attribute[{}] H5Aread failure: ", i, ex); 546 ex.printStackTrace(); 547 } 548 finally { 549 try { 550 H5.H5Tclose(tmptid1); 551 } 552 catch (Exception ex) { 553 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid1, ex); 554 } 555 try { 556 H5.H5Tclose(tmptid2); 557 } 558 catch (Exception ex) { 559 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid2, ex); 560 } 561 } 562 } 563 else { 564 log.trace("getAttribute(): Attribute[{}] H5Aread", i); 565 H5.H5Aread(aid, tid, value); 566 } 567 568 if (tclass == HDF5Constants.H5T_STRING) { 569 log.trace("getAttribute(): Attribute[{}] byteToString", i); 570 value = Dataset.byteToString((byte[]) value, (int)H5.H5Tget_size(tid)); 571 } 572 else if (tclass == HDF5Constants.H5T_REFERENCE) { 573 log.trace("getAttribute(): Attribute[{}] byteToLong", i); 574 value = HDFNativeData.byteToLong((byte[]) value); 575 } 576 } 577 578 attr.setValue(value); 579 580 } 581 catch (HDF5Exception ex) { 582 log.debug("getAttribute(): Attribute[{}] inspection failure: ", i, ex); 583 } 584 finally { 585 try { 586 H5.H5Tclose(tid); 587 } 588 catch (Exception ex) { 589 log.debug("getAttribute(): Attribute[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 590 } 591 try { 592 H5.H5Sclose(sid); 593 } 594 catch (Exception ex) { 595 log.debug("getAttribute(): Attribute[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 596 } 597 try { 598 H5.H5Aclose(aid); 599 } 600 catch (Exception ex) { 601 log.debug("getAttribute(): Attribute[{}] H5Aclose(aid {}) failure: ", i, aid, ex); 602 } 603 } 604 } // for (int i=0; i<obj_info.num_attrs; i++) 605 606 log.trace("getAttribute(): finish"); 607 return attributeList; 608 } 609 610 /** 611 * Creates attributes for an HDF5 image dataset. 612 * <p> 613 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 614 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 615 * method may be used to write image attributes that are not handled by this method. 616 * <p> 617 * For more information about HDF5 image attributes, see the <a 618 * href="https://www.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 619 * <p> 620 * This method can be called to create attributes for 24-bit true color and indexed images. The 621 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 622 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 623 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 624 * image with the indicated interlace mode. 625 * <p> 626 * <ul> 627 * The created attribute descriptions, names, and values are: 628 * <li>The image identifier: name="CLASS", value="IMAGE" 629 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 630 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 631 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 632 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 633 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 634 * to the palette datasets, with initial value of {-1} 635 * </ul> 636 * <p> 637 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 638 * Format implementation level. 639 * 640 * @param dataset 641 * The image dataset the attributes are added to. 642 * @param selectionFlag 643 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 644 * <ul> 645 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 646 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 647 * stored in a separate plane. 648 * </ul> 649 * 650 * @throws Exception 651 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 652 */ 653 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 654 log.trace("createImageAttributes(): start: dataset={}", dataset.toString()); 655 String subclass = null; 656 String interlaceMode = null; 657 658 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 659 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PIXEL"); 660 subclass = "IMAGE_TRUECOLOR"; 661 interlaceMode = "INTERLACE_PIXEL"; 662 } 663 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 664 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PLANE"); 665 subclass = "IMAGE_TRUECOLOR"; 666 interlaceMode = "INTERLACE_PLANE"; 667 } 668 else if (selectionFlag == -1) { 669 log.trace("createImageAttributes(): subclass IMAGE_INDEXED"); 670 subclass = "IMAGE_INDEXED"; 671 } 672 else { 673 log.debug("createImageAttributes(): invalid selectionFlag"); 674 log.trace("createImageAttributes(): finish"); 675 throw new HDF5Exception("The selectionFlag is invalid."); 676 } 677 678 String attrName = "CLASS"; 679 String[] classValue = { "IMAGE" }; 680 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, -1, -1); 681 Attribute attr = new Attribute(attrName, attrType, null); 682 attr.setValue(classValue); 683 dataset.writeMetadata(attr); 684 685 attrName = "IMAGE_VERSION"; 686 String[] versionValue = { "1.2" }; 687 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, -1, -1); 688 attr = new Attribute(attrName, attrType, null); 689 attr.setValue(versionValue); 690 dataset.writeMetadata(attr); 691 692 long[] attrDims = { 2 }; 693 attrName = "IMAGE_MINMAXRANGE"; 694 byte[] attrValueInt = { 0, (byte) 255 }; 695 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 696 attr = new Attribute(attrName, attrType, attrDims); 697 attr.setValue(attrValueInt); 698 dataset.writeMetadata(attr); 699 700 attrName = "IMAGE_SUBCLASS"; 701 String[] subclassValue = { subclass }; 702 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, -1, -1); 703 attr = new Attribute(attrName, attrType, null); 704 attr.setValue(subclassValue); 705 dataset.writeMetadata(attr); 706 707 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 708 attrName = "INTERLACE_MODE"; 709 String[] interlaceValue = { interlaceMode }; 710 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, -1, -1); 711 attr = new Attribute(attrName, attrType, null); 712 attr.setValue(interlaceValue); 713 dataset.writeMetadata(attr); 714 } 715 else { 716 attrName = "PALETTE"; 717 long[] palRef = { 0 }; // set ref to null 718 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 719 attr = new Attribute(attrName, attrType, null); 720 attr.setValue(palRef); 721 dataset.writeMetadata(attr); 722 } 723 log.trace("createImageAttributes(): finish"); 724 } 725 726 /** 727 * Updates values of scalar dataset object references in copied file. 728 * <p> 729 * This method has very specific functionality as documented below, and the user is advised to pay close attention 730 * when dealing with files that contain references. 731 * <p> 732 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 733 * the references in the destination file are not updated by the copy and are therefore invalid. 734 * <p> 735 * When an entire file is copied, this method updates the values of the object references and dataset region 736 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 737 * destination file. The method does not update references that occur in objects other than scalar datasets. 738 * <p> 739 * In the current release, the updating of object references is not handled completely as it was not required by the 740 * projects that funded development. There is no support for updates when the copy does not include the entire file. 741 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 742 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 743 * updates itself. 744 * 745 * @param srcFile 746 * The file that was copied. 747 * @param dstFile 748 * The destination file where the object references will be updated. 749 * 750 * @throws Exception 751 * If there is a problem in the update process. 752 */ 753 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 754 log.trace("updateReferenceDataset(): start"); 755 if ((srcFile == null) || (dstFile == null)) { 756 log.debug("updateReferenceDataset(): srcFile or dstFile is null"); 757 log.trace("updateReferenceDataset(): finish"); 758 return; 759 } 760 761 DefaultMutableTreeNode srcRoot = (DefaultMutableTreeNode) srcFile.getRootNode(); 762 DefaultMutableTreeNode newRoot = (DefaultMutableTreeNode) dstFile.getRootNode(); 763 764 Enumeration<?> srcEnum = srcRoot.breadthFirstEnumeration(); 765 Enumeration<?> newEnum = newRoot.breadthFirstEnumeration(); 766 767 // build one-to-one table between objects in 768 // the source file and new file 769 int did = -1; 770 int tid = -1; 771 HObject srcObj, newObj; 772 Hashtable<String, long[]> oidMap = new Hashtable<String, long[]>(); 773 List<ScalarDS> refDatasets = new Vector<ScalarDS>(); 774 while (newEnum.hasMoreElements() && srcEnum.hasMoreElements()) { 775 srcObj = (HObject) ((DefaultMutableTreeNode) srcEnum.nextElement()).getUserObject(); 776 newObj = (HObject) ((DefaultMutableTreeNode) newEnum.nextElement()).getUserObject(); 777 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 778 did = -1; 779 tid = -1; 780 781 // for Scalar DataSets in destination, if there is an object 782 // reference in the dataset, add it to the refDatasets list for 783 // later updating. 784 if (newObj instanceof ScalarDS) { 785 ScalarDS sd = (ScalarDS) newObj; 786 did = sd.open(); 787 if (did >= 0) { 788 try { 789 tid = H5.H5Dget_type(did); 790 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 791 refDatasets.add(sd); 792 } 793 } 794 catch (Exception ex) { 795 log.debug("updateReferenceDataset(): ScalarDS reference failure: ", ex); 796 } 797 finally { 798 try { 799 H5.H5Tclose(tid); 800 } 801 catch (Exception ex) { 802 log.debug("updateReferenceDataset(): ScalarDS reference H5Tclose(tid {}) failure: ", tid, ex); 803 } 804 } 805 } 806 sd.close(did); 807 } // if (newObj instanceof ScalarDS) 808 } 809 810 // Update the references in the scalar datasets in the dest file. 811 H5ScalarDS d = null; 812 int sid = -1; 813 int size = 0; 814 int rank = 0; 815 int n = refDatasets.size(); 816 for (int i = 0; i < n; i++) { 817 log.trace("updateReferenceDataset(): Update the references in the scalar datasets in the dest file"); 818 d = (H5ScalarDS) refDatasets.get(i); 819 byte[] buf = null; 820 long[] refs = null; 821 822 try { 823 did = d.open(); 824 if (did >= 0) { 825 tid = H5.H5Dget_type(did); 826 sid = H5.H5Dget_space(did); 827 rank = H5.H5Sget_simple_extent_ndims(sid); 828 size = 1; 829 if (rank > 0) { 830 long[] dims = new long[rank]; 831 H5.H5Sget_simple_extent_dims(sid, dims, null); 832 log.trace("updateReferenceDataset(): rank={}, dims={}", rank, dims); 833 for (int j = 0; j < rank; j++) { 834 size *= (int) dims[j]; 835 } 836 dims = null; 837 } 838 839 buf = new byte[size * 8]; 840 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 841 842 // update the ref values 843 refs = HDFNativeData.byteToLong(buf); 844 size = refs.length; 845 for (int j = 0; j < size; j++) { 846 long[] theOID = oidMap.get(String.valueOf(refs[j])); 847 if (theOID != null) { 848 refs[j] = theOID[0]; 849 } 850 } 851 852 // write back to file 853 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 854 } 855 else { 856 log.debug("updateReferenceDataset(): dest file dataset failed to open"); 857 } 858 } 859 catch (Exception ex) { 860 log.debug("updateReferenceDataset(): Reference[{}] failure: ", i, ex); 861 log.trace("updateReferenceDataset(): Reference[{}] continue", i); 862 continue; 863 } 864 finally { 865 try { 866 H5.H5Tclose(tid); 867 } 868 catch (Exception ex) { 869 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 870 } 871 try { 872 H5.H5Sclose(sid); 873 } 874 catch (Exception ex) { 875 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 876 } 877 try { 878 H5.H5Dclose(did); 879 } 880 catch (Exception ex) { 881 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Dclose(did {}) failure: ", i, did, ex); 882 } 883 } 884 885 refs = null; 886 buf = null; 887 } // for (int i=0; i<n; i++) 888 } 889 890 /*************************************************************************** 891 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 892 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 893 * instance methods. 894 **************************************************************************/ 895 896 /** 897 * Returns the version of the HDF5 library. 898 * 899 * @see hdf.object.FileFormat#getLibversion() 900 */ 901 @Override 902 public String getLibversion() { 903 int[] vers = new int[3]; 904 String ver = "HDF5 "; 905 906 try { 907 H5.H5get_libversion(vers); 908 } 909 catch (Throwable ex) { 910 ex.printStackTrace(); 911 } 912 913 ver += vers[0] + "." + vers[1] + "." + vers[2]; 914 log.debug("getLibversion(): libversion is {}", ver); 915 916 return ver; 917 } 918 919 /** 920 * Checks if the specified FileFormat instance has the HDF5 format. 921 * 922 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 923 */ 924 @Override 925 public boolean isThisType(FileFormat theFile) { 926 return (theFile instanceof H5File); 927 } 928 929 /** 930 * Checks if the specified file has the HDF5 format. 931 * 932 * @see hdf.object.FileFormat#isThisType(java.lang.String) 933 */ 934 @Override 935 public boolean isThisType(String filename) { 936 boolean isH5 = false; 937 938 try { 939 isH5 = H5.H5Fis_hdf5(filename); 940 } 941 catch (HDF5Exception ex) { 942 isH5 = false; 943 } 944 945 return isH5; 946 } 947 948 /** 949 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 950 * 951 * @throws Exception 952 * If the file cannot be created or if createFlag has unexpected value. 953 * 954 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 955 * @see #H5File(String, int) 956 */ 957 @Override 958 public FileFormat createFile(String filename, int createFlag) throws Exception { 959 log.trace("createFile(): start: filename={} createFlag={}", filename, createFlag); 960 // Flag if we need to create or truncate the file. 961 Boolean doCreateFile = true; 962 963 // Won't create or truncate if CREATE_OPEN specified and file exists 964 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 965 File f = new File(filename); 966 if (f.exists()) { 967 doCreateFile = false; 968 } 969 } 970 log.trace("createFile(): doCreateFile={}", doCreateFile); 971 972 if (doCreateFile) { 973 int fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 974 975 if ((createFlag & FILE_CREATE_EARLY_LIB) != FILE_CREATE_EARLY_LIB) { 976 H5.H5Pset_libver_bounds(fapl, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST); 977 } 978 979 int fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 980 try { 981 H5.H5Pclose(fapl); 982 H5.H5Fclose(fileid); 983 } 984 catch (HDF5Exception ex) { 985 log.debug("H5 file, {} failure: ", filename, ex); 986 } 987 } 988 989 log.trace("createFile(): finish"); 990 return new H5File(filename, WRITE); 991 } 992 993 /** 994 * Creates an H5File instance with specified file name and access. 995 * 996 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 997 * @see #H5File(String, int) 998 * 999 * @throws Exception 1000 * If there is a failure. 1001 */ 1002 @Override 1003 public FileFormat createInstance(String filename, int access) throws Exception { 1004 log.trace("createInstance start"); 1005 return new H5File(filename, access); 1006 } 1007 1008 /*************************************************************************** 1009 * Instance Methods 1010 * 1011 * These methods are related to the H5File class and to particular instances of objects with this class type. 1012 **************************************************************************/ 1013 1014 /** 1015 * Opens file and returns a file identifier. 1016 * 1017 * @see hdf.object.FileFormat#open() 1018 */ 1019 @Override 1020 public int open() throws Exception { 1021 return open(true); 1022 } 1023 1024 /** 1025 * Opens file and returns a file identifier. 1026 * 1027 * @see hdf.object.FileFormat#open(int...) 1028 */ 1029 @Override 1030 public int open(int... indexList) throws Exception { 1031 setIndexType(indexList[0]); 1032 setIndexOrder(indexList[1]); 1033 return open(true); 1034 } 1035 1036 /** 1037 * Sets the bounds of library versions. 1038 * 1039 * @param low 1040 * The earliest version of the library. 1041 * @param high 1042 * The latest version of the library. 1043 * 1044 * @throws HDF5Exception 1045 * If there is an error at the HDF5 library level. 1046 */ 1047 public void setLibBounds(int low, int high) throws Exception { 1048 int fapl = HDF5Constants.H5P_DEFAULT; 1049 1050 if (fid < 0) 1051 return; 1052 1053 fapl = H5.H5Fget_access_plist(fid); 1054 1055 try { 1056 if (low < 0) 1057 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1058 1059 if (high < 0) 1060 high = HDF5Constants.H5F_LIBVER_LATEST; 1061 1062 H5.H5Pset_libver_bounds(fapl, low, high); 1063 H5.H5Pget_libver_bounds(fapl, libver); 1064 } 1065 finally { 1066 try { 1067 H5.H5Pclose(fapl); 1068 } 1069 catch (Exception e) { 1070 log.debug("setLibBounds(): libver bounds H5Pclose(fapl {}) failure: ", fapl, e); 1071 } 1072 } 1073 } 1074 1075 /** 1076 * Gets the bounds of library versions. 1077 * 1078 * @return libver The earliest and latest version of the library. 1079 * 1080 * @throws HDF5Exception 1081 * If there is an error at the HDF5 library level. 1082 */ 1083 public int[] getLibBounds() throws Exception { 1084 return libver; 1085 } 1086 1087 /** 1088 * Closes file associated with this H5File instance. 1089 * 1090 * @see hdf.object.FileFormat#close() 1091 * 1092 * @throws HDF5Exception 1093 * If there is an error at the HDF5 library level. 1094 */ 1095 @Override 1096 public void close() throws HDF5Exception { 1097 log.trace("close(): start"); 1098 if (fid < 0) { 1099 log.debug("close(): file {} is not open", fullFileName); 1100 log.trace("close(): finish"); 1101 return; 1102 } 1103 // The current working directory may be changed at Dataset.read() 1104 // by System.setProperty("user.dir", newdir) to make it work for external 1105 // datasets. We need to set it back to the original current working 1106 // directory (when hdf-java application started) before the file 1107 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1108 // not work 1109 String rootPath = System.getProperty("hdfview.workdir"); 1110 if (rootPath == null) { 1111 rootPath = System.getProperty("user.dir"); 1112 } 1113 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 1114 1115 // clean up unused objects 1116 if (rootNode != null) { 1117 DefaultMutableTreeNode theNode = null; 1118 HObject theObj = null; 1119 Enumeration<?> local_enum = (rootNode).breadthFirstEnumeration(); 1120 while (local_enum.hasMoreElements()) { 1121 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1122 theObj = (HObject) theNode.getUserObject(); 1123 1124 if (theObj instanceof Dataset) { 1125 log.trace("close(): clear Dataset {}", ((Dataset) theObj).toString()); 1126 ((Dataset) theObj).clear(); 1127 } 1128 else if (theObj instanceof Group) { 1129 log.trace("close(): clear Group {}", ((Group) theObj).toString()); 1130 ((Group) theObj).clear(); 1131 } 1132 } 1133 } 1134 1135 // Close all open objects associated with this file. 1136 try { 1137 int n = 0; 1138 int type = -1; 1139 int oids[]; 1140 n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1141 log.trace("close(): open objects={}", n); 1142 1143 if (n > 0) { 1144 oids = new int[n]; 1145 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1146 1147 for (int i = 0; i < n; i++) { 1148 log.trace("close(): object[{}] id={}", i, oids[i]); 1149 type = H5.H5Iget_type(oids[i]); 1150 1151 if (HDF5Constants.H5I_DATASET == type) { 1152 try { 1153 H5.H5Dclose(oids[i]); 1154 } 1155 catch (Exception ex2) { 1156 log.debug("close(): Object[{}] H5Dclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1157 } 1158 } 1159 else if (HDF5Constants.H5I_GROUP == type) { 1160 try { 1161 H5.H5Gclose(oids[i]); 1162 } 1163 catch (Exception ex2) { 1164 log.debug("close(): Object[{}] H5Gclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1165 } 1166 } 1167 else if (HDF5Constants.H5I_DATATYPE == type) { 1168 try { 1169 H5.H5Tclose(oids[i]); 1170 } 1171 catch (Exception ex2) { 1172 log.debug("close(): Object[{}] H5Tclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1173 } 1174 } 1175 else if (HDF5Constants.H5I_ATTR == type) { 1176 try { 1177 H5.H5Aclose(oids[i]); 1178 } 1179 catch (Exception ex2) { 1180 log.debug("close(): Object[{}] H5Aclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1181 } 1182 } 1183 } // for (int i=0; i<n; i++) 1184 } // if ( n>0) 1185 } 1186 catch (Exception ex) { 1187 log.debug("close(): failure: ", ex); 1188 } 1189 1190 try { 1191 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1192 } 1193 catch (Exception ex) { 1194 log.debug("close(): H5Fflush(fid {}) failure: ", fid, ex); 1195 } 1196 1197 try { 1198 H5.H5Fclose(fid); 1199 } 1200 catch (Exception ex) { 1201 log.debug("close(): H5Fclose(fid {}) failure: ", fid, ex); 1202 } 1203 1204 // Set fid to -1 but don't reset rootObject 1205 fid = -1; 1206 log.trace("close(): finish"); 1207 } 1208 1209 /** 1210 * Returns the root node of the open HDF5 File. 1211 * 1212 * @see hdf.object.FileFormat#getRootNode() 1213 */ 1214 @Override 1215 public TreeNode getRootNode() { 1216 return rootNode; 1217 } 1218 1219 /* 1220 * (non-Javadoc) 1221 * 1222 * @see hdf.object.FileFormat#get(java.lang.String) 1223 */ 1224 @Override 1225 public HObject get(String path) throws Exception { 1226 log.trace("get(): start"); 1227 HObject obj = null; 1228 1229 if ((path == null) || (path.length() <= 0)) { 1230 log.debug("get(): path is null or invalid path length"); 1231 System.err.println("(path == null) || (path.length() <= 0)"); 1232 log.trace("get(): finish"); 1233 return null; 1234 } 1235 1236 // replace the wrong slash and get rid of "//" 1237 path = path.replace('\\', '/'); 1238 path = "/" + path; 1239 path = path.replaceAll("//", "/"); 1240 log.trace("H5File:get path:{}", path); 1241 1242 // the whole file tree is loaded. find the object in the tree 1243 if (rootNode != null) { 1244 obj = findObject(this, path); 1245 } 1246 1247 // found object in memory 1248 if (obj != null) { 1249 log.trace("get(): Found object in memory"); 1250 log.trace("get(): finish"); 1251 return obj; 1252 } 1253 1254 // open only the requested object 1255 String name = null, pPath = null; 1256 if (path.equals("/")) { 1257 name = "/"; // the root 1258 } 1259 else { 1260 // separate the parent path and the object name 1261 if (path.endsWith("/")) { 1262 path = path.substring(0, path.length() - 1); 1263 } 1264 1265 int idx = path.lastIndexOf('/'); 1266 name = path.substring(idx + 1); 1267 if (idx == 0) { 1268 pPath = "/"; 1269 } 1270 else { 1271 pPath = path.substring(0, idx); 1272 } 1273 } 1274 1275 // do not open the full tree structure, only the file handler 1276 int fid_before_open = fid; 1277 log.trace("H5File:get fid_before_open with:{}", fid); 1278 fid = open(false); 1279 if (fid < 0) { 1280 log.debug("get(): Invalid FID"); 1281 log.trace("get(): finish"); 1282 System.err.println("Could not open file handler"); 1283 return null; 1284 } 1285 1286 try { 1287 H5O_info_t info; 1288 int objType; 1289 int oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1290 log.trace("H5File:get H5Oopen:{}", oid); 1291 1292 if (oid >= 0) { 1293 info = H5.H5Oget_info(oid); 1294 log.trace("H5File:get H5Oget_info-type:{}", info.type); 1295 objType = info.type; 1296 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1297 int did = -1; 1298 try { 1299 log.trace("H5File:get H5O_TYPE_DATASET:{}-{}", name, pPath); 1300 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1301 obj = getDataset(did, name, pPath); 1302 } 1303 finally { 1304 try { 1305 H5.H5Dclose(did); 1306 } 1307 catch (Exception ex) { 1308 log.debug("get(): {} H5Dclose(did {}) failure: ", path, did, ex); 1309 } 1310 } 1311 } 1312 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1313 int gid = -1; 1314 try { 1315 log.trace("H5File:get H5O_TYPE_GROUP:{}-{}", name, pPath); 1316 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1317 H5Group pGroup = null; 1318 if (pPath != null) { 1319 pGroup = new H5Group(this, null, pPath, null); 1320 obj = getGroup(gid, name, pGroup); 1321 pGroup.addToMemberList(obj); 1322 } 1323 else { 1324 obj = getGroup(gid, name, pGroup); 1325 } 1326 } 1327 finally { 1328 try { 1329 H5.H5Gclose(gid); 1330 } 1331 catch (Exception ex) { 1332 log.debug("get(): {} H5Gclose(gid {}) failure: ", path, gid, ex); 1333 } 1334 } 1335 } 1336 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1337 obj = new H5Datatype(this, name, pPath); 1338 } 1339 } 1340 try { 1341 H5.H5Oclose(oid); 1342 } 1343 catch (Exception ex) { 1344 log.debug("get(): H5Oclose(oid {}) failure: ", oid, ex); 1345 ex.printStackTrace(); 1346 } 1347 } 1348 catch (Exception ex) { 1349 log.debug("get(): Exception finding obj {}", path, ex); 1350 obj = null; 1351 } 1352 finally { 1353 if ((fid_before_open <= 0) && (obj == null)) { 1354 // close the fid that is not attached to any object 1355 try { 1356 H5.H5Fclose(fid); 1357 } 1358 catch (Exception ex) { 1359 log.debug("get(): {} H5Fclose(fid {}) failure: ", path, fid, ex); 1360 } 1361 fid = fid_before_open; 1362 } 1363 } 1364 log.trace("H5File:get finish"); 1365 1366 return obj; 1367 } 1368 1369 /* 1370 * (non-Javadoc) 1371 * 1372 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, java.lang.String) 1373 */ 1374 @Override 1375 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception { 1376 return createDatatype(tclass, tsize, torder, tsign, null, name); 1377 } 1378 1379 /* 1380 * (non-Javadoc) 1381 * 1382 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype, java.lang.String) 1383 */ 1384 @Override 1385 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) 1386 throws Exception { 1387 log.trace("createDatatype(): start: name={} class={} size={} order={} sign={}", name, tclass, tsize, torder, tsign); 1388 if (tbase != null) log.trace("createDatatype(): baseType is {}", tbase.getDatatypeDescription()); 1389 1390 int tid = -1; 1391 H5Datatype dtype = null; 1392 1393 try { 1394 H5Datatype t = (H5Datatype) createDatatype(tclass, tsize, torder, tsign, tbase); 1395 if ((tid = t.toNative()) < 0) { 1396 log.debug("createDatatype(): toNative failure"); 1397 log.trace("createDatatype(): finish"); 1398 throw new Exception("toNative failed"); 1399 } 1400 1401 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, 1402 HDF5Constants.H5P_DEFAULT); 1403 1404 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1405 long l = HDFNativeData.byteToLong(ref_buf, 0); 1406 1407 long[] oid = new long[1]; 1408 oid[0] = l; // save the object ID 1409 1410 dtype = new H5Datatype(this, null, name); 1411 } 1412 finally { 1413 H5.H5Tclose(tid); 1414 } 1415 1416 log.trace("createDatatype(): finish"); 1417 return dtype; 1418 } 1419 1420 /*************************************************************************** 1421 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1422 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1423 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1424 * so that we create the proper type of HObject... H5Group for example. 1425 * 1426 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1427 * methods (they can only be shadowed in Java), these are instance methods. 1428 * 1429 **************************************************************************/ 1430 1431 /* 1432 * (non-Javadoc) 1433 * 1434 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1435 */ 1436 @Override 1437 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1438 log.trace("create datatype"); 1439 return new H5Datatype(tclass, tsize, torder, tsign); 1440 } 1441 1442 /* 1443 * (non-Javadoc) 1444 * 1445 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1446 */ 1447 @Override 1448 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1449 log.trace("create datatype with base"); 1450 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1451 } 1452 1453 /* 1454 * (non-Javadoc) 1455 * 1456 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1457 * long[], long[], long[], int, java.lang.Object) 1458 */ 1459 @Override 1460 public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1461 int gzip, Object fillValue, Object data) throws Exception { 1462 log.trace("createScalarDS(): name={}", name); 1463 if (pgroup == null) { 1464 // create new dataset at the root group by default 1465 pgroup = (Group) get("/"); 1466 } 1467 1468 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1469 } 1470 1471 /* 1472 * (non-Javadoc) 1473 * 1474 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1475 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1476 */ 1477 @Override 1478 public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, 1479 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { 1480 log.trace("createCompoundDS(): start: name={}", name); 1481 int nMembers = memberNames.length; 1482 int memberRanks[] = new int[nMembers]; 1483 long memberDims[][] = new long[nMembers][1]; 1484 Dataset ds = null; 1485 1486 for (int i = 0; i < nMembers; i++) { 1487 memberRanks[i] = 1; 1488 if (memberSizes == null) { 1489 memberDims[i][0] = 1; 1490 } 1491 else { 1492 memberDims[i][0] = memberSizes[i]; 1493 } 1494 } 1495 1496 if (pgroup == null) { 1497 // create new dataset at the root group by default 1498 pgroup = (Group) get("/"); 1499 } 1500 log.trace("createCompoundDS name={}", name); 1501 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, memberNames, memberDatatypes, memberRanks, 1502 memberDims, data); 1503 1504 log.trace("createCompoundDS(): finish"); 1505 return ds; 1506 } 1507 1508 /* 1509 * (non-Javadoc) 1510 * 1511 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1512 * long[], long[], long[], int, int, int, java.lang.Object) 1513 */ 1514 @Override 1515 public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1516 int gzip, int ncomp, int interlace, Object data) throws Exception { 1517 log.trace("createImage(): start: name={}", name); 1518 if (pgroup == null) { // create at the root group by default 1519 pgroup = (Group) get("/"); 1520 } 1521 1522 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1523 1524 try { 1525 H5File.createImageAttributes(dataset, interlace); 1526 dataset.setIsImage(true); 1527 } 1528 catch (Exception ex) { 1529 log.debug("createImage(): {} createImageAttributtes failure: ", name, ex); 1530 } 1531 1532 log.trace("createImage(): finish"); 1533 return dataset; 1534 } 1535 1536 /*** 1537 * Creates a new group with specified name in existing group. 1538 * 1539 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1540 */ 1541 @Override 1542 public Group createGroup(String name, Group pgroup) throws Exception { 1543 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1544 1545 } 1546 1547 /*** 1548 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1549 * 1550 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, int...) 1551 * 1552 */ 1553 public Group createGroup(String name, Group pgroup, int... gplist) throws Exception { 1554 // create new group at the root 1555 if (pgroup == null) { 1556 pgroup = (Group) this.get("/"); 1557 } 1558 1559 return H5Group.create(name, pgroup, gplist); 1560 } 1561 1562 /*** 1563 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1564 * 1565 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1566 * 1567 */ 1568 public int createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1569 log.trace("createGcpl(): start"); 1570 int gcpl = -1; 1571 try { 1572 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1573 if (gcpl >= 0) { 1574 // Set link creation order. 1575 if (creationorder == Group.CRT_ORDER_TRACKED) { 1576 log.trace("createGcpl(): creation order ORDER_TRACKED"); 1577 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1578 } 1579 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1580 log.trace("createGcpl(): creation order ORDER_INDEXED"); 1581 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED 1582 + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1583 } 1584 // Set link storage. 1585 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1586 } 1587 } 1588 catch (Exception ex) { 1589 log.debug("createGcpl(): failure: ", ex); 1590 ex.printStackTrace(); 1591 } 1592 1593 log.trace("createGcpl(): finish"); 1594 return gcpl; 1595 } 1596 1597 /* 1598 * (non-Javadoc) 1599 * 1600 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1601 */ 1602 @Override 1603 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1604 if (currentObj instanceof HObject) 1605 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1606 else if (currentObj instanceof String) 1607 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1608 1609 return null; 1610 } 1611 1612 /** 1613 * Creates a link to an object in the open file. 1614 * <p> 1615 * If parentGroup is null, the new link is created in the root group. 1616 * 1617 * @param parentGroup 1618 * The group where the link is created. 1619 * @param name 1620 * The name of the link. 1621 * @param currentObj 1622 * The existing object the new link will reference. 1623 * @param lType 1624 * The type of link to be created. It can be a hard link, a soft link or an external link. 1625 * 1626 * @return The object pointed to by the new link if successful; otherwise returns null. 1627 * 1628 * @throws Exception 1629 * The exceptions thrown vary depending on the implementing class. 1630 */ 1631 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1632 log.trace("createLink(): start: name={}", name); 1633 HObject obj = null; 1634 int type = 0; 1635 String current_full_name = null, new_full_name = null, parent_path = null; 1636 1637 if (currentObj == null) { 1638 log.debug("createLink(): Link target is null"); 1639 log.trace("createLink(): finish"); 1640 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1641 } 1642 if ((parentGroup == null) || parentGroup.isRoot()) { 1643 parent_path = HObject.separator; 1644 } 1645 else { 1646 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1647 } 1648 1649 new_full_name = parent_path + name; 1650 1651 if (lType == Group.LINK_TYPE_HARD) { 1652 type = HDF5Constants.H5L_TYPE_HARD; 1653 log.trace("createLink(): type H5L_TYPE_HARD"); 1654 } 1655 else if (lType == Group.LINK_TYPE_SOFT) { 1656 type = HDF5Constants.H5L_TYPE_SOFT; 1657 log.trace("createLink(): type H5L_TYPE_SOFT"); 1658 } 1659 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1660 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1661 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1662 } 1663 1664 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1665 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1666 } 1667 1668 if (type == HDF5Constants.H5L_TYPE_HARD) { 1669 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1670 log.debug("createLink(): cannot create link to root group"); 1671 log.trace("createLink(): finish"); 1672 throw new HDF5Exception("Cannot make a link to the root group."); 1673 } 1674 current_full_name = currentObj.getPath() + HObject.separator + currentObj.getName(); 1675 1676 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1677 HDF5Constants.H5P_DEFAULT); 1678 } 1679 1680 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1681 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1682 HDF5Constants.H5P_DEFAULT); 1683 } 1684 1685 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1686 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, 1687 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1688 } 1689 1690 if (currentObj instanceof Group) { 1691 log.trace("createLink(): Link target is type H5Group"); 1692 obj = new H5Group(this, name, parent_path, parentGroup); 1693 } 1694 else if (currentObj instanceof H5Datatype) { 1695 log.trace("createLink(): Link target is type H5Datatype"); 1696 obj = new H5Datatype(this, name, parent_path); 1697 } 1698 else if (currentObj instanceof H5CompoundDS) { 1699 log.trace("createLink(): Link target is type H5CompoundDS"); 1700 obj = new H5CompoundDS(this, name, parent_path); 1701 } 1702 else if (currentObj instanceof H5ScalarDS) { 1703 log.trace("createLink(): Link target is type H5ScalarDS"); 1704 obj = new H5ScalarDS(this, name, parent_path); 1705 } 1706 1707 log.trace("createLink(): finish"); 1708 return obj; 1709 } 1710 1711 /** 1712 * Creates a soft or external link to object in a file that does not exist at the time the link is created. 1713 * 1714 * @param parentGroup 1715 * The group where the link is created. 1716 * @param name 1717 * The name of the link. 1718 * @param currentObj 1719 * The name of the object the new link will reference. The object doesn't have to exist. 1720 * @param lType 1721 * The type of link to be created. 1722 * 1723 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1724 * 1725 * @throws Exception 1726 * The exceptions thrown vary depending on the implementing class. 1727 */ 1728 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1729 log.trace("createLink(): start: name={}", name); 1730 HObject obj = null; 1731 int type = 0; 1732 String new_full_name = null, parent_path = null; 1733 1734 if (currentObj == null) { 1735 log.debug("createLink(): Link target is null"); 1736 log.trace("createLink(): finish"); 1737 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1738 } 1739 if ((parentGroup == null) || parentGroup.isRoot()) { 1740 parent_path = HObject.separator; 1741 } 1742 else { 1743 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1744 } 1745 1746 new_full_name = parent_path + name; 1747 1748 if (lType == Group.LINK_TYPE_HARD) { 1749 type = HDF5Constants.H5L_TYPE_HARD; 1750 log.trace("createLink(): type H5L_TYPE_HARD"); 1751 } 1752 else if (lType == Group.LINK_TYPE_SOFT) { 1753 type = HDF5Constants.H5L_TYPE_SOFT; 1754 log.trace("createLink(): type H5L_TYPE_SOFT"); 1755 } 1756 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1757 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1758 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1759 } 1760 1761 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1762 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1763 } 1764 1765 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1766 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1767 } 1768 1769 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1770 String fileName = null; 1771 String objectName = null; 1772 1773 // separate the object name and the file name 1774 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1775 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1776 objectName = objectName.substring(3); 1777 1778 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1779 HDF5Constants.H5P_DEFAULT); 1780 } 1781 1782 if (name.startsWith(HObject.separator)) { 1783 name = name.substring(1); 1784 } 1785 obj = new H5Link(this, name, parent_path); 1786 1787 log.trace("createLink(): finish"); 1788 return obj; 1789 } 1790 1791 /** 1792 * reload the sub-tree structure from file. 1793 * <p> 1794 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1795 * memory is not changed. 1796 * 1797 * @param g 1798 * the group where the structure is to be reloaded in memory 1799 */ 1800 public void reloadTree(Group g) { 1801 if (fid < 0 || rootNode == null || g == null) { 1802 log.debug("reloadTree(): Invalid fid or null object"); 1803 return; 1804 } 1805 1806 HObject theObj = null; 1807 DefaultMutableTreeNode theNode = null; 1808 1809 if (g.equals(rootNode.getUserObject())) 1810 theNode = rootNode; 1811 else { 1812 Enumeration<?> local_enum = rootNode.breadthFirstEnumeration(); 1813 while (local_enum.hasMoreElements()) { 1814 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1815 theObj = (HObject) theNode.getUserObject(); 1816 if (g.equals(theObj)) 1817 break; 1818 } 1819 } 1820 1821 theNode.removeAllChildren(); 1822 depth_first(theNode, Integer.MIN_VALUE); 1823 } 1824 1825 /* 1826 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1827 * 1828 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1829 */ 1830 @Override 1831 public TreeNode copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1832 TreeNode newNode = null; 1833 log.trace("copy(): start: srcObj={} dstGroup={} dstName={}", srcObj, dstGroup, dstName); 1834 1835 if ((srcObj == null) || (dstGroup == null)) { 1836 log.debug("copy(): srcObj or dstGroup is null"); 1837 log.trace("copy(): finish"); 1838 return null; 1839 } 1840 1841 if (dstName == null) { 1842 dstName = srcObj.getName(); 1843 } 1844 1845 List<HObject> members = dstGroup.getMemberList(); 1846 int n = members.size(); 1847 for (int i = 0; i < n; i++) { 1848 HObject obj = (HObject) members.get(i); 1849 String name = obj.getName(); 1850 while (name.equals(dstName)) 1851 dstName += "~copy"; 1852 } 1853 1854 if (srcObj instanceof Dataset) { 1855 log.trace("copy(): srcObj instanceof Dataset"); 1856 newNode = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1857 } 1858 else if (srcObj instanceof H5Group) { 1859 log.trace("copy(): srcObj instanceof H5Group"); 1860 newNode = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1861 } 1862 else if (srcObj instanceof H5Datatype) { 1863 log.trace("copy(): srcObj instanceof H5Datatype"); 1864 newNode = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 1865 } 1866 1867 log.trace("copy(): finish"); 1868 return newNode; 1869 } 1870 1871 /* 1872 * (non-Javadoc) 1873 * 1874 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 1875 */ 1876 @Override 1877 public void delete(HObject obj) throws Exception { 1878 if ((obj == null) || (fid < 0)) { 1879 log.debug("delete(): Invalid FID or object is null"); 1880 return; 1881 } 1882 1883 String name = obj.getPath() + obj.getName(); 1884 1885 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 1886 } 1887 1888 /* 1889 * (non-Javadoc) 1890 * 1891 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 1892 */ 1893 @Override 1894 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 1895 log.trace("writeAttribute(): start"); 1896 1897 String obj_name = obj.getFullName(); 1898 String name = attr.getName(); 1899 int tid = -1; 1900 int sid = -1; 1901 int aid = -1; 1902 log.trace("writeAttribute(): name is {}", name); 1903 1904 int objID = obj.open(); 1905 if (objID < 0) { 1906 log.debug("writeAttribute(): Invalid Object ID"); 1907 log.trace("writeAttribute(): finish"); 1908 return; 1909 } 1910 1911 if ((tid = attr.getType().toNative()) >= 0) { 1912 log.trace("writeAttribute(): tid {} from toNative", tid); 1913 try { 1914 if (attr.isScalar()) 1915 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 1916 else 1917 sid = H5.H5Screate_simple(attr.getRank(), attr.getDataDims(), null); 1918 1919 if (attrExisted) { 1920 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, 1921 HDF5Constants.H5P_DEFAULT); 1922 } 1923 else { 1924 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1925 } 1926 log.trace("writeAttribute(): aid {} opened/created", aid); 1927 1928 // update value of the attribute 1929 Object attrValue = attr.getValue(); 1930 log.trace("writeAttribute(): getValue"); 1931 if (attrValue != null) { 1932 boolean isVlen = (H5.H5Tget_class(tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(tid)); 1933 if (isVlen) { 1934 log.trace("writeAttribute(): isvlen={}", isVlen); 1935 try { 1936 /* 1937 * must use native type to write attribute data to file (see bug 1069) 1938 */ 1939 int tmptid = tid; 1940 tid = H5.H5Tget_native_type(tmptid); 1941 try { 1942 H5.H5Tclose(tmptid); 1943 } 1944 catch (Exception ex) { 1945 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 1946 } 1947 log.trace("writeAttribute(): H5.H5AwriteVL", name); 1948 if ((attrValue instanceof String) || (attr.getDataDims().length == 1)) { 1949 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 1950 } 1951 else { 1952 log.info("writeAttribute(): Datatype is not a string, unable to write {} data", name); 1953 } 1954 } 1955 catch (Exception ex) { 1956 log.debug("writeAttribute(): native type failure: ", name, ex); 1957 } 1958 } 1959 else { 1960 if (attr.getType().getDatatypeClass() == Datatype.CLASS_REFERENCE && attrValue instanceof String) { 1961 // reference is a path+name to the object 1962 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 1963 log.trace("writeAttribute(): Attribute class is CLASS_REFERENCE"); 1964 } 1965 else if (Array.get(attrValue, 0) instanceof String) { 1966 int size = H5.H5Tget_size(tid); 1967 int len = ((String[]) attrValue).length; 1968 byte[] bval = Dataset.stringToByte((String[]) attrValue, size); 1969 if (bval != null && bval.length == size * len) { 1970 bval[bval.length - 1] = 0; 1971 attrValue = bval; 1972 } 1973 log.trace("writeAttribute(): Array", name); 1974 } 1975 1976 try { 1977 /* 1978 * must use native type to write attribute data to file (see bug 1069) 1979 */ 1980 int tmptid = tid; 1981 tid = H5.H5Tget_native_type(tmptid); 1982 try { 1983 H5.H5Tclose(tmptid); 1984 } 1985 catch (Exception ex) { 1986 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 1987 } 1988 log.trace("writeAttribute(): H5.H5Awrite"); 1989 H5.H5Awrite(aid, tid, attrValue); 1990 } 1991 catch (Exception ex) { 1992 log.debug("writeAttribute(): native type failure: ", ex); 1993 } 1994 } 1995 } // if (attrValue != null) { 1996 } 1997 finally { 1998 try { 1999 H5.H5Tclose(tid); 2000 } 2001 catch (Exception ex) { 2002 log.debug("writeAttribute(): H5Tclose(tid {}) failure: ", tid, ex); 2003 } 2004 try { 2005 H5.H5Sclose(sid); 2006 } 2007 catch (Exception ex) { 2008 log.debug("writeAttribute(): H5Sclose(sid {}) failure: ", sid, ex); 2009 } 2010 try { 2011 H5.H5Aclose(aid); 2012 } 2013 catch (Exception ex) { 2014 log.debug("writeAttribute(): H5Aclose(aid {}) failure: ", aid, ex); 2015 } 2016 } 2017 } 2018 else { 2019 log.debug("writeAttribute(): toNative failure"); 2020 } 2021 2022 obj.close(objID); 2023 log.trace("writeAttribute(): finish"); 2024 } 2025 2026 /*************************************************************************** 2027 * Implementations for methods specific to H5File 2028 **************************************************************************/ 2029 2030 /** 2031 * Opens a file with specific file access property list. 2032 * <p> 2033 * This function does the same as "int open()" except the you can also pass an HDF5 file access property to file 2034 * open. For example, 2035 * 2036 * <pre> 2037 * // All open objects remaining in the file are closed then file is closed 2038 * int plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2039 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 2040 * int fid = open(plist); 2041 * </pre> 2042 * 2043 * @param plist 2044 * a file access property list identifier. 2045 * 2046 * @return the file identifier if successful; otherwise returns negative value. 2047 * 2048 * @throws Exception 2049 * If there is a failure. 2050 */ 2051 public int open(int plist) throws Exception { 2052 return open(true, plist); 2053 } 2054 2055 /*************************************************************************** 2056 * Private methods. 2057 **************************************************************************/ 2058 2059 /** 2060 * Opens access to this file. 2061 * 2062 * @param loadFullHierarchy 2063 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2064 * 2065 * @return the file identifier if successful; otherwise returns negative value. 2066 * 2067 * @throws Exception 2068 * If there is a failure. 2069 */ 2070 private int open(boolean loadFullHierarchy) throws Exception { 2071 int the_fid = -1; 2072 2073 int plist = HDF5Constants.H5P_DEFAULT; 2074 2075 /* 2076 * // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache try { //All open objects remaining in the file 2077 * are closed // then file is closed plist = H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); 2078 * H5.H5Pset_fclose_degree ( plist, HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {;} the_fid = 2079 * open(loadFullHierarchy, plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 2080 */ 2081 2082 log.trace("open(): loadFull={}", loadFullHierarchy); 2083 the_fid = open(loadFullHierarchy, plist); 2084 2085 return the_fid; 2086 } 2087 2088 /** 2089 * Opens access to this file. 2090 * 2091 * @param loadFullHierarchy 2092 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2093 * 2094 * @return the file identifier if successful; otherwise returns negative value. 2095 * 2096 * @throws Exception 2097 * If there is a failure. 2098 */ 2099 private int open(boolean loadFullHierarchy, int plist) throws Exception { 2100 log.trace("open(loadFullHierarchy = {}, plist = {}): start", loadFullHierarchy, plist); 2101 if (fid > 0) { 2102 log.trace("open(): FID already opened"); 2103 log.trace("open(): finish"); 2104 return fid; // file is opened already 2105 } 2106 2107 // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) 2108 // to make it work for external datasets. We need to set it back 2109 // before the file is closed/opened. 2110 String rootPath = System.getProperty("hdfview.workdir"); 2111 if (rootPath == null) { 2112 rootPath = System.getProperty("user.dir"); 2113 } 2114 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 2115 2116 // check for valid file access permission 2117 if (flag < 0) { 2118 log.debug("open(): Invalid access identifier -- " + flag); 2119 log.trace("open(): finish"); 2120 throw new HDF5Exception("Invalid access identifer -- " + flag); 2121 } 2122 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 2123 // create a new file 2124 log.trace("open(): create file"); 2125 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 2126 HDF5Constants.H5P_DEFAULT); 2127 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 2128 H5.H5Fclose(fid); 2129 flag = HDF5Constants.H5F_ACC_RDWR; 2130 } 2131 else if (!exists()) { 2132 log.debug("open(): File {} does not exist", fullFileName); 2133 log.trace("open(): finish"); 2134 throw new HDF5Exception("File does not exist -- " + fullFileName); 2135 } 2136 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 2137 log.debug("open(): Cannot write file {}", fullFileName); 2138 log.trace("open(): finish"); 2139 throw new HDF5Exception("Cannot write file, try opening as read-only -- " + fullFileName); 2140 } 2141 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 2142 log.debug("open(): Cannot read file {}", fullFileName); 2143 log.trace("open(): finish"); 2144 throw new HDF5Exception("Cannot read file -- " + fullFileName); 2145 } 2146 2147 try { 2148 log.trace("open(): open file"); 2149 fid = H5.H5Fopen(fullFileName, flag, plist); 2150 } 2151 catch (Exception ex) { 2152 try { 2153 log.debug("open(): open failed, attempting to open file read-only"); 2154 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 2155 isReadOnly = true; 2156 } 2157 catch (Exception ex2) { 2158 // Attemp to open the file as a split file or family file 2159 try { 2160 File tmpf = new File(fullFileName); 2161 String tmpname = tmpf.getName(); 2162 int idx = tmpname.lastIndexOf("."); 2163 2164 if (tmpname.contains("-m")) { 2165 log.debug("open(): open read-only failed, attempting to open split file"); 2166 2167 while (idx > 0) { 2168 char c = tmpname.charAt(idx - 1); 2169 if (!(c == '-')) 2170 idx--; 2171 else 2172 break; 2173 } 2174 2175 if (idx > 0) { 2176 tmpname = tmpname.substring(0, idx - 1); 2177 log.trace("open(): attempting to open split file with name {}", tmpname); 2178 int pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2179 H5.H5Pset_fapl_split(pid, "-m.h5", HDF5Constants.H5P_DEFAULT, "-r.h5", HDF5Constants.H5P_DEFAULT); 2180 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2181 H5.H5Pclose(pid); 2182 } 2183 } 2184 else { 2185 log.debug("open(): open read-only failed, checking for file family"); 2186 // try to see if it is a file family, always open a family file 2187 // from the first one since other files will not be recognized 2188 // as an HDF5 file 2189 int cnt = idx; 2190 while (idx > 0) { 2191 char c = tmpname.charAt(idx - 1); 2192 if (Character.isDigit(c)) 2193 idx--; 2194 else 2195 break; 2196 } 2197 2198 if (idx > 0) { 2199 cnt -= idx; 2200 tmpname = tmpname.substring(0, idx) + "%0" + cnt + "d" + tmpname.substring(tmpname.lastIndexOf(".")); 2201 log.trace("open(): attempting to open file family with name {}", tmpname); 2202 int pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2203 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 2204 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2205 H5.H5Pclose(pid); 2206 } 2207 } 2208 } 2209 catch (Exception ex3) { 2210 log.debug("open(): open failed: ", ex3); 2211 } 2212 } /* catch (Exception ex) { */ 2213 } 2214 2215 if ((fid >= 0) && loadFullHierarchy) { 2216 // load the hierarchy of the file 2217 rootNode = loadTree(); 2218 } 2219 2220 log.trace("open(loadFullHeirarchy = {}, plist = {}): finish", loadFullHierarchy, plist); 2221 return fid; 2222 } 2223 2224 /** 2225 * Reads the file structure into memory (tree node) 2226 * 2227 * @return the root node of the file structure. 2228 */ 2229 private DefaultMutableTreeNode loadTree() { 2230 if (fid < 0) { 2231 log.debug("loadTree(): Invalid FID"); 2232 return null; 2233 } 2234 2235 DefaultMutableTreeNode root = null; 2236 2237 long[] rootOID = { 0 }; 2238 H5Group rootGroup = new H5Group(this, "/", null, // root node does not 2239 // have a parent path 2240 null); // root node does not have a parent node 2241 2242 root = new DefaultMutableTreeNode(rootGroup) { 2243 private static final long serialVersionUID = 991382067363411723L; 2244 2245 @Override 2246 public boolean isLeaf() { 2247 return false; 2248 } 2249 }; 2250 2251 depth_first(root, 0); // reload all 2252 2253 return root; 2254 } 2255 2256 /** 2257 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and 2258 * datasets only. It does not include named datatypes and soft links. 2259 * <p> 2260 * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing 2261 * a path back up to the root. 2262 * 2263 * @param parentNode 2264 * the parent node. 2265 */ 2266 private int depth_first(MutableTreeNode parentNode, int nTotal) { 2267 log.trace("depth_first({}): start", parentNode); 2268 2269 int nelems; 2270 MutableTreeNode node = null; 2271 String fullPath = null; 2272 String ppath = null; 2273 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2274 int gid = -1; 2275 2276 H5Group parentObject = (H5Group) (pnode.getUserObject()); 2277 ppath = parentObject.getPath(); 2278 2279 if (ppath == null) { 2280 fullPath = HObject.separator; 2281 } 2282 else { 2283 fullPath = ppath + parentObject.getName() + HObject.separator; 2284 } 2285 2286 nelems = 0; 2287 try { 2288 gid = parentObject.open(); 2289 H5G_info_t info = H5.H5Gget_info(gid); 2290 nelems = (int) info.nlinks; 2291 } 2292 catch (HDF5Exception ex) { 2293 nelems = -1; 2294 log.debug("depth_first({}): H5Gget_info(gid {}) failure: ", parentObject, gid, ex); 2295 } 2296 2297 if (nelems <= 0) { 2298 parentObject.close(gid); 2299 log.debug("depth_first({}): nelems <= 0", parentObject); 2300 log.trace("depth_first({}): finish", parentObject); 2301 return nTotal; 2302 } 2303 2304 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2305 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2306 // we use only one call to get all the information, which takes about 2307 // two seconds 2308 int[] objTypes = new int[nelems]; 2309 long[] fNos = new long[nelems]; 2310 long[] objRefs = new long[nelems]; 2311 String[] objNames = new String[nelems]; 2312 2313 try { 2314 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2315 } 2316 catch (HDF5Exception ex) { 2317 log.debug("depth_first({}): failure: ", parentObject, ex); 2318 log.trace("depth_first({}): finish", parentObject); 2319 ex.printStackTrace(); 2320 return nTotal; 2321 } 2322 2323 int nStart = getStartMembers(); 2324 int nMax = getMaxMembers(); 2325 2326 String obj_name; 2327 int obj_type; 2328 2329 // Iterate through the file to see members of the group 2330 for (int i = 0; i < nelems; i++) { 2331 obj_name = objNames[i]; 2332 obj_type = objTypes[i]; 2333 log.trace("depth_first({}): obj_name={}, obj_type={}", parentObject, obj_name, obj_type); 2334 long oid[] = { objRefs[i], fNos[i] }; 2335 2336 if (obj_name == null) { 2337 log.trace("depth_first({}): continue after null obj_name", parentObject); 2338 continue; 2339 } 2340 2341 nTotal++; 2342 2343 if (nMax > 0) { 2344 if ((nTotal - nStart) >= nMax) 2345 break; // loaded enough objects 2346 } 2347 2348 boolean skipLoad = false; 2349 if ((nTotal > 0) && (nTotal < nStart)) 2350 skipLoad = true; 2351 2352 // create a new group 2353 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2354 H5Group g = new H5Group(this, obj_name, fullPath, parentObject, oid); // deprecated! 2355 node = new DefaultMutableTreeNode(g) { 2356 private static final long serialVersionUID = 5139629211215794015L; 2357 2358 @Override 2359 public boolean isLeaf() { 2360 return false; 2361 } 2362 }; 2363 pnode.add(node); 2364 parentObject.addToMemberList(g); 2365 2366 // detect and stop loops 2367 // a loop is detected if there exists object with the same 2368 // object ID by tracing path back up to the root. 2369 boolean hasLoop = false; 2370 HObject tmpObj = null; 2371 DefaultMutableTreeNode tmpNode = pnode; 2372 2373 while (tmpNode != null) { 2374 tmpObj = (HObject) tmpNode.getUserObject(); 2375 2376 if (tmpObj.equalsOID(oid) && !(tmpObj.getPath() == null)) { 2377 hasLoop = true; 2378 break; 2379 } 2380 else { 2381 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2382 } 2383 } 2384 2385 // recursively go through the next group 2386 // stops if it has loop. 2387 if (!hasLoop) { 2388 nTotal = depth_first(node, nTotal); 2389 } 2390 } 2391 else if (skipLoad) { 2392 continue; 2393 } 2394 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2395 int did = -1; 2396 int tid = -1; 2397 int tclass = -1; 2398 try { 2399 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2400 if (did >= 0) { 2401 tid = H5.H5Dget_type(did); 2402 2403 tclass = H5.H5Tget_class(tid); 2404 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2405 // for ARRAY, the type is determined by the base type 2406 int btid = H5.H5Tget_super(tid); 2407 2408 tclass = H5.H5Tget_class(btid); 2409 2410 try { 2411 H5.H5Tclose(btid); 2412 } 2413 catch (Exception ex) { 2414 log.debug("depth_first({})[{}] dataset {} H5Tclose(btid {}) failure: ", parentObject, i, obj_name, btid, ex); 2415 } 2416 } 2417 } 2418 else { 2419 log.debug("depth_first({})[{}] {} dataset open failure", parentObject, i, obj_name); 2420 } 2421 } 2422 catch (Exception ex) { 2423 log.debug("depth_first({})[{}] {} dataset access failure: ", parentObject, i, obj_name, ex); 2424 } 2425 finally { 2426 try { 2427 H5.H5Tclose(tid); 2428 } 2429 catch (Exception ex) { 2430 log.debug("depth_first({})[{}] daatset {} H5Tclose(tid {}) failure: ", parentObject, i, obj_name, tid, ex); 2431 } 2432 try { 2433 H5.H5Dclose(did); 2434 } 2435 catch (Exception ex) { 2436 log.debug("depth_first({})[{}] dataset {} H5Dclose(did {}) failure: ", parentObject, i, obj_name, did, ex); 2437 } 2438 } 2439 Dataset d = null; 2440 if (tclass == HDF5Constants.H5T_COMPOUND) { 2441 // create a new compound dataset 2442 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2443 } 2444 else { 2445 // create a new scalar dataset 2446 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2447 } 2448 2449 node = new DefaultMutableTreeNode(d); 2450 pnode.add(node); 2451 parentObject.addToMemberList(d); 2452 } 2453 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2454 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2455 2456 node = new DefaultMutableTreeNode(t); 2457 pnode.add(node); 2458 parentObject.addToMemberList(t); 2459 } 2460 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2461 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2462 2463 node = new DefaultMutableTreeNode(link); 2464 pnode.add(node); 2465 parentObject.addToMemberList(link); 2466 continue; // do the next one, if the object is not identified. 2467 } 2468 } // for ( i = 0; i < nelems; i++) 2469 2470 parentObject.close(gid); 2471 2472 log.trace("depth_first({}): finish", parentObject); 2473 return nTotal; 2474 } // private depth_first() 2475 2476 private void depth_first_old(MutableTreeNode parentNode) { 2477 int nelems; 2478 MutableTreeNode node = null; 2479 String fullPath = null; 2480 String ppath = null; 2481 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2482 int gid = -1; 2483 log.trace("depth_first_old: start"); 2484 2485 H5Group pgroup = (H5Group) (pnode.getUserObject()); 2486 ppath = pgroup.getPath(); 2487 2488 if (ppath == null) { 2489 fullPath = HObject.separator; 2490 } 2491 else { 2492 fullPath = ppath + pgroup.getName() + HObject.separator; 2493 } 2494 2495 nelems = 0; 2496 try { 2497 gid = pgroup.open(); 2498 H5G_info_t info = H5.H5Gget_info(gid); 2499 nelems = (int) info.nlinks; 2500 } 2501 catch (HDF5Exception ex) { 2502 nelems = -1; 2503 } 2504 2505 if (nelems <= 0) { 2506 pgroup.close(gid); 2507 return; 2508 } 2509 2510 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2511 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2512 // we use only one call to get all the information, which takes about 2513 // two seconds 2514 int[] objTypes = new int[nelems]; 2515 long[] fNos = new long[nelems]; 2516 long[] objRefs = new long[nelems]; 2517 String[] objNames = new String[nelems]; 2518 2519 try { 2520 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2521 } 2522 catch (HDF5Exception ex) { 2523 ex.printStackTrace(); 2524 return; 2525 } 2526 2527 int startIndex = Math.max(0, getStartMembers()); 2528 int endIndex = getMaxMembers(); 2529 if (endIndex >= nelems) { 2530 endIndex = nelems; 2531 startIndex = 0; // load all members 2532 } 2533 endIndex += startIndex; 2534 endIndex = Math.min(endIndex, nelems); 2535 2536 String obj_name; 2537 int obj_type; 2538 // int lnk_type; 2539 2540 // Iterate through the file to see members of the group 2541 for (int i = startIndex; i < endIndex; i++) { 2542 obj_name = objNames[i]; 2543 obj_type = objTypes[i]; 2544 log.trace("depth_first_old: obj_name={}, obj_type={}", obj_name, obj_type); 2545 long oid[] = { objRefs[i], fNos[i] }; 2546 2547 if (obj_name == null) { 2548 continue; 2549 } 2550 2551 // we need to use the OID for this release. we will rewrite this so 2552 // that we do not use the deprecated constructor 2553 if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2554 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2555 2556 node = new DefaultMutableTreeNode(link); 2557 pnode.add(node); 2558 pgroup.addToMemberList(link); 2559 continue; // do the next one, if the object is not identified. 2560 } 2561 2562 // create a new group 2563 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2564 H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); // deprecated! 2565 node = new DefaultMutableTreeNode(g) { 2566 private static final long serialVersionUID = 5139629211215794015L; 2567 2568 @Override 2569 public boolean isLeaf() { 2570 return false; 2571 } 2572 }; 2573 pnode.add(node); 2574 pgroup.addToMemberList(g); 2575 2576 // detect and stop loops 2577 // a loop is detected if there exists object with the same 2578 // object ID by tracing path back up to the root. 2579 boolean hasLoop = false; 2580 HObject tmpObj = null; 2581 DefaultMutableTreeNode tmpNode = pnode; 2582 2583 while (tmpNode != null) { 2584 tmpObj = (HObject) tmpNode.getUserObject(); 2585 2586 if (tmpObj.equalsOID(oid)) { 2587 hasLoop = true; 2588 break; 2589 } 2590 else { 2591 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2592 } 2593 } 2594 2595 // recursively go through the next group 2596 // stops if it has loop. 2597 if (!hasLoop) { 2598 depth_first_old(node); 2599 } 2600 } 2601 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2602 int did = -1, tid = -1, tclass = -1; 2603 try { 2604 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2605 if (did >= 0) { 2606 tid = H5.H5Dget_type(did); 2607 2608 tclass = H5.H5Tget_class(tid); 2609 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2610 // for ARRAY, the type is determined by the base type 2611 int btid = H5.H5Tget_super(tid); 2612 int tmpclass = H5.H5Tget_class(btid); 2613 2614 // cannot deal with ARRAY of COMPOUND in compound table 2615 // viewer 2616 if (tmpclass != HDF5Constants.H5T_COMPOUND) 2617 tclass = H5.H5Tget_class(btid); 2618 2619 try { 2620 H5.H5Tclose(btid); 2621 } 2622 catch (Exception ex) { 2623 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2624 } 2625 } 2626 } 2627 else { 2628 log.debug("depth_first_old[{}] {} dataset open failure", i, obj_name); 2629 } 2630 } 2631 catch (HDF5Exception ex) { 2632 log.debug("depth_first_old[{}] {} dataset access failure: ", i, obj_name, ex); 2633 } 2634 finally { 2635 try { 2636 H5.H5Tclose(tid); 2637 } 2638 catch (Exception ex) { 2639 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2640 } 2641 try { 2642 H5.H5Dclose(did); 2643 } 2644 catch (Exception ex) { 2645 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2646 } 2647 } 2648 Dataset d = null; 2649 if (tclass == HDF5Constants.H5T_COMPOUND) { 2650 // create a new compound dataset 2651 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2652 } 2653 else { 2654 // create a new scalar dataset 2655 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2656 } 2657 2658 node = new DefaultMutableTreeNode(d); 2659 pnode.add(node); 2660 pgroup.addToMemberList(d); 2661 } 2662 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2663 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2664 2665 node = new DefaultMutableTreeNode(t); 2666 pnode.add(node); 2667 pgroup.addToMemberList(t); 2668 } 2669 } // for ( i = 0; i < nelems; i++) 2670 2671 pgroup.close(gid); 2672 log.trace("depth_first_old: finish"); 2673 } // private depth_first() 2674 2675 private TreeNode copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2676 Dataset dataset = null; 2677 TreeNode newNode; 2678 int srcdid = -1; 2679 int dstdid = -1; 2680 int ocp_plist_id = -1; 2681 String dname = null, path = null; 2682 2683 if (pgroup.isRoot()) { 2684 path = HObject.separator; 2685 } 2686 else { 2687 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2688 } 2689 2690 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2691 dstName = srcDataset.getName(); 2692 } 2693 dname = path + dstName; 2694 2695 try { 2696 srcdid = srcDataset.open(); 2697 dstdid = pgroup.open(); 2698 2699 try { 2700 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2701 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2702 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2703 } 2704 catch (Exception ex) { 2705 log.debug("copyDataset(): {} failure: ", dname, ex); 2706 } 2707 finally { 2708 try { 2709 H5.H5Pclose(ocp_plist_id); 2710 } 2711 catch (Exception ex) { 2712 log.debug("copyDataset(): {} H5Pclose(ocp_plist_id {}) failure: ", dname, ocp_plist_id, ex); 2713 } 2714 } 2715 2716 if (srcDataset instanceof H5ScalarDS) { 2717 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2718 } 2719 else { 2720 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2721 } 2722 2723 pgroup.addToMemberList(dataset); 2724 newNode = new DefaultMutableTreeNode(dataset); 2725 } 2726 finally { 2727 try { 2728 srcDataset.close(srcdid); 2729 } 2730 catch (Exception ex) { 2731 log.debug("copyDataset(): {} srcDataset.close(srcdid {}) failure: ", dname, srcdid, ex); 2732 } 2733 try { 2734 pgroup.close(dstdid); 2735 } 2736 catch (Exception ex) { 2737 log.debug("copyDataset(): {} pgroup.close(dstdid {}) failure: ", dname, dstdid, ex); 2738 } 2739 } 2740 2741 log.trace("copyDataset(): finish"); 2742 return newNode; 2743 } 2744 2745 /** 2746 * Constructs a dataset for specified dataset identifier. 2747 * 2748 * @param did 2749 * the dataset identifier 2750 * @param name 2751 * the name of the dataset 2752 * @param path 2753 * the path of the dataset 2754 * 2755 * @return the dataset if successful; otherwise return null. 2756 * 2757 * @throws HDF5Exception 2758 * If there is an error at the HDF5 library level. 2759 */ 2760 private Dataset getDataset(int did, String name, String path) throws HDF5Exception { 2761 log.trace("getDataset(): start"); 2762 Dataset dataset = null; 2763 if (did >= 0) { 2764 int tid = -1; 2765 int tclass = -1; 2766 try { 2767 tid = H5.H5Dget_type(did); 2768 tclass = H5.H5Tget_class(tid); 2769 if (tclass == HDF5Constants.H5T_ARRAY) { 2770 // for ARRAY, the type is determined by the base type 2771 int btid = H5.H5Tget_super(tid); 2772 tclass = H5.H5Tget_class(btid); 2773 try { 2774 H5.H5Tclose(btid); 2775 } 2776 catch (Exception ex) { 2777 log.debug("getDataset(): {} H5Tclose(btid {}) failure: ", name, btid, ex); 2778 } 2779 } 2780 } 2781 finally { 2782 try { 2783 H5.H5Tclose(tid); 2784 } 2785 catch (Exception ex) { 2786 log.debug("getDataset(): {} H5Tclose(tid {}) failure: ", name, tid, ex); 2787 } 2788 } 2789 2790 if (tclass == HDF5Constants.H5T_COMPOUND) { 2791 dataset = new H5CompoundDS(this, name, path); 2792 } 2793 else { 2794 dataset = new H5ScalarDS(this, name, path); 2795 } 2796 } 2797 else { 2798 log.debug("getDataset(): id failure"); 2799 } 2800 2801 log.trace("getDataset(): finish"); 2802 return dataset; 2803 } 2804 2805 /** 2806 * Copies a named datatype to another location. 2807 * 2808 * @param srcType 2809 * the source datatype 2810 * @param pgroup 2811 * the group which the new datatype is copied to 2812 * @param dstName 2813 * the name of the new dataype 2814 * 2815 * @throws Exception 2816 * If there is a failure. 2817 */ 2818 private TreeNode copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2819 log.trace("copyDatatype(): start"); 2820 Datatype datatype = null; 2821 int tid_src = -1; 2822 int gid_dst = -1; 2823 String path = null; 2824 DefaultMutableTreeNode newNode = null; 2825 2826 if (pgroup.isRoot()) { 2827 path = HObject.separator; 2828 } 2829 else { 2830 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2831 } 2832 2833 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2834 dstName = srcType.getName(); 2835 } 2836 2837 try { 2838 tid_src = srcType.open(); 2839 gid_dst = pgroup.open(); 2840 2841 try { 2842 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2843 } 2844 catch (Exception ex) { 2845 log.debug("copyDatatype(): {} H5Ocopy(tid_src {}) failure: ", dstName, tid_src, ex); 2846 } 2847 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2848 2849 pgroup.addToMemberList(datatype); 2850 newNode = new DefaultMutableTreeNode(datatype); 2851 } 2852 finally { 2853 try { 2854 srcType.close(tid_src); 2855 } 2856 catch (Exception ex) { 2857 log.debug("copyDatatype(): {} srcType.close(tid_src {}) failure: ", dstName, tid_src, ex); 2858 } 2859 try { 2860 pgroup.close(gid_dst); 2861 } 2862 catch (Exception ex) { 2863 log.debug("copyDatatype(): {} pgroup.close(gid_dst {}) failure: ", dstName, gid_dst, ex); 2864 } 2865 } 2866 2867 log.trace("copyDatatype(): finish"); 2868 return newNode; 2869 } 2870 2871 /** 2872 * Copies a group and its members to a new location. 2873 * 2874 * @param srcGroup 2875 * the source group 2876 * @param dstGroup 2877 * the location where the new group is located 2878 * @param dstName 2879 * the name of the new group 2880 * 2881 * @throws Exception 2882 * If there is a failure. 2883 */ 2884 private TreeNode copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { 2885 log.trace("copyGroup(): start"); 2886 H5Group group = null; 2887 DefaultMutableTreeNode newNode = null; 2888 int srcgid = -1; 2889 int dstgid = -1; 2890 String gname = null; 2891 String path = null; 2892 2893 if (dstGroup.isRoot()) { 2894 path = HObject.separator; 2895 } 2896 else { 2897 path = dstGroup.getPath() + dstGroup.getName() + HObject.separator; 2898 } 2899 2900 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2901 dstName = srcGroup.getName(); 2902 } 2903 2904 try { 2905 srcgid = srcGroup.open(); 2906 dstgid = dstGroup.open(); 2907 try { 2908 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2909 } 2910 catch (Exception ex) { 2911 log.debug("copyGroup(): {} H5Ocopy(srcgid {}) failure: ", dstName, srcgid, ex); 2912 } 2913 2914 group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); 2915 newNode = new DefaultMutableTreeNode(group) { 2916 private static final long serialVersionUID = -4981107816640372359L; 2917 2918 @Override 2919 public boolean isLeaf() { 2920 return false; 2921 } 2922 }; 2923 depth_first(newNode, Integer.MIN_VALUE); // reload all 2924 dstGroup.addToMemberList(group); 2925 } 2926 2927 finally { 2928 try { 2929 srcGroup.close(srcgid); 2930 } 2931 catch (Exception ex) { 2932 log.debug("copyGroup(): {} srcGroup.close(srcgid {}) failure: ", dstName, srcgid, ex); 2933 } 2934 try { 2935 dstGroup.close(dstgid); 2936 } 2937 catch (Exception ex) { 2938 log.debug("copyGroup(): {} pgroup.close(dstgid {}) failure: ", dstName, dstgid, ex); 2939 } 2940 } 2941 2942 log.trace("copyGroup(): finish"); 2943 return newNode; 2944 } 2945 2946 /** 2947 * Constructs a group for specified group identifier and retrieves members. 2948 * 2949 * @param gid 2950 * The group identifier. 2951 * @param name 2952 * The group name. 2953 * @param pGroup 2954 * The parent group, or null for the root group. 2955 * 2956 * @return The group if successful; otherwise returns false. 2957 * 2958 * @throws HDF5Exception 2959 * If there is an error at the HDF5 library level. 2960 */ 2961 private H5Group getGroup(int gid, String name, Group pGroup) throws HDF5Exception { 2962 log.trace("getGroup(): start"); 2963 String parentPath = null; 2964 String thisFullName = null; 2965 String memberFullName = null; 2966 2967 if (pGroup == null) { 2968 thisFullName = name = "/"; 2969 } 2970 else { 2971 parentPath = pGroup.getFullName(); 2972 if ((parentPath == null) || parentPath.equals("/")) { 2973 thisFullName = "/" + name; 2974 } 2975 else { 2976 thisFullName = parentPath + "/" + name; 2977 } 2978 } 2979 2980 // get rid of any extra "/" 2981 if (parentPath != null) { 2982 parentPath = parentPath.replaceAll("//", "/"); 2983 } 2984 if (thisFullName != null) { 2985 thisFullName = thisFullName.replaceAll("//", "/"); 2986 } 2987 2988 log.trace("getGroup(): fullName={}", thisFullName); 2989 2990 H5Group group = new H5Group(this, name, parentPath, pGroup); 2991 2992 H5G_info_t group_info = null; 2993 H5O_info_t obj_info = null; 2994 int oid = -1; 2995 String link_name = null; 2996 try { 2997 group_info = H5.H5Gget_info(gid); 2998 } 2999 catch (Exception ex) { 3000 log.debug("getGroup(): {} H5Gget_info(gid {}) failure: ", name, gid, ex); 3001 } 3002 try { 3003 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 3004 } 3005 catch (Exception ex) { 3006 log.debug("getGroup(): {} H5Oopen(gid {}) failure: ", name, gid, ex); 3007 } 3008 3009 // retrieve only the immediate members of the group, do not follow 3010 // subgroups 3011 for (int i = 0; i < group_info.nlinks; i++) { 3012 try { 3013 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, 3014 HDF5Constants.H5P_DEFAULT); 3015 obj_info = H5 3016 .H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 3017 } 3018 catch (HDF5Exception ex) { 3019 log.debug("getGroup()[{}]: {} name,info failure: ", i, name, ex); 3020 log.trace("getGroup()[{}]: continue", i); 3021 // do not stop if accessing one member fails 3022 continue; 3023 } 3024 // create a new group 3025 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 3026 H5Group g = new H5Group(this, link_name, thisFullName, group); 3027 group.addToMemberList(g); 3028 } 3029 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 3030 int did = -1; 3031 Dataset d = null; 3032 3033 if ((thisFullName == null) || thisFullName.equals("/")) { 3034 memberFullName = "/" + link_name; 3035 } 3036 else { 3037 memberFullName = thisFullName + "/" + link_name; 3038 } 3039 3040 try { 3041 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 3042 d = getDataset(did, link_name, thisFullName); 3043 } 3044 finally { 3045 try { 3046 H5.H5Dclose(did); 3047 } 3048 catch (Exception ex) { 3049 log.debug("getGroup()[{}]: {} H5Dclose(did {}) failure: ", i, name, did, ex); 3050 } 3051 } 3052 group.addToMemberList(d); 3053 } 3054 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 3055 Datatype t = new H5Datatype(this, link_name, thisFullName); 3056 group.addToMemberList(t); 3057 } 3058 } // End of for loop. 3059 try { 3060 if (oid >= 0) 3061 H5.H5Oclose(oid); 3062 } 3063 catch (Exception ex) { 3064 log.debug("getGroup(): {} H5Oclose(oid {}) failure: ", name, oid, ex); 3065 } 3066 log.trace("getGroup(): finish"); 3067 return group; 3068 } 3069 3070 /** 3071 * Retrieves the name of the target object that is being linked to. 3072 * 3073 * @param obj 3074 * The current link object. 3075 * 3076 * @return The name of the target object. 3077 * 3078 * @throws HDF5Exception 3079 * If there is an error at the HDF5 library level. 3080 */ 3081 public static String getLinkTargetName(HObject obj) throws Exception { 3082 log.trace("getLinkTargetName(): start"); 3083 String[] link_value = { null, null }; 3084 String targetObjName = null; 3085 3086 if (obj == null) { 3087 log.debug("getLinkTargetName(): object is null"); 3088 log.trace("getLinkTargetName(): finish"); 3089 return null; 3090 } 3091 3092 if (obj.getFullName().equals("/")) { 3093 log.debug("getLinkTargetName(): object is root group, links not allowed"); 3094 log.trace("getLinkTargetName(): finish"); 3095 return null; 3096 } 3097 3098 H5L_info_t link_info = null; 3099 try { 3100 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 3101 } 3102 catch (Throwable err) { 3103 log.debug("getLinkTargetName(): H5Lget_info {} failure: ", obj.getFullName(), err); 3104 } 3105 if (link_info != null) { 3106 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 3107 try { 3108 H5.H5Lget_val(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 3109 } 3110 catch (Exception ex) { 3111 log.debug("H5Lget_val {} failure: ", obj.getFullName(), ex); 3112 } 3113 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 3114 targetObjName = link_value[0]; 3115 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 3116 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 3117 } 3118 } 3119 } 3120 log.trace("getLinkTargetName(): finish"); 3121 return targetObjName; 3122 } 3123 3124 /** 3125 * Export dataset. 3126 * 3127 * @param file_export_name 3128 * The file name to export data into. 3129 * @param file_name 3130 * The name of the HDF5 file containing the dataset. 3131 * @param object_path 3132 * The full path of the dataset to be exported. 3133 * 3134 * @throws Exception 3135 * If there is a failure. 3136 */ 3137 public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) 3138 throws Exception { 3139 H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); 3140 } 3141 3142 /** 3143 * Renames an attribute. 3144 * 3145 * @param obj 3146 * The object whose attribute is to be renamed. 3147 * @param oldAttrName 3148 * The current name of the attribute. 3149 * @param newAttrName 3150 * The new name of the attribute. 3151 * 3152 * @throws HDF5Exception 3153 * If there is an error at the HDF5 library level. 3154 */ 3155 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 3156 log.trace("renameAttribute(): rename {} to {}", oldAttrName, newAttrName); 3157 if (!attrFlag) { 3158 attrFlag = true; 3159 H5.H5Arename_by_name(obj.getFID(), obj.getName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 3160 } 3161 } 3162 3163 /** 3164 * Rename the given object 3165 * 3166 * @param obj 3167 * the object to be renamed. 3168 * @param newName 3169 * the new name of the object. 3170 * 3171 * @throws Exception 3172 * If there is a failure. 3173 */ 3174 public static void renameObject(HObject obj, String newName) throws Exception { 3175 String currentFullPath = obj.getPath() + obj.getName(); 3176 String newFullPath = obj.getPath() + newName; 3177 3178 currentFullPath = currentFullPath.replaceAll("//", "/"); 3179 newFullPath = newFullPath.replaceAll("//", "/"); 3180 3181 if (currentFullPath.equals("/")) { 3182 throw new HDF5Exception("Can't rename the root group."); 3183 } 3184 3185 if (currentFullPath.equals(newFullPath)) { 3186 throw new HDF5Exception("The new name is the same as the current name."); 3187 } 3188 3189 // Call the library to move things in the file 3190 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, 3191 HDF5Constants.H5P_DEFAULT); 3192 } 3193 3194 public static int getIndexTypeValue(String strtype) { 3195 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3196 return HDF5Constants.H5_INDEX_NAME; 3197 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3198 return HDF5Constants.H5_INDEX_CRT_ORDER; 3199 if (strtype.compareTo("H5_INDEX_N") == 0) 3200 return HDF5Constants.H5_INDEX_N; 3201 return HDF5Constants.H5_INDEX_UNKNOWN; 3202 } 3203 3204 public static int getIndexOrderValue(String strorder) { 3205 if (strorder.compareTo("H5_ITER_INC") == 0) 3206 return HDF5Constants.H5_ITER_INC; 3207 if (strorder.compareTo("H5_ITER_DEC") == 0) 3208 return HDF5Constants.H5_ITER_DEC; 3209 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3210 return HDF5Constants.H5_ITER_NATIVE; 3211 if (strorder.compareTo("H5_ITER_N") == 0) 3212 return HDF5Constants.H5_ITER_N; 3213 return HDF5Constants.H5_ITER_UNKNOWN; 3214 } 3215 3216 public int getIndexType(String strtype) { 3217 if (strtype != null) { 3218 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3219 return HDF5Constants.H5_INDEX_NAME; 3220 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3221 return HDF5Constants.H5_INDEX_CRT_ORDER; 3222 return HDF5Constants.H5_INDEX_UNKNOWN; 3223 } 3224 return getIndexType(); 3225 } 3226 3227 public int getIndexType() { 3228 return indexType; 3229 } 3230 3231 public void setIndexType(int indexType) { 3232 this.indexType = indexType; 3233 } 3234 3235 public int getIndexOrder(String strorder) { 3236 if (strorder != null) { 3237 if (strorder.compareTo("H5_ITER_INC") == 0) 3238 return HDF5Constants.H5_ITER_INC; 3239 if (strorder.compareTo("H5_ITER_DEC") == 0) 3240 return HDF5Constants.H5_ITER_DEC; 3241 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3242 return HDF5Constants.H5_ITER_NATIVE; 3243 if (strorder.compareTo("H5_ITER_N") == 0) 3244 return HDF5Constants.H5_ITER_N; 3245 return HDF5Constants.H5_ITER_UNKNOWN; 3246 } 3247 return getIndexOrder(); 3248 } 3249 3250 public int getIndexOrder() { 3251 return indexOrder; 3252 } 3253 3254 public void setIndexOrder(int indexOrder) { 3255 this.indexOrder = indexOrder; 3256 } 3257}