001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.io.File; 018import java.lang.reflect.Array; 019import java.util.Hashtable; 020import java.util.Iterator; 021import java.util.LinkedList; 022import java.util.List; 023import java.util.Queue; 024import java.util.Vector; 025 026import hdf.hdf5lib.H5; 027import hdf.hdf5lib.HDF5Constants; 028import hdf.hdf5lib.HDFNativeData; 029import hdf.hdf5lib.exceptions.HDF5Exception; 030import hdf.hdf5lib.structs.H5G_info_t; 031import hdf.hdf5lib.structs.H5L_info_t; 032import hdf.hdf5lib.structs.H5O_info_t; 033import hdf.object.Attribute; 034import hdf.object.Dataset; 035import hdf.object.Datatype; 036import hdf.object.FileFormat; 037import hdf.object.Group; 038import hdf.object.HObject; 039import hdf.object.ScalarDS; 040 041 042/** 043 * H5File is an implementation of the FileFormat class for HDF5 files. 044 * <p> 045 * The HDF5 file structure is made up of HObjects stored in a tree-like fashion. Each tree node represents an 046 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootObject</i>, the 047 * tree can be traversed to find a specific object. 048 * <p> 049 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 050 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 051 * 052 * <pre> 053 * HObject findObject(FileFormat file, String path) { 054 * if (file == null || path == null) 055 * return null; 056 * if (!path.endsWith("/")) 057 * path = path + "/"; 058 * HObject theRoot = file.getRootObject(); 059 * if (theRoot == null) 060 * return null; 061 * else if (path.equals("/")) 062 * return theRoot; 063 * 064 * Iterator local_it = ((Group) theRoot) 065 * .breadthFirstMemberList().iterator(); 066 * HObject theObj = null; 067 * while (local_it.hasNext()) { 068 * theObj = local_it.next(); 069 * String fullPath = theObj.getFullName() + "/"; 070 * if (path.equals(fullPath) && theObj.getPath() != null ) { 071 * break; 072 * } 073 * return theObj; 074 * } 075 * </pre> 076 * 077 * @author Peter X. Cao 078 * @version 2.4 9/4/2007 079 */ 080public class H5File extends FileFormat { 081 private static final long serialVersionUID = 6247335559471526045L; 082 083 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 084 085 /** 086 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 087 * HDF5Constants.H5F_ACC_CREAT. 088 */ 089 private int flag; 090 091 /** 092 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 093 */ 094 private int indexType = HDF5Constants.H5_INDEX_NAME; 095 096 /** 097 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 098 */ 099 private int indexOrder = HDF5Constants.H5_ITER_INC; 100 101 /** 102 * The root object of the file hierarchy. 103 */ 104 private HObject rootObject; 105 106 /** 107 * How many characters maximum in an attribute name? 108 */ 109 private static final int attrNameLen = 256; 110 111 /** 112 * The library version bounds 113 */ 114 private int[] libver; 115 public static final int LIBVER_LATEST = HDF5Constants.H5F_LIBVER_LATEST; 116 public static final int LIBVER_EARLIEST = HDF5Constants.H5F_LIBVER_EARLIEST; 117 public static final int LIBVER_V18 = HDF5Constants.H5F_LIBVER_V18; 118 public static final int LIBVER_V110 = HDF5Constants.H5F_LIBVER_V110; 119 120 /*************************************************************************** 121 * Constructor 122 **************************************************************************/ 123 /** 124 * Constructs an H5File instance with an empty file name and read-only access. 125 */ 126 public H5File() { 127 this("", READ); 128 } 129 130 /** 131 * Constructs an H5File instance with specified file name and read/write access. 132 * <p> 133 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 134 * 135 * @param fileName 136 * A valid file name, with a relative or absolute path. 137 * 138 * @throws NullPointerException 139 * If the <code>fileName</code> argument is <code>null</code>. 140 */ 141 public H5File(String fileName) { 142 this(fileName, WRITE); 143 } 144 145 /** 146 * Constructs an H5File instance with specified file name and access. 147 * <p> 148 * The access parameter values and corresponding behaviors: 149 * <ul> 150 * <li>READ: Read-only access; open() will fail file doesn't exist.</li> 151 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 152 * access.</li> 153 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 154 * created or if file exists but can't be opened read/write.</li> 155 * </ul> 156 * <p> 157 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 158 * read/write or created. 159 * <p> 160 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 161 * file isn't yet open. 162 * 163 * @param fileName 164 * A valid file name, with a relative or absolute path. 165 * @param access 166 * The file access flag, which determines behavior when file is opened. Acceptable values are 167 * <code> READ, WRITE, </code> and <code>CREATE</code>. 168 * 169 * @throws NullPointerException 170 * If the <code>fileName</code> argument is <code>null</code>. 171 */ 172 public H5File(String fileName, int access) { 173 // Call FileFormat ctor to set absolute path name 174 super(fileName); 175 libver = new int[2]; 176 177 if ((access & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 178 File f = new File(fileName); 179 if (f.exists()) { 180 access = WRITE; 181 } 182 else { 183 access = CREATE; 184 } 185 } 186 187 // set metadata for the instance 188 rootObject = null; 189 this.fid = -1; 190 isReadOnly = (access == READ); 191 192 // At this point we just set up the flags for what happens later. 193 // We just pass unexpected access values on... subclasses may have 194 // their own values. 195 if (access == READ) { 196 flag = HDF5Constants.H5F_ACC_RDONLY; 197 } 198 else if (access == WRITE) { 199 flag = HDF5Constants.H5F_ACC_RDWR; 200 } 201 else if (access == CREATE) { 202 flag = HDF5Constants.H5F_ACC_CREAT; 203 } 204 else { 205 flag = access; 206 } 207 } 208 209 /*************************************************************************** 210 * Class methods 211 **************************************************************************/ 212 213 /** 214 * Copies the attributes of one object to another object. 215 * <p> 216 * This method copies all the attributes from one object (source object) to another (destination object). If an 217 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 218 * 256 characters will be truncated in the destination object. 219 * <p> 220 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 221 * there is no H5Object class and it is specific to HDF5 objects. 222 * <p> 223 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 224 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(long, long)} 225 * 226 * @param src 227 * The source object. 228 * @param dst 229 * The destination object. 230 * 231 * @see #copyAttributes(long, long) 232 */ 233 public static final void copyAttributes(HObject src, HObject dst) { 234 if ((src != null) && (dst != null)) { 235 long srcID = src.open(); 236 long dstID = dst.open(); 237 238 if ((srcID >= 0) && (dstID >= 0)) { 239 copyAttributes(srcID, dstID); 240 } 241 242 if (srcID >= 0) { 243 src.close(srcID); 244 } 245 246 if (dstID >= 0) { 247 dst.close(dstID); 248 } 249 } 250 } 251 252 /** 253 * Copies the attributes of one object to another object. 254 * <p> 255 * This method copies all the attributes from one object (source object) to another (destination object). If an 256 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 257 * 256 characters will be truncated in the destination object. 258 * <p> 259 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 260 * there is no H5Object class and it is specific to HDF5 objects. 261 * <p> 262 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 263 * exceptions are thrown. 264 * 265 * @param src_id 266 * The identifier of the source object. 267 * @param dst_id 268 * The identifier of the destination object. 269 */ 270 public static final void copyAttributes(long src_id, long dst_id) { 271 log.trace("copyAttributes(): start: src_id={} dst_id={}", src_id, dst_id); 272 long aid_src = -1; 273 long aid_dst = -1; 274 long asid = -1; 275 long atid = -1; 276 String aName = null; 277 H5O_info_t obj_info = null; 278 279 try { 280 obj_info = H5.H5Oget_info(src_id); 281 } 282 catch (Exception ex) { 283 obj_info.num_attrs = -1; 284 } 285 286 if (obj_info.num_attrs < 0) { 287 log.debug("copyAttributes(): no attributes"); 288 return; 289 } 290 291 for (int i = 0; i < obj_info.num_attrs; i++) { 292 try { 293 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 294 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 295 aName = H5.H5Aget_name(aid_src); 296 atid = H5.H5Aget_type(aid_src); 297 asid = H5.H5Aget_space(aid_src); 298 299 aid_dst = H5.H5Acreate(dst_id, aName, atid, asid, HDF5Constants.H5P_DEFAULT, 300 HDF5Constants.H5P_DEFAULT); 301 302 // use native data copy 303 H5.H5Acopy(aid_src, aid_dst); 304 305 } 306 catch (Exception ex) { 307 log.debug("copyAttributes(): Attribute[{}] failure: ", i, ex); 308 } 309 310 try { 311 H5.H5Sclose(asid); 312 } 313 catch (Exception ex) { 314 log.debug("copyAttributes(): Attribute[{}] H5Sclose(asid {}) failure: ", i, asid, ex); 315 } 316 try { 317 H5.H5Tclose(atid); 318 } 319 catch (Exception ex) { 320 log.debug("copyAttributes(): Attribute[{}] H5Tclose(atid {}) failure: ", i, atid, ex); 321 } 322 try { 323 H5.H5Aclose(aid_src); 324 } 325 catch (Exception ex) { 326 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_src {}) failure: ", i, aid_src, ex); 327 } 328 try { 329 H5.H5Aclose(aid_dst); 330 } 331 catch (Exception ex) { 332 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_dst {}) failure: ", i, aid_dst, ex); 333 } 334 335 } // (int i=0; i<num_attr; i++) 336 } 337 338 /** 339 * Returns a list of attributes for the specified object. 340 * <p> 341 * This method returns a list containing the attributes associated with the 342 * identified object. If there are no associated attributes, an empty list will 343 * be returned. 344 * <p> 345 * Attribute names exceeding 256 characters will be truncated in the returned 346 * list. 347 * 348 * @param obj 349 * The HObject whose attributes are to be returned. 350 * 351 * @return The list of the object's attributes. 352 * 353 * @throws HDF5Exception 354 * If an underlying HDF library routine is unable to perform a step 355 * necessary to retrieve the attributes. A variety of failures throw 356 * this exception. 357 * 358 * @see #getAttribute(HObject,int,int) 359 */ 360 public static final List<Attribute> getAttribute(HObject obj) throws HDF5Exception { 361 return H5File.getAttribute(obj, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 362 } 363 364 /** 365 * Returns a list of attributes for the specified object, in creation or 366 * alphabetical order. 367 * <p> 368 * This method returns a list containing the attributes associated with the 369 * identified object. If there are no associated attributes, an empty list will 370 * be returned. The list of attributes returned can be in increasing or 371 * decreasing, creation or alphabetical order. 372 * <p> 373 * Attribute names exceeding 256 characters will be truncated in the returned 374 * list. 375 * 376 * @param obj 377 * The HObject whose attributes are to be returned. 378 * @param idx_type 379 * The type of index. Valid values are: 380 * <ul> 381 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name 382 * <li>H5_INDEX_CRT_ORDER: An index by creation order 383 * </ul> 384 * @param order 385 * The index traversal order. Valid values are: 386 * <ul> 387 * <li>H5_ITER_INC: A top-down iteration incrementing the index 388 * position at each step. 389 * <li>H5_ITER_DEC: A bottom-up iteration decrementing the index 390 * position at each step. 391 * </ul> 392 * 393 * @return The list of the object's attributes. 394 * 395 * @throws HDF5Exception 396 * If an underlying HDF library routine is unable to perform a step 397 * necessary to retrieve the attributes. A variety of failures throw 398 * this exception. 399 */ 400 401 public static final List<Attribute> getAttribute(HObject obj, int idx_type, int order) throws HDF5Exception { 402 log.trace("getAttribute(): start: obj={} idx_type={} order={}", obj, idx_type, order); 403 List<Attribute> attributeList = null; 404 long objID = -1; 405 long aid = -1; 406 long sid = -1; 407 long tid = -1; 408 H5O_info_t obj_info = null; 409 410 objID = obj.open(); 411 if (objID >= 0) { 412 try { 413 try { 414 log.trace("getAttribute(): get obj_info"); 415 obj_info = H5.H5Oget_info(objID); 416 } 417 catch (Exception ex) { 418 log.debug("getAttribute(): H5Oget_info(objID {}) failure: ", objID, ex); 419 } 420 if (obj_info.num_attrs <= 0) { 421 log.trace("getAttribute(): no attributes"); 422 return (attributeList = new Vector<>()); 423 } 424 425 int n = (int) obj_info.num_attrs; 426 attributeList = new Vector<>(n); 427 log.trace("getAttribute(): num_attrs={}", n); 428 429 for (int i = 0; i < n; i++) { 430 long lsize = 1; 431 log.trace("getAttribute(): attribute[{}]", i); 432 433 try { 434 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 435 HDF5Constants.H5P_DEFAULT); 436 sid = H5.H5Aget_space(aid); 437 log.trace("getAttribute(): Attribute[{}] aid={} sid={}", i, aid, sid); 438 439 long dims[] = null; 440 int rank = H5.H5Sget_simple_extent_ndims(sid); 441 442 log.trace("getAttribute(): Attribute[{}] isScalar={}", i, (rank == 0)); 443 444 if (rank > 0) { 445 dims = new long[rank]; 446 H5.H5Sget_simple_extent_dims(sid, dims, null); 447 log.trace("getAttribute(): Attribute[{}] rank={}, dims={}", i, rank, dims); 448 for (int j = 0; j < dims.length; j++) { 449 lsize *= dims[j]; 450 } 451 } 452 453 String nameA = H5.H5Aget_name(aid); 454 log.trace("getAttribute(): Attribute[{}] is {}", i, nameA); 455 456 long tmptid = -1; 457 try { 458 tmptid = H5.H5Aget_type(aid); 459 tid = H5.H5Tget_native_type(tmptid); 460 log.trace("getAttribute(): Attribute[{}] tid={} native tmptid={} from aid={}", i, tid, 461 tmptid, aid); 462 } 463 finally { 464 try { 465 H5.H5Tclose(tmptid); 466 } 467 catch (Exception ex) { 468 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 469 } 470 } 471 472 H5Datatype attrType = null; 473 try { 474 attrType = new H5Datatype(obj.getFileFormat(), tid); 475 476 log.trace("getAttribute(): Attribute[{}] Datatype={}", i, attrType.getDescription()); 477 log.trace("getAttribute(): Attribute[{}] has size={} isCompound={} is_variable_str={} isVLEN={}", 478 i, lsize, attrType.isCompound(), attrType.isVarStr(), attrType.isVLEN()); 479 } 480 catch (Exception ex) { 481 log.debug("getAttribute(): failed to create datatype for Attribute[{}]: ", i, ex); 482 attrType = null; 483 } 484 485 Attribute attr = new Attribute(obj, nameA, attrType, dims); 486 attributeList.add(attr); 487 488 // retrieve the attribute value 489 if (lsize <= 0) { 490 log.debug("getAttribute(): Attribute[{}] lsize <= 0", i); 491 continue; 492 } 493 494 if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) { 495 log.debug("getAttribute(): Attribute[{}] lsize outside valid Java int range; unsafe cast", i); 496 continue; 497 } 498 499 Object value = null; 500 try { 501 if (attr.getDatatype().isVarStr()) { 502 String[] strs = new String[(int) lsize]; 503 for (int j = 0; j < lsize; j++) { 504 strs[j] = ""; 505 } 506 try { 507 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 508 H5.H5AreadVL(aid, tid, strs); 509 } 510 catch (Exception ex) { 511 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 512 ex.printStackTrace(); 513 } 514 value = strs; 515 } 516 else if (attr.getDatatype().isCompound()) { 517 String[] strs = new String[(int) lsize]; 518 for (int j = 0; j < lsize; j++) { 519 strs[j] = ""; 520 } 521 try { 522 log.trace("getAttribute: attribute[{}] H5AreadComplex", i); 523 H5.H5AreadComplex(aid, tid, strs); 524 } 525 catch (Exception ex) { 526 ex.printStackTrace(); 527 } 528 value = strs; 529 } 530 else if (attr.getDatatype().isVLEN()) { 531 String[] strs = new String[(int) lsize]; 532 for (int j = 0; j < lsize; j++) { 533 strs[j] = ""; 534 } 535 try { 536 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 537 H5.H5AreadVL(aid, tid, strs); 538 } 539 catch (Exception ex) { 540 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 541 ex.printStackTrace(); 542 } 543 value = strs; 544 } 545 else { 546 try { 547 value = H5Datatype.allocateArray(((H5Datatype) attr.getDatatype()), (int) lsize); 548 } 549 catch (OutOfMemoryError e) { 550 log.debug("getAttribute(): Attribute[{}] out of memory", i, e); 551 value = null; 552 } 553 if (value == null) { 554 log.debug("getAttribute(): Attribute[{}] allocateArray returned null", i); 555 continue; 556 } 557 558 if (attr.getDatatype().isArray()) { 559 try { 560 log.trace("getAttribute(): Attribute[{}] H5Aread ARRAY tid={}", i, tid); 561 H5.H5Aread(aid, tid, value); 562 } 563 catch (Exception ex) { 564 log.debug("getAttribute(): Attribute[{}] H5Aread failure: ", i, ex); 565 ex.printStackTrace(); 566 } 567 } 568 else { 569 log.trace("getAttribute(): Attribute[{}] H5Aread", i); 570 H5.H5Aread(aid, tid, value); 571 } 572 573 if (attr.getDatatype().isText() && value instanceof byte[]) { 574 log.trace("getAttribute(): isText: converting byte array to string array"); 575 value = Attribute.byteToString((byte[]) value, (int) attr.getDatatype().getDatatypeSize()); 576 } 577 else if (((H5Datatype)attr.getDatatype()).isRefObj()) { 578 log.trace("getAttribute(): Attribute[{}] isREF: converting byte array to long array", i); 579 value = HDFNativeData.byteToLong((byte[]) value); 580 } 581 } 582 } 583 catch (Exception ex) { 584 log.debug("getAttribute(): Attribute[{}] read failure: ", i, ex); 585 continue; 586 } 587 588 log.trace("getAttribute(): Attribute[{}] data: {}", i, value); 589 attr.setData(value); 590 } 591 catch (HDF5Exception ex) { 592 log.debug("getAttribute(): Attribute[{}] inspection failure: ", i, ex); 593 } 594 finally { 595 try { 596 H5.H5Tclose(tid); 597 } 598 catch (Exception ex) { 599 log.debug("getAttribute(): Attribute[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 600 } 601 try { 602 H5.H5Sclose(sid); 603 } 604 catch (Exception ex) { 605 log.debug("getAttribute(): Attribute[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 606 } 607 try { 608 H5.H5Aclose(aid); 609 } 610 catch (Exception ex) { 611 log.debug("getAttribute(): Attribute[{}] H5Aclose(aid {}) failure: ", i, aid, ex); 612 } 613 } 614 } // (int i=0; i<obj_info.num_attrs; i++) 615 } 616 finally { 617 obj.close(objID); 618 } 619 } 620 621 return attributeList; 622 } 623 624 /** 625 * Creates attributes for an HDF5 image dataset. 626 * <p> 627 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 628 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 629 * method may be used to write image attributes that are not handled by this method. 630 * <p> 631 * For more information about HDF5 image attributes, see the 632 * <a href="https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 633 * <p> 634 * This method can be called to create attributes for 24-bit true color and indexed images. The 635 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 636 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 637 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 638 * image with the indicated interlace mode. 639 * <p> 640 * <ul> 641 * The created attribute descriptions, names, and values are: 642 * <li>The image identifier: name="CLASS", value="IMAGE" 643 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 644 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 645 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 646 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 647 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 648 * to the palette datasets, with initial value of {-1} 649 * </ul> 650 * <p> 651 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 652 * Format implementation level. 653 * 654 * @param dataset 655 * The image dataset the attributes are added to. 656 * @param selectionFlag 657 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 658 * <ul> 659 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 660 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 661 * stored in a separate plane. 662 * </ul> 663 * 664 * @throws Exception 665 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 666 */ 667 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 668 log.trace("createImageAttributes(): start: dataset={}", dataset.toString()); 669 String subclass = null; 670 String interlaceMode = null; 671 672 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 673 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PIXEL"); 674 subclass = "IMAGE_TRUECOLOR"; 675 interlaceMode = "INTERLACE_PIXEL"; 676 } 677 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 678 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PLANE"); 679 subclass = "IMAGE_TRUECOLOR"; 680 interlaceMode = "INTERLACE_PLANE"; 681 } 682 else if (selectionFlag == -1) { 683 log.trace("createImageAttributes(): subclass IMAGE_INDEXED"); 684 subclass = "IMAGE_INDEXED"; 685 } 686 else { 687 log.debug("createImageAttributes(): invalid selectionFlag"); 688 throw new HDF5Exception("The selectionFlag is invalid."); 689 } 690 691 String attrName = "CLASS"; 692 String[] classValue = { "IMAGE" }; 693 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 694 Attribute attr = new Attribute(dataset, attrName, attrType, null); 695 attr.write(classValue); 696 697 attrName = "IMAGE_VERSION"; 698 String[] versionValue = { "1.2" }; 699 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 700 attr = new Attribute(dataset, attrName, attrType, null); 701 attr.write(versionValue); 702 703 long[] attrDims = { 2 }; 704 attrName = "IMAGE_MINMAXRANGE"; 705 byte[] attrValueInt = { 0, (byte) 255 }; 706 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 707 attr = new Attribute(dataset, attrName, attrType, attrDims); 708 attr.write(attrValueInt); 709 710 attrName = "IMAGE_SUBCLASS"; 711 String[] subclassValue = { subclass }; 712 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 713 attr = new Attribute(dataset, attrName, attrType, null); 714 attr.write(subclassValue); 715 716 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 717 attrName = "INTERLACE_MODE"; 718 String[] interlaceValue = { interlaceMode }; 719 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 720 attr = new Attribute(dataset, attrName, attrType, null); 721 attr.write(interlaceValue); 722 } 723 else { 724 attrName = "PALETTE"; 725 long[] palRef = { 0 }; // set ref to null 726 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 727 attr = new Attribute(dataset, attrName, attrType, null); 728 attr.write(palRef); 729 } 730 } 731 732 /** 733 * Updates values of scalar dataset object references in copied file. 734 * <p> 735 * This method has very specific functionality as documented below, and the user is advised to pay close attention 736 * when dealing with files that contain references. 737 * <p> 738 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 739 * the references in the destination file are not updated by the copy and are therefore invalid. 740 * <p> 741 * When an entire file is copied, this method updates the values of the object references and dataset region 742 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 743 * destination file. The method does not update references that occur in objects other than scalar datasets. 744 * <p> 745 * In the current release, the updating of object references is not handled completely as it was not required by the 746 * projects that funded development. There is no support for updates when the copy does not include the entire file. 747 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 748 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 749 * updates itself. 750 * 751 * @param srcFile 752 * The file that was copied. 753 * @param dstFile 754 * The destination file where the object references will be updated. 755 * 756 * @throws Exception 757 * If there is a problem in the update process. 758 */ 759 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 760 if ((srcFile == null) || (dstFile == null)) { 761 log.debug("updateReferenceDataset(): srcFile or dstFile is null"); 762 return; 763 } 764 765 HObject srcRoot = srcFile.getRootObject(); 766 HObject newRoot = dstFile.getRootObject(); 767 768 Iterator<HObject> srcIt = getMembersBreadthFirst(srcRoot).iterator(); 769 Iterator<HObject> newIt = getMembersBreadthFirst(newRoot).iterator(); 770 771 long did = -1; 772 // build one-to-one table of between objects in 773 // the source file and new file 774 long tid = -1; 775 HObject srcObj, newObj; 776 Hashtable<String, long[]> oidMap = new Hashtable<>(); 777 List<ScalarDS> refDatasets = new Vector<>(); 778 while (newIt.hasNext() && srcIt.hasNext()) { 779 srcObj = srcIt.next(); 780 newObj = newIt.next(); 781 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 782 did = -1; 783 tid = -1; 784 785 // for Scalar DataSets in destination, if there is an object 786 // reference in the dataset, add it to the refDatasets list for 787 // later updating. 788 if (newObj instanceof ScalarDS) { 789 ScalarDS sd = (ScalarDS) newObj; 790 did = sd.open(); 791 if (did >= 0) { 792 try { 793 tid = H5.H5Dget_type(did); 794 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 795 refDatasets.add(sd); 796 } 797 } 798 catch (Exception ex) { 799 log.debug("updateReferenceDataset(): ScalarDS reference failure: ", ex); 800 } 801 finally { 802 try { 803 H5.H5Tclose(tid); 804 } 805 catch (Exception ex) { 806 log.debug("updateReferenceDataset(): ScalarDS reference H5Tclose(tid {}) failure: ", tid, ex); 807 } 808 } 809 } 810 sd.close(did); 811 } // (newObj instanceof ScalarDS) 812 } 813 814 // Update the references in the scalar datasets in the dest file. 815 H5ScalarDS d = null; 816 long sid = -1; 817 int size = 0; 818 int rank = 0; 819 int n = refDatasets.size(); 820 for (int i = 0; i < n; i++) { 821 log.trace("updateReferenceDataset(): Update the references in the scalar datasets in the dest file"); 822 d = (H5ScalarDS) refDatasets.get(i); 823 byte[] buf = null; 824 long[] refs = null; 825 826 try { 827 did = d.open(); 828 if (did >= 0) { 829 tid = H5.H5Dget_type(did); 830 sid = H5.H5Dget_space(did); 831 rank = H5.H5Sget_simple_extent_ndims(sid); 832 size = 1; 833 if (rank > 0) { 834 long[] dims = new long[rank]; 835 H5.H5Sget_simple_extent_dims(sid, dims, null); 836 log.trace("updateReferenceDataset(): rank={}, dims={}", rank, dims); 837 for (int j = 0; j < rank; j++) { 838 size *= (int) dims[j]; 839 } 840 dims = null; 841 } 842 843 buf = new byte[size * 8]; 844 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 845 846 // update the ref values 847 refs = HDFNativeData.byteToLong(buf); 848 size = refs.length; 849 for (int j = 0; j < size; j++) { 850 long[] theOID = oidMap.get(String.valueOf(refs[j])); 851 if (theOID != null) { 852 refs[j] = theOID[0]; 853 } 854 } 855 856 // write back to file 857 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 858 } 859 else { 860 log.debug("updateReferenceDataset(): dest file dataset failed to open"); 861 } 862 } 863 catch (Exception ex) { 864 log.debug("updateReferenceDataset(): Reference[{}] failure: ", i, ex); 865 continue; 866 } 867 finally { 868 try { 869 H5.H5Tclose(tid); 870 } 871 catch (Exception ex) { 872 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 873 } 874 try { 875 H5.H5Sclose(sid); 876 } 877 catch (Exception ex) { 878 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 879 } 880 try { 881 H5.H5Dclose(did); 882 } 883 catch (Exception ex) { 884 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Dclose(did {}) failure: ", i, did, ex); 885 } 886 } 887 888 refs = null; 889 buf = null; 890 } // (int i=0; i<n; i++) 891 } 892 893 /*************************************************************************** 894 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 895 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 896 * instance methods. 897 **************************************************************************/ 898 899 /** 900 * Returns the version of the HDF5 library. 901 * 902 * @see hdf.object.FileFormat#getLibversion() 903 */ 904 @Override 905 public String getLibversion() { 906 int[] vers = new int[3]; 907 String ver = "HDF5 "; 908 909 try { 910 H5.H5get_libversion(vers); 911 } 912 catch (Exception ex) { 913 ex.printStackTrace(); 914 } 915 916 ver += vers[0] + "." + vers[1] + "." + vers[2]; 917 log.debug("getLibversion(): libversion is {}", ver); 918 919 return ver; 920 } 921 922 /** 923 * Checks if the specified FileFormat instance has the HDF5 format. 924 * 925 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 926 */ 927 @Override 928 public boolean isThisType(FileFormat theFile) { 929 return (theFile instanceof H5File); 930 } 931 932 /** 933 * Checks if the specified file has the HDF5 format. 934 * 935 * @see hdf.object.FileFormat#isThisType(java.lang.String) 936 */ 937 @Override 938 public boolean isThisType(String filename) { 939 boolean isH5 = false; 940 941 try { 942 isH5 = H5.H5Fis_hdf5(filename); 943 } 944 catch (HDF5Exception ex) { 945 isH5 = false; 946 } 947 948 return isH5; 949 } 950 951 /** 952 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 953 * 954 * @throws Exception 955 * If the file cannot be created or if createFlag has unexpected value. 956 * 957 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 958 * @see #H5File(String, int) 959 */ 960 @Override 961 public FileFormat createFile(String filename, int createFlag) throws Exception { 962 log.trace("createFile(): start: filename={} createFlag={}", filename, createFlag); 963 // Flag if we need to create or truncate the file. 964 Boolean doCreateFile = true; 965 966 // Won't create or truncate if CREATE_OPEN specified and file exists 967 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 968 File f = new File(filename); 969 if (f.exists()) { 970 doCreateFile = false; 971 } 972 } 973 log.trace("createFile(): doCreateFile={}", doCreateFile); 974 975 if (doCreateFile) { 976 long fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 977 978 if ((createFlag & FILE_CREATE_EARLY_LIB) == FILE_CREATE_EARLY_LIB) { 979 int[] newlibver = getLibBounds(); 980 H5.H5Pset_libver_bounds(fapl, newlibver[0], newlibver[1]); 981 } 982 983 long fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 984 try { 985 H5.H5Pclose(fapl); 986 H5.H5Fclose(fileid); 987 } 988 catch (HDF5Exception ex) { 989 log.debug("H5 file, {} failure: ", filename, ex); 990 } 991 } 992 993 return new H5File(filename, WRITE); 994 } 995 996 /** 997 * Creates an H5File instance with specified file name and access. 998 * 999 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 1000 * @see #H5File(String, int) 1001 * 1002 * @throws Exception 1003 * If there is a failure. 1004 */ 1005 @Override 1006 public FileFormat createInstance(String filename, int access) throws Exception { 1007 log.trace("createInstance() for {}", filename); 1008 return new H5File(filename, access); 1009 } 1010 1011 /*************************************************************************** 1012 * Instance Methods 1013 * 1014 * These methods are related to the H5File class and to particular instances of objects with this class type. 1015 **************************************************************************/ 1016 1017 /** 1018 * Opens file and returns a file identifier. 1019 * 1020 * @see hdf.object.FileFormat#open() 1021 */ 1022 @Override 1023 public long open() throws Exception { 1024 return open(true); 1025 } 1026 1027 /** 1028 * Opens file and returns a file identifier. 1029 * 1030 * @see hdf.object.FileFormat#open(int...) 1031 */ 1032 @Override 1033 public long open(int... indexList) throws Exception { 1034 setIndexType(indexList[0]); 1035 setIndexOrder(indexList[1]); 1036 return open(true); 1037 } 1038 1039 /** 1040 * Sets the bounds of new library versions. 1041 * 1042 * @param lowStr 1043 * The earliest version of the library. 1044 * @param highStr 1045 * The latest version of the library. 1046 * 1047 * @throws Exception 1048 * If there is an error at the HDF5 library level. 1049 */ 1050 @Override 1051 public void setNewLibBounds(String lowStr, String highStr) throws Exception { 1052 int low = -1; 1053 int high = -1; 1054 1055 if (lowStr == null) { 1056 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1057 } 1058 else if(lowStr.equals("Earliest")) { 1059 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1060 } 1061 else if(lowStr.equals("V18")) { 1062 low = HDF5Constants.H5F_LIBVER_V18; 1063 } 1064 else if(lowStr.equals("V110")) { 1065 low = HDF5Constants.H5F_LIBVER_V110; 1066 } 1067 else if(lowStr.equals("Latest")) { 1068 low = HDF5Constants.H5F_LIBVER_LATEST; 1069 } 1070 else { 1071 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1072 } 1073 1074 if (highStr == null) { 1075 high = HDF5Constants.H5F_LIBVER_LATEST; 1076 } 1077 else if(highStr.equals("V18")) { 1078 high = HDF5Constants.H5F_LIBVER_V18; 1079 } 1080 else if(highStr.equals("V110")) { 1081 high = HDF5Constants.H5F_LIBVER_V110; 1082 } 1083 else if(highStr.equals("Latest")) { 1084 high = HDF5Constants.H5F_LIBVER_LATEST; 1085 } 1086 else { 1087 high = HDF5Constants.H5F_LIBVER_LATEST; 1088 } 1089 libver[0] = low; 1090 libver[1] = high; 1091 } 1092 1093 /** 1094 * Sets the bounds of library versions. 1095 * 1096 * @param lowStr 1097 * The earliest version of the library. 1098 * @param highStr 1099 * The latest version of the library. 1100 * 1101 * @throws Exception 1102 * If there is an error at the HDF5 library level. 1103 */ 1104 @Override 1105 public void setLibBounds(String lowStr, String highStr) throws Exception { 1106 long fapl = HDF5Constants.H5P_DEFAULT; 1107 1108 if (fid < 0) 1109 return; 1110 1111 fapl = H5.H5Fget_access_plist(fid); 1112 1113 try { 1114 int low = -1; 1115 int high = -1; 1116 1117 if (lowStr == null) { 1118 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1119 } 1120 else if(lowStr.equals("Earliest")) { 1121 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1122 } 1123 else if(lowStr.equals("V18")) { 1124 low = HDF5Constants.H5F_LIBVER_V18; 1125 } 1126 else if(lowStr.equals("V110")) { 1127 low = HDF5Constants.H5F_LIBVER_V110; 1128 } 1129 else if(lowStr.equals("Latest")) { 1130 low = HDF5Constants.H5F_LIBVER_LATEST; 1131 } 1132 else { 1133 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1134 } 1135 1136 if (highStr == null) { 1137 high = HDF5Constants.H5F_LIBVER_LATEST; 1138 } 1139 else if(highStr.equals("V18")) { 1140 high = HDF5Constants.H5F_LIBVER_V18; 1141 } 1142 else if(highStr.equals("V110")) { 1143 high = HDF5Constants.H5F_LIBVER_V110; 1144 } 1145 else if(highStr.equals("Latest")) { 1146 high = HDF5Constants.H5F_LIBVER_LATEST; 1147 } 1148 else { 1149 high = HDF5Constants.H5F_LIBVER_LATEST; 1150 } 1151 1152 H5.H5Pset_libver_bounds(fapl, low, high); 1153 H5.H5Pget_libver_bounds(fapl, libver); 1154 } 1155 finally { 1156 try { 1157 H5.H5Pclose(fapl); 1158 } 1159 catch (Exception e) { 1160 log.debug("setLibBounds(): libver bounds H5Pclose(fapl {}) failure: ", fapl, e); 1161 } 1162 } 1163 } 1164 1165 /** 1166 * Gets the bounds of library versions. 1167 * 1168 * @return libver The earliest and latest version of the library. 1169 * 1170 * @throws Exception 1171 * If there is an error at the HDF5 library level. 1172 */ 1173 @Override 1174 public int[] getLibBounds() throws Exception { 1175 return libver; 1176 } 1177 1178 /** 1179 * Gets the bounds of library versions as text. 1180 * 1181 * @return libversion The earliest and latest version of the library. 1182 */ 1183 @Override 1184 public String getLibBoundsDescription() { 1185 String libversion = ""; 1186 1187 if (libver[0] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1188 libversion = "Earliest and "; 1189 } 1190 else if (libver[0] == HDF5Constants.H5F_LIBVER_V18) { 1191 libversion = "V18 and "; 1192 } 1193 else if (libver[0] == HDF5Constants.H5F_LIBVER_V110) { 1194 libversion = "V110 and "; 1195 } 1196 else if (libver[0] == HDF5Constants.H5F_LIBVER_LATEST) { 1197 libversion = "Latest and "; 1198 } 1199 if (libver[1] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1200 libversion += "Earliest"; 1201 } 1202 else if (libver[1] == HDF5Constants.H5F_LIBVER_V18) { 1203 libversion += "V18"; 1204 } 1205 else if (libver[1] == HDF5Constants.H5F_LIBVER_V110) { 1206 libversion += "V110"; 1207 } 1208 else if (libver[1] == HDF5Constants.H5F_LIBVER_LATEST) { 1209 libversion += "Latest"; 1210 } 1211 return libversion; 1212 } 1213 1214 /** 1215 * Closes file associated with this H5File instance. 1216 * 1217 * @see hdf.object.FileFormat#close() 1218 * 1219 * @throws HDF5Exception 1220 * If there is an error at the HDF5 library level. 1221 */ 1222 @Override 1223 public void close() throws HDF5Exception { 1224 if (fid < 0) { 1225 log.debug("close(): file {} is not open", fullFileName); 1226 return; 1227 } 1228 // The current working directory may be changed at Dataset.read() 1229 // by System.setProperty("user.dir", newdir) to make it work for external 1230 // datasets. We need to set it back to the original current working 1231 // directory (when hdf-java application started) before the file 1232 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1233 // not work 1234 String rootPath = System.getProperty("hdfview.workdir"); 1235 if (rootPath == null) { 1236 rootPath = System.getProperty("user.dir"); 1237 } 1238 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 1239 1240 // clean up unused objects 1241 if (rootObject != null) { 1242 HObject theObj = null; 1243 Iterator<HObject> it = getMembersBreadthFirst(rootObject).iterator(); 1244 while (it.hasNext()) { 1245 theObj = it.next(); 1246 1247 if (theObj instanceof Dataset) { 1248 log.trace("close(): clear Dataset {}", ((Dataset) theObj).toString()); 1249 ((Dataset) theObj).clear(); 1250 } 1251 else if (theObj instanceof Group) { 1252 log.trace("close(): clear Group {}", ((Group) theObj).toString()); 1253 ((Group) theObj).clear(); 1254 } 1255 } 1256 } 1257 1258 // Close all open objects associated with this file. 1259 try { 1260 int type = -1; 1261 long[] oids; 1262 long n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1263 log.trace("close(): open objects={}", n); 1264 1265 if (n > 0) { 1266 if (n < Integer.MIN_VALUE || n > Integer.MAX_VALUE) throw new Exception("Invalid int size"); 1267 1268 oids = new long[(int)n]; 1269 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1270 1271 for (int i = 0; i < (int)n; i++) { 1272 log.trace("close(): object[{}] id={}", i, oids[i]); 1273 type = H5.H5Iget_type(oids[i]); 1274 1275 if (HDF5Constants.H5I_DATASET == type) { 1276 try { 1277 H5.H5Dclose(oids[i]); 1278 } 1279 catch (Exception ex2) { 1280 log.debug("close(): Object[{}] H5Dclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1281 } 1282 } 1283 else if (HDF5Constants.H5I_GROUP == type) { 1284 try { 1285 H5.H5Gclose(oids[i]); 1286 } 1287 catch (Exception ex2) { 1288 log.debug("close(): Object[{}] H5Gclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1289 } 1290 } 1291 else if (HDF5Constants.H5I_DATATYPE == type) { 1292 try { 1293 H5.H5Tclose(oids[i]); 1294 } 1295 catch (Exception ex2) { 1296 log.debug("close(): Object[{}] H5Tclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1297 } 1298 } 1299 else if (HDF5Constants.H5I_ATTR == type) { 1300 try { 1301 H5.H5Aclose(oids[i]); 1302 } 1303 catch (Exception ex2) { 1304 log.debug("close(): Object[{}] H5Aclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1305 } 1306 } 1307 } // (int i=0; i<n; i++) 1308 } // ( n>0) 1309 } 1310 catch (Exception ex) { 1311 log.debug("close(): failure: ", ex); 1312 } 1313 1314 try { 1315 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1316 } 1317 catch (Exception ex) { 1318 log.debug("close(): H5Fflush(fid {}) failure: ", fid, ex); 1319 } 1320 1321 try { 1322 H5.H5Fclose(fid); 1323 } 1324 catch (Exception ex) { 1325 log.debug("close(): H5Fclose(fid {}) failure: ", fid, ex); 1326 } 1327 1328 // Set fid to -1 but don't reset rootObject 1329 fid = -1; 1330 } 1331 1332 /** 1333 * Returns the root object of the open HDF5 File. 1334 * 1335 * @see hdf.object.FileFormat#getRootObject() 1336 */ 1337 @Override 1338 public HObject getRootObject() { 1339 return rootObject; 1340 } 1341 1342 /* 1343 * (non-Javadoc) 1344 * 1345 * @see hdf.object.FileFormat#get(java.lang.String) 1346 */ 1347 @Override 1348 public HObject get(String path) throws Exception { 1349 log.trace("get({}): start", path); 1350 HObject obj = null; 1351 1352 if ((path == null) || (path.length() <= 0)) { 1353 log.debug("get(): path is null or invalid path length"); 1354 System.err.println("(path == null) || (path.length() <= 0)"); 1355 return null; 1356 } 1357 1358 // replace the wrong slash and get rid of "//" 1359 path = path.replace('\\', '/'); 1360 path = "/" + path; 1361 path = path.replaceAll("//", "/"); 1362 1363 // the whole file tree is loaded. find the object in the tree 1364 if (rootObject != null) { 1365 obj = findObject(this, path); 1366 } 1367 1368 // found object in memory 1369 if (obj != null) { 1370 log.trace("get(): Found object in memory"); 1371 return obj; 1372 } 1373 1374 // open only the requested object 1375 String name = null; 1376 String pPath = null; 1377 if (path.equals("/")) { 1378 name = "/"; // the root 1379 } 1380 else { 1381 // separate the parent path and the object name 1382 if (path.endsWith("/")) { 1383 path = path.substring(0, path.length() - 1); 1384 } 1385 1386 int idx = path.lastIndexOf('/'); 1387 name = path.substring(idx + 1); 1388 if (idx == 0) { 1389 pPath = "/"; 1390 } 1391 else { 1392 pPath = path.substring(0, idx); 1393 } 1394 } 1395 1396 // do not open the full tree structure, only the file handler 1397 long fid_before_open = fid; 1398 fid = open(false); 1399 if (fid < 0) { 1400 log.debug("get(): Invalid FID"); 1401 System.err.println("Could not open file handler"); 1402 return null; 1403 } 1404 1405 try { 1406 H5O_info_t info; 1407 int objType; 1408 long oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1409 1410 if (oid >= 0) { 1411 info = H5.H5Oget_info(oid); 1412 objType = info.type; 1413 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1414 long did = -1; 1415 try { 1416 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1417 obj = getDataset(did, name, pPath); 1418 } 1419 finally { 1420 try { 1421 H5.H5Dclose(did); 1422 } 1423 catch (Exception ex) { 1424 log.debug("get(): {} H5Dclose(did {}) failure: ", path, did, ex); 1425 } 1426 } 1427 } 1428 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1429 long gid = -1; 1430 try { 1431 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1432 H5Group pGroup = null; 1433 if (pPath != null) { 1434 pGroup = new H5Group(this, null, pPath, null); 1435 obj = getGroup(gid, name, pGroup); 1436 pGroup.addToMemberList(obj); 1437 } 1438 else { 1439 obj = getGroup(gid, name, pGroup); 1440 } 1441 } 1442 finally { 1443 try { 1444 H5.H5Gclose(gid); 1445 } 1446 catch (Exception ex) { 1447 log.debug("get(): {} H5Gclose(gid {}) failure: ", path, gid, ex); 1448 } 1449 } 1450 } 1451 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1452 obj = new H5Datatype(this, name, pPath); 1453 } 1454 } 1455 try { 1456 H5.H5Oclose(oid); 1457 } 1458 catch (Exception ex) { 1459 log.debug("get(): H5Oclose(oid {}) failure: ", oid, ex); 1460 ex.printStackTrace(); 1461 } 1462 } 1463 catch (Exception ex) { 1464 log.debug("get(): Exception finding obj {}", path, ex); 1465 obj = null; 1466 } 1467 finally { 1468 if ((fid_before_open <= 0) && (obj == null)) { 1469 // close the fid that is not attached to any object 1470 try { 1471 H5.H5Fclose(fid); 1472 } 1473 catch (Exception ex) { 1474 log.debug("get(): {} H5Fclose(fid {}) failure: ", path, fid, ex); 1475 } 1476 fid = fid_before_open; 1477 } 1478 } 1479 1480 return obj; 1481 } 1482 1483 1484 /** 1485 * Creates a named datatype in a file. 1486 * <p> 1487 * The following code creates a named datatype in a file. 1488 * 1489 * <pre> 1490 * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE); 1491 * Datatype dtype = file.createDatatype( 1492 * Datatype.CLASS_INTEGER, 1493 * 4, 1494 * Datatype.NATIVE, 1495 * Datatype.NATIVE, 1496 * basetype); 1497 * H5Datatype h5dtype = file.createNamedDatatype( 1498 * dtype, 1499 * null, 1500 * "Native Integer"); 1501 * </pre> 1502 * 1503 * @param tnative 1504 * native datatype previously created 1505 * @param name 1506 * name of the datatype to create, e.g. "Native Integer". 1507 * @return The new datatype if successful; otherwise returns null. 1508 * @throws Exception 1509 * The exceptions thrown vary depending on the implementing class. 1510 */ 1511 public Datatype createNamedDatatype(Datatype tnative, String name) throws Exception { 1512 log.trace("createNamedDatatype(): start: name={}", name); 1513 1514 H5Datatype dtype = null; 1515 1516 if (name != null ) { 1517 long tid = -1; 1518 log.trace("createNamedDatatype(): name={}", name); 1519 try { 1520 tnative.setFullname(name, null); 1521 } 1522 catch (Exception ex) { 1523 log.debug("createNamedDatatype():setName(): {} failure: {}", name, ex.getMessage()); 1524 } 1525 try { 1526 if ((tid = tnative.createNative()) < 0) { 1527 log.debug("createNamedDatatype(): createNative() failure"); 1528 throw new Exception("createNative() failed"); 1529 } 1530 log.trace("createNamedDatatype(): createNative gets id={}", tid); 1531 1532 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1533 1534 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1535 long l = HDFNativeData.byteToLong(ref_buf, 0); 1536 1537 long[] oid = new long[1]; 1538 oid[0] = l; // save the object ID 1539 1540 dtype = new H5Datatype(this, name, null, oid); 1541 } 1542 finally { 1543 H5.H5Tclose(tid); 1544 } 1545 } 1546 else { 1547 dtype = (H5Datatype) tnative; 1548 } 1549 1550 return dtype; 1551 } 1552 1553 /*************************************************************************** 1554 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1555 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1556 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1557 * so that we create the proper type of HObject... H5Group for example. 1558 * 1559 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1560 * methods (they can only be shadowed in Java), these are instance methods. 1561 * 1562 **************************************************************************/ 1563 1564 /* 1565 * (non-Javadoc) 1566 * 1567 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1568 */ 1569 @Override 1570 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1571 return new H5Datatype(tclass, tsize, torder, tsign); 1572 } 1573 1574 /* 1575 * (non-Javadoc) 1576 * 1577 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1578 */ 1579 @Override 1580 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1581 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1582 } 1583 1584 /* 1585 * (non-Javadoc) 1586 * 1587 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1588 * long[], long[], long[], int, java.lang.Object) 1589 */ 1590 @Override 1591 public Dataset createScalarDS(String name, Group pgroup, Datatype type, 1592 long[] dims, long[] maxdims, long[] chunks, 1593 int gzip, Object fillValue, Object data) throws Exception 1594 { 1595 log.trace("createScalarDS(): name={}", name); 1596 if (pgroup == null) { 1597 // create new dataset at the root group by default 1598 pgroup = (Group) get("/"); 1599 } 1600 1601 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1602 } 1603 1604 /* 1605 * (non-Javadoc) 1606 * 1607 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1608 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1609 */ 1610 @Override 1611 public Dataset createCompoundDS(String name, Group pgroup, 1612 long[] dims, long[] maxdims, long[] chunks, int gzip, 1613 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception 1614 { 1615 log.trace("createCompoundDS(): start: name={}", name); 1616 int nMembers = memberNames.length; 1617 int memberRanks[] = new int[nMembers]; 1618 long memberDims[][] = new long[nMembers][1]; 1619 Dataset ds = null; 1620 1621 for (int i = 0; i < nMembers; i++) { 1622 memberRanks[i] = 1; 1623 if (memberSizes == null) { 1624 memberDims[i][0] = 1; 1625 } 1626 else { 1627 memberDims[i][0] = memberSizes[i]; 1628 } 1629 } 1630 1631 if (pgroup == null) { 1632 // create new dataset at the root group by default 1633 pgroup = (Group) get("/"); 1634 } 1635 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, 1636 memberNames, memberDatatypes, memberRanks, memberDims, data); 1637 1638 return ds; 1639 } 1640 1641 /* 1642 * (non-Javadoc) 1643 * 1644 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1645 * long[], long[], long[], int, int, int, java.lang.Object) 1646 */ 1647 @Override 1648 public Dataset createImage(String name, Group pgroup, Datatype type, 1649 long[] dims, long[] maxdims, long[] chunks, 1650 int gzip, int ncomp, int interlace, Object data) throws Exception 1651 { 1652 log.trace("createImage(): start: name={}", name); 1653 if (pgroup == null) { // create at the root group by default 1654 pgroup = (Group) get("/"); 1655 } 1656 1657 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1658 1659 try { 1660 H5File.createImageAttributes(dataset, interlace); 1661 dataset.setIsImage(true); 1662 } 1663 catch (Exception ex) { 1664 log.debug("createImage(): {} createImageAttributtes failure: ", name, ex); 1665 } 1666 1667 return dataset; 1668 } 1669 1670 /*** 1671 * Creates a new group with specified name in existing group. 1672 * 1673 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1674 */ 1675 @Override 1676 public Group createGroup(String name, Group pgroup) throws Exception { 1677 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1678 } 1679 1680 /*** 1681 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1682 * 1683 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, long...) 1684 * 1685 */ 1686 @Override 1687 public Group createGroup(String name, Group pgroup, long... gplist) throws Exception { 1688 // create new group at the root 1689 if (pgroup == null) { 1690 pgroup = (Group) this.get("/"); 1691 } 1692 1693 return H5Group.create(name, pgroup, gplist); 1694 } 1695 1696 /*** 1697 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1698 * 1699 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1700 * 1701 */ 1702 @Override 1703 public long createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1704 long gcpl = -1; 1705 try { 1706 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1707 if (gcpl >= 0) { 1708 // Set link creation order. 1709 if (creationorder == Group.CRT_ORDER_TRACKED) { 1710 log.trace("createGcpl(): creation order ORDER_TRACKED"); 1711 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1712 } 1713 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1714 log.trace("createGcpl(): creation order ORDER_INDEXED"); 1715 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1716 } 1717 // Set link storage. 1718 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1719 } 1720 } 1721 catch (Exception ex) { 1722 log.debug("createGcpl(): failure: ", ex); 1723 ex.printStackTrace(); 1724 } 1725 1726 return gcpl; 1727 } 1728 1729 /* 1730 * (non-Javadoc) 1731 * 1732 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1733 */ 1734 @Override 1735 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1736 if (currentObj instanceof HObject) 1737 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1738 else if (currentObj instanceof String) 1739 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1740 1741 return null; 1742 } 1743 1744 /** 1745 * Creates a link to an object in the open file. 1746 * <p> 1747 * If parentGroup is null, the new link is created in the root group. 1748 * 1749 * @param parentGroup 1750 * The group where the link is created. 1751 * @param name 1752 * The name of the link. 1753 * @param currentObj 1754 * The existing object the new link will reference. 1755 * @param lType 1756 * The type of link to be created. It can be a hard link, a soft link or an external link. 1757 * 1758 * @return The object pointed to by the new link if successful; otherwise returns null. 1759 * 1760 * @throws Exception 1761 * The exceptions thrown vary depending on the implementing class. 1762 */ 1763 @Override 1764 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1765 log.trace("createLink(): start: name={}", name); 1766 HObject obj = null; 1767 int type = 0; 1768 String current_full_name = null; 1769 String new_full_name = null; 1770 String parent_path = null; 1771 1772 if (currentObj == null) { 1773 log.debug("createLink(): Link target is null"); 1774 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1775 } 1776 if ((parentGroup == null) || parentGroup.isRoot()) { 1777 parent_path = HObject.SEPARATOR; 1778 } 1779 else { 1780 parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; 1781 } 1782 1783 new_full_name = parent_path + name; 1784 1785 if (lType == Group.LINK_TYPE_HARD) { 1786 type = HDF5Constants.H5L_TYPE_HARD; 1787 log.trace("createLink(): type H5L_TYPE_HARD"); 1788 } 1789 else if (lType == Group.LINK_TYPE_SOFT) { 1790 type = HDF5Constants.H5L_TYPE_SOFT; 1791 log.trace("createLink(): type H5L_TYPE_SOFT"); 1792 } 1793 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1794 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1795 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1796 } 1797 1798 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1799 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1800 } 1801 1802 if (type == HDF5Constants.H5L_TYPE_HARD) { 1803 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1804 log.debug("createLink(): cannot create link to root group"); 1805 throw new HDF5Exception("Cannot make a link to the root group."); 1806 } 1807 current_full_name = currentObj.getPath() + HObject.SEPARATOR + currentObj.getName(); 1808 1809 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1810 } 1811 1812 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1813 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1814 } 1815 1816 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1817 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1818 } 1819 1820 if (currentObj instanceof Group) { 1821 log.trace("createLink(): Link target is type H5Group"); 1822 obj = new H5Group(this, name, parent_path, parentGroup); 1823 } 1824 else if (currentObj instanceof H5Datatype) { 1825 log.trace("createLink(): Link target is type H5Datatype"); 1826 obj = new H5Datatype(this, name, parent_path); 1827 } 1828 else if (currentObj instanceof H5CompoundDS) { 1829 log.trace("createLink(): Link target is type H5CompoundDS"); 1830 obj = new H5CompoundDS(this, name, parent_path); 1831 } 1832 else if (currentObj instanceof H5ScalarDS) { 1833 log.trace("createLink(): Link target is type H5ScalarDS"); 1834 obj = new H5ScalarDS(this, name, parent_path); 1835 } 1836 1837 return obj; 1838 } 1839 1840 /** 1841 * Creates a soft or external link to object in a file that does not exist at the time the link is created. 1842 * 1843 * @param parentGroup 1844 * The group where the link is created. 1845 * @param name 1846 * The name of the link. 1847 * @param currentObj 1848 * The name of the object the new link will reference. The object doesn't have to exist. 1849 * @param lType 1850 * The type of link to be created. 1851 * 1852 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1853 * 1854 * @throws Exception 1855 * The exceptions thrown vary depending on the implementing class. 1856 */ 1857 @Override 1858 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1859 log.trace("createLink(): start: name={}", name); 1860 HObject obj = null; 1861 int type = 0; 1862 String new_full_name = null; 1863 String parent_path = null; 1864 1865 if (currentObj == null) { 1866 log.debug("createLink(): Link target is null"); 1867 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1868 } 1869 if ((parentGroup == null) || parentGroup.isRoot()) { 1870 parent_path = HObject.SEPARATOR; 1871 } 1872 else { 1873 parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; 1874 } 1875 1876 new_full_name = parent_path + name; 1877 1878 if (lType == Group.LINK_TYPE_HARD) { 1879 type = HDF5Constants.H5L_TYPE_HARD; 1880 log.trace("createLink(): type H5L_TYPE_HARD"); 1881 } 1882 else if (lType == Group.LINK_TYPE_SOFT) { 1883 type = HDF5Constants.H5L_TYPE_SOFT; 1884 log.trace("createLink(): type H5L_TYPE_SOFT"); 1885 } 1886 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1887 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1888 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1889 } 1890 1891 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1892 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1893 } 1894 1895 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1896 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1897 } 1898 1899 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1900 String fileName = null; 1901 String objectName = null; 1902 1903 // separate the object name and the file name 1904 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1905 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1906 objectName = objectName.substring(3); 1907 1908 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1909 } 1910 1911 if (name.startsWith(HObject.SEPARATOR)) { 1912 name = name.substring(1); 1913 } 1914 obj = new H5Link(this, name, parent_path); 1915 1916 return obj; 1917 } 1918 1919 /** 1920 * reload the sub-tree structure from file. 1921 * <p> 1922 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1923 * memory is not changed. 1924 * 1925 * @param g 1926 * the group where the structure is to be reloaded in memory 1927 */ 1928 public void reloadTree(Group g) { 1929 if (fid < 0 || rootObject == null || g == null) { 1930 log.debug("reloadTree(): Invalid fid or null object"); 1931 return; 1932 } 1933 1934 depth_first(g, Integer.MIN_VALUE); 1935 } 1936 1937 /* 1938 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1939 * 1940 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1941 */ 1942 @Override 1943 public HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1944 log.trace("copy(): start: srcObj={} dstGroup={} dstName={}", srcObj, dstGroup, dstName); 1945 if ((srcObj == null) || (dstGroup == null)) { 1946 log.debug("copy(): srcObj or dstGroup is null"); 1947 return null; 1948 } 1949 1950 if (dstName == null) { 1951 dstName = srcObj.getName(); 1952 } 1953 1954 List<HObject> members = dstGroup.getMemberList(); 1955 int n = members.size(); 1956 for (int i = 0; i < n; i++) { 1957 HObject obj = members.get(i); 1958 String name = obj.getName(); 1959 while (name.equals(dstName)) 1960 dstName += "~copy"; 1961 } 1962 1963 HObject newObj = null; 1964 if (srcObj instanceof Dataset) { 1965 log.trace("copy(): srcObj instanceof Dataset"); 1966 newObj = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1967 } 1968 else if (srcObj instanceof H5Group) { 1969 log.trace("copy(): srcObj instanceof H5Group"); 1970 newObj = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1971 } 1972 else if (srcObj instanceof H5Datatype) { 1973 log.trace("copy(): srcObj instanceof H5Datatype"); 1974 newObj = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 1975 } 1976 1977 return newObj; 1978 } 1979 1980 /* 1981 * (non-Javadoc) 1982 * 1983 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 1984 */ 1985 @Override 1986 public void delete(HObject obj) throws Exception { 1987 if ((obj == null) || (fid < 0)) { 1988 log.debug("delete(): Invalid FID or object is null"); 1989 return; 1990 } 1991 1992 String name = obj.getPath() + obj.getName(); 1993 1994 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 1995 } 1996 1997 /* 1998 * (non-Javadoc) 1999 * 2000 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 2001 */ 2002 @Override 2003 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 2004 String obj_name = obj.getFullName(); 2005 String name = attr.getName(); 2006 long tid = -1; 2007 long sid = -1; 2008 long aid = -1; 2009 log.trace("writeAttribute(): name is {}", name); 2010 2011 long objID = obj.open(); 2012 if (objID < 0) { 2013 log.debug("writeAttribute(): Invalid Object ID"); 2014 return; 2015 } 2016 2017 if ((tid = attr.getDatatype().createNative()) >= 0) { 2018 log.trace("writeAttribute(): tid {} from toNative :{}", tid, attr.getDatatype().getDescription()); 2019 try { 2020 if (attr.isScalar()) 2021 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 2022 else 2023 sid = H5.H5Screate_simple(attr.getRank(), attr.getDims(), null); 2024 2025 if (attrExisted) { 2026 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2027 } 2028 else { 2029 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2030 } 2031 log.trace("writeAttribute(): aid {} opened/created", aid); 2032 2033 // update value of the attribute 2034 Object attrValue; 2035 try { 2036 attrValue = attr.getData(); 2037 } 2038 catch (Exception ex) { 2039 attrValue = null; 2040 log.trace("writeAttribute(): getData() failure:", ex); 2041 } 2042 2043 log.trace("writeAttribute(): getValue"); 2044 if (attrValue != null) { 2045 if (attr.getDatatype().isVLEN()) { 2046 log.trace("writeAttribute(): isVLEN"); 2047 try { 2048 /* 2049 * must use native type to write attribute data to file (see bug 1069) 2050 */ 2051 long tmptid = tid; 2052 tid = H5.H5Tget_native_type(tmptid); 2053 try { 2054 H5.H5Tclose(tmptid); 2055 } 2056 catch (Exception ex) { 2057 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2058 } 2059 log.trace("writeAttribute(): H5.H5AwriteVL, {} : {}", name, attr.getDatatype().getDescription()); 2060 if ((attrValue instanceof String) || (attr.getDims().length == 1)) { 2061 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 2062 } 2063 else { 2064 log.info("writeAttribute(): Datatype is not a string, unable to write {} data", name); 2065 } 2066 } 2067 catch (Exception ex) { 2068 log.debug("writeAttribute(): native type failure: ", name, ex); 2069 } 2070 } 2071 else { 2072 if (attr.getDatatype().isRef() && attrValue instanceof String) { 2073 // reference is a path+name to the object 2074 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 2075 log.trace("writeAttribute(): Attribute class is CLASS_REFERENCE"); 2076 } 2077 else if (Array.get(attrValue, 0) instanceof String) { 2078 long size = attr.getDatatype().getDatatypeSize(); 2079 int len = ((String[]) attrValue).length; 2080 byte[] bval = Dataset.stringToByte((String[]) attrValue, (int)size); 2081 if (bval != null && bval.length == size * len) { 2082 bval[bval.length - 1] = 0; 2083 attrValue = bval; 2084 } 2085 log.trace("writeAttribute(): String={}: {}", attrValue, name); 2086 } 2087 2088 try { 2089 /* 2090 * must use native type to write attribute data to file (see bug 1069) 2091 */ 2092 long tmptid = tid; 2093 tid = H5.H5Tget_native_type(tmptid); 2094 try { 2095 H5.H5Tclose(tmptid); 2096 } 2097 catch (Exception ex) { 2098 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2099 } 2100 log.trace("writeAttribute(): H5.H5Awrite, {} :{}", name, attr.getDatatype().getDescription()); 2101 H5.H5Awrite(aid, tid, attrValue); 2102 } 2103 catch (Exception ex) { 2104 log.debug("writeAttribute(): native type failure: ", ex); 2105 } 2106 } 2107 } // (attrValue != null) 2108 } 2109 finally { 2110 try { 2111 H5.H5Tclose(tid); 2112 } 2113 catch (Exception ex) { 2114 log.debug("writeAttribute(): H5Tclose(tid {}) failure: ", tid, ex); 2115 } 2116 try { 2117 H5.H5Sclose(sid); 2118 } 2119 catch (Exception ex) { 2120 log.debug("writeAttribute(): H5Sclose(sid {}) failure: ", sid, ex); 2121 } 2122 try { 2123 H5.H5Aclose(aid); 2124 } 2125 catch (Exception ex) { 2126 log.debug("writeAttribute(): H5Aclose(aid {}) failure: ", aid, ex); 2127 } 2128 } 2129 } 2130 else { 2131 log.debug("writeAttribute(): toNative failure"); 2132 } 2133 2134 obj.close(objID); 2135 } 2136 2137 /*************************************************************************** 2138 * Implementations for methods specific to H5File 2139 **************************************************************************/ 2140 2141 /** 2142 * Opens a file with specific file access property list. 2143 * <p> 2144 * This function does the same as "long open()" except the you can also pass an HDF5 file access property to file 2145 * open. For example, 2146 * 2147 * <pre> 2148 * // All open objects remaining in the file are closed then file is closed 2149 * long plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2150 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 2151 * long fid = open(plist); 2152 * </pre> 2153 * 2154 * @param plist 2155 * a file access property list identifier. 2156 * 2157 * @return the file identifier if successful; otherwise returns negative value. 2158 * 2159 * @throws Exception 2160 * If there is a failure. 2161 */ 2162 public long open(long plist) throws Exception { 2163 return open(true, plist); 2164 } 2165 2166 /*************************************************************************** 2167 * Private methods. 2168 **************************************************************************/ 2169 2170 /** 2171 * Opens access to this file. 2172 * 2173 * @param loadFullHierarchy 2174 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2175 * 2176 * @return the file identifier if successful; otherwise returns negative value. 2177 * 2178 * @throws Exception 2179 * If there is a failure. 2180 */ 2181 private long open(boolean loadFullHierarchy) throws Exception { 2182 long the_fid = -1; 2183 2184 long plist = HDF5Constants.H5P_DEFAULT; 2185 2186 // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache 2187 /** 2188 * try { //All open objects remaining in the file are closed // then file is closed plist = 2189 * H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); H5.H5Pset_fclose_degree ( plist, 2190 * HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {} the_fid = open(loadFullHierarchy, 2191 * plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 2192 */ 2193 2194 log.trace("open(): loadFull={}", loadFullHierarchy); 2195 the_fid = open(loadFullHierarchy, plist); 2196 2197 return the_fid; 2198 } 2199 2200 /** 2201 * Opens access to this file. 2202 * 2203 * @param loadFullHierarchy 2204 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2205 * 2206 * @return the file identifier if successful; otherwise returns negative value. 2207 * 2208 * @throws Exception 2209 * If there is a failure. 2210 */ 2211 private long open(boolean loadFullHierarchy, long plist) throws Exception { 2212 log.trace("open(loadFullHierarchy = {}, plist = {}): start", loadFullHierarchy, plist); 2213 if (fid > 0) { 2214 log.trace("open(): FID already opened"); 2215 return fid; // file is opened already 2216 } 2217 2218 // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) 2219 // to make it work for external datasets. We need to set it back 2220 // before the file is closed/opened. 2221 String rootPath = System.getProperty("hdfview.workdir"); 2222 if (rootPath == null) { 2223 rootPath = System.getProperty("user.dir"); 2224 } 2225 System.setProperty("user.dir", rootPath); 2226 2227 // check for valid file access permission 2228 if (flag < 0) { 2229 log.debug("open(): Invalid access identifier -- " + flag); 2230 throw new HDF5Exception("Invalid access identifer -- " + flag); 2231 } 2232 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 2233 // create a new file 2234 log.trace("open(): create file"); 2235 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2236 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 2237 H5.H5Fclose(fid); 2238 flag = HDF5Constants.H5F_ACC_RDWR; 2239 } 2240 else if (!exists()) { 2241 log.debug("open(): File {} does not exist", fullFileName); 2242 throw new HDF5Exception("File does not exist -- " + fullFileName); 2243 } 2244 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 2245 log.debug("open(): Cannot write file {}", fullFileName); 2246 throw new HDF5Exception("Cannot write file, try opening as read-only -- " + fullFileName); 2247 } 2248 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 2249 log.debug("open(): Cannot read file {}", fullFileName); 2250 throw new HDF5Exception("Cannot read file -- " + fullFileName); 2251 } 2252 2253 try { 2254 fid = H5.H5Fopen(fullFileName, flag, plist); 2255 } 2256 catch (Exception ex) { 2257 try { 2258 log.debug("open(): open failed, attempting to open file read-only"); 2259 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 2260 isReadOnly = true; 2261 } 2262 catch (Exception ex2) { 2263 // Attempt to open the file as a split file or family file 2264 try { 2265 File tmpf = new File(fullFileName); 2266 String tmpname = tmpf.getName(); 2267 int idx = tmpname.lastIndexOf('.'); 2268 2269 if (tmpname.contains("-m")) { 2270 log.debug("open(): open read-only failed, attempting to open split file"); 2271 2272 while (idx > 0) { 2273 char c = tmpname.charAt(idx - 1); 2274 if (c != '-') 2275 idx--; 2276 else 2277 break; 2278 } 2279 2280 if (idx > 0) { 2281 tmpname = tmpname.substring(0, idx - 1); 2282 log.trace("open(): attempting to open split file with name {}", tmpname); 2283 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2284 H5.H5Pset_fapl_split(pid, "-m.h5", HDF5Constants.H5P_DEFAULT, "-r.h5", HDF5Constants.H5P_DEFAULT); 2285 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2286 H5.H5Pclose(pid); 2287 } 2288 } 2289 else { 2290 log.debug("open(): open read-only failed, checking for file family"); 2291 // try to see if it is a file family, always open a family file 2292 // from the first one since other files will not be recognized 2293 // as an HDF5 file 2294 int cnt = idx; 2295 while (idx > 0) { 2296 char c = tmpname.charAt(idx - 1); 2297 if (Character.isDigit(c)) 2298 idx--; 2299 else 2300 break; 2301 } 2302 2303 if (idx > 0) { 2304 cnt -= idx; 2305 tmpname = tmpname.substring(0, idx) + "%0" + cnt + "d" + tmpname.substring(tmpname.lastIndexOf('.')); 2306 log.trace("open(): attempting to open file family with name {}", tmpname); 2307 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2308 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 2309 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2310 H5.H5Pclose(pid); 2311 } 2312 } 2313 } 2314 catch (Exception ex3) { 2315 log.debug("open(): open failed: ", ex3); 2316 } 2317 } 2318 } 2319 2320 if ((fid >= 0) && loadFullHierarchy) { 2321 // load the hierarchy of the file 2322 loadIntoMemory(); 2323 } 2324 2325 log.trace("open(loadFullHeirarchy = {}, plist = {}): finish", loadFullHierarchy, plist); 2326 return fid; 2327 } 2328 2329 /** 2330 * Loads the file structure into memory. 2331 */ 2332 private void loadIntoMemory() { 2333 if (fid < 0) { 2334 log.debug("loadIntoMemory(): Invalid FID"); 2335 return; 2336 } 2337 2338 /* 2339 * TODO: Root group's name should be changed to 'this.getName()' and all 2340 * previous accesses of this field should now use getPath() instead of getName() 2341 * to get the root group. The root group actually does have a path of "/". The 2342 * depth_first method will have to be changed to setup other object paths 2343 * appropriately, as it currently assumes the root path to be null. 2344 */ 2345 rootObject = new H5Group(this, "/", null, null); 2346 log.trace("loadIntoMemory(): depth_first on root"); 2347 depth_first(rootObject, 0); 2348 } 2349 2350 /** 2351 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and 2352 * datasets only. It does not include named datatypes and soft links. 2353 * <p> 2354 * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing 2355 * a path back up to the root. 2356 * 2357 * @param parentObject 2358 * the parent object. 2359 */ 2360 @SuppressWarnings("deprecation") 2361 private int depth_first(HObject parentObject, int nTotal) { 2362 log.trace("depth_first({}): start", parentObject); 2363 2364 int nelems; 2365 String fullPath = null; 2366 String ppath = null; 2367 long gid = -1; 2368 2369 H5Group pgroup = (H5Group) parentObject; 2370 ppath = pgroup.getPath(); 2371 2372 if (ppath == null) { 2373 fullPath = HObject.SEPARATOR; 2374 } 2375 else { 2376 fullPath = ppath + pgroup.getName() + HObject.SEPARATOR; 2377 } 2378 2379 nelems = 0; 2380 try { 2381 gid = pgroup.open(); 2382 H5G_info_t info = H5.H5Gget_info(gid); 2383 nelems = (int) info.nlinks; 2384 } 2385 catch (HDF5Exception ex) { 2386 nelems = -1; 2387 log.debug("depth_first({}): H5Gget_info(gid {}) failure: ", parentObject, gid, ex); 2388 } 2389 2390 if (nelems <= 0) { 2391 pgroup.close(gid); 2392 log.debug("depth_first({}): nelems <= 0", parentObject); 2393 return nTotal; 2394 } 2395 2396 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2397 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2398 // we use only one call to get all the information, which takes about 2399 // two seconds 2400 int[] objTypes = new int[nelems]; 2401 long[] fNos = new long[nelems]; 2402 long[] objRefs = new long[nelems]; 2403 String[] objNames = new String[nelems]; 2404 2405 try { 2406 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2407 } 2408 catch (HDF5Exception ex) { 2409 log.debug("depth_first({}): failure: ", parentObject, ex); 2410 ex.printStackTrace(); 2411 return nTotal; 2412 } 2413 2414 int nStart = getStartMembers(); 2415 int nMax = getMaxMembers(); 2416 2417 String obj_name; 2418 int obj_type; 2419 2420 // Iterate through the file to see members of the group 2421 for (int i = 0; i < nelems; i++) { 2422 obj_name = objNames[i]; 2423 obj_type = objTypes[i]; 2424 log.trace("depth_first({}): obj_name={}, obj_type={}", parentObject, obj_name, obj_type); 2425 long oid[] = { objRefs[i], fNos[i] }; 2426 2427 if (obj_name == null) { 2428 log.trace("depth_first({}): continue after null obj_name", parentObject); 2429 continue; 2430 } 2431 2432 nTotal++; 2433 2434 if (nMax > 0) { 2435 if ((nTotal - nStart) >= nMax) 2436 break; // loaded enough objects 2437 } 2438 2439 boolean skipLoad = false; 2440 if ((nTotal > 0) && (nTotal < nStart)) 2441 skipLoad = true; 2442 2443 // create a new group 2444 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2445 H5Group g = new H5Group(this, obj_name, fullPath, pgroup); 2446 2447 pgroup.addToMemberList(g); 2448 2449 // detect and stop loops 2450 // a loop is detected if there exists object with the same 2451 // object ID by tracing path back up to the root. 2452 boolean hasLoop = false; 2453 H5Group tmpObj = (H5Group) parentObject; 2454 2455 while (tmpObj != null) { 2456 if (tmpObj.equalsOID(oid) && (tmpObj.getPath() != null)) { 2457 hasLoop = true; 2458 break; 2459 } 2460 else { 2461 tmpObj = (H5Group) tmpObj.getParent(); 2462 } 2463 } 2464 2465 // recursively go through the next group 2466 // stops if it has loop. 2467 if (!hasLoop) { 2468 nTotal = depth_first(g, nTotal); 2469 } 2470 } 2471 else if (skipLoad) { 2472 continue; 2473 } 2474 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2475 long did = -1; 2476 long tid = -1; 2477 int tclass = -1; 2478 try { 2479 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2480 if (did >= 0) { 2481 tid = H5.H5Dget_type(did); 2482 2483 tclass = H5.H5Tget_class(tid); 2484 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2485 // for ARRAY, the type is determined by the base type 2486 long btid = H5.H5Tget_super(tid); 2487 2488 tclass = H5.H5Tget_class(btid); 2489 2490 try { 2491 H5.H5Tclose(btid); 2492 } 2493 catch (Exception ex) { 2494 log.debug("depth_first({})[{}] dataset {} H5Tclose(btid {}) failure: ", parentObject, i, obj_name, btid, ex); 2495 } 2496 } 2497 } 2498 else { 2499 log.debug("depth_first({})[{}] {} dataset open failure", parentObject, i, obj_name); 2500 } 2501 } 2502 catch (Exception ex) { 2503 log.debug("depth_first({})[{}] {} dataset access failure: ", parentObject, i, obj_name, ex); 2504 } 2505 finally { 2506 try { 2507 H5.H5Tclose(tid); 2508 } 2509 catch (Exception ex) { 2510 log.debug("depth_first({})[{}] daatset {} H5Tclose(tid {}) failure: ", parentObject, i, obj_name, tid, ex); 2511 } 2512 try { 2513 H5.H5Dclose(did); 2514 } 2515 catch (Exception ex) { 2516 log.debug("depth_first({})[{}] dataset {} H5Dclose(did {}) failure: ", parentObject, i, obj_name, did, ex); 2517 } 2518 } 2519 Dataset d = null; 2520 if (tclass == HDF5Constants.H5T_COMPOUND) { 2521 // create a new compound dataset 2522 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2523 } 2524 else { 2525 // create a new scalar dataset 2526 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2527 } 2528 2529 pgroup.addToMemberList(d); 2530 } 2531 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2532 Datatype t = new H5Datatype(parentObject.getFileFormat(), obj_name, fullPath, oid); // deprecated! 2533 log.trace("depth_first({}): H5O_TYPE_NAMED_DATATYPE name={}", parentObject, t.getFullName()); 2534 2535 pgroup.addToMemberList(t); 2536 } 2537 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2538 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2539 2540 pgroup.addToMemberList(link); 2541 continue; // do the next one, if the object is not identified. 2542 } 2543 } // ( i = 0; i < nelems; i++) 2544 2545 pgroup.close(gid); 2546 2547 return nTotal; 2548 } // private depth_first() 2549 2550 /** 2551 * Returns a list of all the members of this H5File in a 2552 * breadth-first ordering that are rooted at the specified 2553 * object. 2554 */ 2555 private static List<HObject> getMembersBreadthFirst(HObject obj) { 2556 List<HObject> allMembers = new Vector<>(); 2557 Queue<HObject> queue = new LinkedList<>(); 2558 HObject currentObject = obj; 2559 2560 queue.add(currentObject); 2561 2562 while(!queue.isEmpty()) { 2563 currentObject = queue.remove(); 2564 allMembers.add(currentObject); 2565 2566 if(currentObject instanceof Group) { 2567 queue.addAll(((Group) currentObject).getMemberList()); 2568 } 2569 } 2570 2571 return allMembers; 2572 } 2573 2574 private HObject copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2575 Dataset dataset = null; 2576 long srcdid = -1, dstdid = -1; 2577 long ocp_plist_id = -1; 2578 String dname = null, path = null; 2579 2580 if (pgroup.isRoot()) { 2581 path = HObject.SEPARATOR; 2582 } 2583 else { 2584 path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; 2585 } 2586 2587 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2588 dstName = srcDataset.getName(); 2589 } 2590 dname = path + dstName; 2591 2592 try { 2593 srcdid = srcDataset.open(); 2594 dstdid = pgroup.open(); 2595 2596 try { 2597 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2598 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2599 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2600 } 2601 catch (Exception ex) { 2602 log.debug("copyDataset(): {} failure: ", dname, ex); 2603 } 2604 finally { 2605 try { 2606 H5.H5Pclose(ocp_plist_id); 2607 } 2608 catch (Exception ex) { 2609 log.debug("copyDataset(): {} H5Pclose(ocp_plist_id {}) failure: ", dname, ocp_plist_id, ex); 2610 } 2611 } 2612 2613 if (srcDataset instanceof H5ScalarDS) { 2614 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2615 } 2616 else { 2617 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2618 } 2619 2620 pgroup.addToMemberList(dataset); 2621 } 2622 finally { 2623 try { 2624 srcDataset.close(srcdid); 2625 } 2626 catch (Exception ex) { 2627 log.debug("copyDataset(): {} srcDataset.close(srcdid {}) failure: ", dname, srcdid, ex); 2628 } 2629 try { 2630 pgroup.close(dstdid); 2631 } 2632 catch (Exception ex) { 2633 log.debug("copyDataset(): {} pgroup.close(dstdid {}) failure: ", dname, dstdid, ex); 2634 } 2635 } 2636 2637 return dataset; 2638 } 2639 2640 /** 2641 * Constructs a dataset for specified dataset identifier. 2642 * 2643 * @param did 2644 * the dataset identifier 2645 * @param name 2646 * the name of the dataset 2647 * @param path 2648 * the path of the dataset 2649 * 2650 * @return the dataset if successful; otherwise return null. 2651 * 2652 * @throws HDF5Exception 2653 * If there is an error at the HDF5 library level. 2654 */ 2655 private Dataset getDataset(long did, String name, String path) throws HDF5Exception { 2656 Dataset dataset = null; 2657 if (did >= 0) { 2658 long tid = -1; 2659 int tclass = -1; 2660 try { 2661 tid = H5.H5Dget_type(did); 2662 tclass = H5.H5Tget_class(tid); 2663 if (tclass == HDF5Constants.H5T_ARRAY) { 2664 // for ARRAY, the type is determined by the base type 2665 long btid = H5.H5Tget_super(tid); 2666 tclass = H5.H5Tget_class(btid); 2667 try { 2668 H5.H5Tclose(btid); 2669 } 2670 catch (Exception ex) { 2671 log.debug("getDataset(): {} H5Tclose(btid {}) failure: ", name, btid, ex); 2672 } 2673 } 2674 } 2675 finally { 2676 try { 2677 H5.H5Tclose(tid); 2678 } 2679 catch (Exception ex) { 2680 log.debug("getDataset(): {} H5Tclose(tid {}) failure: ", name, tid, ex); 2681 } 2682 } 2683 2684 if (tclass == HDF5Constants.H5T_COMPOUND) { 2685 dataset = new H5CompoundDS(this, name, path); 2686 } 2687 else { 2688 dataset = new H5ScalarDS(this, name, path); 2689 } 2690 } 2691 else { 2692 log.debug("getDataset(): id failure"); 2693 } 2694 2695 return dataset; 2696 } 2697 2698 /** 2699 * Copies a named datatype to another location. 2700 * 2701 * @param srcType 2702 * the source datatype 2703 * @param pgroup 2704 * the group which the new datatype is copied to 2705 * @param dstName 2706 * the name of the new dataype 2707 * 2708 * @throws Exception 2709 * If there is a failure. 2710 */ 2711 private HObject copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2712 Datatype datatype = null; 2713 long tid_src = -1; 2714 long gid_dst = -1; 2715 String path = null; 2716 2717 if (pgroup.isRoot()) { 2718 path = HObject.SEPARATOR; 2719 } 2720 else { 2721 path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; 2722 } 2723 2724 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2725 dstName = srcType.getName(); 2726 } 2727 2728 try { 2729 tid_src = srcType.open(); 2730 gid_dst = pgroup.open(); 2731 2732 try { 2733 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2734 } 2735 catch (Exception ex) { 2736 log.debug("copyDatatype(): {} H5Ocopy(tid_src {}) failure: ", dstName, tid_src, ex); 2737 } 2738 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2739 2740 pgroup.addToMemberList(datatype); 2741 } 2742 finally { 2743 try { 2744 srcType.close(tid_src); 2745 } 2746 catch (Exception ex) { 2747 log.debug("copyDatatype(): {} srcType.close(tid_src {}) failure: ", dstName, tid_src, ex); 2748 } 2749 try { 2750 pgroup.close(gid_dst); 2751 } 2752 catch (Exception ex) { 2753 log.debug("copyDatatype(): {} pgroup.close(gid_dst {}) failure: ", dstName, gid_dst, ex); 2754 } 2755 } 2756 2757 return datatype; 2758 } 2759 2760 /** 2761 * Copies a group and its members to a new location. 2762 * 2763 * @param srcGroup 2764 * the source group 2765 * @param dstGroup 2766 * the location where the new group is located 2767 * @param dstName 2768 * the name of the new group 2769 * 2770 * @throws Exception 2771 * If there is a failure. 2772 */ 2773 private HObject copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { 2774 H5Group group = null; 2775 long srcgid = -1, dstgid = -1; 2776 String path = null; 2777 2778 if (dstGroup.isRoot()) { 2779 path = HObject.SEPARATOR; 2780 } 2781 else { 2782 path = dstGroup.getPath() + dstGroup.getName() + HObject.SEPARATOR; 2783 } 2784 2785 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2786 dstName = srcGroup.getName(); 2787 } 2788 2789 try { 2790 srcgid = srcGroup.open(); 2791 dstgid = dstGroup.open(); 2792 try { 2793 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2794 } 2795 catch (Exception ex) { 2796 log.debug("copyGroup(): {} H5Ocopy(srcgid {}) failure: ", dstName, srcgid, ex); 2797 } 2798 2799 group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); 2800 depth_first(group, Integer.MIN_VALUE); // reload all 2801 dstGroup.addToMemberList(group); 2802 } 2803 2804 finally { 2805 try { 2806 srcGroup.close(srcgid); 2807 } 2808 catch (Exception ex) { 2809 log.debug("copyGroup(): {} srcGroup.close(srcgid {}) failure: ", dstName, srcgid, ex); 2810 } 2811 try { 2812 dstGroup.close(dstgid); 2813 } 2814 catch (Exception ex) { 2815 log.debug("copyGroup(): {} pgroup.close(dstgid {}) failure: ", dstName, dstgid, ex); 2816 } 2817 } 2818 2819 return group; 2820 } 2821 2822 /** 2823 * Constructs a group for specified group identifier and retrieves members. 2824 * 2825 * @param gid 2826 * The group identifier. 2827 * @param name 2828 * The group name. 2829 * @param pGroup 2830 * The parent group, or null for the root group. 2831 * 2832 * @return The group if successful; otherwise returns false. 2833 * 2834 * @throws HDF5Exception 2835 * If there is an error at the HDF5 library level. 2836 */ 2837 private H5Group getGroup(long gid, String name, Group pGroup) throws HDF5Exception { 2838 String parentPath = null; 2839 String thisFullName = null; 2840 String memberFullName = null; 2841 2842 if (pGroup == null) { 2843 thisFullName = name = "/"; 2844 } 2845 else { 2846 parentPath = pGroup.getFullName(); 2847 if ((parentPath == null) || parentPath.equals("/")) { 2848 thisFullName = "/" + name; 2849 } 2850 else { 2851 thisFullName = parentPath + "/" + name; 2852 } 2853 } 2854 2855 // get rid of any extra "/" 2856 if (parentPath != null) { 2857 parentPath = parentPath.replaceAll("//", "/"); 2858 } 2859 if (thisFullName != null) { 2860 thisFullName = thisFullName.replaceAll("//", "/"); 2861 } 2862 2863 log.trace("getGroup(): fullName={}", thisFullName); 2864 2865 H5Group group = new H5Group(this, name, parentPath, pGroup); 2866 2867 H5G_info_t group_info = null; 2868 H5O_info_t obj_info = null; 2869 long oid = -1; 2870 String link_name = null; 2871 try { 2872 group_info = H5.H5Gget_info(gid); 2873 } 2874 catch (Exception ex) { 2875 log.debug("getGroup(): {} H5Gget_info(gid {}) failure: ", name, gid, ex); 2876 } 2877 try { 2878 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 2879 } 2880 catch (Exception ex) { 2881 log.debug("getGroup(): {} H5Oopen(gid {}) failure: ", name, gid, ex); 2882 } 2883 2884 // retrieve only the immediate members of the group, do not follow 2885 // subgroups 2886 for (int i = 0; i < group_info.nlinks; i++) { 2887 try { 2888 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2889 obj_info = H5.H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2890 } 2891 catch (HDF5Exception ex) { 2892 log.debug("getGroup()[{}]: {} name,info failure: ", i, name, ex); 2893 // do not stop if accessing one member fails 2894 continue; 2895 } 2896 // create a new group 2897 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 2898 H5Group g = new H5Group(this, link_name, thisFullName, group); 2899 group.addToMemberList(g); 2900 } 2901 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 2902 long did = -1; 2903 Dataset d = null; 2904 2905 if ((thisFullName == null) || thisFullName.equals("/")) { 2906 memberFullName = "/" + link_name; 2907 } 2908 else { 2909 memberFullName = thisFullName + "/" + link_name; 2910 } 2911 2912 try { 2913 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 2914 d = getDataset(did, link_name, thisFullName); 2915 } 2916 finally { 2917 try { 2918 H5.H5Dclose(did); 2919 } 2920 catch (Exception ex) { 2921 log.debug("getGroup()[{}]: {} H5Dclose(did {}) failure: ", i, name, did, ex); 2922 } 2923 } 2924 group.addToMemberList(d); 2925 } 2926 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2927 Datatype t = new H5Datatype(group.getFileFormat(), link_name, thisFullName); 2928 group.addToMemberList(t); 2929 } 2930 } // End of for loop. 2931 try { 2932 if (oid >= 0) 2933 H5.H5Oclose(oid); 2934 } 2935 catch (Exception ex) { 2936 log.debug("getGroup(): {} H5Oclose(oid {}) failure: ", name, oid, ex); 2937 } 2938 2939 return group; 2940 } 2941 2942 /** 2943 * Retrieves the name of the target object that is being linked to. 2944 * 2945 * @param obj 2946 * The current link object. 2947 * 2948 * @return The name of the target object. 2949 * 2950 * @throws Exception 2951 * If there is an error at the HDF5 library level. 2952 */ 2953 public static String getLinkTargetName(HObject obj) throws Exception { 2954 String[] link_value = { null, null }; 2955 String targetObjName = null; 2956 2957 if (obj == null) { 2958 log.debug("getLinkTargetName(): object is null"); 2959 return null; 2960 } 2961 2962 if (obj.getFullName().equals("/")) { 2963 log.debug("getLinkTargetName(): object is root group, links not allowed"); 2964 return null; 2965 } 2966 2967 H5L_info_t link_info = null; 2968 try { 2969 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 2970 } 2971 catch (Exception err) { 2972 log.debug("getLinkTargetName(): H5Lget_info {} failure: ", obj.getFullName(), err); 2973 } 2974 if (link_info != null) { 2975 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 2976 try { 2977 H5.H5Lget_value(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 2978 } 2979 catch (Exception ex) { 2980 log.debug("getLinkTargetName(): H5Lget_value {} failure: ", obj.getFullName(), ex); 2981 } 2982 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 2983 targetObjName = link_value[0]; 2984 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 2985 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 2986 } 2987 } 2988 } 2989 2990 return targetObjName; 2991 } 2992 2993 /** 2994 * Export dataset. 2995 * 2996 * @param file_export_name 2997 * The file name to export data into. 2998 * @param file_name 2999 * The name of the HDF5 file containing the dataset. 3000 * @param object_path 3001 * The full path of the dataset to be exported. 3002 * 3003 * @throws Exception 3004 * If there is a failure. 3005 */ 3006 @Override 3007 public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) 3008 throws Exception { 3009 H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); 3010 } 3011 3012 /** 3013 * Renames an attribute. 3014 * 3015 * @param obj 3016 * The object whose attribute is to be renamed. 3017 * @param oldAttrName 3018 * The current name of the attribute. 3019 * @param newAttrName 3020 * The new name of the attribute. 3021 * 3022 * @throws Exception 3023 * If there is an error at the HDF5 library level. 3024 */ 3025 @Override 3026 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 3027 log.trace("renameAttribute(): rename {} to {}", oldAttrName, newAttrName); 3028 H5.H5Arename_by_name(obj.getFID(), obj.getFullName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 3029 } 3030 3031 /** 3032 * Rename the given object 3033 * 3034 * @param obj 3035 * the object to be renamed. 3036 * @param newName 3037 * the new name of the object. 3038 * 3039 * @throws Exception 3040 * If there is a failure. 3041 */ 3042 public static void renameObject(HObject obj, String newName) throws Exception { 3043 renameObject(obj, obj.getPath(), newName); 3044 } 3045 3046 /** 3047 * Rename the given object 3048 * 3049 * @param obj 3050 * the object to be renamed. 3051 * @param newPath 3052 * the new path of the object. 3053 * @param newName 3054 * the new name of the object. 3055 * 3056 * @throws Exception 3057 * If there is a failure. 3058 */ 3059 public static void renameObject(HObject obj, String newPath, String newName) throws Exception { 3060 String currentFullPath = obj.getFullName(); 3061 String newFullPath = obj.createFullname(newPath, newName); 3062 3063 log.trace("renameObject(): currentFullPath={} newFullPath={}", currentFullPath, newFullPath); 3064 if ((currentFullPath != null) && (newFullPath != null)) { 3065 currentFullPath = currentFullPath.replaceAll("//", "/"); 3066 newFullPath = newFullPath.replaceAll("//", "/"); 3067 3068 if (currentFullPath.equals("/") && obj instanceof Group) { 3069 throw new HDF5Exception("Can't rename the root group."); 3070 } 3071 3072 if (currentFullPath.equals(newFullPath)) { 3073 throw new HDF5Exception("The new name is the same as the current name."); 3074 } 3075 3076 if (obj.getName() != null) 3077 // Call the library to move things in the file if object exists 3078 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 3079 } 3080 } 3081 3082 public static int getIndexTypeValue(String strtype) { 3083 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3084 return HDF5Constants.H5_INDEX_NAME; 3085 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3086 return HDF5Constants.H5_INDEX_CRT_ORDER; 3087 if (strtype.compareTo("H5_INDEX_N") == 0) 3088 return HDF5Constants.H5_INDEX_N; 3089 return HDF5Constants.H5_INDEX_UNKNOWN; 3090 } 3091 3092 public static int getIndexOrderValue(String strorder) { 3093 if (strorder.compareTo("H5_ITER_INC") == 0) 3094 return HDF5Constants.H5_ITER_INC; 3095 if (strorder.compareTo("H5_ITER_DEC") == 0) 3096 return HDF5Constants.H5_ITER_DEC; 3097 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3098 return HDF5Constants.H5_ITER_NATIVE; 3099 if (strorder.compareTo("H5_ITER_N") == 0) 3100 return HDF5Constants.H5_ITER_N; 3101 return HDF5Constants.H5_ITER_UNKNOWN; 3102 } 3103 3104 @Override 3105 public int getIndexType(String strtype) { 3106 if (strtype != null) { 3107 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3108 return HDF5Constants.H5_INDEX_NAME; 3109 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3110 return HDF5Constants.H5_INDEX_CRT_ORDER; 3111 return HDF5Constants.H5_INDEX_UNKNOWN; 3112 } 3113 return getIndexType(); 3114 } 3115 3116 public int getIndexType() { 3117 return indexType; 3118 } 3119 3120 @Override 3121 public void setIndexType(int indexType) { 3122 this.indexType = indexType; 3123 } 3124 3125 @Override 3126 public int getIndexOrder(String strorder) { 3127 if (strorder != null) { 3128 if (strorder.compareTo("H5_ITER_INC") == 0) 3129 return HDF5Constants.H5_ITER_INC; 3130 if (strorder.compareTo("H5_ITER_DEC") == 0) 3131 return HDF5Constants.H5_ITER_DEC; 3132 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3133 return HDF5Constants.H5_ITER_NATIVE; 3134 if (strorder.compareTo("H5_ITER_N") == 0) 3135 return HDF5Constants.H5_ITER_N; 3136 return HDF5Constants.H5_ITER_UNKNOWN; 3137 } 3138 return getIndexOrder(); 3139 } 3140 3141 public int getIndexOrder() { 3142 return indexOrder; 3143 } 3144 3145 @Override 3146 public void setIndexOrder(int indexOrder) { 3147 this.indexOrder = indexOrder; 3148 } 3149}