001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.io.File; 018import java.lang.reflect.Array; 019import java.util.Hashtable; 020import java.util.Iterator; 021import java.util.LinkedList; 022import java.util.List; 023import java.util.Queue; 024import java.util.Vector; 025 026import hdf.hdf5lib.H5; 027import hdf.hdf5lib.HDF5Constants; 028import hdf.hdf5lib.HDFNativeData; 029import hdf.hdf5lib.exceptions.HDF5Exception; 030import hdf.hdf5lib.structs.H5G_info_t; 031import hdf.hdf5lib.structs.H5L_info_t; 032import hdf.hdf5lib.structs.H5O_info_t; 033import hdf.object.Attribute; 034import hdf.object.Dataset; 035import hdf.object.Datatype; 036import hdf.object.FileFormat; 037import hdf.object.Group; 038import hdf.object.HObject; 039import hdf.object.ScalarDS; 040 041 042/** 043 * H5File is an implementation of the FileFormat class for HDF5 files. 044 * <p> 045 * The HDF5 file structure is made up of HObjects stored in a tree-like fashion. Each tree node represents an 046 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootObject</i>, the 047 * tree can be traversed to find a specific object. 048 * <p> 049 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 050 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 051 * 052 * <pre> 053 * HObject findObject(FileFormat file, String path) { 054 * if (file == null || path == null) 055 * return null; 056 * if (!path.endsWith("/")) 057 * path = path + "/"; 058 * HObject theRoot = file.getRootObject(); 059 * if (theRoot == null) 060 * return null; 061 * else if (path.equals("/")) 062 * return theRoot; 063 * 064 * Iterator local_it = ((Group) theRoot) 065 * .breadthFirstMemberList().iterator(); 066 * HObject theObj = null; 067 * while (local_it.hasNext()) { 068 * theObj = local_it.next(); 069 * String fullPath = theObj.getFullName() + "/"; 070 * if (path.equals(fullPath) && theObj.getPath() != null ) { 071 * break; 072 * } 073 * return theObj; 074 * } 075 * </pre> 076 * 077 * @author Peter X. Cao 078 * @version 2.4 9/4/2007 079 */ 080public class H5File extends FileFormat { 081 private static final long serialVersionUID = 6247335559471526045L; 082 083 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 084 085 /** 086 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 087 * HDF5Constants.H5F_ACC_CREAT. 088 */ 089 private int flag; 090 091 /** 092 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 093 */ 094 private int indexType = HDF5Constants.H5_INDEX_NAME; 095 096 /** 097 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 098 */ 099 private int indexOrder = HDF5Constants.H5_ITER_INC; 100 101 /** 102 * The root object of the file hierarchy. 103 */ 104 private HObject rootObject; 105 106 /** 107 * How many characters maximum in an attribute name? 108 */ 109 private static final int attrNameLen = 256; 110 111 /** 112 * The library version bounds 113 */ 114 private int[] libver; 115 public static final int LIBVER_LATEST = HDF5Constants.H5F_LIBVER_LATEST; 116 public static final int LIBVER_EARLIEST = HDF5Constants.H5F_LIBVER_EARLIEST; 117 public static final int LIBVER_V18 = HDF5Constants.H5F_LIBVER_V18; 118 public static final int LIBVER_V110 = HDF5Constants.H5F_LIBVER_V110; 119 120 /*************************************************************************** 121 * Constructor 122 **************************************************************************/ 123 /** 124 * Constructs an H5File instance with an empty file name and read-only access. 125 */ 126 public H5File() { 127 this("", READ); 128 } 129 130 /** 131 * Constructs an H5File instance with specified file name and read/write access. 132 * <p> 133 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 134 * 135 * @param fileName 136 * A valid file name, with a relative or absolute path. 137 * 138 * @throws NullPointerException 139 * If the <code>fileName</code> argument is <code>null</code>. 140 */ 141 public H5File(String fileName) { 142 this(fileName, WRITE); 143 } 144 145 /** 146 * Constructs an H5File instance with specified file name and access. 147 * <p> 148 * The access parameter values and corresponding behaviors: 149 * <ul> 150 * <li>READ: Read-only access; open() will fail file doesn't exist.</li> 151 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 152 * access.</li> 153 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 154 * created or if file exists but can't be opened read/write.</li> 155 * </ul> 156 * <p> 157 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 158 * read/write or created. 159 * <p> 160 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 161 * file isn't yet open. 162 * 163 * @param fileName 164 * A valid file name, with a relative or absolute path. 165 * @param access 166 * The file access flag, which determines behavior when file is opened. Acceptable values are 167 * <code> READ, WRITE, </code> and <code>CREATE</code>. 168 * 169 * @throws NullPointerException 170 * If the <code>fileName</code> argument is <code>null</code>. 171 */ 172 public H5File(String fileName, int access) { 173 // Call FileFormat ctor to set absolute path name 174 super(fileName); 175 libver = new int[2]; 176 177 if ((access & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 178 File f = new File(fileName); 179 if (f.exists()) { 180 access = WRITE; 181 } 182 else { 183 access = CREATE; 184 } 185 } 186 187 // set metadata for the instance 188 rootObject = null; 189 this.fid = -1; 190 isReadOnly = (access == READ); 191 192 // At this point we just set up the flags for what happens later. 193 // We just pass unexpected access values on... subclasses may have 194 // their own values. 195 if (access == READ) { 196 flag = HDF5Constants.H5F_ACC_RDONLY; 197 } 198 else if (access == WRITE) { 199 flag = HDF5Constants.H5F_ACC_RDWR; 200 } 201 else if (access == CREATE) { 202 flag = HDF5Constants.H5F_ACC_CREAT; 203 } 204 else { 205 flag = access; 206 } 207 } 208 209 /*************************************************************************** 210 * Class methods 211 **************************************************************************/ 212 213 /** 214 * Copies the attributes of one object to another object. 215 * <p> 216 * This method copies all the attributes from one object (source object) to another (destination object). If an 217 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 218 * 256 characters will be truncated in the destination object. 219 * <p> 220 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 221 * there is no H5Object class and it is specific to HDF5 objects. 222 * <p> 223 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 224 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(long, long)} 225 * 226 * @param src 227 * The source object. 228 * @param dst 229 * The destination object. 230 * 231 * @see #copyAttributes(long, long) 232 */ 233 public static final void copyAttributes(HObject src, HObject dst) { 234 if ((src != null) && (dst != null)) { 235 long srcID = src.open(); 236 long dstID = dst.open(); 237 238 if ((srcID >= 0) && (dstID >= 0)) { 239 copyAttributes(srcID, dstID); 240 } 241 242 if (srcID >= 0) { 243 src.close(srcID); 244 } 245 246 if (dstID >= 0) { 247 dst.close(dstID); 248 } 249 } 250 } 251 252 /** 253 * Copies the attributes of one object to another object. 254 * <p> 255 * This method copies all the attributes from one object (source object) to another (destination object). If an 256 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 257 * 256 characters will be truncated in the destination object. 258 * <p> 259 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 260 * there is no H5Object class and it is specific to HDF5 objects. 261 * <p> 262 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 263 * exceptions are thrown. 264 * 265 * @param src_id 266 * The identifier of the source object. 267 * @param dst_id 268 * The identifier of the destination object. 269 */ 270 public static final void copyAttributes(long src_id, long dst_id) { 271 log.trace("copyAttributes(): start: src_id={} dst_id={}", src_id, dst_id); 272 long aid_src = -1; 273 long aid_dst = -1; 274 long asid = -1; 275 long atid = -1; 276 String aName = null; 277 H5O_info_t obj_info = null; 278 279 try { 280 obj_info = H5.H5Oget_info(src_id); 281 } 282 catch (Exception ex) { 283 obj_info.num_attrs = -1; 284 } 285 286 if (obj_info.num_attrs < 0) { 287 log.debug("copyAttributes(): no attributes"); 288 return; 289 } 290 291 for (int i = 0; i < obj_info.num_attrs; i++) { 292 try { 293 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 294 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 295 aName = H5.H5Aget_name(aid_src); 296 atid = H5.H5Aget_type(aid_src); 297 asid = H5.H5Aget_space(aid_src); 298 299 aid_dst = H5.H5Acreate(dst_id, aName, atid, asid, HDF5Constants.H5P_DEFAULT, 300 HDF5Constants.H5P_DEFAULT); 301 302 // use native data copy 303 H5.H5Acopy(aid_src, aid_dst); 304 305 } 306 catch (Exception ex) { 307 log.debug("copyAttributes(): Attribute[{}] failure: ", i, ex); 308 } 309 310 try { 311 H5.H5Sclose(asid); 312 } 313 catch (Exception ex) { 314 log.debug("copyAttributes(): Attribute[{}] H5Sclose(asid {}) failure: ", i, asid, ex); 315 } 316 try { 317 H5.H5Tclose(atid); 318 } 319 catch (Exception ex) { 320 log.debug("copyAttributes(): Attribute[{}] H5Tclose(atid {}) failure: ", i, atid, ex); 321 } 322 try { 323 H5.H5Aclose(aid_src); 324 } 325 catch (Exception ex) { 326 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_src {}) failure: ", i, aid_src, ex); 327 } 328 try { 329 H5.H5Aclose(aid_dst); 330 } 331 catch (Exception ex) { 332 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_dst {}) failure: ", i, aid_dst, ex); 333 } 334 335 } // (int i=0; i<num_attr; i++) 336 } 337 338 /** 339 * Returns a list of attributes for the specified object. 340 * <p> 341 * This method returns a list containing the attributes associated with the 342 * identified object. If there are no associated attributes, an empty list will 343 * be returned. 344 * <p> 345 * Attribute names exceeding 256 characters will be truncated in the returned 346 * list. 347 * 348 * @param obj 349 * The HObject whose attributes are to be returned. 350 * 351 * @return The list of the object's attributes. 352 * 353 * @throws HDF5Exception 354 * If an underlying HDF library routine is unable to perform a step 355 * necessary to retrieve the attributes. A variety of failures throw 356 * this exception. 357 * 358 * @see #getAttribute(HObject,int,int) 359 */ 360 public static final List<Attribute> getAttribute(HObject obj) throws HDF5Exception { 361 return H5File.getAttribute(obj, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 362 } 363 364 /** 365 * Returns a list of attributes for the specified object, in creation or 366 * alphabetical order. 367 * <p> 368 * This method returns a list containing the attributes associated with the 369 * identified object. If there are no associated attributes, an empty list will 370 * be returned. The list of attributes returned can be in increasing or 371 * decreasing, creation or alphabetical order. 372 * <p> 373 * Attribute names exceeding 256 characters will be truncated in the returned 374 * list. 375 * 376 * @param obj 377 * The HObject whose attributes are to be returned. 378 * @param idx_type 379 * The type of index. Valid values are: 380 * <ul> 381 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name 382 * <li>H5_INDEX_CRT_ORDER: An index by creation order 383 * </ul> 384 * @param order 385 * The index traversal order. Valid values are: 386 * <ul> 387 * <li>H5_ITER_INC: A top-down iteration incrementing the index 388 * position at each step. 389 * <li>H5_ITER_DEC: A bottom-up iteration decrementing the index 390 * position at each step. 391 * </ul> 392 * 393 * @return The list of the object's attributes. 394 * 395 * @throws HDF5Exception 396 * If an underlying HDF library routine is unable to perform a step 397 * necessary to retrieve the attributes. A variety of failures throw 398 * this exception. 399 */ 400 401 public static final List<Attribute> getAttribute(HObject obj, int idx_type, int order) throws HDF5Exception { 402 log.trace("getAttribute(): start: obj={} idx_type={} order={}", obj, idx_type, order); 403 List<Attribute> attributeList = null; 404 long objID = -1; 405 long aid = -1; 406 long sid = -1; 407 long tid = -1; 408 H5O_info_t obj_info = null; 409 410 objID = obj.open(); 411 if (objID >= 0) { 412 try { 413 try { 414 log.trace("getAttribute(): get obj_info"); 415 obj_info = H5.H5Oget_info(objID); 416 } 417 catch (Exception ex) { 418 log.debug("getAttribute(): H5Oget_info(objID {}) failure: ", objID, ex); 419 } 420 if (obj_info.num_attrs <= 0) { 421 log.trace("getAttribute(): no attributes"); 422 return (attributeList = new Vector<>()); 423 } 424 425 int n = (int) obj_info.num_attrs; 426 attributeList = new Vector<>(n); 427 log.trace("getAttribute(): num_attrs={}", n); 428 429 for (int i = 0; i < n; i++) { 430 long lsize = 1; 431 log.trace("getAttribute(): attribute[{}]", i); 432 433 try { 434 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 435 HDF5Constants.H5P_DEFAULT); 436 sid = H5.H5Aget_space(aid); 437 log.trace("getAttribute(): Attribute[{}] aid={} sid={}", i, aid, sid); 438 439 long dims[] = null; 440 int rank = H5.H5Sget_simple_extent_ndims(sid); 441 442 log.trace("getAttribute(): Attribute[{}] isScalar={}", i, (rank == 0)); 443 444 if (rank > 0) { 445 dims = new long[rank]; 446 H5.H5Sget_simple_extent_dims(sid, dims, null); 447 log.trace("getAttribute(): Attribute[{}] rank={}, dims={}", i, rank, dims); 448 for (int j = 0; j < dims.length; j++) { 449 lsize *= dims[j]; 450 } 451 } 452 453 String nameA = H5.H5Aget_name(aid); 454 log.trace("getAttribute(): Attribute[{}] is {}", i, nameA); 455 456 long tmptid = -1; 457 try { 458 tmptid = H5.H5Aget_type(aid); 459 tid = H5.H5Tget_native_type(tmptid); 460 log.trace("getAttribute(): Attribute[{}] tid={} native tmptid={} from aid={}", i, tid, 461 tmptid, aid); 462 } 463 finally { 464 try { 465 H5.H5Tclose(tmptid); 466 } 467 catch (Exception ex) { 468 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 469 } 470 } 471 472 H5Datatype attrType = null; 473 try { 474 attrType = new H5Datatype(obj.getFileFormat(), tid); 475 476 log.trace("getAttribute(): Attribute[{}] Datatype={}", i, attrType.getDescription()); 477 log.trace("getAttribute(): Attribute[{}] has size={} isCompound={} is_variable_str={} isVLEN={}", 478 i, lsize, attrType.isCompound(), attrType.isVarStr(), attrType.isVLEN()); 479 } 480 catch (Exception ex) { 481 log.debug("getAttribute(): failed to create datatype for Attribute[{}]: ", i, ex); 482 attrType = null; 483 } 484 485 Attribute attr = new Attribute(obj, nameA, attrType, dims); 486 attributeList.add(attr); 487 488 // retrieve the attribute value 489 if (lsize <= 0) { 490 log.debug("getAttribute(): Attribute[{}] lsize <= 0", i); 491 continue; 492 } 493 494 if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) { 495 log.debug("getAttribute(): Attribute[{}] lsize outside valid Java int range; unsafe cast", i); 496 continue; 497 } 498 499 Object value = null; 500 try { 501 if (attr.getDatatype().isVarStr()) { 502 String[] strs = new String[(int) lsize]; 503 for (int j = 0; j < lsize; j++) { 504 strs[j] = ""; 505 } 506 try { 507 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 508 H5.H5AreadVL(aid, tid, strs); 509 } 510 catch (Exception ex) { 511 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 512 ex.printStackTrace(); 513 } 514 value = strs; 515 } 516 else if (attr.getDatatype().isCompound()) { 517 String[] strs = new String[(int) lsize]; 518 for (int j = 0; j < lsize; j++) { 519 strs[j] = ""; 520 } 521 try { 522 log.trace("getAttribute: attribute[{}] H5AreadComplex", i); 523 H5.H5AreadComplex(aid, tid, strs); 524 } 525 catch (Exception ex) { 526 ex.printStackTrace(); 527 } 528 value = strs; 529 } 530 else if (attr.getDatatype().isVLEN()) { 531 String[] strs = new String[(int) lsize]; 532 for (int j = 0; j < lsize; j++) { 533 strs[j] = ""; 534 } 535 try { 536 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 537 H5.H5AreadVL(aid, tid, strs); 538 } 539 catch (Exception ex) { 540 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 541 ex.printStackTrace(); 542 } 543 value = strs; 544 } 545 else { 546 try { 547 value = H5Datatype.allocateArray(((H5Datatype) attr.getDatatype()), (int) lsize); 548 } 549 catch (OutOfMemoryError e) { 550 log.debug("getAttribute(): Attribute[{}] out of memory", i, e); 551 value = null; 552 } 553 if (value == null) { 554 log.debug("getAttribute(): Attribute[{}] allocateArray returned null", i); 555 continue; 556 } 557 558 if (attr.getDatatype().isArray()) { 559 try { 560 log.trace("getAttribute(): Attribute[{}] H5Aread ARRAY tid={}", i, tid); 561 H5.H5Aread(aid, tid, value); 562 } 563 catch (Exception ex) { 564 log.debug("getAttribute(): Attribute[{}] H5Aread failure: ", i, ex); 565 ex.printStackTrace(); 566 } 567 } 568 else { 569 log.trace("getAttribute(): Attribute[{}] H5Aread", i); 570 H5.H5Aread(aid, tid, value); 571 } 572 573 if (attr.getDatatype().isText() && value instanceof byte[]) { 574 log.trace("getAttribute(): isText: converting byte array to string array"); 575 value = Attribute.byteToString((byte[]) value, (int) attr.getDatatype().getDatatypeSize()); 576 } 577 else if (attr.getDatatype().isFloat() && attr.getDatatype().getDatatypeSize() == 16) { 578 log.trace("scalarDatasetCommonIO(): isFloat: converting byte array to BigDecimal array"); 579 value = ((H5Datatype)attr.getDatatype()).byteToBigDecimal(0, (int) lsize, (byte[]) value); 580 } 581 else if (((H5Datatype)attr.getDatatype()).isRefObj()) { 582 log.trace("getAttribute(): Attribute[{}] isREF: converting byte array to long array", i); 583 value = HDFNativeData.byteToLong((byte[]) value); 584 } 585 } 586 } 587 catch (Exception ex) { 588 log.debug("getAttribute(): Attribute[{}] read failure: ", i, ex); 589 continue; 590 } 591 592 log.trace("getAttribute(): Attribute[{}] data: {}", i, value); 593 attr.setData(value); 594 } 595 catch (HDF5Exception ex) { 596 log.debug("getAttribute(): Attribute[{}] inspection failure: ", i, ex); 597 } 598 finally { 599 try { 600 H5.H5Tclose(tid); 601 } 602 catch (Exception ex) { 603 log.debug("getAttribute(): Attribute[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 604 } 605 try { 606 H5.H5Sclose(sid); 607 } 608 catch (Exception ex) { 609 log.debug("getAttribute(): Attribute[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 610 } 611 try { 612 H5.H5Aclose(aid); 613 } 614 catch (Exception ex) { 615 log.debug("getAttribute(): Attribute[{}] H5Aclose(aid {}) failure: ", i, aid, ex); 616 } 617 } 618 } // (int i=0; i<obj_info.num_attrs; i++) 619 } 620 finally { 621 obj.close(objID); 622 } 623 } 624 625 return attributeList; 626 } 627 628 /** 629 * Creates attributes for an HDF5 image dataset. 630 * <p> 631 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 632 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 633 * method may be used to write image attributes that are not handled by this method. 634 * <p> 635 * For more information about HDF5 image attributes, see the 636 * <a href="https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 637 * <p> 638 * This method can be called to create attributes for 24-bit true color and indexed images. The 639 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 640 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 641 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 642 * image with the indicated interlace mode. 643 * <p> 644 * <ul> 645 * The created attribute descriptions, names, and values are: 646 * <li>The image identifier: name="CLASS", value="IMAGE" 647 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 648 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 649 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 650 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 651 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 652 * to the palette datasets, with initial value of {-1} 653 * </ul> 654 * <p> 655 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 656 * Format implementation level. 657 * 658 * @param dataset 659 * The image dataset the attributes are added to. 660 * @param selectionFlag 661 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 662 * <ul> 663 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 664 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 665 * stored in a separate plane. 666 * </ul> 667 * 668 * @throws Exception 669 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 670 */ 671 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 672 log.trace("createImageAttributes(): start: dataset={}", dataset.toString()); 673 String subclass = null; 674 String interlaceMode = null; 675 676 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 677 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PIXEL"); 678 subclass = "IMAGE_TRUECOLOR"; 679 interlaceMode = "INTERLACE_PIXEL"; 680 } 681 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 682 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PLANE"); 683 subclass = "IMAGE_TRUECOLOR"; 684 interlaceMode = "INTERLACE_PLANE"; 685 } 686 else if (selectionFlag == -1) { 687 log.trace("createImageAttributes(): subclass IMAGE_INDEXED"); 688 subclass = "IMAGE_INDEXED"; 689 } 690 else { 691 log.debug("createImageAttributes(): invalid selectionFlag"); 692 throw new HDF5Exception("The selectionFlag is invalid."); 693 } 694 695 String attrName = "CLASS"; 696 String[] classValue = { "IMAGE" }; 697 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 698 Attribute attr = new Attribute(dataset, attrName, attrType, null); 699 attr.write(classValue); 700 701 attrName = "IMAGE_VERSION"; 702 String[] versionValue = { "1.2" }; 703 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 704 attr = new Attribute(dataset, attrName, attrType, null); 705 attr.write(versionValue); 706 707 long[] attrDims = { 2 }; 708 attrName = "IMAGE_MINMAXRANGE"; 709 byte[] attrValueInt = { 0, (byte) 255 }; 710 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 711 attr = new Attribute(dataset, attrName, attrType, attrDims); 712 attr.write(attrValueInt); 713 714 attrName = "IMAGE_SUBCLASS"; 715 String[] subclassValue = { subclass }; 716 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 717 attr = new Attribute(dataset, attrName, attrType, null); 718 attr.write(subclassValue); 719 720 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 721 attrName = "INTERLACE_MODE"; 722 String[] interlaceValue = { interlaceMode }; 723 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, Datatype.NATIVE, Datatype.NATIVE); 724 attr = new Attribute(dataset, attrName, attrType, null); 725 attr.write(interlaceValue); 726 } 727 else { 728 attrName = "PALETTE"; 729 long[] palRef = { 0 }; // set ref to null 730 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 731 attr = new Attribute(dataset, attrName, attrType, null); 732 attr.write(palRef); 733 } 734 } 735 736 /** 737 * Updates values of scalar dataset object references in copied file. 738 * <p> 739 * This method has very specific functionality as documented below, and the user is advised to pay close attention 740 * when dealing with files that contain references. 741 * <p> 742 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 743 * the references in the destination file are not updated by the copy and are therefore invalid. 744 * <p> 745 * When an entire file is copied, this method updates the values of the object references and dataset region 746 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 747 * destination file. The method does not update references that occur in objects other than scalar datasets. 748 * <p> 749 * In the current release, the updating of object references is not handled completely as it was not required by the 750 * projects that funded development. There is no support for updates when the copy does not include the entire file. 751 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 752 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 753 * updates itself. 754 * 755 * @param srcFile 756 * The file that was copied. 757 * @param dstFile 758 * The destination file where the object references will be updated. 759 * 760 * @throws Exception 761 * If there is a problem in the update process. 762 */ 763 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 764 if ((srcFile == null) || (dstFile == null)) { 765 log.debug("updateReferenceDataset(): srcFile or dstFile is null"); 766 return; 767 } 768 769 HObject srcRoot = srcFile.getRootObject(); 770 HObject newRoot = dstFile.getRootObject(); 771 772 Iterator<HObject> srcIt = getMembersBreadthFirst(srcRoot).iterator(); 773 Iterator<HObject> newIt = getMembersBreadthFirst(newRoot).iterator(); 774 775 long did = -1; 776 // build one-to-one table of between objects in 777 // the source file and new file 778 long tid = -1; 779 HObject srcObj, newObj; 780 Hashtable<String, long[]> oidMap = new Hashtable<>(); 781 List<ScalarDS> refDatasets = new Vector<>(); 782 while (newIt.hasNext() && srcIt.hasNext()) { 783 srcObj = srcIt.next(); 784 newObj = newIt.next(); 785 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 786 did = -1; 787 tid = -1; 788 789 // for Scalar DataSets in destination, if there is an object 790 // reference in the dataset, add it to the refDatasets list for 791 // later updating. 792 if (newObj instanceof ScalarDS) { 793 ScalarDS sd = (ScalarDS) newObj; 794 did = sd.open(); 795 if (did >= 0) { 796 try { 797 tid = H5.H5Dget_type(did); 798 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 799 refDatasets.add(sd); 800 } 801 } 802 catch (Exception ex) { 803 log.debug("updateReferenceDataset(): ScalarDS reference failure: ", ex); 804 } 805 finally { 806 try { 807 H5.H5Tclose(tid); 808 } 809 catch (Exception ex) { 810 log.debug("updateReferenceDataset(): ScalarDS reference H5Tclose(tid {}) failure: ", tid, ex); 811 } 812 } 813 } 814 sd.close(did); 815 } // (newObj instanceof ScalarDS) 816 } 817 818 // Update the references in the scalar datasets in the dest file. 819 H5ScalarDS d = null; 820 long sid = -1; 821 int size = 0; 822 int rank = 0; 823 int n = refDatasets.size(); 824 for (int i = 0; i < n; i++) { 825 log.trace("updateReferenceDataset(): Update the references in the scalar datasets in the dest file"); 826 d = (H5ScalarDS) refDatasets.get(i); 827 byte[] buf = null; 828 long[] refs = null; 829 830 try { 831 did = d.open(); 832 if (did >= 0) { 833 tid = H5.H5Dget_type(did); 834 sid = H5.H5Dget_space(did); 835 rank = H5.H5Sget_simple_extent_ndims(sid); 836 size = 1; 837 if (rank > 0) { 838 long[] dims = new long[rank]; 839 H5.H5Sget_simple_extent_dims(sid, dims, null); 840 log.trace("updateReferenceDataset(): rank={}, dims={}", rank, dims); 841 for (int j = 0; j < rank; j++) { 842 size *= (int) dims[j]; 843 } 844 dims = null; 845 } 846 847 buf = new byte[size * 8]; 848 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 849 850 // update the ref values 851 refs = HDFNativeData.byteToLong(buf); 852 size = refs.length; 853 for (int j = 0; j < size; j++) { 854 long[] theOID = oidMap.get(String.valueOf(refs[j])); 855 if (theOID != null) { 856 refs[j] = theOID[0]; 857 } 858 } 859 860 // write back to file 861 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 862 } 863 else { 864 log.debug("updateReferenceDataset(): dest file dataset failed to open"); 865 } 866 } 867 catch (Exception ex) { 868 log.debug("updateReferenceDataset(): Reference[{}] failure: ", i, ex); 869 continue; 870 } 871 finally { 872 try { 873 H5.H5Tclose(tid); 874 } 875 catch (Exception ex) { 876 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 877 } 878 try { 879 H5.H5Sclose(sid); 880 } 881 catch (Exception ex) { 882 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 883 } 884 try { 885 H5.H5Dclose(did); 886 } 887 catch (Exception ex) { 888 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Dclose(did {}) failure: ", i, did, ex); 889 } 890 } 891 892 refs = null; 893 buf = null; 894 } // (int i=0; i<n; i++) 895 } 896 897 /*************************************************************************** 898 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 899 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 900 * instance methods. 901 **************************************************************************/ 902 903 /** 904 * Returns the version of the HDF5 library. 905 * 906 * @see hdf.object.FileFormat#getLibversion() 907 */ 908 @Override 909 public String getLibversion() { 910 int[] vers = new int[3]; 911 String ver = "HDF5 "; 912 913 try { 914 H5.H5get_libversion(vers); 915 } 916 catch (Exception ex) { 917 ex.printStackTrace(); 918 } 919 920 ver += vers[0] + "." + vers[1] + "." + vers[2]; 921 log.debug("getLibversion(): libversion is {}", ver); 922 923 return ver; 924 } 925 926 /** 927 * Checks if the specified FileFormat instance has the HDF5 format. 928 * 929 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 930 */ 931 @Override 932 public boolean isThisType(FileFormat theFile) { 933 return (theFile instanceof H5File); 934 } 935 936 /** 937 * Checks if the specified file has the HDF5 format. 938 * 939 * @see hdf.object.FileFormat#isThisType(java.lang.String) 940 */ 941 @Override 942 public boolean isThisType(String filename) { 943 boolean isH5 = false; 944 945 try { 946 isH5 = H5.H5Fis_hdf5(filename); 947 } 948 catch (HDF5Exception ex) { 949 isH5 = false; 950 } 951 952 return isH5; 953 } 954 955 /** 956 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 957 * 958 * @throws Exception 959 * If the file cannot be created or if createFlag has unexpected value. 960 * 961 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 962 * @see #H5File(String, int) 963 */ 964 @Override 965 public FileFormat createFile(String filename, int createFlag) throws Exception { 966 log.trace("createFile(): start: filename={} createFlag={}", filename, createFlag); 967 // Flag if we need to create or truncate the file. 968 Boolean doCreateFile = true; 969 970 // Won't create or truncate if CREATE_OPEN specified and file exists 971 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 972 File f = new File(filename); 973 if (f.exists()) { 974 doCreateFile = false; 975 } 976 } 977 log.trace("createFile(): doCreateFile={}", doCreateFile); 978 979 if (doCreateFile) { 980 long fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 981 982 if ((createFlag & FILE_CREATE_EARLY_LIB) == FILE_CREATE_EARLY_LIB) { 983 int[] newlibver = getLibBounds(); 984 H5.H5Pset_libver_bounds(fapl, newlibver[0], newlibver[1]); 985 } 986 987 long fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 988 try { 989 H5.H5Pclose(fapl); 990 H5.H5Fclose(fileid); 991 } 992 catch (HDF5Exception ex) { 993 log.debug("H5 file, {} failure: ", filename, ex); 994 } 995 } 996 997 return new H5File(filename, WRITE); 998 } 999 1000 /** 1001 * Creates an H5File instance with specified file name and access. 1002 * 1003 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 1004 * @see #H5File(String, int) 1005 * 1006 * @throws Exception 1007 * If there is a failure. 1008 */ 1009 @Override 1010 public FileFormat createInstance(String filename, int access) throws Exception { 1011 log.trace("createInstance() for {}", filename); 1012 return new H5File(filename, access); 1013 } 1014 1015 /*************************************************************************** 1016 * Instance Methods 1017 * 1018 * These methods are related to the H5File class and to particular instances of objects with this class type. 1019 **************************************************************************/ 1020 1021 /** 1022 * Opens file and returns a file identifier. 1023 * 1024 * @see hdf.object.FileFormat#open() 1025 */ 1026 @Override 1027 public long open() throws Exception { 1028 return open(true); 1029 } 1030 1031 /** 1032 * Opens file and returns a file identifier. 1033 * 1034 * @see hdf.object.FileFormat#open(int...) 1035 */ 1036 @Override 1037 public long open(int... indexList) throws Exception { 1038 setIndexType(indexList[0]); 1039 setIndexOrder(indexList[1]); 1040 return open(true); 1041 } 1042 1043 /** 1044 * Sets the bounds of new library versions. 1045 * 1046 * @param lowStr 1047 * The earliest version of the library. 1048 * @param highStr 1049 * The latest version of the library. 1050 * 1051 * @throws Exception 1052 * If there is an error at the HDF5 library level. 1053 */ 1054 @Override 1055 public void setNewLibBounds(String lowStr, String highStr) throws Exception { 1056 int low = -1; 1057 int high = -1; 1058 1059 if (lowStr == null) { 1060 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1061 } 1062 else if(lowStr.equals("Earliest")) { 1063 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1064 } 1065 else if(lowStr.equals("V18")) { 1066 low = HDF5Constants.H5F_LIBVER_V18; 1067 } 1068 else if(lowStr.equals("V110")) { 1069 low = HDF5Constants.H5F_LIBVER_V110; 1070 } 1071 else if(lowStr.equals("Latest")) { 1072 low = HDF5Constants.H5F_LIBVER_LATEST; 1073 } 1074 else { 1075 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1076 } 1077 1078 if (highStr == null) { 1079 high = HDF5Constants.H5F_LIBVER_LATEST; 1080 } 1081 else if(highStr.equals("V18")) { 1082 high = HDF5Constants.H5F_LIBVER_V18; 1083 } 1084 else if(highStr.equals("V110")) { 1085 high = HDF5Constants.H5F_LIBVER_V110; 1086 } 1087 else if(highStr.equals("Latest")) { 1088 high = HDF5Constants.H5F_LIBVER_LATEST; 1089 } 1090 else { 1091 high = HDF5Constants.H5F_LIBVER_LATEST; 1092 } 1093 libver[0] = low; 1094 libver[1] = high; 1095 } 1096 1097 /** 1098 * Sets the bounds of library versions. 1099 * 1100 * @param lowStr 1101 * The earliest version of the library. 1102 * @param highStr 1103 * The latest version of the library. 1104 * 1105 * @throws Exception 1106 * If there is an error at the HDF5 library level. 1107 */ 1108 @Override 1109 public void setLibBounds(String lowStr, String highStr) throws Exception { 1110 long fapl = HDF5Constants.H5P_DEFAULT; 1111 1112 if (fid < 0) 1113 return; 1114 1115 fapl = H5.H5Fget_access_plist(fid); 1116 1117 try { 1118 int low = -1; 1119 int high = -1; 1120 1121 if (lowStr == null) { 1122 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1123 } 1124 else if(lowStr.equals("Earliest")) { 1125 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1126 } 1127 else if(lowStr.equals("V18")) { 1128 low = HDF5Constants.H5F_LIBVER_V18; 1129 } 1130 else if(lowStr.equals("V110")) { 1131 low = HDF5Constants.H5F_LIBVER_V110; 1132 } 1133 else if(lowStr.equals("Latest")) { 1134 low = HDF5Constants.H5F_LIBVER_LATEST; 1135 } 1136 else { 1137 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1138 } 1139 1140 if (highStr == null) { 1141 high = HDF5Constants.H5F_LIBVER_LATEST; 1142 } 1143 else if(highStr.equals("V18")) { 1144 high = HDF5Constants.H5F_LIBVER_V18; 1145 } 1146 else if(highStr.equals("V110")) { 1147 high = HDF5Constants.H5F_LIBVER_V110; 1148 } 1149 else if(highStr.equals("Latest")) { 1150 high = HDF5Constants.H5F_LIBVER_LATEST; 1151 } 1152 else { 1153 high = HDF5Constants.H5F_LIBVER_LATEST; 1154 } 1155 1156 H5.H5Pset_libver_bounds(fapl, low, high); 1157 H5.H5Pget_libver_bounds(fapl, libver); 1158 } 1159 finally { 1160 try { 1161 H5.H5Pclose(fapl); 1162 } 1163 catch (Exception e) { 1164 log.debug("setLibBounds(): libver bounds H5Pclose(fapl {}) failure: ", fapl, e); 1165 } 1166 } 1167 } 1168 1169 /** 1170 * Gets the bounds of library versions. 1171 * 1172 * @return libver The earliest and latest version of the library. 1173 * 1174 * @throws Exception 1175 * If there is an error at the HDF5 library level. 1176 */ 1177 @Override 1178 public int[] getLibBounds() throws Exception { 1179 if (libver.length == 0) 1180 initLibBounds(); 1181 return libver; 1182 } 1183 1184 /** 1185 * Initialize the bounds of library versions 1186 * 1187 * @throws Exception 1188 * The exceptions thrown vary depending on the implementing class. 1189 */ 1190 @Override 1191 public void initLibBounds() throws Exception { 1192 if (fid >= 0) { 1193 /* Get the file's file access property list */ 1194 long fapl = H5.H5Fget_access_plist(fid); 1195 /* Get library format */ 1196 H5.H5Pget_libver_bounds(fapl, libver); 1197 /* Close FAPL */ 1198 H5.H5Pclose(fapl); 1199 } 1200 } 1201 1202 1203 /** 1204 * Gets the bounds of library versions as text. 1205 * 1206 * @return libversion The earliest and latest version of the library. 1207 */ 1208 @Override 1209 public String getLibBoundsDescription() { 1210 String libversion = ""; 1211 1212 if (libver[0] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1213 libversion = "Earliest and "; 1214 } 1215 else if (libver[0] == HDF5Constants.H5F_LIBVER_V18) { 1216 libversion = "V18 and "; 1217 } 1218 else if (libver[0] == HDF5Constants.H5F_LIBVER_V110) { 1219 libversion = "V110 and "; 1220 } 1221 else if (libver[0] == HDF5Constants.H5F_LIBVER_LATEST) { 1222 libversion = "Latest and "; 1223 } 1224 if (libver[1] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1225 libversion += "Earliest"; 1226 } 1227 else if (libver[1] == HDF5Constants.H5F_LIBVER_V18) { 1228 libversion += "V18"; 1229 } 1230 else if (libver[1] == HDF5Constants.H5F_LIBVER_V110) { 1231 libversion += "V110"; 1232 } 1233 else if (libver[1] == HDF5Constants.H5F_LIBVER_LATEST) { 1234 libversion += "Latest"; 1235 } 1236 return libversion; 1237 } 1238 1239 /** 1240 * Closes file associated with this H5File instance. 1241 * 1242 * @see hdf.object.FileFormat#close() 1243 * 1244 * @throws HDF5Exception 1245 * If there is an error at the HDF5 library level. 1246 */ 1247 @Override 1248 public void close() throws HDF5Exception { 1249 if (fid < 0) { 1250 log.debug("close(): file {} is not open", fullFileName); 1251 return; 1252 } 1253 // The current working directory may be changed at Dataset.read() 1254 // by System.setProperty("user.dir", newdir) to make it work for external 1255 // datasets. We need to set it back to the original current working 1256 // directory (when hdf-java application started) before the file 1257 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1258 // not work 1259 String rootPath = System.getProperty("hdfview.workdir"); 1260 if (rootPath == null) { 1261 rootPath = System.getProperty("user.dir"); 1262 } 1263 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 1264 1265 // clean up unused objects 1266 if (rootObject != null) { 1267 HObject theObj = null; 1268 Iterator<HObject> it = getMembersBreadthFirst(rootObject).iterator(); 1269 while (it.hasNext()) { 1270 theObj = it.next(); 1271 1272 if (theObj instanceof Dataset) { 1273 log.trace("close(): clear Dataset {}", ((Dataset) theObj).toString()); 1274 ((Dataset) theObj).clear(); 1275 } 1276 else if (theObj instanceof Group) { 1277 log.trace("close(): clear Group {}", ((Group) theObj).toString()); 1278 ((Group) theObj).clear(); 1279 } 1280 } 1281 } 1282 1283 // Close all open objects associated with this file. 1284 try { 1285 int type = -1; 1286 long[] oids; 1287 long n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1288 log.trace("close(): open objects={}", n); 1289 1290 if (n > 0) { 1291 if (n < Integer.MIN_VALUE || n > Integer.MAX_VALUE) throw new Exception("Invalid int size"); 1292 1293 oids = new long[(int)n]; 1294 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1295 1296 for (int i = 0; i < (int)n; i++) { 1297 log.trace("close(): object[{}] id={}", i, oids[i]); 1298 type = H5.H5Iget_type(oids[i]); 1299 1300 if (HDF5Constants.H5I_DATASET == type) { 1301 try { 1302 H5.H5Dclose(oids[i]); 1303 } 1304 catch (Exception ex2) { 1305 log.debug("close(): Object[{}] H5Dclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1306 } 1307 } 1308 else if (HDF5Constants.H5I_GROUP == type) { 1309 try { 1310 H5.H5Gclose(oids[i]); 1311 } 1312 catch (Exception ex2) { 1313 log.debug("close(): Object[{}] H5Gclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1314 } 1315 } 1316 else if (HDF5Constants.H5I_DATATYPE == type) { 1317 try { 1318 H5.H5Tclose(oids[i]); 1319 } 1320 catch (Exception ex2) { 1321 log.debug("close(): Object[{}] H5Tclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1322 } 1323 } 1324 else if (HDF5Constants.H5I_ATTR == type) { 1325 try { 1326 H5.H5Aclose(oids[i]); 1327 } 1328 catch (Exception ex2) { 1329 log.debug("close(): Object[{}] H5Aclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1330 } 1331 } 1332 } // (int i=0; i<n; i++) 1333 } // ( n>0) 1334 } 1335 catch (Exception ex) { 1336 log.debug("close(): failure: ", ex); 1337 } 1338 1339 try { 1340 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1341 } 1342 catch (Exception ex) { 1343 log.debug("close(): H5Fflush(fid {}) failure: ", fid, ex); 1344 } 1345 1346 try { 1347 H5.H5Fclose(fid); 1348 } 1349 catch (Exception ex) { 1350 log.debug("close(): H5Fclose(fid {}) failure: ", fid, ex); 1351 } 1352 1353 // Set fid to -1 but don't reset rootObject 1354 fid = -1; 1355 } 1356 1357 /** 1358 * Returns the root object of the open HDF5 File. 1359 * 1360 * @see hdf.object.FileFormat#getRootObject() 1361 */ 1362 @Override 1363 public HObject getRootObject() { 1364 return rootObject; 1365 } 1366 1367 /* 1368 * (non-Javadoc) 1369 * 1370 * @see hdf.object.FileFormat#get(java.lang.String) 1371 */ 1372 @Override 1373 public HObject get(String path) throws Exception { 1374 log.trace("get({}): start", path); 1375 HObject obj = null; 1376 1377 if ((path == null) || (path.length() <= 0)) { 1378 log.debug("get(): path is null or invalid path length"); 1379 System.err.println("(path == null) || (path.length() <= 0)"); 1380 return null; 1381 } 1382 1383 // replace the wrong slash and get rid of "//" 1384 path = path.replace('\\', '/'); 1385 path = "/" + path; 1386 path = path.replaceAll("//", "/"); 1387 1388 // the whole file tree is loaded. find the object in the tree 1389 if (rootObject != null) { 1390 obj = findObject(this, path); 1391 } 1392 1393 // found object in memory 1394 if (obj != null) { 1395 log.trace("get(): Found object in memory"); 1396 return obj; 1397 } 1398 1399 // open only the requested object 1400 String name = null; 1401 String pPath = null; 1402 if (path.equals("/")) { 1403 name = "/"; // the root 1404 } 1405 else { 1406 // separate the parent path and the object name 1407 if (path.endsWith("/")) { 1408 path = path.substring(0, path.length() - 1); 1409 } 1410 1411 int idx = path.lastIndexOf('/'); 1412 name = path.substring(idx + 1); 1413 if (idx == 0) { 1414 pPath = "/"; 1415 } 1416 else { 1417 pPath = path.substring(0, idx); 1418 } 1419 } 1420 1421 // do not open the full tree structure, only the file handler 1422 long fid_before_open = fid; 1423 fid = open(false); 1424 if (fid < 0) { 1425 log.debug("get(): Invalid FID"); 1426 System.err.println("Could not open file handler"); 1427 return null; 1428 } 1429 1430 try { 1431 H5O_info_t info; 1432 int objType; 1433 long oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1434 1435 if (oid >= 0) { 1436 info = H5.H5Oget_info(oid); 1437 objType = info.type; 1438 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1439 long did = -1; 1440 try { 1441 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1442 obj = getDataset(did, name, pPath); 1443 } 1444 finally { 1445 try { 1446 H5.H5Dclose(did); 1447 } 1448 catch (Exception ex) { 1449 log.debug("get(): {} H5Dclose(did {}) failure: ", path, did, ex); 1450 } 1451 } 1452 } 1453 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1454 long gid = -1; 1455 try { 1456 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1457 H5Group pGroup = null; 1458 if (pPath != null) { 1459 pGroup = new H5Group(this, null, pPath, null); 1460 obj = getGroup(gid, name, pGroup); 1461 pGroup.addToMemberList(obj); 1462 } 1463 else { 1464 obj = getGroup(gid, name, pGroup); 1465 } 1466 } 1467 finally { 1468 try { 1469 H5.H5Gclose(gid); 1470 } 1471 catch (Exception ex) { 1472 log.debug("get(): {} H5Gclose(gid {}) failure: ", path, gid, ex); 1473 } 1474 } 1475 } 1476 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1477 obj = new H5Datatype(this, name, pPath); 1478 } 1479 } 1480 try { 1481 H5.H5Oclose(oid); 1482 } 1483 catch (Exception ex) { 1484 log.debug("get(): H5Oclose(oid {}) failure: ", oid, ex); 1485 ex.printStackTrace(); 1486 } 1487 } 1488 catch (Exception ex) { 1489 log.debug("get(): Exception finding obj {}", path, ex); 1490 obj = null; 1491 } 1492 finally { 1493 if ((fid_before_open <= 0) && (obj == null)) { 1494 // close the fid that is not attached to any object 1495 try { 1496 H5.H5Fclose(fid); 1497 } 1498 catch (Exception ex) { 1499 log.debug("get(): {} H5Fclose(fid {}) failure: ", path, fid, ex); 1500 } 1501 fid = fid_before_open; 1502 } 1503 } 1504 1505 return obj; 1506 } 1507 1508 1509 /** 1510 * Creates a named datatype in a file. 1511 * <p> 1512 * The following code creates a named datatype in a file. 1513 * 1514 * <pre> 1515 * H5File file = (H5File) h5file.createInstance("test_hdf5.h5", FileFormat.WRITE); 1516 * Datatype dtype = file.createDatatype( 1517 * Datatype.CLASS_INTEGER, 1518 * 4, 1519 * Datatype.NATIVE, 1520 * Datatype.NATIVE, 1521 * basetype); 1522 * H5Datatype h5dtype = file.createNamedDatatype( 1523 * dtype, 1524 * null, 1525 * "Native Integer"); 1526 * </pre> 1527 * 1528 * @param tnative 1529 * native datatype previously created 1530 * @param name 1531 * name of the datatype to create, e.g. "Native Integer". 1532 * @return The new datatype if successful; otherwise returns null. 1533 * @throws Exception 1534 * The exceptions thrown vary depending on the implementing class. 1535 */ 1536 public Datatype createNamedDatatype(Datatype tnative, String name) throws Exception { 1537 log.trace("createNamedDatatype(): start: name={}", name); 1538 1539 H5Datatype dtype = null; 1540 1541 if (name != null ) { 1542 long tid = -1; 1543 log.trace("createNamedDatatype(): name={}", name); 1544 try { 1545 tnative.setFullname(name, null); 1546 } 1547 catch (Exception ex) { 1548 log.debug("createNamedDatatype():setName(): {} failure: {}", name, ex.getMessage()); 1549 } 1550 try { 1551 if ((tid = tnative.createNative()) < 0) { 1552 log.debug("createNamedDatatype(): createNative() failure"); 1553 throw new Exception("createNative() failed"); 1554 } 1555 log.trace("createNamedDatatype(): createNative gets id={}", tid); 1556 1557 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1558 1559 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1560 long l = HDFNativeData.byteToLong(ref_buf, 0); 1561 1562 long[] oid = new long[1]; 1563 oid[0] = l; // save the object ID 1564 1565 dtype = new H5Datatype(this, name, null, oid); 1566 } 1567 finally { 1568 H5.H5Tclose(tid); 1569 } 1570 } 1571 else { 1572 dtype = (H5Datatype) tnative; 1573 } 1574 1575 return dtype; 1576 } 1577 1578 /*************************************************************************** 1579 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1580 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1581 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1582 * so that we create the proper type of HObject... H5Group for example. 1583 * 1584 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1585 * methods (they can only be shadowed in Java), these are instance methods. 1586 * 1587 **************************************************************************/ 1588 1589 /* 1590 * (non-Javadoc) 1591 * 1592 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1593 */ 1594 @Override 1595 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1596 return new H5Datatype(tclass, tsize, torder, tsign); 1597 } 1598 1599 /* 1600 * (non-Javadoc) 1601 * 1602 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1603 */ 1604 @Override 1605 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1606 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1607 } 1608 1609 /* 1610 * (non-Javadoc) 1611 * 1612 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1613 * long[], long[], long[], int, java.lang.Object) 1614 */ 1615 @Override 1616 public Dataset createScalarDS(String name, Group pgroup, Datatype type, 1617 long[] dims, long[] maxdims, long[] chunks, 1618 int gzip, Object fillValue, Object data) throws Exception 1619 { 1620 log.trace("createScalarDS(): name={}", name); 1621 if (pgroup == null) { 1622 // create new dataset at the root group by default 1623 pgroup = (Group) get("/"); 1624 } 1625 1626 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1627 } 1628 1629 /* 1630 * (non-Javadoc) 1631 * 1632 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1633 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1634 */ 1635 @Override 1636 public Dataset createCompoundDS(String name, Group pgroup, 1637 long[] dims, long[] maxdims, long[] chunks, int gzip, 1638 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception 1639 { 1640 log.trace("createCompoundDS(): start: name={}", name); 1641 int nMembers = memberNames.length; 1642 int memberRanks[] = new int[nMembers]; 1643 long memberDims[][] = new long[nMembers][1]; 1644 Dataset ds = null; 1645 1646 for (int i = 0; i < nMembers; i++) { 1647 memberRanks[i] = 1; 1648 if (memberSizes == null) { 1649 memberDims[i][0] = 1; 1650 } 1651 else { 1652 memberDims[i][0] = memberSizes[i]; 1653 } 1654 } 1655 1656 if (pgroup == null) { 1657 // create new dataset at the root group by default 1658 pgroup = (Group) get("/"); 1659 } 1660 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, 1661 memberNames, memberDatatypes, memberRanks, memberDims, data); 1662 1663 return ds; 1664 } 1665 1666 /* 1667 * (non-Javadoc) 1668 * 1669 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1670 * long[], long[], long[], int, int, int, java.lang.Object) 1671 */ 1672 @Override 1673 public Dataset createImage(String name, Group pgroup, Datatype type, 1674 long[] dims, long[] maxdims, long[] chunks, 1675 int gzip, int ncomp, int interlace, Object data) throws Exception 1676 { 1677 log.trace("createImage(): start: name={}", name); 1678 if (pgroup == null) { // create at the root group by default 1679 pgroup = (Group) get("/"); 1680 } 1681 1682 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1683 1684 try { 1685 H5File.createImageAttributes(dataset, interlace); 1686 dataset.setIsImage(true); 1687 } 1688 catch (Exception ex) { 1689 log.debug("createImage(): {} createImageAttributtes failure: ", name, ex); 1690 } 1691 1692 return dataset; 1693 } 1694 1695 /*** 1696 * Creates a new group with specified name in existing group. 1697 * 1698 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1699 */ 1700 @Override 1701 public Group createGroup(String name, Group pgroup) throws Exception { 1702 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1703 } 1704 1705 /*** 1706 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1707 * 1708 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, long...) 1709 * 1710 */ 1711 @Override 1712 public Group createGroup(String name, Group pgroup, long... gplist) throws Exception { 1713 // create new group at the root 1714 if (pgroup == null) { 1715 pgroup = (Group) this.get("/"); 1716 } 1717 1718 return H5Group.create(name, pgroup, gplist); 1719 } 1720 1721 /*** 1722 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1723 * 1724 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1725 * 1726 */ 1727 @Override 1728 public long createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1729 long gcpl = -1; 1730 try { 1731 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1732 if (gcpl >= 0) { 1733 // Set link creation order. 1734 if (creationorder == Group.CRT_ORDER_TRACKED) { 1735 log.trace("createGcpl(): creation order ORDER_TRACKED"); 1736 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1737 } 1738 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1739 log.trace("createGcpl(): creation order ORDER_INDEXED"); 1740 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1741 } 1742 // Set link storage. 1743 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1744 } 1745 } 1746 catch (Exception ex) { 1747 log.debug("createGcpl(): failure: ", ex); 1748 ex.printStackTrace(); 1749 } 1750 1751 return gcpl; 1752 } 1753 1754 /* 1755 * (non-Javadoc) 1756 * 1757 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1758 */ 1759 @Override 1760 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1761 if (currentObj instanceof HObject) 1762 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1763 else if (currentObj instanceof String) 1764 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1765 1766 return null; 1767 } 1768 1769 /** 1770 * Creates a link to an object in the open file. 1771 * <p> 1772 * If parentGroup is null, the new link is created in the root group. 1773 * 1774 * @param parentGroup 1775 * The group where the link is created. 1776 * @param name 1777 * The name of the link. 1778 * @param currentObj 1779 * The existing object the new link will reference. 1780 * @param lType 1781 * The type of link to be created. It can be a hard link, a soft link or an external link. 1782 * 1783 * @return The object pointed to by the new link if successful; otherwise returns null. 1784 * 1785 * @throws Exception 1786 * The exceptions thrown vary depending on the implementing class. 1787 */ 1788 @Override 1789 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1790 log.trace("createLink(): start: name={}", name); 1791 HObject obj = null; 1792 int type = 0; 1793 String current_full_name = null; 1794 String new_full_name = null; 1795 String parent_path = null; 1796 1797 if (currentObj == null) { 1798 log.debug("createLink(): Link target is null"); 1799 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1800 } 1801 if ((parentGroup == null) || parentGroup.isRoot()) { 1802 parent_path = HObject.SEPARATOR; 1803 } 1804 else { 1805 parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; 1806 } 1807 1808 new_full_name = parent_path + name; 1809 1810 if (lType == Group.LINK_TYPE_HARD) { 1811 type = HDF5Constants.H5L_TYPE_HARD; 1812 log.trace("createLink(): type H5L_TYPE_HARD"); 1813 } 1814 else if (lType == Group.LINK_TYPE_SOFT) { 1815 type = HDF5Constants.H5L_TYPE_SOFT; 1816 log.trace("createLink(): type H5L_TYPE_SOFT"); 1817 } 1818 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1819 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1820 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1821 } 1822 1823 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1824 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1825 } 1826 1827 if (type == HDF5Constants.H5L_TYPE_HARD) { 1828 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1829 log.debug("createLink(): cannot create link to root group"); 1830 throw new HDF5Exception("Cannot make a link to the root group."); 1831 } 1832 current_full_name = currentObj.getPath() + HObject.SEPARATOR + currentObj.getName(); 1833 1834 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1835 } 1836 1837 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1838 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1839 } 1840 1841 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1842 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1843 } 1844 1845 if (currentObj instanceof Group) { 1846 log.trace("createLink(): Link target is type H5Group"); 1847 obj = new H5Group(this, name, parent_path, parentGroup); 1848 } 1849 else if (currentObj instanceof H5Datatype) { 1850 log.trace("createLink(): Link target is type H5Datatype"); 1851 obj = new H5Datatype(this, name, parent_path); 1852 } 1853 else if (currentObj instanceof H5CompoundDS) { 1854 log.trace("createLink(): Link target is type H5CompoundDS"); 1855 obj = new H5CompoundDS(this, name, parent_path); 1856 } 1857 else if (currentObj instanceof H5ScalarDS) { 1858 log.trace("createLink(): Link target is type H5ScalarDS"); 1859 obj = new H5ScalarDS(this, name, parent_path); 1860 } 1861 1862 return obj; 1863 } 1864 1865 /** 1866 * Creates a soft or external link to object in a file that does not exist at the time the link is created. 1867 * 1868 * @param parentGroup 1869 * The group where the link is created. 1870 * @param name 1871 * The name of the link. 1872 * @param currentObj 1873 * The name of the object the new link will reference. The object doesn't have to exist. 1874 * @param lType 1875 * The type of link to be created. 1876 * 1877 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1878 * 1879 * @throws Exception 1880 * The exceptions thrown vary depending on the implementing class. 1881 */ 1882 @Override 1883 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1884 log.trace("createLink(): start: name={}", name); 1885 HObject obj = null; 1886 int type = 0; 1887 String new_full_name = null; 1888 String parent_path = null; 1889 1890 if (currentObj == null) { 1891 log.debug("createLink(): Link target is null"); 1892 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1893 } 1894 if ((parentGroup == null) || parentGroup.isRoot()) { 1895 parent_path = HObject.SEPARATOR; 1896 } 1897 else { 1898 parent_path = parentGroup.getPath() + HObject.SEPARATOR + parentGroup.getName() + HObject.SEPARATOR; 1899 } 1900 1901 new_full_name = parent_path + name; 1902 1903 if (lType == Group.LINK_TYPE_HARD) { 1904 type = HDF5Constants.H5L_TYPE_HARD; 1905 log.trace("createLink(): type H5L_TYPE_HARD"); 1906 } 1907 else if (lType == Group.LINK_TYPE_SOFT) { 1908 type = HDF5Constants.H5L_TYPE_SOFT; 1909 log.trace("createLink(): type H5L_TYPE_SOFT"); 1910 } 1911 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1912 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1913 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1914 } 1915 1916 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1917 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1918 } 1919 1920 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1921 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1922 } 1923 1924 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1925 String fileName = null; 1926 String objectName = null; 1927 1928 // separate the object name and the file name 1929 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1930 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1931 objectName = objectName.substring(3); 1932 1933 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1934 } 1935 1936 if (name.startsWith(HObject.SEPARATOR)) { 1937 name = name.substring(1); 1938 } 1939 obj = new H5Link(this, name, parent_path); 1940 1941 return obj; 1942 } 1943 1944 /** 1945 * reload the sub-tree structure from file. 1946 * <p> 1947 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1948 * memory is not changed. 1949 * 1950 * @param g 1951 * the group where the structure is to be reloaded in memory 1952 */ 1953 public void reloadTree(Group g) { 1954 if (fid < 0 || rootObject == null || g == null) { 1955 log.debug("reloadTree(): Invalid fid or null object"); 1956 return; 1957 } 1958 1959 depth_first(g, Integer.MIN_VALUE); 1960 } 1961 1962 /* 1963 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1964 * 1965 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1966 */ 1967 @Override 1968 public HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1969 log.trace("copy(): start: srcObj={} dstGroup={} dstName={}", srcObj, dstGroup, dstName); 1970 if ((srcObj == null) || (dstGroup == null)) { 1971 log.debug("copy(): srcObj or dstGroup is null"); 1972 return null; 1973 } 1974 1975 if (dstName == null) { 1976 dstName = srcObj.getName(); 1977 } 1978 1979 List<HObject> members = dstGroup.getMemberList(); 1980 int n = members.size(); 1981 for (int i = 0; i < n; i++) { 1982 HObject obj = members.get(i); 1983 String name = obj.getName(); 1984 while (name.equals(dstName)) 1985 dstName += "~copy"; 1986 } 1987 1988 HObject newObj = null; 1989 if (srcObj instanceof Dataset) { 1990 log.trace("copy(): srcObj instanceof Dataset"); 1991 newObj = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1992 } 1993 else if (srcObj instanceof H5Group) { 1994 log.trace("copy(): srcObj instanceof H5Group"); 1995 newObj = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1996 } 1997 else if (srcObj instanceof H5Datatype) { 1998 log.trace("copy(): srcObj instanceof H5Datatype"); 1999 newObj = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 2000 } 2001 2002 return newObj; 2003 } 2004 2005 /* 2006 * (non-Javadoc) 2007 * 2008 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 2009 */ 2010 @Override 2011 public void delete(HObject obj) throws Exception { 2012 if ((obj == null) || (fid < 0)) { 2013 log.debug("delete(): Invalid FID or object is null"); 2014 return; 2015 } 2016 2017 String name = obj.getPath() + obj.getName(); 2018 2019 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 2020 } 2021 2022 /* 2023 * (non-Javadoc) 2024 * 2025 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 2026 */ 2027 @Override 2028 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 2029 String obj_name = obj.getFullName(); 2030 String name = attr.getName(); 2031 long tid = -1; 2032 long sid = -1; 2033 long aid = -1; 2034 log.trace("writeAttribute(): name is {}", name); 2035 2036 long objID = obj.open(); 2037 if (objID < 0) { 2038 log.debug("writeAttribute(): Invalid Object ID"); 2039 return; 2040 } 2041 2042 if ((tid = attr.getDatatype().createNative()) >= 0) { 2043 log.trace("writeAttribute(): tid {} from toNative :{}", tid, attr.getDatatype().getDescription()); 2044 try { 2045 if (attr.isScalar()) 2046 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 2047 else 2048 sid = H5.H5Screate_simple(attr.getRank(), attr.getDims(), null); 2049 2050 if (attrExisted) { 2051 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2052 } 2053 else { 2054 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2055 } 2056 log.trace("writeAttribute(): aid {} opened/created", aid); 2057 2058 // update value of the attribute 2059 Object attrValue; 2060 try { 2061 attrValue = attr.getData(); 2062 } 2063 catch (Exception ex) { 2064 attrValue = null; 2065 log.trace("writeAttribute(): getData() failure:", ex); 2066 } 2067 2068 log.trace("writeAttribute(): getValue"); 2069 if (attrValue != null) { 2070 if (attr.getDatatype().isVLEN()) { 2071 log.trace("writeAttribute(): isVLEN"); 2072 try { 2073 /* 2074 * must use native type to write attribute data to file (see bug 1069) 2075 */ 2076 long tmptid = tid; 2077 tid = H5.H5Tget_native_type(tmptid); 2078 try { 2079 H5.H5Tclose(tmptid); 2080 } 2081 catch (Exception ex) { 2082 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2083 } 2084 log.trace("writeAttribute(): H5.H5AwriteVL, {} : {}", name, attr.getDatatype().getDescription()); 2085 if ((attrValue instanceof String) || (attr.getDims().length == 1)) { 2086 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 2087 } 2088 else { 2089 log.info("writeAttribute(): Datatype is not a string, unable to write {} data", name); 2090 } 2091 } 2092 catch (Exception ex) { 2093 log.debug("writeAttribute(): native type failure: ", name, ex); 2094 } 2095 } 2096 else { 2097 if (attr.getDatatype().isRef() && attrValue instanceof String) { 2098 // reference is a path+name to the object 2099 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 2100 log.trace("writeAttribute(): Attribute class is CLASS_REFERENCE"); 2101 } 2102 else if (Array.get(attrValue, 0) instanceof String) { 2103 long size = attr.getDatatype().getDatatypeSize(); 2104 int len = ((String[]) attrValue).length; 2105 byte[] bval = Dataset.stringToByte((String[]) attrValue, (int)size); 2106 if (bval != null && bval.length == size * len) { 2107 bval[bval.length - 1] = 0; 2108 attrValue = bval; 2109 } 2110 log.trace("writeAttribute(): String={}: {}", attrValue, name); 2111 } 2112 2113 try { 2114 /* 2115 * must use native type to write attribute data to file (see bug 1069) 2116 */ 2117 long tmptid = tid; 2118 tid = H5.H5Tget_native_type(tmptid); 2119 try { 2120 H5.H5Tclose(tmptid); 2121 } 2122 catch (Exception ex) { 2123 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2124 } 2125 log.trace("writeAttribute(): H5.H5Awrite, {} :{}", name, attr.getDatatype().getDescription()); 2126 H5.H5Awrite(aid, tid, attrValue); 2127 } 2128 catch (Exception ex) { 2129 log.debug("writeAttribute(): native type failure: ", ex); 2130 } 2131 } 2132 } // (attrValue != null) 2133 } 2134 finally { 2135 try { 2136 H5.H5Tclose(tid); 2137 } 2138 catch (Exception ex) { 2139 log.debug("writeAttribute(): H5Tclose(tid {}) failure: ", tid, ex); 2140 } 2141 try { 2142 H5.H5Sclose(sid); 2143 } 2144 catch (Exception ex) { 2145 log.debug("writeAttribute(): H5Sclose(sid {}) failure: ", sid, ex); 2146 } 2147 try { 2148 H5.H5Aclose(aid); 2149 } 2150 catch (Exception ex) { 2151 log.debug("writeAttribute(): H5Aclose(aid {}) failure: ", aid, ex); 2152 } 2153 } 2154 } 2155 else { 2156 log.debug("writeAttribute(): toNative failure"); 2157 } 2158 2159 obj.close(objID); 2160 } 2161 2162 /*************************************************************************** 2163 * Implementations for methods specific to H5File 2164 **************************************************************************/ 2165 2166 /** 2167 * Opens a file with specific file access property list. 2168 * <p> 2169 * This function does the same as "long open()" except the you can also pass an HDF5 file access property to file 2170 * open. For example, 2171 * 2172 * <pre> 2173 * // All open objects remaining in the file are closed then file is closed 2174 * long plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2175 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 2176 * long fid = open(plist); 2177 * </pre> 2178 * 2179 * @param plist 2180 * a file access property list identifier. 2181 * 2182 * @return the file identifier if successful; otherwise returns negative value. 2183 * 2184 * @throws Exception 2185 * If there is a failure. 2186 */ 2187 public long open(long plist) throws Exception { 2188 return open(true, plist); 2189 } 2190 2191 /*************************************************************************** 2192 * Private methods. 2193 **************************************************************************/ 2194 2195 /** 2196 * Opens access to this file. 2197 * 2198 * @param loadFullHierarchy 2199 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2200 * 2201 * @return the file identifier if successful; otherwise returns negative value. 2202 * 2203 * @throws Exception 2204 * If there is a failure. 2205 */ 2206 private long open(boolean loadFullHierarchy) throws Exception { 2207 long the_fid = -1; 2208 2209 long plist = HDF5Constants.H5P_DEFAULT; 2210 2211 // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache 2212 /** 2213 * try { //All open objects remaining in the file are closed // then file is closed plist = 2214 * H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); H5.H5Pset_fclose_degree ( plist, 2215 * HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {} the_fid = open(loadFullHierarchy, 2216 * plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 2217 */ 2218 2219 log.trace("open(): loadFull={}", loadFullHierarchy); 2220 the_fid = open(loadFullHierarchy, plist); 2221 2222 return the_fid; 2223 } 2224 2225 /** 2226 * Opens access to this file. 2227 * 2228 * @param loadFullHierarchy 2229 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2230 * 2231 * @return the file identifier if successful; otherwise returns negative value. 2232 * 2233 * @throws Exception 2234 * If there is a failure. 2235 */ 2236 private long open(boolean loadFullHierarchy, long plist) throws Exception { 2237 log.trace("open(loadFullHierarchy = {}, plist = {}): start", loadFullHierarchy, plist); 2238 if (fid > 0) { 2239 log.trace("open(): FID already opened"); 2240 return fid; // file is opened already 2241 } 2242 2243 // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) 2244 // to make it work for external datasets. We need to set it back 2245 // before the file is closed/opened. 2246 String rootPath = System.getProperty("hdfview.workdir"); 2247 if (rootPath == null) { 2248 rootPath = System.getProperty("user.dir"); 2249 } 2250 System.setProperty("user.dir", rootPath); 2251 2252 // check for valid file access permission 2253 if (flag < 0) { 2254 log.debug("open(): Invalid access identifier -- " + flag); 2255 throw new HDF5Exception("Invalid access identifer -- " + flag); 2256 } 2257 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 2258 // create a new file 2259 log.trace("open(): create file"); 2260 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2261 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 2262 H5.H5Fclose(fid); 2263 flag = HDF5Constants.H5F_ACC_RDWR; 2264 } 2265 else if (!exists()) { 2266 log.debug("open(): File {} does not exist", fullFileName); 2267 throw new HDF5Exception("File does not exist -- " + fullFileName); 2268 } 2269 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 2270 log.debug("open(): Cannot write file {}", fullFileName); 2271 throw new HDF5Exception("Cannot write file, try opening as read-only -- " + fullFileName); 2272 } 2273 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 2274 log.debug("open(): Cannot read file {}", fullFileName); 2275 throw new HDF5Exception("Cannot read file -- " + fullFileName); 2276 } 2277 2278 try { 2279 fid = H5.H5Fopen(fullFileName, flag, plist); 2280 } 2281 catch (Exception ex) { 2282 try { 2283 log.debug("open(): open failed, attempting to open file read-only"); 2284 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 2285 isReadOnly = true; 2286 } 2287 catch (Exception ex2) { 2288 // Attempt to open the file as a split file or family file 2289 try { 2290 File tmpf = new File(fullFileName); 2291 String tmpname = tmpf.getName(); 2292 int idx = tmpname.lastIndexOf('.'); 2293 2294 if (tmpname.contains("-m")) { 2295 log.debug("open(): open read-only failed, attempting to open split file"); 2296 2297 while (idx > 0) { 2298 char c = tmpname.charAt(idx - 1); 2299 if (c != '-') 2300 idx--; 2301 else 2302 break; 2303 } 2304 2305 if (idx > 0) { 2306 tmpname = tmpname.substring(0, idx - 1); 2307 log.trace("open(): attempting to open split file with name {}", tmpname); 2308 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2309 H5.H5Pset_fapl_split(pid, "-m.h5", HDF5Constants.H5P_DEFAULT, "-r.h5", HDF5Constants.H5P_DEFAULT); 2310 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2311 H5.H5Pclose(pid); 2312 } 2313 } 2314 else { 2315 log.debug("open(): open read-only failed, checking for file family"); 2316 // try to see if it is a file family, always open a family file 2317 // from the first one since other files will not be recognized 2318 // as an HDF5 file 2319 int cnt = idx; 2320 while (idx > 0) { 2321 char c = tmpname.charAt(idx - 1); 2322 if (Character.isDigit(c)) 2323 idx--; 2324 else 2325 break; 2326 } 2327 2328 if (idx > 0) { 2329 cnt -= idx; 2330 tmpname = tmpname.substring(0, idx) + "%0" + cnt + "d" + tmpname.substring(tmpname.lastIndexOf('.')); 2331 log.trace("open(): attempting to open file family with name {}", tmpname); 2332 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2333 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 2334 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2335 H5.H5Pclose(pid); 2336 } 2337 } 2338 } 2339 catch (Exception ex3) { 2340 log.debug("open(): open failed: ", ex3); 2341 } 2342 } 2343 } 2344 2345 initLibBounds(); 2346 2347 if ((fid >= 0) && loadFullHierarchy) { 2348 // load the hierarchy of the file 2349 loadIntoMemory(); 2350 } 2351 2352 log.trace("open(loadFullHeirarchy = {}, plist = {}): finish", loadFullHierarchy, plist); 2353 return fid; 2354 } 2355 2356 /** 2357 * Loads the file structure into memory. 2358 */ 2359 private void loadIntoMemory() { 2360 if (fid < 0) { 2361 log.debug("loadIntoMemory(): Invalid FID"); 2362 return; 2363 } 2364 2365 /* 2366 * TODO: Root group's name should be changed to 'this.getName()' and all 2367 * previous accesses of this field should now use getPath() instead of getName() 2368 * to get the root group. The root group actually does have a path of "/". The 2369 * depth_first method will have to be changed to setup other object paths 2370 * appropriately, as it currently assumes the root path to be null. 2371 */ 2372 rootObject = new H5Group(this, "/", null, null); 2373 log.trace("loadIntoMemory(): depth_first on root"); 2374 depth_first(rootObject, 0); 2375 } 2376 2377 /** 2378 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and 2379 * datasets only. It does not include named datatypes and soft links. 2380 * <p> 2381 * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing 2382 * a path back up to the root. 2383 * 2384 * @param parentObject 2385 * the parent object. 2386 */ 2387 @SuppressWarnings("deprecation") 2388 private int depth_first(HObject parentObject, int nTotal) { 2389 log.trace("depth_first({}): start", parentObject); 2390 2391 int nelems; 2392 String fullPath = null; 2393 String ppath = null; 2394 long gid = -1; 2395 2396 H5Group pgroup = (H5Group) parentObject; 2397 ppath = pgroup.getPath(); 2398 2399 if (ppath == null) { 2400 fullPath = HObject.SEPARATOR; 2401 } 2402 else { 2403 fullPath = ppath + pgroup.getName() + HObject.SEPARATOR; 2404 } 2405 2406 nelems = 0; 2407 try { 2408 gid = pgroup.open(); 2409 H5G_info_t info = H5.H5Gget_info(gid); 2410 nelems = (int) info.nlinks; 2411 } 2412 catch (HDF5Exception ex) { 2413 nelems = -1; 2414 log.debug("depth_first({}): H5Gget_info(gid {}) failure: ", parentObject, gid, ex); 2415 } 2416 2417 if (nelems <= 0) { 2418 pgroup.close(gid); 2419 log.debug("depth_first({}): nelems <= 0", parentObject); 2420 return nTotal; 2421 } 2422 2423 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2424 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2425 // we use only one call to get all the information, which takes about 2426 // two seconds 2427 int[] objTypes = new int[nelems]; 2428 long[] fNos = new long[nelems]; 2429 long[] objRefs = new long[nelems]; 2430 String[] objNames = new String[nelems]; 2431 2432 try { 2433 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2434 } 2435 catch (HDF5Exception ex) { 2436 log.debug("depth_first({}): failure: ", parentObject, ex); 2437 ex.printStackTrace(); 2438 return nTotal; 2439 } 2440 2441 int nStart = getStartMembers(); 2442 int nMax = getMaxMembers(); 2443 2444 String obj_name; 2445 int obj_type; 2446 2447 // Iterate through the file to see members of the group 2448 for (int i = 0; i < nelems; i++) { 2449 obj_name = objNames[i]; 2450 obj_type = objTypes[i]; 2451 log.trace("depth_first({}): obj_name={}, obj_type={}", parentObject, obj_name, obj_type); 2452 long oid[] = { objRefs[i], fNos[i] }; 2453 2454 if (obj_name == null) { 2455 log.trace("depth_first({}): continue after null obj_name", parentObject); 2456 continue; 2457 } 2458 2459 nTotal++; 2460 2461 if (nMax > 0) { 2462 if ((nTotal - nStart) >= nMax) 2463 break; // loaded enough objects 2464 } 2465 2466 boolean skipLoad = false; 2467 if ((nTotal > 0) && (nTotal < nStart)) 2468 skipLoad = true; 2469 2470 // create a new group 2471 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2472 H5Group g = new H5Group(this, obj_name, fullPath, pgroup); 2473 2474 pgroup.addToMemberList(g); 2475 2476 // detect and stop loops 2477 // a loop is detected if there exists object with the same 2478 // object ID by tracing path back up to the root. 2479 boolean hasLoop = false; 2480 H5Group tmpObj = (H5Group) parentObject; 2481 2482 while (tmpObj != null) { 2483 if (tmpObj.equalsOID(oid) && (tmpObj.getPath() != null)) { 2484 hasLoop = true; 2485 break; 2486 } 2487 else { 2488 tmpObj = (H5Group) tmpObj.getParent(); 2489 } 2490 } 2491 2492 // recursively go through the next group 2493 // stops if it has loop. 2494 if (!hasLoop) { 2495 nTotal = depth_first(g, nTotal); 2496 } 2497 } 2498 else if (skipLoad) { 2499 continue; 2500 } 2501 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2502 long did = -1; 2503 long tid = -1; 2504 int tclass = -1; 2505 try { 2506 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2507 if (did >= 0) { 2508 tid = H5.H5Dget_type(did); 2509 2510 tclass = H5.H5Tget_class(tid); 2511 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2512 // for ARRAY, the type is determined by the base type 2513 long btid = H5.H5Tget_super(tid); 2514 2515 tclass = H5.H5Tget_class(btid); 2516 2517 try { 2518 H5.H5Tclose(btid); 2519 } 2520 catch (Exception ex) { 2521 log.debug("depth_first({})[{}] dataset {} H5Tclose(btid {}) failure: ", parentObject, i, obj_name, btid, ex); 2522 } 2523 } 2524 } 2525 else { 2526 log.debug("depth_first({})[{}] {} dataset open failure", parentObject, i, obj_name); 2527 } 2528 } 2529 catch (Exception ex) { 2530 log.debug("depth_first({})[{}] {} dataset access failure: ", parentObject, i, obj_name, ex); 2531 } 2532 finally { 2533 try { 2534 H5.H5Tclose(tid); 2535 } 2536 catch (Exception ex) { 2537 log.debug("depth_first({})[{}] daatset {} H5Tclose(tid {}) failure: ", parentObject, i, obj_name, tid, ex); 2538 } 2539 try { 2540 H5.H5Dclose(did); 2541 } 2542 catch (Exception ex) { 2543 log.debug("depth_first({})[{}] dataset {} H5Dclose(did {}) failure: ", parentObject, i, obj_name, did, ex); 2544 } 2545 } 2546 Dataset d = null; 2547 if (tclass == HDF5Constants.H5T_COMPOUND) { 2548 // create a new compound dataset 2549 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2550 } 2551 else { 2552 // create a new scalar dataset 2553 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2554 } 2555 2556 pgroup.addToMemberList(d); 2557 } 2558 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2559 Datatype t = new H5Datatype(parentObject.getFileFormat(), obj_name, fullPath, oid); // deprecated! 2560 log.trace("depth_first({}): H5O_TYPE_NAMED_DATATYPE name={}", parentObject, t.getFullName()); 2561 2562 pgroup.addToMemberList(t); 2563 } 2564 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2565 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2566 2567 pgroup.addToMemberList(link); 2568 continue; // do the next one, if the object is not identified. 2569 } 2570 } // ( i = 0; i < nelems; i++) 2571 2572 pgroup.close(gid); 2573 2574 return nTotal; 2575 } // private depth_first() 2576 2577 /** 2578 * Returns a list of all the members of this H5File in a 2579 * breadth-first ordering that are rooted at the specified 2580 * object. 2581 */ 2582 private static List<HObject> getMembersBreadthFirst(HObject obj) { 2583 List<HObject> allMembers = new Vector<>(); 2584 Queue<HObject> queue = new LinkedList<>(); 2585 HObject currentObject = obj; 2586 2587 queue.add(currentObject); 2588 2589 while(!queue.isEmpty()) { 2590 currentObject = queue.remove(); 2591 allMembers.add(currentObject); 2592 2593 if(currentObject instanceof Group) { 2594 queue.addAll(((Group) currentObject).getMemberList()); 2595 } 2596 } 2597 2598 return allMembers; 2599 } 2600 2601 private HObject copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2602 Dataset dataset = null; 2603 long srcdid = -1, dstdid = -1; 2604 long ocp_plist_id = -1; 2605 String dname = null, path = null; 2606 2607 if (pgroup.isRoot()) { 2608 path = HObject.SEPARATOR; 2609 } 2610 else { 2611 path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; 2612 } 2613 2614 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2615 dstName = srcDataset.getName(); 2616 } 2617 dname = path + dstName; 2618 2619 try { 2620 srcdid = srcDataset.open(); 2621 dstdid = pgroup.open(); 2622 2623 try { 2624 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2625 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2626 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2627 } 2628 catch (Exception ex) { 2629 log.debug("copyDataset(): {} failure: ", dname, ex); 2630 } 2631 finally { 2632 try { 2633 H5.H5Pclose(ocp_plist_id); 2634 } 2635 catch (Exception ex) { 2636 log.debug("copyDataset(): {} H5Pclose(ocp_plist_id {}) failure: ", dname, ocp_plist_id, ex); 2637 } 2638 } 2639 2640 if (srcDataset instanceof H5ScalarDS) { 2641 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2642 } 2643 else { 2644 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2645 } 2646 2647 pgroup.addToMemberList(dataset); 2648 } 2649 finally { 2650 try { 2651 srcDataset.close(srcdid); 2652 } 2653 catch (Exception ex) { 2654 log.debug("copyDataset(): {} srcDataset.close(srcdid {}) failure: ", dname, srcdid, ex); 2655 } 2656 try { 2657 pgroup.close(dstdid); 2658 } 2659 catch (Exception ex) { 2660 log.debug("copyDataset(): {} pgroup.close(dstdid {}) failure: ", dname, dstdid, ex); 2661 } 2662 } 2663 2664 return dataset; 2665 } 2666 2667 /** 2668 * Constructs a dataset for specified dataset identifier. 2669 * 2670 * @param did 2671 * the dataset identifier 2672 * @param name 2673 * the name of the dataset 2674 * @param path 2675 * the path of the dataset 2676 * 2677 * @return the dataset if successful; otherwise return null. 2678 * 2679 * @throws HDF5Exception 2680 * If there is an error at the HDF5 library level. 2681 */ 2682 private Dataset getDataset(long did, String name, String path) throws HDF5Exception { 2683 Dataset dataset = null; 2684 if (did >= 0) { 2685 long tid = -1; 2686 int tclass = -1; 2687 try { 2688 tid = H5.H5Dget_type(did); 2689 tclass = H5.H5Tget_class(tid); 2690 if (tclass == HDF5Constants.H5T_ARRAY) { 2691 // for ARRAY, the type is determined by the base type 2692 long btid = H5.H5Tget_super(tid); 2693 tclass = H5.H5Tget_class(btid); 2694 try { 2695 H5.H5Tclose(btid); 2696 } 2697 catch (Exception ex) { 2698 log.debug("getDataset(): {} H5Tclose(btid {}) failure: ", name, btid, ex); 2699 } 2700 } 2701 } 2702 finally { 2703 try { 2704 H5.H5Tclose(tid); 2705 } 2706 catch (Exception ex) { 2707 log.debug("getDataset(): {} H5Tclose(tid {}) failure: ", name, tid, ex); 2708 } 2709 } 2710 2711 if (tclass == HDF5Constants.H5T_COMPOUND) { 2712 dataset = new H5CompoundDS(this, name, path); 2713 } 2714 else { 2715 dataset = new H5ScalarDS(this, name, path); 2716 } 2717 } 2718 else { 2719 log.debug("getDataset(): id failure"); 2720 } 2721 2722 return dataset; 2723 } 2724 2725 /** 2726 * Copies a named datatype to another location. 2727 * 2728 * @param srcType 2729 * the source datatype 2730 * @param pgroup 2731 * the group which the new datatype is copied to 2732 * @param dstName 2733 * the name of the new dataype 2734 * 2735 * @throws Exception 2736 * If there is a failure. 2737 */ 2738 private HObject copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2739 Datatype datatype = null; 2740 long tid_src = -1; 2741 long gid_dst = -1; 2742 String path = null; 2743 2744 if (pgroup.isRoot()) { 2745 path = HObject.SEPARATOR; 2746 } 2747 else { 2748 path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR; 2749 } 2750 2751 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2752 dstName = srcType.getName(); 2753 } 2754 2755 try { 2756 tid_src = srcType.open(); 2757 gid_dst = pgroup.open(); 2758 2759 try { 2760 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2761 } 2762 catch (Exception ex) { 2763 log.debug("copyDatatype(): {} H5Ocopy(tid_src {}) failure: ", dstName, tid_src, ex); 2764 } 2765 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2766 2767 pgroup.addToMemberList(datatype); 2768 } 2769 finally { 2770 try { 2771 srcType.close(tid_src); 2772 } 2773 catch (Exception ex) { 2774 log.debug("copyDatatype(): {} srcType.close(tid_src {}) failure: ", dstName, tid_src, ex); 2775 } 2776 try { 2777 pgroup.close(gid_dst); 2778 } 2779 catch (Exception ex) { 2780 log.debug("copyDatatype(): {} pgroup.close(gid_dst {}) failure: ", dstName, gid_dst, ex); 2781 } 2782 } 2783 2784 return datatype; 2785 } 2786 2787 /** 2788 * Copies a group and its members to a new location. 2789 * 2790 * @param srcGroup 2791 * the source group 2792 * @param dstGroup 2793 * the location where the new group is located 2794 * @param dstName 2795 * the name of the new group 2796 * 2797 * @throws Exception 2798 * If there is a failure. 2799 */ 2800 private HObject copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { 2801 H5Group group = null; 2802 long srcgid = -1, dstgid = -1; 2803 String path = null; 2804 2805 if (dstGroup.isRoot()) { 2806 path = HObject.SEPARATOR; 2807 } 2808 else { 2809 path = dstGroup.getPath() + dstGroup.getName() + HObject.SEPARATOR; 2810 } 2811 2812 if ((dstName == null) || dstName.equals(HObject.SEPARATOR) || (dstName.length() < 1)) { 2813 dstName = srcGroup.getName(); 2814 } 2815 2816 try { 2817 srcgid = srcGroup.open(); 2818 dstgid = dstGroup.open(); 2819 try { 2820 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2821 } 2822 catch (Exception ex) { 2823 log.debug("copyGroup(): {} H5Ocopy(srcgid {}) failure: ", dstName, srcgid, ex); 2824 } 2825 2826 group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); 2827 depth_first(group, Integer.MIN_VALUE); // reload all 2828 dstGroup.addToMemberList(group); 2829 } 2830 2831 finally { 2832 try { 2833 srcGroup.close(srcgid); 2834 } 2835 catch (Exception ex) { 2836 log.debug("copyGroup(): {} srcGroup.close(srcgid {}) failure: ", dstName, srcgid, ex); 2837 } 2838 try { 2839 dstGroup.close(dstgid); 2840 } 2841 catch (Exception ex) { 2842 log.debug("copyGroup(): {} pgroup.close(dstgid {}) failure: ", dstName, dstgid, ex); 2843 } 2844 } 2845 2846 return group; 2847 } 2848 2849 /** 2850 * Constructs a group for specified group identifier and retrieves members. 2851 * 2852 * @param gid 2853 * The group identifier. 2854 * @param name 2855 * The group name. 2856 * @param pGroup 2857 * The parent group, or null for the root group. 2858 * 2859 * @return The group if successful; otherwise returns false. 2860 * 2861 * @throws HDF5Exception 2862 * If there is an error at the HDF5 library level. 2863 */ 2864 private H5Group getGroup(long gid, String name, Group pGroup) throws HDF5Exception { 2865 String parentPath = null; 2866 String thisFullName = null; 2867 String memberFullName = null; 2868 2869 if (pGroup == null) { 2870 thisFullName = name = "/"; 2871 } 2872 else { 2873 parentPath = pGroup.getFullName(); 2874 if ((parentPath == null) || parentPath.equals("/")) { 2875 thisFullName = "/" + name; 2876 } 2877 else { 2878 thisFullName = parentPath + "/" + name; 2879 } 2880 } 2881 2882 // get rid of any extra "/" 2883 if (parentPath != null) { 2884 parentPath = parentPath.replaceAll("//", "/"); 2885 } 2886 if (thisFullName != null) { 2887 thisFullName = thisFullName.replaceAll("//", "/"); 2888 } 2889 2890 log.trace("getGroup(): fullName={}", thisFullName); 2891 2892 H5Group group = new H5Group(this, name, parentPath, pGroup); 2893 2894 H5G_info_t group_info = null; 2895 H5O_info_t obj_info = null; 2896 long oid = -1; 2897 String link_name = null; 2898 try { 2899 group_info = H5.H5Gget_info(gid); 2900 } 2901 catch (Exception ex) { 2902 log.debug("getGroup(): {} H5Gget_info(gid {}) failure: ", name, gid, ex); 2903 } 2904 try { 2905 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 2906 } 2907 catch (Exception ex) { 2908 log.debug("getGroup(): {} H5Oopen(gid {}) failure: ", name, gid, ex); 2909 } 2910 2911 // retrieve only the immediate members of the group, do not follow 2912 // subgroups 2913 for (int i = 0; i < group_info.nlinks; i++) { 2914 try { 2915 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2916 obj_info = H5.H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2917 } 2918 catch (HDF5Exception ex) { 2919 log.debug("getGroup()[{}]: {} name,info failure: ", i, name, ex); 2920 // do not stop if accessing one member fails 2921 continue; 2922 } 2923 // create a new group 2924 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 2925 H5Group g = new H5Group(this, link_name, thisFullName, group); 2926 group.addToMemberList(g); 2927 } 2928 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 2929 long did = -1; 2930 Dataset d = null; 2931 2932 if ((thisFullName == null) || thisFullName.equals("/")) { 2933 memberFullName = "/" + link_name; 2934 } 2935 else { 2936 memberFullName = thisFullName + "/" + link_name; 2937 } 2938 2939 try { 2940 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 2941 d = getDataset(did, link_name, thisFullName); 2942 } 2943 finally { 2944 try { 2945 H5.H5Dclose(did); 2946 } 2947 catch (Exception ex) { 2948 log.debug("getGroup()[{}]: {} H5Dclose(did {}) failure: ", i, name, did, ex); 2949 } 2950 } 2951 group.addToMemberList(d); 2952 } 2953 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2954 Datatype t = new H5Datatype(group.getFileFormat(), link_name, thisFullName); 2955 group.addToMemberList(t); 2956 } 2957 } // End of for loop. 2958 try { 2959 if (oid >= 0) 2960 H5.H5Oclose(oid); 2961 } 2962 catch (Exception ex) { 2963 log.debug("getGroup(): {} H5Oclose(oid {}) failure: ", name, oid, ex); 2964 } 2965 2966 return group; 2967 } 2968 2969 /** 2970 * Retrieves the name of the target object that is being linked to. 2971 * 2972 * @param obj 2973 * The current link object. 2974 * 2975 * @return The name of the target object. 2976 * 2977 * @throws Exception 2978 * If there is an error at the HDF5 library level. 2979 */ 2980 public static String getLinkTargetName(HObject obj) throws Exception { 2981 String[] link_value = { null, null }; 2982 String targetObjName = null; 2983 2984 if (obj == null) { 2985 log.debug("getLinkTargetName(): object is null"); 2986 return null; 2987 } 2988 2989 if (obj.getFullName().equals("/")) { 2990 log.debug("getLinkTargetName(): object is root group, links not allowed"); 2991 return null; 2992 } 2993 2994 H5L_info_t link_info = null; 2995 try { 2996 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 2997 } 2998 catch (Exception err) { 2999 log.debug("getLinkTargetName(): H5Lget_info {} failure: ", obj.getFullName(), err); 3000 } 3001 if (link_info != null) { 3002 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 3003 try { 3004 H5.H5Lget_value(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 3005 } 3006 catch (Exception ex) { 3007 log.debug("getLinkTargetName(): H5Lget_value {} failure: ", obj.getFullName(), ex); 3008 } 3009 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 3010 targetObjName = link_value[0]; 3011 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 3012 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 3013 } 3014 } 3015 } 3016 3017 return targetObjName; 3018 } 3019 3020 /** 3021 * Export dataset. 3022 * 3023 * @param file_export_name 3024 * The file name to export data into. 3025 * @param object 3026 * The HDF5 dataset object. 3027 * @param binary_order 3028 * The data byte order 3029 * 3030 * @throws Exception 3031 * If there is a failure. 3032 */ 3033 public void exportDataset(String file_export_name, Dataset object, int binary_order) 3034 throws Exception { 3035 H5.H5export_dataset(file_export_name, object.getFile(), object.getFullName(), binary_order); 3036 } 3037 3038 /** 3039 * Renames an attribute. 3040 * 3041 * @param obj 3042 * The object whose attribute is to be renamed. 3043 * @param oldAttrName 3044 * The current name of the attribute. 3045 * @param newAttrName 3046 * The new name of the attribute. 3047 * 3048 * @throws Exception 3049 * If there is an error at the HDF5 library level. 3050 */ 3051 @Override 3052 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 3053 log.trace("renameAttribute(): rename {} to {}", oldAttrName, newAttrName); 3054 H5.H5Arename_by_name(obj.getFID(), obj.getFullName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 3055 } 3056 3057 /** 3058 * Rename the given object 3059 * 3060 * @param obj 3061 * the object to be renamed. 3062 * @param newName 3063 * the new name of the object. 3064 * 3065 * @throws Exception 3066 * If there is a failure. 3067 */ 3068 public static void renameObject(HObject obj, String newName) throws Exception { 3069 renameObject(obj, obj.getPath(), newName); 3070 } 3071 3072 /** 3073 * Rename the given object 3074 * 3075 * @param obj 3076 * the object to be renamed. 3077 * @param newPath 3078 * the new path of the object. 3079 * @param newName 3080 * the new name of the object. 3081 * 3082 * @throws Exception 3083 * If there is a failure. 3084 */ 3085 public static void renameObject(HObject obj, String newPath, String newName) throws Exception { 3086 String currentFullPath = obj.getFullName(); 3087 String newFullPath = obj.createFullname(newPath, newName); 3088 3089 log.trace("renameObject(): currentFullPath={} newFullPath={}", currentFullPath, newFullPath); 3090 if ((currentFullPath != null) && (newFullPath != null)) { 3091 currentFullPath = currentFullPath.replaceAll("//", "/"); 3092 newFullPath = newFullPath.replaceAll("//", "/"); 3093 3094 if (currentFullPath.equals("/") && obj instanceof Group) { 3095 throw new HDF5Exception("Can't rename the root group."); 3096 } 3097 3098 if (currentFullPath.equals(newFullPath)) { 3099 throw new HDF5Exception("The new name is the same as the current name."); 3100 } 3101 3102 if (obj.getName() != null) 3103 // Call the library to move things in the file if object exists 3104 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 3105 } 3106 } 3107 3108 public static int getIndexTypeValue(String strtype) { 3109 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3110 return HDF5Constants.H5_INDEX_NAME; 3111 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3112 return HDF5Constants.H5_INDEX_CRT_ORDER; 3113 if (strtype.compareTo("H5_INDEX_N") == 0) 3114 return HDF5Constants.H5_INDEX_N; 3115 return HDF5Constants.H5_INDEX_UNKNOWN; 3116 } 3117 3118 public static int getIndexOrderValue(String strorder) { 3119 if (strorder.compareTo("H5_ITER_INC") == 0) 3120 return HDF5Constants.H5_ITER_INC; 3121 if (strorder.compareTo("H5_ITER_DEC") == 0) 3122 return HDF5Constants.H5_ITER_DEC; 3123 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3124 return HDF5Constants.H5_ITER_NATIVE; 3125 if (strorder.compareTo("H5_ITER_N") == 0) 3126 return HDF5Constants.H5_ITER_N; 3127 return HDF5Constants.H5_ITER_UNKNOWN; 3128 } 3129 3130 @Override 3131 public int getIndexType(String strtype) { 3132 if (strtype != null) { 3133 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3134 return HDF5Constants.H5_INDEX_NAME; 3135 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3136 return HDF5Constants.H5_INDEX_CRT_ORDER; 3137 return HDF5Constants.H5_INDEX_UNKNOWN; 3138 } 3139 return getIndexType(); 3140 } 3141 3142 public int getIndexType() { 3143 return indexType; 3144 } 3145 3146 @Override 3147 public void setIndexType(int indexType) { 3148 this.indexType = indexType; 3149 } 3150 3151 @Override 3152 public int getIndexOrder(String strorder) { 3153 if (strorder != null) { 3154 if (strorder.compareTo("H5_ITER_INC") == 0) 3155 return HDF5Constants.H5_ITER_INC; 3156 if (strorder.compareTo("H5_ITER_DEC") == 0) 3157 return HDF5Constants.H5_ITER_DEC; 3158 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3159 return HDF5Constants.H5_ITER_NATIVE; 3160 if (strorder.compareTo("H5_ITER_N") == 0) 3161 return HDF5Constants.H5_ITER_N; 3162 return HDF5Constants.H5_ITER_UNKNOWN; 3163 } 3164 return getIndexOrder(); 3165 } 3166 3167 public int getIndexOrder() { 3168 return indexOrder; 3169 } 3170 3171 @Override 3172 public void setIndexOrder(int indexOrder) { 3173 this.indexOrder = indexOrder; 3174 } 3175}