001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see https://support.hdfgroup.org/products/licenses.html * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.io.File; 018import java.lang.reflect.Array; 019import java.util.Hashtable; 020import java.util.Iterator; 021import java.util.LinkedList; 022import java.util.List; 023import java.util.Queue; 024import java.util.Vector; 025 026import hdf.hdf5lib.H5; 027import hdf.hdf5lib.HDF5Constants; 028import hdf.hdf5lib.HDFNativeData; 029import hdf.hdf5lib.exceptions.HDF5Exception; 030import hdf.hdf5lib.structs.H5G_info_t; 031import hdf.hdf5lib.structs.H5L_info_t; 032import hdf.hdf5lib.structs.H5O_info_t; 033import hdf.object.Attribute; 034import hdf.object.Dataset; 035import hdf.object.Datatype; 036import hdf.object.FileFormat; 037import hdf.object.Group; 038import hdf.object.HObject; 039import hdf.object.ScalarDS; 040 041 042/** 043 * H5File is an implementation of the FileFormat class for HDF5 files. 044 * <p> 045 * The HDF5 file structure is made up of HObjects stored in a tree-like fashion. Each tree node represents an 046 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootObject</i>, the 047 * tree can be traversed to find a specific object. 048 * <p> 049 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 050 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 051 * 052 * <pre> 053 * HObject findObject(FileFormat file, String path) { 054 * if (file == null || path == null) 055 * return null; 056 * if (!path.endsWith("/")) 057 * path = path + "/"; 058 * HObject theRoot = file.getRootObject(); 059 * if (theRoot == null) 060 * return null; 061 * else if (path.equals("/")) 062 * return theRoot; 063 * 064 * Iterator local_it = ((Group) theRoot) 065 * .breadthFirstMemberList().iterator(); 066 * HObject theObj = null; 067 * while (local_it.hasNext()) { 068 * theObj = local_it.next(); 069 * String fullPath = theObj.getFullName() + "/"; 070 * if (path.equals(fullPath) && theObj.getPath() != null ) { 071 * break; 072 * } 073 * return theObj; 074 * } 075 * </pre> 076 * 077 * @author Peter X. Cao 078 * @version 2.4 9/4/2007 079 */ 080public class H5File extends FileFormat { 081 private static final long serialVersionUID = 6247335559471526045L; 082 083 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 084 085 /** 086 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 087 * HDF5Constants.H5F_ACC_CREAT. 088 */ 089 private int flag; 090 091 /** 092 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 093 */ 094 private int indexType = HDF5Constants.H5_INDEX_NAME; 095 096 /** 097 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 098 */ 099 private int indexOrder = HDF5Constants.H5_ITER_INC; 100 101 /** 102 * The root object of the file hierarchy. 103 */ 104 private HObject rootObject; 105 106 /** 107 * How many characters maximum in an attribute name? 108 */ 109 private static final int attrNameLen = 256; 110 111 /** 112 * The library version bounds 113 */ 114 private int[] libver; 115 public static final int LIBVER_LATEST = HDF5Constants.H5F_LIBVER_LATEST; 116 public static final int LIBVER_EARLIEST = HDF5Constants.H5F_LIBVER_EARLIEST; 117 public static final int LIBVER_V18 = HDF5Constants.H5F_LIBVER_V18; 118 public static final int LIBVER_V110 = HDF5Constants.H5F_LIBVER_V110; 119 120 /*************************************************************************** 121 * Constructor 122 **************************************************************************/ 123 /** 124 * Constructs an H5File instance with an empty file name and read-only access. 125 */ 126 public H5File() { 127 this("", READ); 128 } 129 130 /** 131 * Constructs an H5File instance with specified file name and read/write access. 132 * <p> 133 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 134 * 135 * @param fileName 136 * A valid file name, with a relative or absolute path. 137 * 138 * @throws NullPointerException 139 * If the <code>fileName</code> argument is <code>null</code>. 140 */ 141 public H5File(String fileName) { 142 this(fileName, WRITE); 143 } 144 145 /** 146 * Constructs an H5File instance with specified file name and access. 147 * <p> 148 * The access parameter values and corresponding behaviors: 149 * <ul> 150 * <li>READ: Read-only access; open() will fail file doesn't exist.</li> 151 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 152 * access.</li> 153 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 154 * created or if file exists but can't be opened read/write.</li> 155 * </ul> 156 * <p> 157 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 158 * read/write or created. 159 * <p> 160 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 161 * file isn't yet open. 162 * 163 * @param fileName 164 * A valid file name, with a relative or absolute path. 165 * @param access 166 * The file access flag, which determines behavior when file is opened. Acceptable values are 167 * <code> READ, WRITE, </code> and <code>CREATE</code>. 168 * 169 * @throws NullPointerException 170 * If the <code>fileName</code> argument is <code>null</code>. 171 */ 172 public H5File(String fileName, int access) { 173 // Call FileFormat ctor to set absolute path name 174 super(fileName); 175 libver = new int[2]; 176 177 if ((access & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 178 File f = new File(fileName); 179 if (f.exists()) { 180 access = WRITE; 181 } 182 else { 183 access = CREATE; 184 } 185 } 186 187 // set metadata for the instance 188 rootObject = null; 189 this.fid = -1; 190 isReadOnly = (access == READ); 191 192 // At this point we just set up the flags for what happens later. 193 // We just pass unexpected access values on... subclasses may have 194 // their own values. 195 if (access == READ) { 196 flag = HDF5Constants.H5F_ACC_RDONLY; 197 } 198 else if (access == WRITE) { 199 flag = HDF5Constants.H5F_ACC_RDWR; 200 } 201 else if (access == CREATE) { 202 flag = HDF5Constants.H5F_ACC_CREAT; 203 } 204 else { 205 flag = access; 206 } 207 } 208 209 /*************************************************************************** 210 * Class methods 211 **************************************************************************/ 212 213 /** 214 * Copies the attributes of one object to another object. 215 * <p> 216 * This method copies all the attributes from one object (source object) to another (destination object). If an 217 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 218 * 256 characters will be truncated in the destination object. 219 * <p> 220 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 221 * there is no H5Object class and it is specific to HDF5 objects. 222 * <p> 223 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 224 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(long, long)} 225 * 226 * @param src 227 * The source object. 228 * @param dst 229 * The destination object. 230 * 231 * @see #copyAttributes(long, long) 232 */ 233 public static final void copyAttributes(HObject src, HObject dst) { 234 if ((src != null) && (dst != null)) { 235 long srcID = src.open(); 236 long dstID = dst.open(); 237 238 if ((srcID >= 0) && (dstID >= 0)) { 239 copyAttributes(srcID, dstID); 240 } 241 242 if (srcID >= 0) { 243 src.close(srcID); 244 } 245 246 if (dstID >= 0) { 247 dst.close(dstID); 248 } 249 } 250 } 251 252 /** 253 * Copies the attributes of one object to another object. 254 * <p> 255 * This method copies all the attributes from one object (source object) to another (destination object). If an 256 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 257 * 256 characters will be truncated in the destination object. 258 * <p> 259 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 260 * there is no H5Object class and it is specific to HDF5 objects. 261 * <p> 262 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 263 * exceptions are thrown. 264 * 265 * @param src_id 266 * The identifier of the source object. 267 * @param dst_id 268 * The identifier of the destination object. 269 */ 270 public static final void copyAttributes(long src_id, long dst_id) { 271 log.trace("copyAttributes(): start: src_id={} dst_id={}", src_id, dst_id); 272 long aid_src = -1; 273 long aid_dst = -1; 274 long asid = -1; 275 long atid = -1; 276 String aName = null; 277 H5O_info_t obj_info = null; 278 279 try { 280 obj_info = H5.H5Oget_info(src_id); 281 } 282 catch (Exception ex) { 283 obj_info.num_attrs = -1; 284 } 285 286 if (obj_info.num_attrs < 0) { 287 log.debug("copyAttributes(): no attributes"); 288 log.trace("copyAttributes(): finish"); 289 return; 290 } 291 292 for (int i = 0; i < obj_info.num_attrs; i++) { 293 try { 294 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 295 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 296 aName = H5.H5Aget_name(aid_src); 297 atid = H5.H5Aget_type(aid_src); 298 asid = H5.H5Aget_space(aid_src); 299 300 aid_dst = H5.H5Acreate(dst_id, aName, atid, asid, HDF5Constants.H5P_DEFAULT, 301 HDF5Constants.H5P_DEFAULT); 302 303 // use native data copy 304 H5.H5Acopy(aid_src, aid_dst); 305 306 } 307 catch (Exception ex) { 308 log.debug("copyAttributes(): Attribute[{}] failure: ", i, ex); 309 } 310 311 try { 312 H5.H5Sclose(asid); 313 } 314 catch (Exception ex) { 315 log.debug("copyAttributes(): Attribute[{}] H5Sclose(asid {}) failure: ", i, asid, ex); 316 } 317 try { 318 H5.H5Tclose(atid); 319 } 320 catch (Exception ex) { 321 log.debug("copyAttributes(): Attribute[{}] H5Tclose(atid {}) failure: ", i, atid, ex); 322 } 323 try { 324 H5.H5Aclose(aid_src); 325 } 326 catch (Exception ex) { 327 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_src {}) failure: ", i, aid_src, ex); 328 } 329 try { 330 H5.H5Aclose(aid_dst); 331 } 332 catch (Exception ex) { 333 log.debug("copyAttributes(): Attribute[{}] H5Aclose(aid_dst {}) failure: ", i, aid_dst, ex); 334 } 335 336 } // for (int i=0; i<num_attr; i++) 337 } 338 339 /** 340 * Returns a list of attributes for the specified object. 341 * <p> 342 * This method returns a list containing the attributes associated with the 343 * identified object. If there are no associated attributes, an empty list will 344 * be returned. 345 * <p> 346 * Attribute names exceeding 256 characters will be truncated in the returned 347 * list. 348 * 349 * @param obj 350 * The HObject whose attributes are to be returned. 351 * 352 * @return The list of the object's attributes. 353 * 354 * @throws HDF5Exception 355 * If an underlying HDF library routine is unable to perform a step 356 * necessary to retrieve the attributes. A variety of failures throw 357 * this exception. 358 * 359 * @see #getAttribute(HObject,int,int) 360 */ 361 public static final List<Attribute> getAttribute(HObject obj) throws HDF5Exception { 362 return H5File.getAttribute(obj, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 363 } 364 365 /** 366 * Returns a list of attributes for the specified object, in creation or 367 * alphabetical order. 368 * <p> 369 * This method returns a list containing the attributes associated with the 370 * identified object. If there are no associated attributes, an empty list will 371 * be returned. The list of attributes returned can be in increasing or 372 * decreasing, creation or alphabetical order. 373 * <p> 374 * Attribute names exceeding 256 characters will be truncated in the returned 375 * list. 376 * 377 * @param obj 378 * The HObject whose attributes are to be returned. 379 * @param idx_type 380 * The type of index. Valid values are: 381 * <ul> 382 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name 383 * <li>H5_INDEX_CRT_ORDER: An index by creation order 384 * </ul> 385 * @param order 386 * The index traversal order. Valid values are: 387 * <ul> 388 * <li>H5_ITER_INC: A top-down iteration incrementing the index 389 * position at each step. 390 * <li>H5_ITER_DEC: A bottom-up iteration decrementing the index 391 * position at each step. 392 * </ul> 393 * 394 * @return The list of the object's attributes. 395 * 396 * @throws HDF5Exception 397 * If an underlying HDF library routine is unable to perform a step 398 * necessary to retrieve the attributes. A variety of failures throw 399 * this exception. 400 */ 401 402 public static final List<Attribute> getAttribute(HObject obj, int idx_type, int order) throws HDF5Exception { 403 log.trace("getAttribute(): start: obj={} idx_type={} order={}", obj, idx_type, order); 404 List<Attribute> attributeList = null; 405 long objID = -1; 406 long aid = -1; 407 long sid = -1; 408 long tid = -1; 409 H5O_info_t obj_info = null; 410 411 objID = obj.open(); 412 if (objID >= 0) { 413 try { 414 try { 415 log.trace("getAttribute(): get obj_info"); 416 obj_info = H5.H5Oget_info(objID); 417 } 418 catch (Exception ex) { 419 log.debug("getAttribute(): H5Oget_info(objID {}) failure: ", objID, ex); 420 } 421 if (obj_info.num_attrs <= 0) { 422 log.trace("getAttribute(): no attributes"); 423 log.trace("getAttribute(): finish"); 424 return (attributeList = new Vector<>()); 425 } 426 427 int n = (int) obj_info.num_attrs; 428 attributeList = new Vector<>(n); 429 log.trace("getAttribute(): num_attrs={}", n); 430 431 for (int i = 0; i < n; i++) { 432 long lsize = 1; 433 log.trace("getAttribute(): attribute[{}]", i); 434 435 try { 436 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 437 HDF5Constants.H5P_DEFAULT); 438 sid = H5.H5Aget_space(aid); 439 log.trace("getAttribute(): Attribute[{}] aid={} sid={}", i, aid, sid); 440 441 long dims[] = null; 442 int rank = H5.H5Sget_simple_extent_ndims(sid); 443 444 if (rank > 0) { 445 dims = new long[rank]; 446 H5.H5Sget_simple_extent_dims(sid, dims, null); 447 log.trace("getAttribute(): Attribute[{}] rank={}, dims={}", i, rank, dims); 448 for (int j = 0; j < dims.length; j++) { 449 lsize *= dims[j]; 450 } 451 } 452 String nameA = H5.H5Aget_name(aid); 453 log.trace("getAttribute(): Attribute[{}] is {}", i, nameA); 454 455 long tmptid = -1; 456 try { 457 tmptid = H5.H5Aget_type(aid); 458 tid = H5.H5Tget_native_type(tmptid); 459 log.trace("getAttribute(): Attribute[{}] tid={} native tmptid={} from aid={}", i, tid, 460 tmptid, aid); 461 } 462 finally { 463 try { 464 H5.H5Tclose(tmptid); 465 } 466 catch (Exception ex) { 467 log.debug("getAttribute(): Attribute[{}] H5Tclose(tmptid {}) failure: ", i, tmptid, ex); 468 } 469 } 470 H5Datatype attrType = new H5Datatype(tid); 471 Attribute attr = new Attribute(obj, nameA, attrType, dims); 472 attributeList.add(attr); 473 log.debug("getAttribute(): Attribute[{}] Datatype={}", i, attr.getDatatype().getDescription()); 474 log.trace( 475 "getAttribute(): Attribute[{}] has size={} isCompound={} isScalar={} is_variable_str={} isVLEN={}", 476 i, lsize, attr.getDatatype().isCompound(), attr.isScalar(), attr.getDatatype().isVarStr(), attr.getDatatype().isVLEN()); 477 478 // retrieve the attribute value 479 if (lsize <= 0) { 480 log.debug("getAttribute(): Attribute[{}] lsize <= 0", i); 481 log.trace("getAttribute(): Attribute[{}] continue", i); 482 continue; 483 } 484 485 if (lsize < Integer.MIN_VALUE || lsize > Integer.MAX_VALUE) { 486 log.debug("getAttribute(): Attribute[{}] lsize outside valid Java int range; unsafe cast", 487 i); 488 log.trace("getAttribute(): Attribute[{}] continue", i); 489 continue; 490 } 491 492 Object value = null; 493 if (attr.getDatatype().isVarStr()) { 494 String[] strs = new String[(int) lsize]; 495 for (int j = 0; j < lsize; j++) { 496 strs[j] = ""; 497 } 498 try { 499 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 500 H5.H5AreadVL(aid, tid, strs); 501 } 502 catch (Exception ex) { 503 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 504 ex.printStackTrace(); 505 } 506 value = strs; 507 } 508 else if (attr.getDatatype().isCompound()) { 509 String[] strs = new String[(int) lsize]; 510 for (int j = 0; j < lsize; j++) { 511 strs[j] = ""; 512 } 513 try { 514 log.trace("getAttribute: attribute[{}] H5AreadComplex", i); 515 H5.H5AreadComplex(aid, tid, strs); 516 } 517 catch (Exception ex) { 518 ex.printStackTrace(); 519 } 520 value = strs; 521 } 522 else if (attr.getDatatype().isVLEN()) { 523 String[] strs = new String[(int) lsize]; 524 for (int j = 0; j < lsize; j++) { 525 strs[j] = ""; 526 } 527 try { 528 log.trace("getAttribute(): Attribute[{}] H5AreadVL", i); 529 H5.H5AreadVL(aid, tid, strs); 530 } 531 catch (Exception ex) { 532 log.debug("getAttribute(): Attribute[{}] H5AreadVL failure: ", i, ex); 533 ex.printStackTrace(); 534 } 535 value = strs; 536 } 537 else { 538 try { 539 value = ((H5Datatype) attr.getDatatype()).allocateArray((int) lsize); 540 } 541 catch (OutOfMemoryError e) { 542 log.debug("getAttribute(): Attribute[{}] out of memory", i, e); 543 value = null; 544 } 545 if (value == null) { 546 log.debug("getAttribute(): Attribute[{}] allocateArray returned null", i); 547 log.trace("getAttribute(): Attribute[{}] continue", i); 548 continue; 549 } 550 551 if (attr.getDatatype().isArray()) { 552 try { 553 log.trace("getAttribute(): Attribute[{}] H5Aread ARRAY tid={}", i, tid); 554 H5.H5Aread(aid, tid, value); 555 } 556 catch (Exception ex) { 557 log.debug("getAttribute(): Attribute[{}] H5Aread failure: ", i, ex); 558 ex.printStackTrace(); 559 } 560 } 561 else { 562 log.trace("getAttribute(): Attribute[{}] H5Aread", i); 563 H5.H5Aread(aid, tid, value); 564 } 565 566 if (attr.getDatatype().isString()) { 567 log.trace("getAttribute(): Attribute[{}] byteToString", i); 568 value = Dataset.byteToString((byte[]) value, (int) H5.H5Tget_size(tid)); 569 } 570 else if (attr.getDatatype().isRef()) { 571 log.trace("getAttribute(): Attribute[{}] byteToLong", i); 572 value = HDFNativeData.byteToLong((byte[]) value); 573 } 574 } 575 576 log.debug("getAttribute(): Attribute[{}] data: {}", i, value); 577 attr.setData(value); 578 } 579 catch (HDF5Exception ex) { 580 log.debug("getAttribute(): Attribute[{}] inspection failure: ", i, ex); 581 } 582 finally { 583 try { 584 H5.H5Tclose(tid); 585 } 586 catch (Exception ex) { 587 log.debug("getAttribute(): Attribute[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 588 } 589 try { 590 H5.H5Sclose(sid); 591 } 592 catch (Exception ex) { 593 log.debug("getAttribute(): Attribute[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 594 } 595 try { 596 H5.H5Aclose(aid); 597 } 598 catch (Exception ex) { 599 log.debug("getAttribute(): Attribute[{}] H5Aclose(aid {}) failure: ", i, aid, ex); 600 } 601 } 602 } // for (int i=0; i<obj_info.num_attrs; i++) 603 } 604 finally { 605 obj.close(objID); 606 } 607 } 608 609 log.trace("getAttribute(): finish"); 610 return attributeList; 611 } 612 613 /** 614 * Creates attributes for an HDF5 image dataset. 615 * <p> 616 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 617 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 618 * method may be used to write image attributes that are not handled by this method. 619 * <p> 620 * For more information about HDF5 image attributes, see the 621 * <a href="https://support.hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 622 * <p> 623 * This method can be called to create attributes for 24-bit true color and indexed images. The 624 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 625 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 626 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 627 * image with the indicated interlace mode. 628 * <p> 629 * <ul> 630 * The created attribute descriptions, names, and values are: 631 * <li>The image identifier: name="CLASS", value="IMAGE" 632 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 633 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 634 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 635 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 636 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 637 * to the palette datasets, with initial value of {-1} 638 * </ul> 639 * <p> 640 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 641 * Format implementation level. 642 * 643 * @param dataset 644 * The image dataset the attributes are added to. 645 * @param selectionFlag 646 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 647 * <ul> 648 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 649 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 650 * stored in a separate plane. 651 * </ul> 652 * 653 * @throws Exception 654 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 655 */ 656 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 657 log.trace("createImageAttributes(): start: dataset={}", dataset.toString()); 658 String subclass = null; 659 String interlaceMode = null; 660 661 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 662 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PIXEL"); 663 subclass = "IMAGE_TRUECOLOR"; 664 interlaceMode = "INTERLACE_PIXEL"; 665 } 666 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 667 log.trace("createImageAttributes(): subclass IMAGE_TRUECOLOR selectionFlag INTERLACE_PLANE"); 668 subclass = "IMAGE_TRUECOLOR"; 669 interlaceMode = "INTERLACE_PLANE"; 670 } 671 else if (selectionFlag == -1) { 672 log.trace("createImageAttributes(): subclass IMAGE_INDEXED"); 673 subclass = "IMAGE_INDEXED"; 674 } 675 else { 676 log.debug("createImageAttributes(): invalid selectionFlag"); 677 log.trace("createImageAttributes(): finish"); 678 throw new HDF5Exception("The selectionFlag is invalid."); 679 } 680 681 String attrName = "CLASS"; 682 String[] classValue = { "IMAGE" }; 683 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, -1, -1); 684 Attribute attr = new Attribute(dataset, attrName, attrType, null); 685 attr.write(classValue); 686 687 attrName = "IMAGE_VERSION"; 688 String[] versionValue = { "1.2" }; 689 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, -1, -1); 690 attr = new Attribute(dataset, attrName, attrType, null); 691 attr.write(versionValue); 692 693 long[] attrDims = { 2 }; 694 attrName = "IMAGE_MINMAXRANGE"; 695 byte[] attrValueInt = { 0, (byte) 255 }; 696 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 697 attr = new Attribute(dataset, attrName, attrType, attrDims); 698 attr.write(attrValueInt); 699 700 attrName = "IMAGE_SUBCLASS"; 701 String[] subclassValue = { subclass }; 702 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, -1, -1); 703 attr = new Attribute(dataset, attrName, attrType, null); 704 attr.write(subclassValue); 705 706 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 707 attrName = "INTERLACE_MODE"; 708 String[] interlaceValue = { interlaceMode }; 709 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, -1, -1); 710 attr = new Attribute(dataset, attrName, attrType, null); 711 attr.write(interlaceValue); 712 } 713 else { 714 attrName = "PALETTE"; 715 long[] palRef = { 0 }; // set ref to null 716 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 717 attr = new Attribute(dataset, attrName, attrType, null); 718 attr.write(palRef); 719 } 720 log.trace("createImageAttributes(): finish"); 721 } 722 723 /** 724 * Updates values of scalar dataset object references in copied file. 725 * <p> 726 * This method has very specific functionality as documented below, and the user is advised to pay close attention 727 * when dealing with files that contain references. 728 * <p> 729 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 730 * the references in the destination file are not updated by the copy and are therefore invalid. 731 * <p> 732 * When an entire file is copied, this method updates the values of the object references and dataset region 733 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 734 * destination file. The method does not update references that occur in objects other than scalar datasets. 735 * <p> 736 * In the current release, the updating of object references is not handled completely as it was not required by the 737 * projects that funded development. There is no support for updates when the copy does not include the entire file. 738 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 739 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 740 * updates itself. 741 * 742 * @param srcFile 743 * The file that was copied. 744 * @param dstFile 745 * The destination file where the object references will be updated. 746 * 747 * @throws Exception 748 * If there is a problem in the update process. 749 */ 750 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 751 log.trace("updateReferenceDataset(): start"); 752 if ((srcFile == null) || (dstFile == null)) { 753 log.debug("updateReferenceDataset(): srcFile or dstFile is null"); 754 log.trace("updateReferenceDataset(): finish"); 755 return; 756 } 757 758 HObject srcRoot = srcFile.getRootObject(); 759 HObject newRoot = dstFile.getRootObject(); 760 761 Iterator<HObject> srcIt = getMembersBreadthFirst(srcRoot).iterator(); 762 Iterator<HObject> newIt = getMembersBreadthFirst(newRoot).iterator(); 763 764 long did = -1; 765 // build one-to-one table of between objects in 766 // the source file and new file 767 long tid = -1; 768 HObject srcObj, newObj; 769 Hashtable<String, long[]> oidMap = new Hashtable<>(); 770 List<ScalarDS> refDatasets = new Vector<>(); 771 while (newIt.hasNext() && srcIt.hasNext()) { 772 srcObj = srcIt.next(); 773 newObj = newIt.next(); 774 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 775 did = -1; 776 tid = -1; 777 778 // for Scalar DataSets in destination, if there is an object 779 // reference in the dataset, add it to the refDatasets list for 780 // later updating. 781 if (newObj instanceof ScalarDS) { 782 ScalarDS sd = (ScalarDS) newObj; 783 did = sd.open(); 784 if (did >= 0) { 785 try { 786 tid = H5.H5Dget_type(did); 787 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 788 refDatasets.add(sd); 789 } 790 } 791 catch (Exception ex) { 792 log.debug("updateReferenceDataset(): ScalarDS reference failure: ", ex); 793 } 794 finally { 795 try { 796 H5.H5Tclose(tid); 797 } 798 catch (Exception ex) { 799 log.debug("updateReferenceDataset(): ScalarDS reference H5Tclose(tid {}) failure: ", tid, ex); 800 } 801 } 802 } 803 sd.close(did); 804 } // if (newObj instanceof ScalarDS) 805 } 806 807 // Update the references in the scalar datasets in the dest file. 808 H5ScalarDS d = null; 809 long sid = -1; 810 int size = 0; 811 int rank = 0; 812 int n = refDatasets.size(); 813 for (int i = 0; i < n; i++) { 814 log.trace("updateReferenceDataset(): Update the references in the scalar datasets in the dest file"); 815 d = (H5ScalarDS) refDatasets.get(i); 816 byte[] buf = null; 817 long[] refs = null; 818 819 try { 820 did = d.open(); 821 if (did >= 0) { 822 tid = H5.H5Dget_type(did); 823 sid = H5.H5Dget_space(did); 824 rank = H5.H5Sget_simple_extent_ndims(sid); 825 size = 1; 826 if (rank > 0) { 827 long[] dims = new long[rank]; 828 H5.H5Sget_simple_extent_dims(sid, dims, null); 829 log.trace("updateReferenceDataset(): rank={}, dims={}", rank, dims); 830 for (int j = 0; j < rank; j++) { 831 size *= (int) dims[j]; 832 } 833 dims = null; 834 } 835 836 buf = new byte[size * 8]; 837 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 838 839 // update the ref values 840 refs = HDFNativeData.byteToLong(buf); 841 size = refs.length; 842 for (int j = 0; j < size; j++) { 843 long[] theOID = oidMap.get(String.valueOf(refs[j])); 844 if (theOID != null) { 845 refs[j] = theOID[0]; 846 } 847 } 848 849 // write back to file 850 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 851 } 852 else { 853 log.debug("updateReferenceDataset(): dest file dataset failed to open"); 854 } 855 } 856 catch (Exception ex) { 857 log.debug("updateReferenceDataset(): Reference[{}] failure: ", i, ex); 858 log.trace("updateReferenceDataset(): Reference[{}] continue", i); 859 continue; 860 } 861 finally { 862 try { 863 H5.H5Tclose(tid); 864 } 865 catch (Exception ex) { 866 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Tclose(tid {}) failure: ", i, tid, ex); 867 } 868 try { 869 H5.H5Sclose(sid); 870 } 871 catch (Exception ex) { 872 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Sclose(sid {}) failure: ", i, sid, ex); 873 } 874 try { 875 H5.H5Dclose(did); 876 } 877 catch (Exception ex) { 878 log.debug("updateReferenceDataset(): H5ScalarDS reference[{}] H5Dclose(did {}) failure: ", i, did, ex); 879 } 880 } 881 882 refs = null; 883 buf = null; 884 } // for (int i=0; i<n; i++) 885 } 886 887 /*************************************************************************** 888 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 889 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 890 * instance methods. 891 **************************************************************************/ 892 893 /** 894 * Returns the version of the HDF5 library. 895 * 896 * @see hdf.object.FileFormat#getLibversion() 897 */ 898 @Override 899 public String getLibversion() { 900 int[] vers = new int[3]; 901 String ver = "HDF5 "; 902 903 try { 904 H5.H5get_libversion(vers); 905 } 906 catch (Throwable ex) { 907 ex.printStackTrace(); 908 } 909 910 ver += vers[0] + "." + vers[1] + "." + vers[2]; 911 log.debug("getLibversion(): libversion is {}", ver); 912 913 return ver; 914 } 915 916 /** 917 * Checks if the specified FileFormat instance has the HDF5 format. 918 * 919 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 920 */ 921 @Override 922 public boolean isThisType(FileFormat theFile) { 923 return (theFile instanceof H5File); 924 } 925 926 /** 927 * Checks if the specified file has the HDF5 format. 928 * 929 * @see hdf.object.FileFormat#isThisType(java.lang.String) 930 */ 931 @Override 932 public boolean isThisType(String filename) { 933 boolean isH5 = false; 934 935 try { 936 isH5 = H5.H5Fis_hdf5(filename); 937 } 938 catch (HDF5Exception ex) { 939 isH5 = false; 940 } 941 942 return isH5; 943 } 944 945 /** 946 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 947 * 948 * @throws Exception 949 * If the file cannot be created or if createFlag has unexpected value. 950 * 951 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 952 * @see #H5File(String, int) 953 */ 954 @Override 955 public FileFormat createFile(String filename, int createFlag) throws Exception { 956 log.trace("createFile(): start: filename={} createFlag={}", filename, createFlag); 957 // Flag if we need to create or truncate the file. 958 Boolean doCreateFile = true; 959 960 // Won't create or truncate if CREATE_OPEN specified and file exists 961 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 962 File f = new File(filename); 963 if (f.exists()) { 964 doCreateFile = false; 965 } 966 } 967 log.trace("createFile(): doCreateFile={}", doCreateFile); 968 969 if (doCreateFile) { 970 long fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 971 972 if ((createFlag & FILE_CREATE_EARLY_LIB) == FILE_CREATE_EARLY_LIB) { 973 int[] newlibver = getLibBounds(); 974 H5.H5Pset_libver_bounds(fapl, newlibver[0], newlibver[1]); 975 } 976 977 long fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 978 try { 979 H5.H5Pclose(fapl); 980 H5.H5Fclose(fileid); 981 } 982 catch (HDF5Exception ex) { 983 log.debug("H5 file, {} failure: ", filename, ex); 984 } 985 } 986 987 log.trace("createFile(): finish"); 988 return new H5File(filename, WRITE); 989 } 990 991 /** 992 * Creates an H5File instance with specified file name and access. 993 * 994 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 995 * @see #H5File(String, int) 996 * 997 * @throws Exception 998 * If there is a failure. 999 */ 1000 @Override 1001 public FileFormat createInstance(String filename, int access) throws Exception { 1002 log.trace("createInstance() for {}", filename); 1003 return new H5File(filename, access); 1004 } 1005 1006 /*************************************************************************** 1007 * Instance Methods 1008 * 1009 * These methods are related to the H5File class and to particular instances of objects with this class type. 1010 **************************************************************************/ 1011 1012 /** 1013 * Opens file and returns a file identifier. 1014 * 1015 * @see hdf.object.FileFormat#open() 1016 */ 1017 @Override 1018 public long open() throws Exception { 1019 log.trace("open()"); 1020 return open(true); 1021 } 1022 1023 /** 1024 * Opens file and returns a file identifier. 1025 * 1026 * @see hdf.object.FileFormat#open(int...) 1027 */ 1028 @Override 1029 public long open(int... indexList) throws Exception { 1030 setIndexType(indexList[0]); 1031 setIndexOrder(indexList[1]); 1032 log.trace("open() with proplist"); 1033 return open(true); 1034 } 1035 1036 /** 1037 * Sets the bounds of new library versions. 1038 * 1039 * @param lowStr 1040 * The earliest version of the library. 1041 * @param highStr 1042 * The latest version of the library. 1043 * 1044 * @throws HDF5Exception 1045 * If there is an error at the HDF5 library level. 1046 */ 1047 @Override 1048 public void setNewLibBounds(String lowStr, String highStr) throws Exception { 1049 int low = -1, high = -1; 1050 1051 if (lowStr == null) { 1052 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1053 } 1054 else if(lowStr.equals("Earliest")) { 1055 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1056 } 1057 else if(lowStr.equals("V18")) { 1058 low = HDF5Constants.H5F_LIBVER_V18; 1059 } 1060 else if(lowStr.equals("V110")) { 1061 low = HDF5Constants.H5F_LIBVER_V110; 1062 } 1063 else if(lowStr.equals("Latest")) { 1064 low = HDF5Constants.H5F_LIBVER_LATEST; 1065 } 1066 else { 1067 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1068 } 1069 1070 if (highStr == null) { 1071 high = HDF5Constants.H5F_LIBVER_LATEST; 1072 } 1073 else if(highStr.equals("V18")) { 1074 high = HDF5Constants.H5F_LIBVER_V18; 1075 } 1076 else if(highStr.equals("V110")) { 1077 high = HDF5Constants.H5F_LIBVER_V110; 1078 } 1079 else if(highStr.equals("Latest")) { 1080 high = HDF5Constants.H5F_LIBVER_LATEST; 1081 } 1082 else { 1083 high = HDF5Constants.H5F_LIBVER_LATEST; 1084 } 1085 libver[0] = low; 1086 libver[1] = high; 1087 } 1088 1089 /** 1090 * Sets the bounds of library versions. 1091 * 1092 * @param lowStr 1093 * The earliest version of the library. 1094 * @param highStr 1095 * The latest version of the library. 1096 * 1097 * @throws HDF5Exception 1098 * If there is an error at the HDF5 library level. 1099 */ 1100 @Override 1101 public void setLibBounds(String lowStr, String highStr) throws Exception { 1102 long fapl = HDF5Constants.H5P_DEFAULT; 1103 1104 if (fid < 0) 1105 return; 1106 1107 fapl = H5.H5Fget_access_plist(fid); 1108 1109 try { 1110 int low = -1, high = -1; 1111 1112 if (lowStr == null) { 1113 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1114 } 1115 else if(lowStr.equals("Earliest")) { 1116 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1117 } 1118 else if(lowStr.equals("V18")) { 1119 low = HDF5Constants.H5F_LIBVER_V18; 1120 } 1121 else if(lowStr.equals("V110")) { 1122 low = HDF5Constants.H5F_LIBVER_V110; 1123 } 1124 else if(lowStr.equals("Latest")) { 1125 low = HDF5Constants.H5F_LIBVER_LATEST; 1126 } 1127 else { 1128 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1129 } 1130 1131 if (highStr == null) { 1132 high = HDF5Constants.H5F_LIBVER_LATEST; 1133 } 1134 else if(highStr.equals("V18")) { 1135 high = HDF5Constants.H5F_LIBVER_V18; 1136 } 1137 else if(highStr.equals("V110")) { 1138 high = HDF5Constants.H5F_LIBVER_V110; 1139 } 1140 else if(highStr.equals("Latest")) { 1141 high = HDF5Constants.H5F_LIBVER_LATEST; 1142 } 1143 else { 1144 high = HDF5Constants.H5F_LIBVER_LATEST; 1145 } 1146 1147 H5.H5Pset_libver_bounds(fapl, low, high); 1148 H5.H5Pget_libver_bounds(fapl, libver); 1149 } 1150 finally { 1151 try { 1152 H5.H5Pclose(fapl); 1153 } 1154 catch (Exception e) { 1155 log.debug("setLibBounds(): libver bounds H5Pclose(fapl {}) failure: ", fapl, e); 1156 } 1157 } 1158 } 1159 1160 /** 1161 * Gets the bounds of library versions. 1162 * 1163 * @return libver The earliest and latest version of the library. 1164 * 1165 * @throws HDF5Exception 1166 * If there is an error at the HDF5 library level. 1167 */ 1168 @Override 1169 public int[] getLibBounds() throws Exception { 1170 return libver; 1171 } 1172 1173 /** 1174 * Gets the bounds of library versions as text. 1175 * 1176 * @return libversion The earliest and latest version of the library. 1177 */ 1178 @Override 1179 public String getLibBoundsDescription() { 1180 String libversion = ""; 1181 1182 if (libver[0] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1183 libversion = "Earliest and "; 1184 } 1185 else if (libver[0] == HDF5Constants.H5F_LIBVER_V18) { 1186 libversion = "V18 and "; 1187 } 1188 else if (libver[0] == HDF5Constants.H5F_LIBVER_V110) { 1189 libversion = "V110 and "; 1190 } 1191 else if (libver[0] == HDF5Constants.H5F_LIBVER_LATEST) { 1192 libversion = "Latest and "; 1193 } 1194 if (libver[1] == HDF5Constants.H5F_LIBVER_EARLIEST) { 1195 libversion += "Earliest"; 1196 } 1197 else if (libver[1] == HDF5Constants.H5F_LIBVER_V18) { 1198 libversion += "V18"; 1199 } 1200 else if (libver[1] == HDF5Constants.H5F_LIBVER_V110) { 1201 libversion += "V110"; 1202 } 1203 else if (libver[1] == HDF5Constants.H5F_LIBVER_LATEST) { 1204 libversion += "Latest"; 1205 } 1206 return libversion; 1207 } 1208 1209 /** 1210 * Closes file associated with this H5File instance. 1211 * 1212 * @see hdf.object.FileFormat#close() 1213 * 1214 * @throws HDF5Exception 1215 * If there is an error at the HDF5 library level. 1216 */ 1217 @Override 1218 public void close() throws HDF5Exception { 1219 log.trace("close(): start"); 1220 if (fid < 0) { 1221 log.debug("close(): file {} is not open", fullFileName); 1222 log.trace("close(): finish"); 1223 return; 1224 } 1225 // The current working directory may be changed at Dataset.read() 1226 // by System.setProperty("user.dir", newdir) to make it work for external 1227 // datasets. We need to set it back to the original current working 1228 // directory (when hdf-java application started) before the file 1229 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1230 // not work 1231 String rootPath = System.getProperty("hdfview.workdir"); 1232 if (rootPath == null) { 1233 rootPath = System.getProperty("user.dir"); 1234 } 1235 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 1236 1237 // clean up unused objects 1238 if (rootObject != null) { 1239 HObject theObj = null; 1240 Iterator<HObject> it = getMembersBreadthFirst(rootObject).iterator(); 1241 while (it.hasNext()) { 1242 theObj = it.next(); 1243 1244 if (theObj instanceof Dataset) { 1245 log.trace("close(): clear Dataset {}", ((Dataset) theObj).toString()); 1246 ((Dataset) theObj).clear(); 1247 } 1248 else if (theObj instanceof Group) { 1249 log.trace("close(): clear Group {}", ((Group) theObj).toString()); 1250 ((Group) theObj).clear(); 1251 } 1252 } 1253 } 1254 1255 // Close all open objects associated with this file. 1256 try { 1257 int type = -1; 1258 long[] oids; 1259 long n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1260 log.trace("close(): open objects={}", n); 1261 1262 if (n > 0) { 1263 if (n < Integer.MIN_VALUE || n > Integer.MAX_VALUE) throw new Exception("Invalid int size"); 1264 1265 oids = new long[(int)n]; 1266 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1267 1268 for (int i = 0; i < (int)n; i++) { 1269 log.trace("close(): object[{}] id={}", i, oids[i]); 1270 type = H5.H5Iget_type(oids[i]); 1271 1272 if (HDF5Constants.H5I_DATASET == type) { 1273 try { 1274 H5.H5Dclose(oids[i]); 1275 } 1276 catch (Exception ex2) { 1277 log.debug("close(): Object[{}] H5Dclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1278 } 1279 } 1280 else if (HDF5Constants.H5I_GROUP == type) { 1281 try { 1282 H5.H5Gclose(oids[i]); 1283 } 1284 catch (Exception ex2) { 1285 log.debug("close(): Object[{}] H5Gclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1286 } 1287 } 1288 else if (HDF5Constants.H5I_DATATYPE == type) { 1289 try { 1290 H5.H5Tclose(oids[i]); 1291 } 1292 catch (Exception ex2) { 1293 log.debug("close(): Object[{}] H5Tclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1294 } 1295 } 1296 else if (HDF5Constants.H5I_ATTR == type) { 1297 try { 1298 H5.H5Aclose(oids[i]); 1299 } 1300 catch (Exception ex2) { 1301 log.debug("close(): Object[{}] H5Aclose(oids[{}] {}) failure: ", i, i, oids[i], ex2); 1302 } 1303 } 1304 } // for (int i=0; i<n; i++) 1305 } // if ( n>0) 1306 } 1307 catch (Exception ex) { 1308 log.debug("close(): failure: ", ex); 1309 } 1310 1311 try { 1312 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1313 } 1314 catch (Exception ex) { 1315 log.debug("close(): H5Fflush(fid {}) failure: ", fid, ex); 1316 } 1317 1318 try { 1319 H5.H5Fclose(fid); 1320 } 1321 catch (Exception ex) { 1322 log.debug("close(): H5Fclose(fid {}) failure: ", fid, ex); 1323 } 1324 1325 // Set fid to -1 but don't reset rootObject 1326 fid = -1; 1327 log.trace("close(): finish"); 1328 } 1329 1330 /** 1331 * Returns the root object of the open HDF5 File. 1332 * 1333 * @see hdf.object.FileFormat#getRootObject() 1334 */ 1335 @Override 1336 public HObject getRootObject() { 1337 return rootObject; 1338 } 1339 1340 /* 1341 * (non-Javadoc) 1342 * 1343 * @see hdf.object.FileFormat#get(java.lang.String) 1344 */ 1345 @Override 1346 public HObject get(String path) throws Exception { 1347 log.trace("get({}): start", path); 1348 HObject obj = null; 1349 1350 if ((path == null) || (path.length() <= 0)) { 1351 log.debug("get(): path is null or invalid path length"); 1352 System.err.println("(path == null) || (path.length() <= 0)"); 1353 log.trace("get(): finish"); 1354 return null; 1355 } 1356 1357 // replace the wrong slash and get rid of "//" 1358 path = path.replace('\\', '/'); 1359 path = "/" + path; 1360 path = path.replaceAll("//", "/"); 1361 1362 // the whole file tree is loaded. find the object in the tree 1363 if (rootObject != null) { 1364 obj = findObject(this, path); 1365 } 1366 1367 // found object in memory 1368 if (obj != null) { 1369 log.trace("get(): Found object in memory"); 1370 log.trace("get(): finish"); 1371 return obj; 1372 } 1373 1374 // open only the requested object 1375 String name = null, pPath = null; 1376 if (path.equals("/")) { 1377 name = "/"; // the root 1378 } 1379 else { 1380 // separate the parent path and the object name 1381 if (path.endsWith("/")) { 1382 path = path.substring(0, path.length() - 1); 1383 } 1384 1385 int idx = path.lastIndexOf('/'); 1386 name = path.substring(idx + 1); 1387 if (idx == 0) { 1388 pPath = "/"; 1389 } 1390 else { 1391 pPath = path.substring(0, idx); 1392 } 1393 } 1394 1395 // do not open the full tree structure, only the file handler 1396 long fid_before_open = fid; 1397 fid = open(false); 1398 if (fid < 0) { 1399 log.debug("get(): Invalid FID"); 1400 log.trace("get(): finish"); 1401 System.err.println("Could not open file handler"); 1402 return null; 1403 } 1404 1405 try { 1406 H5O_info_t info; 1407 int objType; 1408 long oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1409 1410 if (oid >= 0) { 1411 info = H5.H5Oget_info(oid); 1412 objType = info.type; 1413 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1414 long did = -1; 1415 try { 1416 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1417 obj = getDataset(did, name, pPath); 1418 } 1419 finally { 1420 try { 1421 H5.H5Dclose(did); 1422 } 1423 catch (Exception ex) { 1424 log.debug("get(): {} H5Dclose(did {}) failure: ", path, did, ex); 1425 } 1426 } 1427 } 1428 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1429 long gid = -1; 1430 try { 1431 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1432 H5Group pGroup = null; 1433 if (pPath != null) { 1434 pGroup = new H5Group(this, null, pPath, null); 1435 obj = getGroup(gid, name, pGroup); 1436 pGroup.addToMemberList(obj); 1437 } 1438 else { 1439 obj = getGroup(gid, name, pGroup); 1440 } 1441 } 1442 finally { 1443 try { 1444 H5.H5Gclose(gid); 1445 } 1446 catch (Exception ex) { 1447 log.debug("get(): {} H5Gclose(gid {}) failure: ", path, gid, ex); 1448 } 1449 } 1450 } 1451 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1452 obj = new H5Datatype(this, name, pPath); 1453 } 1454 } 1455 try { 1456 H5.H5Oclose(oid); 1457 } 1458 catch (Exception ex) { 1459 log.debug("get(): H5Oclose(oid {}) failure: ", oid, ex); 1460 ex.printStackTrace(); 1461 } 1462 } 1463 catch (Exception ex) { 1464 log.debug("get(): Exception finding obj {}", path, ex); 1465 obj = null; 1466 } 1467 finally { 1468 if ((fid_before_open <= 0) && (obj == null)) { 1469 // close the fid that is not attached to any object 1470 try { 1471 H5.H5Fclose(fid); 1472 } 1473 catch (Exception ex) { 1474 log.debug("get(): {} H5Fclose(fid {}) failure: ", path, fid, ex); 1475 } 1476 fid = fid_before_open; 1477 } 1478 } 1479 1480 return obj; 1481 } 1482 1483 /* 1484 * (non-Javadoc) 1485 * 1486 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, java.lang.String) 1487 */ 1488 @Override 1489 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception { 1490 return createDatatype(tclass, tsize, torder, tsign, null, name); 1491 } 1492 1493 /* 1494 * (non-Javadoc) 1495 * 1496 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype, java.lang.String) 1497 */ 1498 @Override 1499 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) 1500 throws Exception { 1501 log.trace("createDatatype(): start: name={} class={} size={} order={} sign={}", name, tclass, tsize, torder, tsign); 1502 if (tbase != null) 1503 log.trace("createDatatype(): baseType is {}", tbase.getDescription()); 1504 1505 long tid = -1; 1506 H5Datatype dtype = null; 1507 1508 try { 1509 H5Datatype t = (H5Datatype) createDatatype(tclass, tsize, torder, tsign, tbase); 1510 if ((tid = t.createNative()) < 0) { 1511 log.debug("createDatatype(): createNative() failure"); 1512 log.trace("createDatatype(): finish"); 1513 throw new Exception("createNative() failed"); 1514 } 1515 1516 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, 1517 HDF5Constants.H5P_DEFAULT); 1518 1519 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1520 long l = HDFNativeData.byteToLong(ref_buf, 0); 1521 1522 long[] oid = new long[1]; 1523 oid[0] = l; // save the object ID 1524 1525 dtype = new H5Datatype(this, null, name); 1526 } 1527 finally { 1528 H5.H5Tclose(tid); 1529 } 1530 1531 log.trace("createDatatype(): finish"); 1532 return dtype; 1533 } 1534 1535 /*************************************************************************** 1536 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1537 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1538 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1539 * so that we create the proper type of HObject... H5Group for example. 1540 * 1541 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1542 * methods (they can only be shadowed in Java), these are instance methods. 1543 * 1544 **************************************************************************/ 1545 1546 /* 1547 * (non-Javadoc) 1548 * 1549 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1550 */ 1551 @Override 1552 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1553 log.trace("create datatype"); 1554 return new H5Datatype(tclass, tsize, torder, tsign); 1555 } 1556 1557 /* 1558 * (non-Javadoc) 1559 * 1560 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1561 */ 1562 @Override 1563 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1564 log.trace("create datatype with base"); 1565 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1566 } 1567 1568 /* 1569 * (non-Javadoc) 1570 * 1571 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1572 * long[], long[], long[], int, java.lang.Object) 1573 */ 1574 @Override 1575 public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1576 int gzip, Object fillValue, Object data) throws Exception { 1577 log.trace("createScalarDS(): name={}", name); 1578 if (pgroup == null) { 1579 // create new dataset at the root group by default 1580 pgroup = (Group) get("/"); 1581 } 1582 1583 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1584 } 1585 1586 /* 1587 * (non-Javadoc) 1588 * 1589 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1590 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1591 */ 1592 @Override 1593 public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, 1594 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { 1595 log.trace("createCompoundDS(): start: name={}", name); 1596 int nMembers = memberNames.length; 1597 int memberRanks[] = new int[nMembers]; 1598 long memberDims[][] = new long[nMembers][1]; 1599 Dataset ds = null; 1600 1601 for (int i = 0; i < nMembers; i++) { 1602 memberRanks[i] = 1; 1603 if (memberSizes == null) { 1604 memberDims[i][0] = 1; 1605 } 1606 else { 1607 memberDims[i][0] = memberSizes[i]; 1608 } 1609 } 1610 1611 if (pgroup == null) { 1612 // create new dataset at the root group by default 1613 pgroup = (Group) get("/"); 1614 } 1615 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, memberNames, memberDatatypes, memberRanks, 1616 memberDims, data); 1617 1618 log.trace("createCompoundDS(): finish"); 1619 return ds; 1620 } 1621 1622 /* 1623 * (non-Javadoc) 1624 * 1625 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1626 * long[], long[], long[], int, int, int, java.lang.Object) 1627 */ 1628 @Override 1629 public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1630 int gzip, int ncomp, int interlace, Object data) throws Exception { 1631 log.trace("createImage(): start: name={}", name); 1632 if (pgroup == null) { // create at the root group by default 1633 pgroup = (Group) get("/"); 1634 } 1635 1636 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1637 1638 try { 1639 H5File.createImageAttributes(dataset, interlace); 1640 dataset.setIsImage(true); 1641 } 1642 catch (Exception ex) { 1643 log.debug("createImage(): {} createImageAttributtes failure: ", name, ex); 1644 } 1645 1646 log.trace("createImage(): finish"); 1647 return dataset; 1648 } 1649 1650 /*** 1651 * Creates a new group with specified name in existing group. 1652 * 1653 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1654 */ 1655 @Override 1656 public Group createGroup(String name, Group pgroup) throws Exception { 1657 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1658 } 1659 1660 /*** 1661 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1662 * 1663 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, long...) 1664 * 1665 */ 1666 @Override 1667 public Group createGroup(String name, Group pgroup, long... gplist) throws Exception { 1668 // create new group at the root 1669 if (pgroup == null) { 1670 pgroup = (Group) this.get("/"); 1671 } 1672 1673 return H5Group.create(name, pgroup, gplist); 1674 } 1675 1676 /*** 1677 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1678 * 1679 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1680 * 1681 */ 1682 @Override 1683 public long createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1684 log.trace("createGcpl(): start"); 1685 long gcpl = -1; 1686 try { 1687 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1688 if (gcpl >= 0) { 1689 // Set link creation order. 1690 if (creationorder == Group.CRT_ORDER_TRACKED) { 1691 log.trace("createGcpl(): creation order ORDER_TRACKED"); 1692 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1693 } 1694 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1695 log.trace("createGcpl(): creation order ORDER_INDEXED"); 1696 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED 1697 + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1698 } 1699 // Set link storage. 1700 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1701 } 1702 } 1703 catch (Exception ex) { 1704 log.debug("createGcpl(): failure: ", ex); 1705 ex.printStackTrace(); 1706 } 1707 1708 log.trace("createGcpl(): finish"); 1709 return gcpl; 1710 } 1711 1712 /* 1713 * (non-Javadoc) 1714 * 1715 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1716 */ 1717 @Override 1718 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1719 if (currentObj instanceof HObject) 1720 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1721 else if (currentObj instanceof String) 1722 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1723 1724 return null; 1725 } 1726 1727 /** 1728 * Creates a link to an object in the open file. 1729 * <p> 1730 * If parentGroup is null, the new link is created in the root group. 1731 * 1732 * @param parentGroup 1733 * The group where the link is created. 1734 * @param name 1735 * The name of the link. 1736 * @param currentObj 1737 * The existing object the new link will reference. 1738 * @param lType 1739 * The type of link to be created. It can be a hard link, a soft link or an external link. 1740 * 1741 * @return The object pointed to by the new link if successful; otherwise returns null. 1742 * 1743 * @throws Exception 1744 * The exceptions thrown vary depending on the implementing class. 1745 */ 1746 @Override 1747 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1748 log.trace("createLink(): start: name={}", name); 1749 HObject obj = null; 1750 int type = 0; 1751 String current_full_name = null, new_full_name = null, parent_path = null; 1752 1753 if (currentObj == null) { 1754 log.debug("createLink(): Link target is null"); 1755 log.trace("createLink(): finish"); 1756 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1757 } 1758 if ((parentGroup == null) || parentGroup.isRoot()) { 1759 parent_path = HObject.separator; 1760 } 1761 else { 1762 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1763 } 1764 1765 new_full_name = parent_path + name; 1766 1767 if (lType == Group.LINK_TYPE_HARD) { 1768 type = HDF5Constants.H5L_TYPE_HARD; 1769 log.trace("createLink(): type H5L_TYPE_HARD"); 1770 } 1771 else if (lType == Group.LINK_TYPE_SOFT) { 1772 type = HDF5Constants.H5L_TYPE_SOFT; 1773 log.trace("createLink(): type H5L_TYPE_SOFT"); 1774 } 1775 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1776 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1777 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1778 } 1779 1780 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1781 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1782 } 1783 1784 if (type == HDF5Constants.H5L_TYPE_HARD) { 1785 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1786 log.debug("createLink(): cannot create link to root group"); 1787 log.trace("createLink(): finish"); 1788 throw new HDF5Exception("Cannot make a link to the root group."); 1789 } 1790 current_full_name = currentObj.getPath() + HObject.separator + currentObj.getName(); 1791 1792 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1793 HDF5Constants.H5P_DEFAULT); 1794 } 1795 1796 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1797 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1798 HDF5Constants.H5P_DEFAULT); 1799 } 1800 1801 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1802 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, 1803 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1804 } 1805 1806 if (currentObj instanceof Group) { 1807 log.trace("createLink(): Link target is type H5Group"); 1808 obj = new H5Group(this, name, parent_path, parentGroup); 1809 } 1810 else if (currentObj instanceof H5Datatype) { 1811 log.trace("createLink(): Link target is type H5Datatype"); 1812 obj = new H5Datatype(this, name, parent_path); 1813 } 1814 else if (currentObj instanceof H5CompoundDS) { 1815 log.trace("createLink(): Link target is type H5CompoundDS"); 1816 obj = new H5CompoundDS(this, name, parent_path); 1817 } 1818 else if (currentObj instanceof H5ScalarDS) { 1819 log.trace("createLink(): Link target is type H5ScalarDS"); 1820 obj = new H5ScalarDS(this, name, parent_path); 1821 } 1822 1823 log.trace("createLink(): finish"); 1824 return obj; 1825 } 1826 1827 /** 1828 * Creates a soft or external link to object in a file that does not exist at the time the link is created. 1829 * 1830 * @param parentGroup 1831 * The group where the link is created. 1832 * @param name 1833 * The name of the link. 1834 * @param currentObj 1835 * The name of the object the new link will reference. The object doesn't have to exist. 1836 * @param lType 1837 * The type of link to be created. 1838 * 1839 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1840 * 1841 * @throws Exception 1842 * The exceptions thrown vary depending on the implementing class. 1843 */ 1844 @Override 1845 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1846 log.trace("createLink(): start: name={}", name); 1847 HObject obj = null; 1848 int type = 0; 1849 String new_full_name = null, parent_path = null; 1850 1851 if (currentObj == null) { 1852 log.debug("createLink(): Link target is null"); 1853 log.trace("createLink(): finish"); 1854 throw new HDF5Exception("The object pointed to by the link cannot be null."); 1855 } 1856 if ((parentGroup == null) || parentGroup.isRoot()) { 1857 parent_path = HObject.separator; 1858 } 1859 else { 1860 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1861 } 1862 1863 new_full_name = parent_path + name; 1864 1865 if (lType == Group.LINK_TYPE_HARD) { 1866 type = HDF5Constants.H5L_TYPE_HARD; 1867 log.trace("createLink(): type H5L_TYPE_HARD"); 1868 } 1869 else if (lType == Group.LINK_TYPE_SOFT) { 1870 type = HDF5Constants.H5L_TYPE_SOFT; 1871 log.trace("createLink(): type H5L_TYPE_SOFT"); 1872 } 1873 else if (lType == Group.LINK_TYPE_EXTERNAL) { 1874 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1875 log.trace("createLink(): type H5L_TYPE_EXTERNAL"); 1876 } 1877 1878 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1879 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1880 } 1881 1882 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1883 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1884 } 1885 1886 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1887 String fileName = null; 1888 String objectName = null; 1889 1890 // separate the object name and the file name 1891 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1892 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1893 objectName = objectName.substring(3); 1894 1895 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1896 HDF5Constants.H5P_DEFAULT); 1897 } 1898 1899 if (name.startsWith(HObject.separator)) { 1900 name = name.substring(1); 1901 } 1902 obj = new H5Link(this, name, parent_path); 1903 1904 log.trace("createLink(): finish"); 1905 return obj; 1906 } 1907 1908 /** 1909 * reload the sub-tree structure from file. 1910 * <p> 1911 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1912 * memory is not changed. 1913 * 1914 * @param g 1915 * the group where the structure is to be reloaded in memory 1916 */ 1917 public void reloadTree(Group g) { 1918 if (fid < 0 || rootObject == null || g == null) { 1919 log.debug("reloadTree(): Invalid fid or null object"); 1920 return; 1921 } 1922 1923 depth_first(g, Integer.MIN_VALUE); 1924 } 1925 1926 /* 1927 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1928 * 1929 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1930 */ 1931 @Override 1932 public HObject copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1933 log.trace("copy(): start: srcObj={} dstGroup={} dstName={}", srcObj, dstGroup, dstName); 1934 if ((srcObj == null) || (dstGroup == null)) { 1935 log.debug("copy(): srcObj or dstGroup is null"); 1936 log.trace("copy(): finish"); 1937 return null; 1938 } 1939 1940 if (dstName == null) { 1941 dstName = srcObj.getName(); 1942 } 1943 1944 List<HObject> members = dstGroup.getMemberList(); 1945 int n = members.size(); 1946 for (int i = 0; i < n; i++) { 1947 HObject obj = members.get(i); 1948 String name = obj.getName(); 1949 while (name.equals(dstName)) 1950 dstName += "~copy"; 1951 } 1952 1953 HObject newObj = null; 1954 if (srcObj instanceof Dataset) { 1955 log.trace("copy(): srcObj instanceof Dataset"); 1956 newObj = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1957 } 1958 else if (srcObj instanceof H5Group) { 1959 log.trace("copy(): srcObj instanceof H5Group"); 1960 newObj = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1961 } 1962 else if (srcObj instanceof H5Datatype) { 1963 log.trace("copy(): srcObj instanceof H5Datatype"); 1964 newObj = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 1965 } 1966 1967 log.trace("copy(): finish"); 1968 return newObj; 1969 } 1970 1971 /* 1972 * (non-Javadoc) 1973 * 1974 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 1975 */ 1976 @Override 1977 public void delete(HObject obj) throws Exception { 1978 if ((obj == null) || (fid < 0)) { 1979 log.debug("delete(): Invalid FID or object is null"); 1980 return; 1981 } 1982 1983 String name = obj.getPath() + obj.getName(); 1984 1985 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 1986 } 1987 1988 /* 1989 * (non-Javadoc) 1990 * 1991 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 1992 */ 1993 @Override 1994 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 1995 log.trace("writeAttribute(): start"); 1996 1997 String obj_name = obj.getFullName(); 1998 String name = attr.getName(); 1999 long tid = -1; 2000 long sid = -1; 2001 long aid = -1; 2002 log.trace("writeAttribute(): name is {}", name); 2003 2004 long objID = obj.open(); 2005 if (objID < 0) { 2006 log.debug("writeAttribute(): Invalid Object ID"); 2007 log.trace("writeAttribute(): finish"); 2008 return; 2009 } 2010 2011 if ((tid = attr.getDatatype().createNative()) >= 0) { 2012 log.trace("writeAttribute(): tid {} from toNative :{}", tid, attr.getDatatype().getDescription()); 2013 try { 2014 if (attr.isScalar()) 2015 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 2016 else 2017 sid = H5.H5Screate_simple(attr.getRank(), attr.getDims(), null); 2018 2019 if (attrExisted) { 2020 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, 2021 HDF5Constants.H5P_DEFAULT); 2022 } 2023 else { 2024 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2025 } 2026 log.trace("writeAttribute(): aid {} opened/created", aid); 2027 2028 // update value of the attribute 2029 Object attrValue; 2030 try { 2031 attrValue = attr.getData(); 2032 } 2033 catch (Exception ex) { 2034 attrValue = null; 2035 log.trace("writeAttribute(): getData() failure:", ex); 2036 } 2037 2038 log.trace("writeAttribute(): getValue"); 2039 if (attrValue != null) { 2040 if (attr.getDatatype().isVLEN()) { 2041 log.trace("writeAttribute(): isVLEN"); 2042 try { 2043 /* 2044 * must use native type to write attribute data to file (see bug 1069) 2045 */ 2046 long tmptid = tid; 2047 tid = H5.H5Tget_native_type(tmptid); 2048 try { 2049 H5.H5Tclose(tmptid); 2050 } 2051 catch (Exception ex) { 2052 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2053 } 2054 log.trace("writeAttribute(): H5.H5AwriteVL, {} : {}", name, attr.getDatatype().getDescription()); 2055 if ((attrValue instanceof String) || (attr.getDims().length == 1)) { 2056 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 2057 } 2058 else { 2059 log.info("writeAttribute(): Datatype is not a string, unable to write {} data", name); 2060 } 2061 } 2062 catch (Exception ex) { 2063 log.debug("writeAttribute(): native type failure: ", name, ex); 2064 } 2065 } 2066 else { 2067 if (attr.getDatatype().isRef() && attrValue instanceof String) { 2068 // reference is a path+name to the object 2069 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 2070 log.trace("writeAttribute(): Attribute class is CLASS_REFERENCE"); 2071 } 2072 else if (Array.get(attrValue, 0) instanceof String) { 2073 long size = attr.getDatatype().getDatatypeSize(); 2074 int len = ((String[]) attrValue).length; 2075 byte[] bval = Dataset.stringToByte((String[]) attrValue, (int)size); 2076 if (bval != null && bval.length == size * len) { 2077 bval[bval.length - 1] = 0; 2078 attrValue = bval; 2079 } 2080 log.trace("writeAttribute(): String={}: {}", attrValue, name); 2081 } 2082 2083 try { 2084 /* 2085 * must use native type to write attribute data to file (see bug 1069) 2086 */ 2087 long tmptid = tid; 2088 tid = H5.H5Tget_native_type(tmptid); 2089 try { 2090 H5.H5Tclose(tmptid); 2091 } 2092 catch (Exception ex) { 2093 log.debug("writeAttribute(): H5Tclose(tmptid {}) failure: ", tmptid, ex); 2094 } 2095 log.trace("writeAttribute(): H5.H5Awrite, {} :{}", name, attr.getDatatype().getDescription()); 2096 H5.H5Awrite(aid, tid, attrValue); 2097 } 2098 catch (Exception ex) { 2099 log.debug("writeAttribute(): native type failure: ", ex); 2100 } 2101 } 2102 } // if (attrValue != null) { 2103 } 2104 finally { 2105 try { 2106 H5.H5Tclose(tid); 2107 } 2108 catch (Exception ex) { 2109 log.debug("writeAttribute(): H5Tclose(tid {}) failure: ", tid, ex); 2110 } 2111 try { 2112 H5.H5Sclose(sid); 2113 } 2114 catch (Exception ex) { 2115 log.debug("writeAttribute(): H5Sclose(sid {}) failure: ", sid, ex); 2116 } 2117 try { 2118 H5.H5Aclose(aid); 2119 } 2120 catch (Exception ex) { 2121 log.debug("writeAttribute(): H5Aclose(aid {}) failure: ", aid, ex); 2122 } 2123 } 2124 } 2125 else { 2126 log.debug("writeAttribute(): toNative failure"); 2127 } 2128 2129 obj.close(objID); 2130 log.trace("writeAttribute(): finish"); 2131 } 2132 2133 /*************************************************************************** 2134 * Implementations for methods specific to H5File 2135 **************************************************************************/ 2136 2137 /** 2138 * Opens a file with specific file access property list. 2139 * <p> 2140 * This function does the same as "long open()" except the you can also pass an HDF5 file access property to file 2141 * open. For example, 2142 * 2143 * <pre> 2144 * // All open objects remaining in the file are closed then file is closed 2145 * long plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2146 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 2147 * long fid = open(plist); 2148 * </pre> 2149 * 2150 * @param plist 2151 * a file access property list identifier. 2152 * 2153 * @return the file identifier if successful; otherwise returns negative value. 2154 * 2155 * @throws Exception 2156 * If there is a failure. 2157 */ 2158 public long open(long plist) throws Exception { 2159 return open(true, plist); 2160 } 2161 2162 /*************************************************************************** 2163 * Private methods. 2164 **************************************************************************/ 2165 2166 /** 2167 * Opens access to this file. 2168 * 2169 * @param loadFullHierarchy 2170 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2171 * 2172 * @return the file identifier if successful; otherwise returns negative value. 2173 * 2174 * @throws Exception 2175 * If there is a failure. 2176 */ 2177 private long open(boolean loadFullHierarchy) throws Exception { 2178 long the_fid = -1; 2179 2180 long plist = HDF5Constants.H5P_DEFAULT; 2181 2182 /* 2183 * // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache try { //All open objects remaining in the file 2184 * are closed // then file is closed plist = H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); 2185 * H5.H5Pset_fclose_degree ( plist, HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {;} the_fid = 2186 * open(loadFullHierarchy, plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 2187 */ 2188 2189 log.trace("open(): loadFull={}", loadFullHierarchy); 2190 the_fid = open(loadFullHierarchy, plist); 2191 2192 return the_fid; 2193 } 2194 2195 /** 2196 * Opens access to this file. 2197 * 2198 * @param loadFullHierarchy 2199 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 2200 * 2201 * @return the file identifier if successful; otherwise returns negative value. 2202 * 2203 * @throws Exception 2204 * If there is a failure. 2205 */ 2206 private long open(boolean loadFullHierarchy, long plist) throws Exception { 2207 log.trace("open(loadFullHierarchy = {}, plist = {}): start", loadFullHierarchy, plist); 2208 if (fid > 0) { 2209 log.trace("open(): FID already opened"); 2210 log.trace("open(): finish"); 2211 return fid; // file is opened already 2212 } 2213 2214 // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) 2215 // to make it work for external datasets. We need to set it back 2216 // before the file is closed/opened. 2217 String rootPath = System.getProperty("hdfview.workdir"); 2218 if (rootPath == null) { 2219 rootPath = System.getProperty("user.dir"); 2220 } 2221 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 2222 2223 // check for valid file access permission 2224 if (flag < 0) { 2225 log.debug("open(): Invalid access identifier -- " + flag); 2226 log.trace("open(): finish"); 2227 throw new HDF5Exception("Invalid access identifer -- " + flag); 2228 } 2229 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 2230 // create a new file 2231 log.trace("open(): create file"); 2232 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 2233 HDF5Constants.H5P_DEFAULT); 2234 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 2235 H5.H5Fclose(fid); 2236 flag = HDF5Constants.H5F_ACC_RDWR; 2237 } 2238 else if (!exists()) { 2239 log.debug("open(): File {} does not exist", fullFileName); 2240 log.trace("open(): finish"); 2241 throw new HDF5Exception("File does not exist -- " + fullFileName); 2242 } 2243 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 2244 log.debug("open(): Cannot write file {}", fullFileName); 2245 log.trace("open(): finish"); 2246 throw new HDF5Exception("Cannot write file, try opening as read-only -- " + fullFileName); 2247 } 2248 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 2249 log.debug("open(): Cannot read file {}", fullFileName); 2250 log.trace("open(): finish"); 2251 throw new HDF5Exception("Cannot read file -- " + fullFileName); 2252 } 2253 2254 try { 2255 log.trace("open(): open file"); 2256 fid = H5.H5Fopen(fullFileName, flag, plist); 2257 } 2258 catch (Exception ex) { 2259 try { 2260 log.debug("open(): open failed, attempting to open file read-only"); 2261 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 2262 isReadOnly = true; 2263 } 2264 catch (Exception ex2) { 2265 // Attempt to open the file as a split file or family file 2266 try { 2267 File tmpf = new File(fullFileName); 2268 String tmpname = tmpf.getName(); 2269 int idx = tmpname.lastIndexOf("."); 2270 2271 if (tmpname.contains("-m")) { 2272 log.debug("open(): open read-only failed, attempting to open split file"); 2273 2274 while (idx > 0) { 2275 char c = tmpname.charAt(idx - 1); 2276 if (!(c == '-')) 2277 idx--; 2278 else 2279 break; 2280 } 2281 2282 if (idx > 0) { 2283 tmpname = tmpname.substring(0, idx - 1); 2284 log.trace("open(): attempting to open split file with name {}", tmpname); 2285 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2286 H5.H5Pset_fapl_split(pid, "-m.h5", HDF5Constants.H5P_DEFAULT, "-r.h5", HDF5Constants.H5P_DEFAULT); 2287 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2288 H5.H5Pclose(pid); 2289 } 2290 } 2291 else { 2292 log.debug("open(): open read-only failed, checking for file family"); 2293 // try to see if it is a file family, always open a family file 2294 // from the first one since other files will not be recognized 2295 // as an HDF5 file 2296 int cnt = idx; 2297 while (idx > 0) { 2298 char c = tmpname.charAt(idx - 1); 2299 if (Character.isDigit(c)) 2300 idx--; 2301 else 2302 break; 2303 } 2304 2305 if (idx > 0) { 2306 cnt -= idx; 2307 tmpname = tmpname.substring(0, idx) + "%0" + cnt + "d" + tmpname.substring(tmpname.lastIndexOf(".")); 2308 log.trace("open(): attempting to open file family with name {}", tmpname); 2309 long pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2310 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 2311 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2312 H5.H5Pclose(pid); 2313 } 2314 } 2315 } catch (Exception ex3) { 2316 log.debug("open(): open failed: ", ex3); 2317 } 2318 } /* catch (Exception ex) { */ 2319 } 2320 2321 if ((fid >= 0) && loadFullHierarchy) { 2322 // load the hierarchy of the file 2323 log.trace("open(loadFullHeirarchy): load the hierarchy"); 2324 loadIntoMemory(); 2325 } 2326 2327 log.trace("open(loadFullHeirarchy = {}, plist = {}): finish", loadFullHierarchy, plist); 2328 return fid; 2329 } 2330 2331 /** 2332 * Loads the file structure into memory. 2333 */ 2334 private void loadIntoMemory() { 2335 log.trace("loadIntoMemory(): start"); 2336 if (fid < 0) { 2337 log.debug("loadIntoMemory(): Invalid FID"); 2338 return; 2339 } 2340 2341 /* 2342 * TODO: Root group's name should be changed to 'this.getName()' and all 2343 * previous accesses of this field should now use getPath() instead of getName() 2344 * to get the root group. The root group actually does have a path of "/". The 2345 * depth_first method will have to be changed to setup other object paths 2346 * appropriately, as it currently assumes the root path to be null. 2347 */ 2348 rootObject = new H5Group(this, "/", null, null); 2349 log.trace("loadIntoMemory(): depth_first on root"); 2350 depth_first(rootObject, 0); 2351 log.trace("loadIntoMemory(): finish"); 2352 } 2353 2354 /** 2355 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and 2356 * datasets only. It does not include named datatypes and soft links. 2357 * <p> 2358 * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing 2359 * a path back up to the root. 2360 * 2361 * @param parentObject 2362 * the parent object. 2363 */ 2364 @SuppressWarnings("deprecation") 2365 private int depth_first(HObject parentObject, int nTotal) { 2366 log.trace("depth_first({}): start", parentObject); 2367 2368 int nelems; 2369 String fullPath = null; 2370 String ppath = null; 2371 long gid = -1; 2372 2373 H5Group pgroup = (H5Group) parentObject; 2374 ppath = pgroup.getPath(); 2375 2376 if (ppath == null) { 2377 fullPath = HObject.separator; 2378 } 2379 else { 2380 fullPath = ppath + pgroup.getName() + HObject.separator; 2381 } 2382 2383 nelems = 0; 2384 try { 2385 gid = pgroup.open(); 2386 H5G_info_t info = H5.H5Gget_info(gid); 2387 nelems = (int) info.nlinks; 2388 } 2389 catch (HDF5Exception ex) { 2390 nelems = -1; 2391 log.debug("depth_first({}): H5Gget_info(gid {}) failure: ", parentObject, gid, ex); 2392 } 2393 2394 if (nelems <= 0) { 2395 pgroup.close(gid); 2396 log.debug("depth_first({}): nelems <= 0", parentObject); 2397 log.trace("depth_first({}): finish", parentObject); 2398 return nTotal; 2399 } 2400 2401 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2402 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2403 // we use only one call to get all the information, which takes about 2404 // two seconds 2405 int[] objTypes = new int[nelems]; 2406 long[] fNos = new long[nelems]; 2407 long[] objRefs = new long[nelems]; 2408 String[] objNames = new String[nelems]; 2409 2410 try { 2411 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2412 } 2413 catch (HDF5Exception ex) { 2414 log.debug("depth_first({}): failure: ", parentObject, ex); 2415 log.trace("depth_first({}): finish", parentObject); 2416 ex.printStackTrace(); 2417 return nTotal; 2418 } 2419 2420 int nStart = getStartMembers(); 2421 int nMax = getMaxMembers(); 2422 2423 String obj_name; 2424 int obj_type; 2425 2426 // Iterate through the file to see members of the group 2427 for (int i = 0; i < nelems; i++) { 2428 obj_name = objNames[i]; 2429 obj_type = objTypes[i]; 2430 log.trace("depth_first({}): obj_name={}, obj_type={}", parentObject, obj_name, obj_type); 2431 long oid[] = { objRefs[i], fNos[i] }; 2432 2433 if (obj_name == null) { 2434 log.trace("depth_first({}): continue after null obj_name", parentObject); 2435 continue; 2436 } 2437 2438 nTotal++; 2439 2440 if (nMax > 0) { 2441 if ((nTotal - nStart) >= nMax) 2442 break; // loaded enough objects 2443 } 2444 2445 boolean skipLoad = false; 2446 if ((nTotal > 0) && (nTotal < nStart)) 2447 skipLoad = true; 2448 2449 // create a new group 2450 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2451 //H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); 2452 H5Group g = new H5Group(this, obj_name, fullPath, pgroup); 2453 2454 pgroup.addToMemberList(g); 2455 2456 // detect and stop loops 2457 // a loop is detected if there exists object with the same 2458 // object ID by tracing path back up to the root. 2459 boolean hasLoop = false; 2460 H5Group tmpObj = (H5Group) parentObject; 2461 2462 while (tmpObj != null) { 2463 if (tmpObj.equalsOID(oid) && !(tmpObj.getPath() == null)) { 2464 hasLoop = true; 2465 break; 2466 } 2467 else { 2468 tmpObj = (H5Group) tmpObj.getParent(); 2469 } 2470 } 2471 2472 // recursively go through the next group 2473 // stops if it has loop. 2474 if (!hasLoop) { 2475 nTotal = depth_first(g, nTotal); 2476 } 2477 } 2478 else if (skipLoad) { 2479 continue; 2480 } 2481 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2482 long did = -1; 2483 long tid = -1; 2484 int tclass = -1; 2485 try { 2486 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2487 if (did >= 0) { 2488 tid = H5.H5Dget_type(did); 2489 2490 tclass = H5.H5Tget_class(tid); 2491 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2492 // for ARRAY, the type is determined by the base type 2493 long btid = H5.H5Tget_super(tid); 2494 2495 tclass = H5.H5Tget_class(btid); 2496 2497 try { 2498 H5.H5Tclose(btid); 2499 } 2500 catch (Exception ex) { 2501 log.debug("depth_first({})[{}] dataset {} H5Tclose(btid {}) failure: ", parentObject, i, obj_name, btid, ex); 2502 } 2503 } 2504 } 2505 else { 2506 log.debug("depth_first({})[{}] {} dataset open failure", parentObject, i, obj_name); 2507 } 2508 } 2509 catch (Exception ex) { 2510 log.debug("depth_first({})[{}] {} dataset access failure: ", parentObject, i, obj_name, ex); 2511 } 2512 finally { 2513 try { 2514 H5.H5Tclose(tid); 2515 } 2516 catch (Exception ex) { 2517 log.debug("depth_first({})[{}] daatset {} H5Tclose(tid {}) failure: ", parentObject, i, obj_name, tid, ex); 2518 } 2519 try { 2520 H5.H5Dclose(did); 2521 } 2522 catch (Exception ex) { 2523 log.debug("depth_first({})[{}] dataset {} H5Dclose(did {}) failure: ", parentObject, i, obj_name, did, ex); 2524 } 2525 } 2526 Dataset d = null; 2527 if (tclass == HDF5Constants.H5T_COMPOUND) { 2528 // create a new compound dataset 2529 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2530 } 2531 else { 2532 // create a new scalar dataset 2533 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2534 } 2535 2536 pgroup.addToMemberList(d); 2537 } 2538 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2539 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2540 2541 pgroup.addToMemberList(t); 2542 } 2543 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2544 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2545 2546 pgroup.addToMemberList(link); 2547 continue; // do the next one, if the object is not identified. 2548 } 2549 } // for ( i = 0; i < nelems; i++) 2550 2551 pgroup.close(gid); 2552 2553 log.trace("depth_first({}): finish", parentObject); 2554 return nTotal; 2555 } // private depth_first() 2556 2557 /** 2558 * Returns a list of all the members of this H5File in a 2559 * breadth-first ordering that are rooted at the specified 2560 * object. 2561 */ 2562 private static List<HObject> getMembersBreadthFirst(HObject obj) { 2563 List<HObject> allMembers = new Vector<>(); 2564 Queue<HObject> queue = new LinkedList<>(); 2565 HObject currentObject = obj; 2566 2567 queue.add(currentObject); 2568 2569 while(!queue.isEmpty()) { 2570 currentObject = queue.remove(); 2571 allMembers.add(currentObject); 2572 2573 if(currentObject instanceof Group) { 2574 queue.addAll(((Group) currentObject).getMemberList()); 2575 } 2576 else { 2577 continue; 2578 } 2579 } 2580 2581 return allMembers; 2582 } 2583 2584 private HObject copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2585 log.trace("copyDataset(): start"); 2586 Dataset dataset = null; 2587 long srcdid = -1, dstdid = -1; 2588 long ocp_plist_id = -1; 2589 String dname = null, path = null; 2590 2591 if (pgroup.isRoot()) { 2592 path = HObject.separator; 2593 } 2594 else { 2595 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2596 } 2597 2598 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2599 dstName = srcDataset.getName(); 2600 } 2601 dname = path + dstName; 2602 2603 try { 2604 srcdid = srcDataset.open(); 2605 dstdid = pgroup.open(); 2606 2607 try { 2608 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2609 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2610 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2611 } 2612 catch (Exception ex) { 2613 log.debug("copyDataset(): {} failure: ", dname, ex); 2614 } 2615 finally { 2616 try { 2617 H5.H5Pclose(ocp_plist_id); 2618 } 2619 catch (Exception ex) { 2620 log.debug("copyDataset(): {} H5Pclose(ocp_plist_id {}) failure: ", dname, ocp_plist_id, ex); 2621 } 2622 } 2623 2624 if (srcDataset instanceof H5ScalarDS) { 2625 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2626 } 2627 else { 2628 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2629 } 2630 2631 pgroup.addToMemberList(dataset); 2632 } 2633 finally { 2634 try { 2635 srcDataset.close(srcdid); 2636 } 2637 catch (Exception ex) { 2638 log.debug("copyDataset(): {} srcDataset.close(srcdid {}) failure: ", dname, srcdid, ex); 2639 } 2640 try { 2641 pgroup.close(dstdid); 2642 } 2643 catch (Exception ex) { 2644 log.debug("copyDataset(): {} pgroup.close(dstdid {}) failure: ", dname, dstdid, ex); 2645 } 2646 } 2647 2648 log.trace("copyDataset(): finish"); 2649 return dataset; 2650 } 2651 2652 /** 2653 * Constructs a dataset for specified dataset identifier. 2654 * 2655 * @param did 2656 * the dataset identifier 2657 * @param name 2658 * the name of the dataset 2659 * @param path 2660 * the path of the dataset 2661 * 2662 * @return the dataset if successful; otherwise return null. 2663 * 2664 * @throws HDF5Exception 2665 * If there is an error at the HDF5 library level. 2666 */ 2667 private Dataset getDataset(long did, String name, String path) throws HDF5Exception { 2668 log.trace("getDataset(): start"); 2669 Dataset dataset = null; 2670 if (did >= 0) { 2671 long tid = -1; 2672 int tclass = -1; 2673 try { 2674 tid = H5.H5Dget_type(did); 2675 tclass = H5.H5Tget_class(tid); 2676 if (tclass == HDF5Constants.H5T_ARRAY) { 2677 // for ARRAY, the type is determined by the base type 2678 long btid = H5.H5Tget_super(tid); 2679 tclass = H5.H5Tget_class(btid); 2680 try { 2681 H5.H5Tclose(btid); 2682 } 2683 catch (Exception ex) { 2684 log.debug("getDataset(): {} H5Tclose(btid {}) failure: ", name, btid, ex); 2685 } 2686 } 2687 } 2688 finally { 2689 try { 2690 H5.H5Tclose(tid); 2691 } 2692 catch (Exception ex) { 2693 log.debug("getDataset(): {} H5Tclose(tid {}) failure: ", name, tid, ex); 2694 } 2695 } 2696 2697 if (tclass == HDF5Constants.H5T_COMPOUND) { 2698 dataset = new H5CompoundDS(this, name, path); 2699 } 2700 else { 2701 dataset = new H5ScalarDS(this, name, path); 2702 } 2703 } 2704 else { 2705 log.debug("getDataset(): id failure"); 2706 } 2707 2708 log.trace("getDataset(): finish"); 2709 return dataset; 2710 } 2711 2712 /** 2713 * Copies a named datatype to another location. 2714 * 2715 * @param srcType 2716 * the source datatype 2717 * @param pgroup 2718 * the group which the new datatype is copied to 2719 * @param dstName 2720 * the name of the new dataype 2721 * 2722 * @throws Exception 2723 * If there is a failure. 2724 */ 2725 private HObject copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2726 log.trace("copyDatatype(): start"); 2727 Datatype datatype = null; 2728 long tid_src = -1; 2729 long gid_dst = -1; 2730 String path = null; 2731 2732 if (pgroup.isRoot()) { 2733 path = HObject.separator; 2734 } 2735 else { 2736 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2737 } 2738 2739 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2740 dstName = srcType.getName(); 2741 } 2742 2743 try { 2744 tid_src = srcType.open(); 2745 gid_dst = pgroup.open(); 2746 2747 try { 2748 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2749 } 2750 catch (Exception ex) { 2751 log.debug("copyDatatype(): {} H5Ocopy(tid_src {}) failure: ", dstName, tid_src, ex); 2752 } 2753 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2754 2755 pgroup.addToMemberList(datatype); 2756 } 2757 finally { 2758 try { 2759 srcType.close(tid_src); 2760 } 2761 catch (Exception ex) { 2762 log.debug("copyDatatype(): {} srcType.close(tid_src {}) failure: ", dstName, tid_src, ex); 2763 } 2764 try { 2765 pgroup.close(gid_dst); 2766 } 2767 catch (Exception ex) { 2768 log.debug("copyDatatype(): {} pgroup.close(gid_dst {}) failure: ", dstName, gid_dst, ex); 2769 } 2770 } 2771 2772 log.trace("copyDatatype(): finish"); 2773 return datatype; 2774 } 2775 2776 /** 2777 * Copies a group and its members to a new location. 2778 * 2779 * @param srcGroup 2780 * the source group 2781 * @param dstGroup 2782 * the location where the new group is located 2783 * @param dstName 2784 * the name of the new group 2785 * 2786 * @throws Exception 2787 * If there is a failure. 2788 */ 2789 private HObject copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { 2790 log.trace("copyGroup(): start"); 2791 H5Group group = null; 2792 long srcgid = -1, dstgid = -1; 2793 String path = null; 2794 2795 if (dstGroup.isRoot()) { 2796 path = HObject.separator; 2797 } 2798 else { 2799 path = dstGroup.getPath() + dstGroup.getName() + HObject.separator; 2800 } 2801 2802 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2803 dstName = srcGroup.getName(); 2804 } 2805 2806 try { 2807 srcgid = srcGroup.open(); 2808 dstgid = dstGroup.open(); 2809 try { 2810 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2811 } 2812 catch (Exception ex) { 2813 log.debug("copyGroup(): {} H5Ocopy(srcgid {}) failure: ", dstName, srcgid, ex); 2814 } 2815 2816 group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); 2817 depth_first(group, Integer.MIN_VALUE); // reload all 2818 dstGroup.addToMemberList(group); 2819 } 2820 2821 finally { 2822 try { 2823 srcGroup.close(srcgid); 2824 } 2825 catch (Exception ex) { 2826 log.debug("copyGroup(): {} srcGroup.close(srcgid {}) failure: ", dstName, srcgid, ex); 2827 } 2828 try { 2829 dstGroup.close(dstgid); 2830 } 2831 catch (Exception ex) { 2832 log.debug("copyGroup(): {} pgroup.close(dstgid {}) failure: ", dstName, dstgid, ex); 2833 } 2834 } 2835 2836 log.trace("copyGroup(): finish"); 2837 return group; 2838 } 2839 2840 /** 2841 * Constructs a group for specified group identifier and retrieves members. 2842 * 2843 * @param gid 2844 * The group identifier. 2845 * @param name 2846 * The group name. 2847 * @param pGroup 2848 * The parent group, or null for the root group. 2849 * 2850 * @return The group if successful; otherwise returns false. 2851 * 2852 * @throws HDF5Exception 2853 * If there is an error at the HDF5 library level. 2854 */ 2855 private H5Group getGroup(long gid, String name, Group pGroup) throws HDF5Exception { 2856 log.trace("getGroup(): start"); 2857 String parentPath = null; 2858 String thisFullName = null; 2859 String memberFullName = null; 2860 2861 if (pGroup == null) { 2862 thisFullName = name = "/"; 2863 } 2864 else { 2865 parentPath = pGroup.getFullName(); 2866 if ((parentPath == null) || parentPath.equals("/")) { 2867 thisFullName = "/" + name; 2868 } 2869 else { 2870 thisFullName = parentPath + "/" + name; 2871 } 2872 } 2873 2874 // get rid of any extra "/" 2875 if (parentPath != null) { 2876 parentPath = parentPath.replaceAll("//", "/"); 2877 } 2878 if (thisFullName != null) { 2879 thisFullName = thisFullName.replaceAll("//", "/"); 2880 } 2881 2882 log.trace("getGroup(): fullName={}", thisFullName); 2883 2884 H5Group group = new H5Group(this, name, parentPath, pGroup); 2885 2886 H5G_info_t group_info = null; 2887 H5O_info_t obj_info = null; 2888 long oid = -1; 2889 String link_name = null; 2890 try { 2891 group_info = H5.H5Gget_info(gid); 2892 } 2893 catch (Exception ex) { 2894 log.debug("getGroup(): {} H5Gget_info(gid {}) failure: ", name, gid, ex); 2895 } 2896 try { 2897 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 2898 } 2899 catch (Exception ex) { 2900 log.debug("getGroup(): {} H5Oopen(gid {}) failure: ", name, gid, ex); 2901 } 2902 2903 // retrieve only the immediate members of the group, do not follow 2904 // subgroups 2905 for (int i = 0; i < group_info.nlinks; i++) { 2906 try { 2907 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, 2908 HDF5Constants.H5P_DEFAULT); 2909 obj_info = H5 2910 .H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2911 } 2912 catch (HDF5Exception ex) { 2913 log.debug("getGroup()[{}]: {} name,info failure: ", i, name, ex); 2914 log.trace("getGroup()[{}]: continue", i); 2915 // do not stop if accessing one member fails 2916 continue; 2917 } 2918 // create a new group 2919 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 2920 H5Group g = new H5Group(this, link_name, thisFullName, group); 2921 group.addToMemberList(g); 2922 } 2923 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 2924 long did = -1; 2925 Dataset d = null; 2926 2927 if ((thisFullName == null) || thisFullName.equals("/")) { 2928 memberFullName = "/" + link_name; 2929 } 2930 else { 2931 memberFullName = thisFullName + "/" + link_name; 2932 } 2933 2934 try { 2935 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 2936 d = getDataset(did, link_name, thisFullName); 2937 } 2938 finally { 2939 try { 2940 H5.H5Dclose(did); 2941 } 2942 catch (Exception ex) { 2943 log.debug("getGroup()[{}]: {} H5Dclose(did {}) failure: ", i, name, did, ex); 2944 } 2945 } 2946 group.addToMemberList(d); 2947 } 2948 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2949 Datatype t = new H5Datatype(this, link_name, thisFullName); 2950 group.addToMemberList(t); 2951 } 2952 } // End of for loop. 2953 try { 2954 if (oid >= 0) 2955 H5.H5Oclose(oid); 2956 } 2957 catch (Exception ex) { 2958 log.debug("getGroup(): {} H5Oclose(oid {}) failure: ", name, oid, ex); 2959 } 2960 log.trace("getGroup(): finish"); 2961 return group; 2962 } 2963 2964 /** 2965 * Retrieves the name of the target object that is being linked to. 2966 * 2967 * @param obj 2968 * The current link object. 2969 * 2970 * @return The name of the target object. 2971 * 2972 * @throws HDF5Exception 2973 * If there is an error at the HDF5 library level. 2974 */ 2975 public static String getLinkTargetName(HObject obj) throws Exception { 2976 log.trace("getLinkTargetName(): start"); 2977 String[] link_value = { null, null }; 2978 String targetObjName = null; 2979 2980 if (obj == null) { 2981 log.debug("getLinkTargetName(): object is null"); 2982 log.trace("getLinkTargetName(): finish"); 2983 return null; 2984 } 2985 2986 if (obj.getFullName().equals("/")) { 2987 log.debug("getLinkTargetName(): object is root group, links not allowed"); 2988 log.trace("getLinkTargetName(): finish"); 2989 return null; 2990 } 2991 2992 H5L_info_t link_info = null; 2993 try { 2994 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 2995 } 2996 catch (Throwable err) { 2997 log.debug("getLinkTargetName(): H5Lget_info {} failure: ", obj.getFullName(), err); 2998 } 2999 if (link_info != null) { 3000 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 3001 try { 3002 H5.H5Lget_value(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 3003 } 3004 catch (Exception ex) { 3005 log.debug("getLinkTargetName(): H5Lget_value {} failure: ", obj.getFullName(), ex); 3006 } 3007 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 3008 targetObjName = link_value[0]; 3009 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 3010 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 3011 } 3012 } 3013 } 3014 log.trace("getLinkTargetName(): finish"); 3015 return targetObjName; 3016 } 3017 3018 /** 3019 * Export dataset. 3020 * 3021 * @param file_export_name 3022 * The file name to export data into. 3023 * @param file_name 3024 * The name of the HDF5 file containing the dataset. 3025 * @param object_path 3026 * The full path of the dataset to be exported. 3027 * 3028 * @throws Exception 3029 * If there is a failure. 3030 */ 3031 @Override 3032 public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) 3033 throws Exception { 3034 H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); 3035 } 3036 3037 /** 3038 * Renames an attribute. 3039 * 3040 * @param obj 3041 * The object whose attribute is to be renamed. 3042 * @param oldAttrName 3043 * The current name of the attribute. 3044 * @param newAttrName 3045 * The new name of the attribute. 3046 * 3047 * @throws HDF5Exception 3048 * If there is an error at the HDF5 library level. 3049 */ 3050 @Override 3051 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 3052 log.trace("renameAttribute(): rename {} to {}", oldAttrName, newAttrName); 3053 H5.H5Arename_by_name(obj.getFID(), obj.getFullName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 3054 } 3055 3056 /** 3057 * Rename the given object 3058 * 3059 * @param obj 3060 * the object to be renamed. 3061 * @param newName 3062 * the new name of the object. 3063 * 3064 * @throws Exception 3065 * If there is a failure. 3066 */ 3067 public static void renameObject(HObject obj, String newName) throws Exception { 3068 String currentFullPath = obj.getPath() + obj.getName(); 3069 String newFullPath = obj.getPath() + newName; 3070 3071 currentFullPath = currentFullPath.replaceAll("//", "/"); 3072 newFullPath = newFullPath.replaceAll("//", "/"); 3073 3074 if (currentFullPath.equals("/")) { 3075 throw new HDF5Exception("Can't rename the root group."); 3076 } 3077 3078 if (currentFullPath.equals(newFullPath)) { 3079 throw new HDF5Exception("The new name is the same as the current name."); 3080 } 3081 3082 // Call the library to move things in the file 3083 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, 3084 HDF5Constants.H5P_DEFAULT); 3085 } 3086 3087 public static int getIndexTypeValue(String strtype) { 3088 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3089 return HDF5Constants.H5_INDEX_NAME; 3090 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3091 return HDF5Constants.H5_INDEX_CRT_ORDER; 3092 if (strtype.compareTo("H5_INDEX_N") == 0) 3093 return HDF5Constants.H5_INDEX_N; 3094 return HDF5Constants.H5_INDEX_UNKNOWN; 3095 } 3096 3097 public static int getIndexOrderValue(String strorder) { 3098 if (strorder.compareTo("H5_ITER_INC") == 0) 3099 return HDF5Constants.H5_ITER_INC; 3100 if (strorder.compareTo("H5_ITER_DEC") == 0) 3101 return HDF5Constants.H5_ITER_DEC; 3102 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3103 return HDF5Constants.H5_ITER_NATIVE; 3104 if (strorder.compareTo("H5_ITER_N") == 0) 3105 return HDF5Constants.H5_ITER_N; 3106 return HDF5Constants.H5_ITER_UNKNOWN; 3107 } 3108 3109 @Override 3110 public int getIndexType(String strtype) { 3111 if (strtype != null) { 3112 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3113 return HDF5Constants.H5_INDEX_NAME; 3114 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3115 return HDF5Constants.H5_INDEX_CRT_ORDER; 3116 return HDF5Constants.H5_INDEX_UNKNOWN; 3117 } 3118 return getIndexType(); 3119 } 3120 3121 public int getIndexType() { 3122 return indexType; 3123 } 3124 3125 @Override 3126 public void setIndexType(int indexType) { 3127 this.indexType = indexType; 3128 } 3129 3130 @Override 3131 public int getIndexOrder(String strorder) { 3132 if (strorder != null) { 3133 if (strorder.compareTo("H5_ITER_INC") == 0) 3134 return HDF5Constants.H5_ITER_INC; 3135 if (strorder.compareTo("H5_ITER_DEC") == 0) 3136 return HDF5Constants.H5_ITER_DEC; 3137 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3138 return HDF5Constants.H5_ITER_NATIVE; 3139 if (strorder.compareTo("H5_ITER_N") == 0) 3140 return HDF5Constants.H5_ITER_N; 3141 return HDF5Constants.H5_ITER_UNKNOWN; 3142 } 3143 return getIndexOrder(); 3144 } 3145 3146 public int getIndexOrder() { 3147 return indexOrder; 3148 } 3149 3150 @Override 3151 public void setIndexOrder(int indexOrder) { 3152 this.indexOrder = indexOrder; 3153 } 3154}