001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the file COPYING. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * If you do not have access to this file, you may request a copy from * 011 * help@hdfgroup.org. * 012 ****************************************************************************/ 013 014package hdf.object.h4; 015 016import java.util.List; 017import java.util.Vector; 018 019import hdf.hdflib.HDFChunkInfo; 020import hdf.hdflib.HDFCompInfo; 021import hdf.hdflib.HDFConstants; 022import hdf.hdflib.HDFDeflateCompInfo; 023import hdf.hdflib.HDFException; 024import hdf.hdflib.HDFLibrary; 025import hdf.object.Attribute; 026import hdf.object.Dataset; 027import hdf.object.Datatype; 028import hdf.object.FileFormat; 029import hdf.object.Group; 030import hdf.object.HObject; 031import hdf.object.ScalarDS; 032 033/** 034 * H4GRImage describes an HDF4 general raster(GR) image and operations performed on 035 * the GR image. An HDF4 raster image is a two-dimension array of pixel values. 036 * <p> 037 * Every GR data set must contain the following components: image array, name, 038 * pixel type, and dimensions. The name, dimensions, and pixel type must be 039 * supplied by the user at the time the GR data set is defined. 040 * <p> 041 * An image array is a two-dimensional array of pixels. Each element in an image 042 * array corresponds to one pixel and each pixel can consist of a number of 043 * color component values or pixel components, e.g., Red-Green-Blue or RGB, 044 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented 045 * by different methods (8-bit lookup table or 24-bit direct representation) and 046 * may have different data types. The data type of pixel components and the number 047 * of components in each pixel are collectively known as the pixel type. 048 * <p> 049 * <b>How to Select a Subset</b> 050 * <p> 051 * Dataset defines APIs for reading, writing and subsetting a dataset. No function is 052 * defined to select a subset of a data array. The selection is done in an implicit way. 053 * Function calls to dimension information such as getSelectedDims() return an array 054 * of dimension values, which is a reference to the array in the dataset object. 055 * Changes of the array outside the dataset object directly change the values of 056 * the array in the dataset object. It is like pointers in C. 057 * <p> 058 * 059 * The following is an example of how to make a subset. In the example, the dataset 060 * is a 4-dimension with size of [200][100][50][10], i.e. 061 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br> 062 * We want to select every other data point in dims[1] and dims[2] 063 * <pre> 064 int rank = dataset.getRank(); // number of dimensions of the dataset 065 long[] dims = dataset.getDims(); // the dimension sizes of the dataset 066 long[] selected = dataset.getSelectedDims(); // the selected size of the dataet 067 long[] start = dataset.getStartDims(); // the offset of the selection 068 long[] stride = dataset.getStride(); // the stride of the dataset 069 int[] selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display 070 071 // select dim1 and dim2 as 2D data for display, and slice through dim0 072 selectedIndex[0] = 1; 073 selectedIndex[1] = 2; 074 selectedIndex[1] = 0; 075 076 // reset the selection arrays 077 for (int i=0; i<rank; i++) { 078 start[i] = 0; 079 selected[i] = 1; 080 stride[i] = 1; 081 } 082 083 // set stride to 2 on dim1 and dim2 so that every other data point is selected. 084 stride[1] = 2; 085 stride[2] = 2; 086 087 // set the selection size of dim1 and dim2 088 selected[1] = dims[1]/stride[1]; 089 selected[2] = dims[1]/stride[2]; 090 091 // when dataset.read() is called, the slection above will be used since 092 // the dimension arrays are passed by reference. Changes of these arrays 093 // outside the dataset object directly change the values of these array 094 // in the dataset object. 095 096 * </pre> 097 * 098 * @version 1.1 9/4/2007 099 * @author Peter X. Cao 100 */ 101public class H4GRImage extends ScalarDS 102{ 103 private static final long serialVersionUID = 1029672744963360976L; 104 105 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class); 106 107 /** 108 * The list of attributes of this data object. Members of the list are 109 * instance of Attribute. 110 */ 111 @SuppressWarnings("rawtypes") 112 private List attributeList; 113 114 /** 115 * The GR interface identifier obtained from GRstart(fid) 116 */ 117 private int grid; 118 119 /** 120 * The number of components in the raster image 121 */ 122 private int ncomp; 123 124 /** the datatype identifier */ 125 private int datatypeID = -1; 126 127 private int nAttributes = -1; 128 129 130 public H4GRImage(FileFormat theFile, String name, String path) 131 { 132 this(theFile, name, path, null); 133 } 134 135 /** 136 * Creates a H4GRImage object with specific name, path, and object ID. 137 * 138 * @param theFile the HDF file. 139 * @param name the name of this H4GRImage. 140 * @param path the full path of this H4GRImage. 141 * @param oid the unique identifier of this data object. 142 */ 143 public H4GRImage( 144 FileFormat theFile, 145 String name, 146 String path, 147 long[] oid) 148 { 149 super (theFile, name, path, oid); 150 palette = null; 151 isImage = isImageDisplay = true; 152 unsignedConverted = false; 153 grid = ((H4File)getFileFormat()).getGRAccessID(); 154 } 155 156 /* 157 * (non-Javadoc) 158 * @see hdf.object.DataFormat#hasAttribute() 159 */ 160 public boolean hasAttribute () 161 { 162 if (nAttributes < 0) { 163 grid = ((H4File)getFileFormat()).getGRAccessID(); 164 165 int id = open(); 166 167 if (id >= 0) { 168 String[] objName = {""}; 169 int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs 170 int[] idims = new int[2]; 171 try { 172 HDFLibrary.GRgetiminfo(id, objName, grInfo, idims); 173 nAttributes = grInfo[3]; 174 } 175 catch (Exception ex) { 176 log.trace("hasAttribute() failure: ", ex); 177 nAttributes = 0; 178 } 179 180 log.trace("hasAttribute(): nAttributes={}", nAttributes); 181 182 close(id); 183 } 184 } 185 186 return (nAttributes > 0); 187 } 188 189 // To do: Implementing Dataset 190 @Override 191 public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception 192 { 193 log.trace("copy(): start: parentGroup={} datasetName={}", pgroup, dname); 194 195 Dataset dataset = null; 196 int srcdid=-1, dstdid=-1; 197 String path=null; 198 int[] count=null; 199 200 if (pgroup == null) { 201 log.debug("copy(): Parent group is null"); 202 log.trace("copy(): finish"); 203 return null; 204 } 205 206 if (pgroup.isRoot()) { 207 path = HObject.separator; 208 } 209 else { 210 path = pgroup.getPath()+pgroup.getName()+HObject.separator; 211 } 212 213 srcdid = open(); 214 if (srcdid < 0) { 215 log.debug("copy(): Invalid source dataset ID"); 216 log.trace("copy(): finish"); 217 return null; 218 } 219 220 if (dims != null) { 221 count = new int[2]; 222 count[0] = (int)dims[0]; 223 count[1] = (int)dims[1]; 224 } 225 226 int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs 227 try { 228 String[] tmpName = {""}; 229 int[] tmpDims = new int[2]; 230 HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims); 231 if (count == null) { 232 count = tmpDims; 233 } 234 } 235 catch (HDFException ex) { 236 log.debug("copy(): GRgetiminfo failure: ", ex); 237 } 238 239 int ncomp = grInfo[0]; 240 int tid = grInfo[1]; 241 int interlace = grInfo[2]; 242 int numberOfAttributes = grInfo[3]; 243 dstdid = HDFLibrary.GRcreate( 244 ((H4File)pgroup.getFileFormat()).getGRAccessID(), 245 dname, ncomp, tid, interlace, count); 246 if (dstdid < 0) { 247 log.debug("copy(): Invalid dest dataset ID"); 248 log.trace("copy(): finish"); 249 return null; 250 } 251 252 int ref = HDFLibrary.GRidtoref(dstdid); 253 if (!pgroup.isRoot()) { 254 int vgid = pgroup.open(); 255 HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref); 256 pgroup.close(vgid); 257 } 258 259 // read data from the source dataset 260 int[] start = {0, 0}; 261 if (buff == null) { 262 buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)]; 263 HDFLibrary.GRreadimage(srcdid, start, null, count, buff); 264 } 265 266 // write the data into the destination dataset 267 HDFLibrary.GRwriteimage(dstdid, start, null, count, buff); 268 269 // copy palette 270 int pid = HDFLibrary.GRgetlutid(srcdid, 0); 271 int[] palInfo = new int[4]; 272 273 HDFLibrary.GRgetlutinfo(pid, palInfo); 274 palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work 275 int palSize = palInfo[0]*palInfo[3]; 276 byte[] palBuff = new byte[palSize]; 277 HDFLibrary.GRreadlut(pid, palBuff); 278 pid = HDFLibrary.GRgetlutid(dstdid, 0); 279 HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff); 280 281 // copy attributes from one object to the new object 282 log.trace("copy(): copyAttributes: numAttributes={}", numberOfAttributes); 283 copyAttribute(srcdid, dstdid, numberOfAttributes); 284 285 long[] oid = {HDFConstants.DFTAG_RIG, ref}; 286 dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid); 287 288 pgroup.addToMemberList(dataset); 289 290 close(srcdid); 291 292 try { 293 HDFLibrary.GRendaccess(dstdid); 294 } 295 catch (HDFException ex) { 296 log.debug("copy(): GRendaccess failure: ", ex); 297 } 298 299 log.trace("copy(): finish"); 300 return dataset; 301 } 302 303 // ***** need to implement from ScalarDS ***** 304 @Override 305 public byte[][] readPalette(int idx) { return null;} 306 307 // ***** need to implement from ScalarDS ***** 308 @Override 309 public byte[] getPaletteRefs() { return null;} 310 311 // implementing ScalarDS 312 @Override 313 public Datatype getDatatype() 314 { 315 if (datatype == null) { 316 datatype = new H4Datatype(datatypeID); 317 } 318 319 return datatype; 320 } 321 322 // Implementing Dataset 323 @Override 324 public byte[] readBytes() throws HDFException 325 { 326 log.trace("readBytes(): start"); 327 328 byte[] theData = null; 329 330 if (rank <= 0) { 331 init(); 332 } 333 334 int id = open(); 335 if (id < 0) { 336 log.debug("readBytes(): Invalid ID"); 337 log.trace("readBytes(): finish"); 338 return null; 339 } 340 341 try { 342 // set the interlacing scheme for reading image data 343 HDFLibrary.GRreqimageil(id, interlace); 344 int datasize = (int)(getWidth()*getHeight()*ncomp); 345 int size = HDFLibrary.DFKNTsize(datatypeID)*datasize; 346 theData = new byte[size]; 347 int[] start = {(int)startDims[0], (int)startDims[1]}; 348 int[] select = {(int)selectedDims[0], (int)selectedDims[1]}; 349 350 int[] stride = null; 351 if (selectedStride != null) { 352 stride = new int[rank]; 353 for (int i=0; i<rank; i++) { 354 stride[i] = (int)selectedStride[i]; 355 } 356 } 357 358 HDFLibrary.GRreadimage(id, start, stride, select, theData); 359 } 360 catch (Exception ex) { 361 log.debug("readBytes(): failure: ", ex); 362 } 363 finally { 364 close(id); 365 } 366 367 log.trace("readBytes(): finish"); 368 return theData; 369 } 370 371 // ***** need to implement from DataFormat ***** 372 @Override 373 public Object read() throws HDFException 374 { 375 log.trace("read(): start"); 376 377 Object theData = null; 378 379 if (rank <=0 ) { 380 init(); 381 } 382 383 int id = open(); 384 if (id < 0) { 385 log.debug("read(): Invalid ID"); 386 log.trace("read(): finish"); 387 return null; 388 } 389 390 try { 391 // set the interlacing scheme for reading image data 392 HDFLibrary.GRreqimageil(id, interlace); 393 int datasize = (int)(getWidth()*getHeight()*ncomp); 394 395 theData = H4Datatype.allocateArray(datatypeID, datasize); 396 397 if (theData != null) { 398 // assume external data files are located in the same directory as the main file. 399 HDFLibrary.HXsetdir(getFileFormat().getParent()); 400 401 int[] start = {(int)startDims[0], (int)startDims[1]}; 402 int[] select = {(int)selectedDims[0], (int)selectedDims[1]}; 403 404 int[] stride = null; 405 if (selectedStride != null) { 406 stride = new int[rank]; 407 for (int i=0; i<rank; i++) { 408 stride[i] = (int)selectedStride[i]; 409 } 410 } 411 412 HDFLibrary.GRreadimage(id, start, stride, select, theData); 413 } 414 } 415 catch (Exception ex) { 416 log.debug("read(): failure: ", ex); 417 } 418 finally { 419 close(id); 420 } 421 422 if ( (rank >1) && (selectedIndex[1]>selectedIndex[0])) 423 isDefaultImageOrder = false; 424 else 425 isDefaultImageOrder = true; 426 427 log.trace("read(): isDefaultImageOrder={}", isDefaultImageOrder); 428 log.trace("read(): finish"); 429 return theData; 430 } 431 432 // Implementing DataFormat 433 @Override 434 public void write(Object buf) throws HDFException 435 { 436 log.trace("write(): start"); 437 438 if (buf == null) { 439 log.debug("write(): buf is null"); 440 log.trace("write(): finish"); 441 return; 442 } 443 444 int id = open(); 445 if (id < 0) { 446 log.debug("write(): Invalid ID"); 447 log.trace("write(): finish"); 448 return; 449 } 450 451 int[] select = new int[rank]; 452 int[] start = new int[rank]; 453 for (int i=0; i<rank; i++) { 454 select[i] = (int)selectedDims[i]; 455 start[i] = (int)startDims[i]; 456 } 457 458 int[] stride = null; 459 if (selectedStride != null) { 460 stride = new int[rank]; 461 for (int i=0; i<rank; i++) { 462 stride[i] = (int)selectedStride[i]; 463 } 464 } 465 466 Object tmpData = buf; 467 try { 468 if ( isUnsigned && unsignedConverted) { 469 tmpData = convertToUnsignedC(buf); 470 } 471 // assume external data files are located in the same directory as the main file. 472 HDFLibrary.HXsetdir(getFileFormat().getParent()); 473 474 HDFLibrary.GRwriteimage(id, start, stride, select, tmpData); 475 } 476 catch (Exception ex) { 477 log.debug("write(): failure: ", ex); 478 } 479 finally { 480 tmpData = null; 481 close(id); 482 } 483 484 log.trace("write(): finish"); 485 } 486 487 // ***** need to implement from DataFormat ***** 488 @SuppressWarnings({"rawtypes", "unchecked"}) 489 public List getMetadata() throws HDFException 490 { 491 log.trace("getMetadata(): start"); 492 493 if (attributeList != null) { 494 log.trace("getMetadata(): attributeList != null"); 495 log.trace("getMetadata(): finish"); 496 return attributeList; 497 } 498 499 int id = open(); 500 String[] objName = {""}; 501 int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs 502 int[] idims = new int[2]; 503 try { 504 HDFLibrary.GRgetiminfo(id, objName, grInfo, idims); 505 // mask off the litend bit 506 grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND); 507 int n = grInfo[3]; 508 509 if ((attributeList == null) && (n>0)) { 510 attributeList = new Vector(n, 5); 511 } 512 513 boolean b = false; 514 String[] attrName = new String[1]; 515 int[] attrInfo = {0, 0}; // data_type, length 516 for (int i=0; i<n; i++) { 517 attrName[0] = ""; 518 try { 519 b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo); 520 // mask off the litend bit 521 attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND); 522 } 523 catch (HDFException ex) { 524 log.debug("getMetadata(): GRattrinfo failure: ", ex); 525 b = false; 526 } 527 528 if (!b) { 529 continue; 530 } 531 532 long[] attrDims = {attrInfo[1]}; 533 Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims); 534 attributeList.add(attr); 535 536 Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]); 537 try { 538 HDFLibrary.GRgetattr(id, i, buf); 539 } 540 catch (HDFException ex) { 541 log.debug("getMetadata(): GRgetattr failure: ", ex); 542 buf = null; 543 } 544 545 if (buf != null) { 546 if ((attrInfo[0] == HDFConstants.DFNT_CHAR) || 547 (attrInfo[0] == HDFConstants.DFNT_UCHAR8)) { 548 buf = Dataset.byteToString((byte[])buf, attrInfo[1]); 549 } 550 551 attr.setValue(buf); 552 } 553 } // for (int i=0; i<n; i++) 554 } 555 catch (Exception ex) { 556 log.debug("getMetadata(): failure: ", ex); 557 } 558 finally { 559 close(id); 560 } 561 562 log.trace("getMetadata(): finish"); 563 return attributeList; 564 } 565 566 // ***** need to implement from DataFormat ***** 567 @SuppressWarnings({"rawtypes", "unchecked"}) 568 public void writeMetadata(Object info) throws Exception 569 { 570 log.trace("writeMetadata(): start"); 571 572 // only attribute metadata is supported. 573 if (!(info instanceof Attribute)) { 574 log.debug("writeMetadata(): Object not an Attribute"); 575 log.trace("writeMetadata(): finish"); 576 return; 577 } 578 579 try { 580 getFileFormat().writeAttribute(this, (Attribute)info, true); 581 582 if (attributeList == null) { 583 attributeList = new Vector(); 584 } 585 586 attributeList.add(info); 587 nAttributes = attributeList.size(); 588 } 589 catch (Exception ex) { 590 log.debug("writeMetadata(): failure: ", ex); 591 } 592 593 log.trace("writeMetadata(): finish"); 594 } 595 596 // ***** need to implement from DataFormat ***** 597 public void removeMetadata(Object info) throws HDFException { 598 log.trace("removeMetadata(): disabled"); 599 } 600 601 // implementing DataFormat 602 public void updateMetadata(Object info) throws Exception { 603 log.trace("updateMetadata(): disabled"); 604 } 605 606 // Implementing HObject. 607 @Override 608 public int open() 609 { 610 log.trace("open(): start: for file={} with ref={}", getFID(), (short) oid[1]); 611 612 int id = -1; 613 try { 614 int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]); 615 id = HDFLibrary.GRselect(grid, index); 616 } 617 catch (HDFException ex) { 618 log.debug("open(): failure: ", ex); 619 id = -1; 620 } 621 622 log.trace("open(): finish"); 623 return id; 624 } 625 626 // Implementing HObject. 627 @Override 628 public void close(int grid) 629 { 630 try { HDFLibrary.GRendaccess(grid); } 631 catch (HDFException ex) {log.debug("close(): failure: ", ex);} 632 } 633 634 // Implementing Dataset. 635 @Override 636 public void init() 637 { 638 log.trace("init(): start"); 639 640 if (rank>0) { 641 log.trace("init(): Already initialized"); 642 log.trace("init(): finish"); 643 return; // already called. Initialize only once 644 } 645 646 int id = open(); 647 String[] objName = {""}; 648 int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs 649 int[] idims = new int[2]; 650 try { 651 HDFLibrary.GRgetiminfo(id, objName, grInfo, idims); 652 // mask off the litend bit 653 grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND); 654 datatypeID = grInfo[1]; 655 656 // get compression information 657 try { 658 HDFCompInfo compInfo = new HDFCompInfo(); 659 HDFLibrary.GRgetcompinfo(id, compInfo); 660 if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) { 661 compression = "GZIP"; 662 } 663 else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) { 664 compression = "SZIP"; 665 } 666 else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) { 667 compression = "JPEG"; 668 } 669 else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) { 670 compression = "SKPHUFF"; 671 } 672 else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) { 673 compression = "RLE"; 674 } 675 else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) { 676 compression = "NBIT"; 677 } 678 } 679 catch (Exception ex) { 680 log.debug("init(): get compression information failure: ", ex); 681 } 682 683 // get chunk information 684 try { 685 HDFChunkInfo chunkInfo = new HDFChunkInfo(); 686 int[] cflag = {HDFConstants.HDF_NONE}; 687 HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag); 688 if (cflag[0] == HDFConstants.HDF_NONE) { 689 chunkSize = null; 690 storage_layout = "NONE"; 691 } 692 else { 693 chunkSize = new long[rank]; 694 for (int i=0; i<rank; i++) { 695 chunkSize[i] = chunkInfo.chunk_lengths[i]; 696 } 697 storage_layout = "CHUNKED: " + String.valueOf(chunkSize[0]); 698 for (int i = 1; i < rank; i++) { 699 storage_layout += " X " + chunkSize[i]; 700 } 701 } 702 } 703 catch (Exception ex) { 704 log.debug("init(): get chunk information failure: ", ex); 705 } 706 707 } 708 catch (HDFException ex) { 709 log.debug("init(): failure: ", ex); 710 } 711 finally { 712 close(id); 713 } 714 715 isUnsigned = H4Datatype.isUnsigned(datatypeID); 716 717 if (idims == null) { 718 log.debug("init(): idims is null"); 719 log.trace("init(): finish"); 720 return; 721 } 722 723 ncomp = grInfo[0]; 724 isTrueColor = (ncomp >= 3); 725 interlace = grInfo[2]; 726 rank = 2; // support only two dimensional raster image 727 728 // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height. 729 // other image data is arranged as dim[0]=height, dim[1]=width. 730 selectedIndex[0] = 1; 731 selectedIndex[1] = 0; 732 733 dims = new long[rank]; 734 startDims = new long[rank]; 735 selectedDims = new long[rank]; 736 for (int i=0; i<rank; i++) { 737 startDims[i] = 0; 738 selectedDims[i] = idims[i]; 739 dims[i] = idims[i]; 740 } 741 742 log.trace("init(): finish"); 743 } 744 745 // ***** need to implement from ScalarDS ***** 746 @Override 747 public byte[][] getPalette() 748 { 749 log.trace("getPalette(): start"); 750 751 if (palette != null) { 752 log.trace("getPalette(): palette != null"); 753 log.trace("getPalette(): finish"); 754 return palette; 755 } 756 757 int id = open(); 758 if (id < 0) { 759 log.debug("getPalette(): Invalid ID"); 760 log.trace("getPalette(): finish"); 761 return null; 762 } 763 764 // get palette info. 765 int lutid = -1; 766 int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries 767 try { 768 // find the first palette. 769 // Todo: get all the palettes 770 lutid = HDFLibrary.GRgetlutid(id, 0); 771 HDFLibrary.GRgetlutinfo(lutid, lutInfo); 772 } 773 catch (HDFException ex) { 774 log.debug("getPalette(): failure: ", ex); 775 close(id); 776 log.trace("getPalette(): finish"); 777 return null; 778 } 779 780 // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes 781 // return true even if there is no palette data, and check if it is a 782 // RGB with 256 colors 783 if ((lutInfo[0] != 3) || (lutInfo[2] < 0) | (lutInfo[3] != 256)) { 784 close(id); 785 log.debug("getPalette(): no palette data"); 786 log.trace("getPalette(): finish"); 787 return null; 788 } 789 790 // read palette data 791 boolean b = false; 792 byte[] pal = new byte[3*256]; 793 try 794 { 795 HDFLibrary.GRreqlutil(id, lutInfo[2]); 796 b = HDFLibrary.GRreadlut(lutid, pal); 797 } 798 catch (HDFException ex) { 799 log.debug("getPalette(): failure: ", ex); 800 b = false; 801 } 802 803 if (!b) { 804 close(id); 805 log.debug("getPalette(): no palette data"); 806 log.trace("getPalette(): finish"); 807 return null; 808 } 809 810 palette = new byte[3][256]; 811 if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) { 812 // color conponents are arranged in RGB, RGB, RGB, ... 813 for (int i=0; i<256; i++) { 814 palette[0][i] = pal[i*3]; 815 palette[1][i] = pal[i*3+1]; 816 palette[2][i] = pal[i*3+2]; 817 } 818 } 819 else { 820 for (int i=0; i<256; i++) { 821 palette[0][i] = pal[i]; 822 palette[1][i] = pal[256+i]; 823 palette[2][i] = pal[512+i]; 824 } 825 } 826 827 close(id); 828 829 log.trace("getPalette(): finish"); 830 return palette; 831 } 832 833 /** 834 * Returns the number of components of this image data. 835 * 836 * @return the number of components 837 */ 838 public int getComponentCount() 839 { 840 return ncomp; 841 } 842 843 /** 844 * Creates a new image. 845 * 846 * @param name the name of the dataset to create. 847 * @param pgroup the parent group of the new dataset. 848 * @param type the datatype of the dataset. 849 * @param dims the dimension size of the dataset. 850 * @param maxdims the max dimension size of the dataset. 851 * @param chunks the chunk size of the dataset. 852 * @param gzip the level of the gzip compression. 853 * @param ncomp number of components of the image data. 854 * @param interlace the interlace mode. 855 * @param data the array of data values. 856 * 857 * @return the new image if successful. Otherwise returns null. 858 * 859 * @throws Exception if the image can not be created 860 */ 861 public static H4GRImage create( 862 String name, 863 Group pgroup, 864 Datatype type, 865 long[] dims, 866 long[] maxdims, 867 long[] chunks, 868 int gzip, 869 int ncomp, 870 int interlace, 871 Object data) throws Exception 872 { 873 log.trace("create(): start: name={} parentGroup={} type={} gzip={} ncomp={} interlace={}", name, pgroup, type, gzip, ncomp, interlace); 874 875 H4GRImage dataset = null; 876 if ((name == null) || 877 (pgroup == null) || 878 (dims == null) || 879 ((gzip>0) && (chunks==null))) { 880 log.debug("create(): one or more parameters are null"); 881 log.trace("create(): finish"); 882 return null; 883 } 884 885 H4File file = (H4File)pgroup.getFileFormat(); 886 if (file == null) { 887 log.debug("create(): Parent group FileFormat is null"); 888 log.trace("create(): finish"); 889 return null; 890 } 891 892 String path = HObject.separator; 893 if (!pgroup.isRoot()) { 894 path = pgroup.getPath()+pgroup.getName()+HObject.separator; 895 } 896 if (interlace == ScalarDS.INTERLACE_PLANE) { 897 interlace = HDFConstants.MFGR_INTERLACE_COMPONENT; 898 } 899 else { 900 interlace = HDFConstants.MFGR_INTERLACE_PIXEL; 901 } 902 903 int rank = 2; 904 int idims[] = new int[rank]; 905 int imaxdims[] = new int[rank]; 906 int start[] = new int [rank]; 907 for (int i=0; i<rank; i++) { 908 idims[i] = (int)dims[i]; 909 if (maxdims != null) { 910 imaxdims[i] = (int)maxdims[i]; 911 } 912 else { 913 imaxdims[i] = idims[i]; 914 } 915 start[i] = 0; 916 } 917 918 int ichunks[] = null; 919 if (chunks != null) { 920 ichunks = new int[rank]; 921 for (int i=0; i<rank; i++) { 922 ichunks[i] = (int)chunks[i]; 923 } 924 } 925 926 int grid = -1; 927 int vgid = -1; 928 int gid = (file).getGRAccessID(); 929 int tid = type.toNative(); 930 931 if(tid >= 0) { 932 try { 933 grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims); 934 } 935 catch (Exception ex) { 936 log.debug("create(): failure: ", ex); 937 log.trace("create(): finish"); 938 throw (ex); 939 } 940 } 941 942 if (grid < 0) { 943 log.debug("create(): Invalid GR ID"); 944 log.trace("create(): finish"); 945 throw (new HDFException("Unable to create the new dataset.")); 946 } 947 948 if ((grid > 0) && (data != null)) { 949 HDFLibrary.GRwriteimage(grid, start, null, idims, data); 950 } 951 952 if (chunks != null) { 953 // set chunk 954 HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks); 955 HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK); 956 } 957 958 if (gzip > 0) { 959 // set compression 960 int compType = HDFConstants.COMP_CODE_DEFLATE; 961 HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo(); 962 compInfo.level = gzip; 963 HDFLibrary.GRsetcompress(grid, compType, compInfo); 964 } 965 966 int ref = HDFLibrary.GRidtoref(grid); 967 968 if (!pgroup.isRoot()) { 969 // add the dataset to the parent group 970 vgid = pgroup.open(); 971 if (vgid < 0) { 972 if (grid > 0) { 973 HDFLibrary.GRendaccess(grid); 974 } 975 log.debug("create(): Invalid VG ID"); 976 log.trace("create(): finish"); 977 throw (new HDFException("Unable to open the parent group.")); 978 } 979 980 HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref); 981 982 pgroup.close(vgid); 983 } 984 985 try { 986 if (grid > 0) { 987 HDFLibrary.GRendaccess(grid); 988 } 989 } 990 catch (Exception ex) { 991 log.debug("create(): GRendaccess failure: ", ex); 992 } 993 994 long[] oid = {HDFConstants.DFTAG_NDG, ref}; 995 dataset = new H4GRImage(file, name, path, oid); 996 997 if (dataset != null) { 998 pgroup.addToMemberList(dataset); 999 } 1000 1001 log.trace("create(): finish"); 1002 return dataset; 1003 } 1004 1005 /** 1006 * copy attributes from one GR image to another GR image 1007 */ 1008 private void copyAttribute(int srcdid, int dstdid, int numberOfAttributes) 1009 { 1010 log.trace("copyAttribute(): start: srcdid={} dstdid={} numAttributes={}", srcdid, dstdid, numberOfAttributes); 1011 1012 if (numberOfAttributes <= 0) { 1013 log.debug("copyAttribute(): numberOfAttributes={}", numberOfAttributes); 1014 log.trace("copyAttribute(): finish"); 1015 return; 1016 } 1017 1018 try { 1019 boolean b = false; 1020 String[] attrName = new String[1]; 1021 int[] attrInfo = {0, 0}; 1022 for (int i=0; i<numberOfAttributes; i++) { 1023 attrName[0] = ""; 1024 try { 1025 b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo); 1026 } 1027 catch (HDFException ex) { 1028 log.trace("copyAttribute(): attribute[{}] GRattrinfo failure: ", i, ex); 1029 b = false; 1030 } 1031 1032 if (!b) { 1033 continue; 1034 } 1035 1036 // read attribute data from source dataset 1037 byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])]; 1038 try { 1039 HDFLibrary.GRgetattr(srcdid, i, attrBuff); 1040 } 1041 catch (Exception ex) { 1042 log.trace("copyAttribute(): attribute[{}] GRgetattr failure: ", i, ex); 1043 attrBuff = null; 1044 } 1045 1046 if (attrBuff == null) { 1047 log.debug("copyAttribute(): attrBuff[{}] is null", i); 1048 log.trace("copyAttribute(): continue"); 1049 continue; 1050 } 1051 1052 // attach attribute to the destination dataset 1053 HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff); 1054 } // for (int i=0; i<numberOfAttributes; i++) 1055 } 1056 catch (Exception ex) { 1057 log.debug("copyAttribute(): failure: ", ex); 1058 } 1059 } 1060 1061 //Implementing DataFormat 1062 @SuppressWarnings("rawtypes") 1063 public List getMetadata(int... attrPropList) throws Exception { 1064 throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported"); 1065 } 1066}