001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.io.File; 018import java.lang.reflect.Array; 019import java.util.Enumeration; 020import java.util.Hashtable; 021import java.util.List; 022import java.util.Vector; 023 024import javax.swing.tree.DefaultMutableTreeNode; 025import javax.swing.tree.MutableTreeNode; 026import javax.swing.tree.TreeNode; 027 028import hdf.hdf5lib.H5; 029import hdf.hdf5lib.HDF5Constants; 030import hdf.hdf5lib.HDFNativeData; 031import hdf.hdf5lib.exceptions.HDF5Exception; 032import hdf.hdf5lib.structs.H5G_info_t; 033import hdf.hdf5lib.structs.H5L_info_t; 034import hdf.hdf5lib.structs.H5O_info_t; 035import hdf.object.Attribute; 036import hdf.object.Dataset; 037import hdf.object.Datatype; 038import hdf.object.FileFormat; 039import hdf.object.Group; 040import hdf.object.HObject; 041import hdf.object.ScalarDS; 042 043/** 044 * H5File is an implementation of the FileFormat class for HDF5 files. 045 * <p> 046 * The HDF5 file structure is stored in a tree that is made up of Java TreeNode objects. Each tree node represents an 047 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootNode</i>, the tree can 048 * be traversed to find a specific object. 049 * <p> 050 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 051 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 052 * 053 * <pre> 054 * HObject findObject(FileFormat file, String path) { 055 * if (file == null || path == null) 056 * return null; 057 * if (!path.endsWith("/")) 058 * path = path + "/"; 059 * DefaultMutableTreeNode theRoot = (DefaultMutableTreeNode) file 060 * .getRootNode(); 061 * if (theRoot == null) 062 * return null; 063 * else if (path.equals("/")) 064 * return (HObject) theRoot.getUserObject(); 065 * 066 * Enumeration local_enum = ((DefaultMutableTreeNode) theRoot) 067 * .breadthFirstEnumeration(); 068 * DefaultMutableTreeNode theNode = null; 069 * HObject theObj = null; 070 * while (local_enum.hasMoreElements()) { 071 * theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 072 * theObj = (HObject) theNode.getUserObject(); 073 * String fullPath = theObj.getFullName() + "/"; 074 * if (path.equals(fullPath) && theObj.getPath() != null ) { 075 * break; 076 * } 077 * return theObj; 078 * } 079 * </pre> 080 * 081 * @author Peter X. Cao 082 * @version 2.4 9/4/2007 083 */ 084public class H5File extends FileFormat { 085 private static final long serialVersionUID = 6247335559471526045L; 086 087 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 088 089 /** 090 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 091 * HDF5Constants.H5F_ACC_CREAT. 092 */ 093 private int flag; 094 095 /** 096 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 097 */ 098 private int indexType = HDF5Constants.H5_INDEX_NAME; 099 100 /** 101 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 102 */ 103 private int indexOrder = HDF5Constants.H5_ITER_INC; 104 105 /** 106 * The root node of the file hierarchy. 107 */ 108 private DefaultMutableTreeNode rootNode; 109 110 /** 111 * How many characters maximum in an attribute name? 112 */ 113 private static final int attrNameLen = 256; 114 115 /** 116 * The library version bounds 117 */ 118 private int[] libver; 119 120 private boolean attrFlag; 121 122 /*************************************************************************** 123 * Constructor 124 **************************************************************************/ 125 /** 126 * Constructs an H5File instance with an empty file name and read-only access. 127 */ 128 public H5File() { 129 this("", READ); 130 } 131 132 /** 133 * Constructs an H5File instance with specified file name and read/write access. 134 * <p> 135 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 136 * 137 * @param fileName 138 * A valid file name, with a relative or absolute path. 139 * 140 * @throws NullPointerException 141 * If the <code>fileName</code> argument is <code>null</code>. 142 */ 143 public H5File(String fileName) { 144 this(fileName, WRITE); 145 } 146 147 /** 148 * Constructs an H5File instance with specified file name and access. 149 * <p> 150 * The access parameter values and corresponding behaviors: 151 * <ul> 152 * <li>READ: Read-only access; open() will fail file doesn't exist.</li> 153 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 154 * access.</li> 155 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 156 * created or if file exists but can't be opened read/write.</li> 157 * </ul> 158 * <p> 159 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 160 * read/write or created. 161 * <p> 162 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 163 * file isn't yet open. 164 * 165 * @param fileName 166 * A valid file name, with a relative or absolute path. 167 * @param access 168 * The file access flag, which determines behavior when file is opened. Acceptable values are 169 * <code> READ, WRITE, </code> and <code>CREATE</code>. 170 * 171 * @throws NullPointerException 172 * If the <code>fileName</code> argument is <code>null</code>. 173 */ 174 public H5File(String fileName, int access) { 175 // Call FileFormat ctor to set absolute path name 176 super(fileName); 177 libver = new int[2]; 178 attrFlag = false; 179 180 // set metadata for the instance 181 rootNode = null; 182 this.fid = -1; 183 isReadOnly = (access == READ); 184 185 // At this point we just set up the flags for what happens later. 186 // We just pass unexpected access values on... subclasses may have 187 // their own values. 188 if (access == READ) { 189 flag = HDF5Constants.H5F_ACC_RDONLY; 190 } 191 else if (access == WRITE) { 192 flag = HDF5Constants.H5F_ACC_RDWR; 193 } 194 else if (access == CREATE) { 195 flag = HDF5Constants.H5F_ACC_CREAT; 196 } 197 else { 198 flag = access; 199 } 200 } 201 202 /*************************************************************************** 203 * Class methods 204 **************************************************************************/ 205 206 /** 207 * Copies the attributes of one object to another object. 208 * <p> 209 * This method copies all the attributes from one object (source object) to another (destination object). If an 210 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 211 * 256 characters will be truncated in the destination object. 212 * <p> 213 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 214 * there is no H5Object class and it is specific to HDF5 objects. 215 * <p> 216 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 217 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(int, int)} 218 * 219 * @param src 220 * The source object. 221 * @param dst 222 * The destination object. 223 * 224 * @see #copyAttributes(int, int) 225 */ 226 public static final void copyAttributes(HObject src, HObject dst) { 227 if ((src != null) && (dst != null)) { 228 int srcID = src.open(); 229 int dstID = dst.open(); 230 231 if ((srcID >= 0) && (dstID >= 0)) { 232 copyAttributes(srcID, dstID); 233 } 234 235 if (srcID >= 0) { 236 src.close(srcID); 237 } 238 239 if (dstID >= 0) { 240 dst.close(dstID); 241 } 242 } 243 } 244 245 /** 246 * Copies the attributes of one object to another object. 247 * <p> 248 * This method copies all the attributes from one object (source object) to another (destination object). If an 249 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 250 * 256 characters will be truncated in the destination object. 251 * <p> 252 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 253 * there is no H5Object class and it is specific to HDF5 objects. 254 * <p> 255 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 256 * exceptions are thrown. 257 * 258 * @param src_id 259 * The identifier of the source object. 260 * @param dst_id 261 * The identifier of the destination object. 262 */ 263 public static final void copyAttributes(int src_id, int dst_id) { 264 int aid_src = -1, aid_dst = -1, atid = -1, asid = -1; 265 String[] aName = { "" }; 266 H5O_info_t obj_info = null; 267 268 try { 269 obj_info = H5.H5Oget_info(src_id); 270 } 271 catch (Exception ex) { 272 obj_info.num_attrs = -1; 273 } 274 275 if (obj_info.num_attrs < 0) { 276 return; 277 } 278 279 for (int i = 0; i < obj_info.num_attrs; i++) { 280 aName[0] = new String(""); 281 282 try { 283 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 284 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 285 H5.H5Aget_name(aid_src, H5File.attrNameLen, aName); 286 atid = H5.H5Aget_type(aid_src); 287 asid = H5.H5Aget_space(aid_src); 288 289 aid_dst = H5.H5Acreate(dst_id, aName[0], atid, asid, HDF5Constants.H5P_DEFAULT, 290 HDF5Constants.H5P_DEFAULT); 291 292 // use native data copy 293 H5.H5Acopy(aid_src, aid_dst); 294 295 } 296 catch (Exception ex) { 297 log.debug("Attribute[{}] failure: ", i, ex); 298 } 299 300 try { 301 H5.H5Sclose(asid); 302 } 303 catch (Exception ex) { 304 log.debug("H5Sclose failure: ", ex); 305 } 306 try { 307 H5.H5Tclose(atid); 308 } 309 catch (Exception ex) { 310 log.debug("H5Tclose failure: ", ex); 311 } 312 try { 313 H5.H5Aclose(aid_src); 314 } 315 catch (Exception ex) { 316 log.debug("src H5Aclose failure: ", ex); 317 } 318 try { 319 H5.H5Aclose(aid_dst); 320 } 321 catch (Exception ex) { 322 log.debug("dst H5Aclose failure: ", ex); 323 } 324 325 } // for (int i=0; i<num_attr; i++) 326 } 327 328 /** 329 * Returns a list of attributes for the specified object. 330 * <p> 331 * This method returns a list containing the attributes associated with the identified object. If there are no 332 * associated attributes, an empty list will be returned. 333 * <p> 334 * Attribute names exceeding 256 characters will be truncated in the returned list. 335 * 336 * @param objID 337 * The identifier for the object whose attributes are to be returned. 338 * 339 * @return The list of the object's attributes. 340 * 341 * @throws HDF5Exception 342 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 343 * attributes. A variety of failures throw this exception. 344 * 345 * @see #getAttribute(int,int,int) 346 */ 347 public static final List<Attribute> getAttribute(int objID) throws HDF5Exception { 348 return H5File.getAttribute(objID, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 349 } 350 351 /** 352 * Returns a list of attributes for the specified object, in creation or alphabetical order. 353 * <p> 354 * This method returns a list containing the attributes associated with the identified object. If there are no 355 * associated attributes, an empty list will be returned. The list of attributes returned can be in increasing or 356 * decreasing, creation or alphabetical order. 357 * <p> 358 * Attribute names exceeding 256 characters will be truncated in the returned list. 359 * 360 * @param objID 361 * The identifier for the object whose attributes are to be returned. 362 * @param idx_type 363 * The type of index. Valid values are: 364 * <ul> 365 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name <li>H5_INDEX_CRT_ORDER: An index by 366 * creation order 367 * </ul> 368 * @param order 369 * The index traversal order. Valid values are: 370 * <ul> 371 * <li>H5_ITER_INC: A top-down iteration incrementing the index position at each step. <li>H5_ITER_DEC: A 372 * bottom-up iteration decrementing the index position at each step. 373 * </ul> 374 * 375 * @return The list of the object's attributes. 376 * 377 * @throws HDF5Exception 378 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 379 * attributes. A variety of failures throw this exception. 380 */ 381 382 public static final List<Attribute> getAttribute(int objID, int idx_type, int order) throws HDF5Exception { 383 List<Attribute> attributeList = null; 384 int aid = -1, sid = -1, tid = -1; 385 H5O_info_t obj_info = null; 386 log.trace("getAttribute: start"); 387 388 try { 389 obj_info = H5.H5Oget_info(objID); 390 } 391 catch (Exception ex) { 392 log.debug("H5Oget_info failure: ", ex); 393 } 394 if (obj_info.num_attrs <= 0) { 395 return (attributeList = new Vector<Attribute>()); 396 } 397 398 int n = (int) obj_info.num_attrs; 399 attributeList = new Vector<Attribute>(n); 400 log.trace("getAttribute: num_attrs={}", n); 401 402 for (int i = 0; i < n; i++) { 403 long lsize = 1; 404 log.trace("getAttribute: attribute[{}]", i); 405 406 try { 407 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 408 HDF5Constants.H5P_DEFAULT); 409 sid = H5.H5Aget_space(aid); 410 411 long dims[] = null; 412 int rank = H5.H5Sget_simple_extent_ndims(sid); 413 414 if (rank > 0) { 415 dims = new long[rank]; 416 H5.H5Sget_simple_extent_dims(sid, dims, null); 417 log.trace("getAttribute() rank={}, dims={}", rank, dims); 418 for (int j = 0; j < dims.length; j++) { 419 lsize *= dims[j]; 420 } 421 } 422 String[] nameA = { "" }; 423 H5.H5Aget_name(aid, H5File.attrNameLen, nameA); 424 log.trace("getAttribute: attribute[{}] is {}", i, nameA); 425 426 int tmptid = -1; 427 try { 428 tmptid = H5.H5Aget_type(aid); 429 tid = H5.H5Tget_native_type(tmptid); 430 log.trace("getAttribute: attribute[{}] tid={} native tmptid={} from aid={}", i, tid, tmptid, aid); 431 } 432 finally { 433 try { 434 H5.H5Tclose(tmptid); 435 } 436 catch (Exception ex) { 437 log.debug("H5Tclose failure: ", ex); 438 } 439 } 440 Datatype attrType = new H5Datatype(tid); 441 Attribute attr = new Attribute(nameA[0], attrType, dims); 442 attributeList.add(attr); 443 log.trace("getAttribute: attribute[{}] Datatype={}", i, attrType.getDatatypeDescription()); 444 445 boolean is_variable_str = false; 446 boolean isVLEN = false; 447 boolean isCompound = false; 448 boolean isScalar = false; 449 int tclass = H5.H5Tget_class(tid); 450 451 if (dims == null) 452 isScalar = true; 453 try { 454 is_variable_str = H5.H5Tis_variable_str(tid); 455 } 456 catch (Exception ex) { 457 log.debug("H5Tis_variable_str failure: ", ex); 458 } 459 isVLEN = (tclass == HDF5Constants.H5T_VLEN); 460 isCompound = (tclass == HDF5Constants.H5T_COMPOUND); 461 log.trace( 462 "getAttribute: attribute[{}] has size={} isCompound={} isScalar={} is_variable_str={} isVLEN={}", 463 i, lsize, isCompound, isScalar, is_variable_str, isVLEN); 464 465 // retrieve the attribute value 466 if (lsize <= 0) { 467 continue; 468 } 469 470 Object value = null; 471 if (is_variable_str) { 472 String[] strs = new String[(int) lsize]; 473 for (int j = 0; j < lsize; j++) { 474 strs[j] = ""; 475 } 476 try { 477 log.trace("getAttribute: attribute[{}] H5AreadVL", i); 478 H5.H5AreadVL(aid, tid, strs); 479 } 480 catch (Exception ex) { 481 ex.printStackTrace(); 482 } 483 value = strs; 484 } 485 else if (isCompound || (isScalar && tclass == HDF5Constants.H5T_ARRAY)) { 486 String[] strs = new String[(int) lsize]; 487 for (int j = 0; j < lsize; j++) { 488 strs[j] = ""; 489 } 490 try { 491 log.trace("getAttribute: attribute[{}] H5AreadComplex", i); 492 H5.H5AreadComplex(aid, tid, strs); 493 } 494 catch (Exception ex) { 495 ex.printStackTrace(); 496 } 497 value = strs; 498 } 499 else if (isVLEN) { 500 String[] strs = new String[(int) lsize]; 501 for (int j = 0; j < lsize; j++) { 502 strs[j] = ""; 503 } 504 try { 505 log.trace("getAttribute: attribute[{}] H5AreadVL", i); 506 H5.H5AreadComplex(aid, tid, strs); 507 } 508 catch (Exception ex) { 509 ex.printStackTrace(); 510 } 511 value = strs; 512 } 513 else { 514 value = H5Datatype.allocateArray(tid, (int) lsize); 515 if (value == null) { 516 continue; 517 } 518 519 if (tclass == HDF5Constants.H5T_ARRAY) { 520 int tmptid1 = -1, tmptid2 = -1; 521 try { 522 log.trace("getAttribute: attribute[{}] H5Aread ARRAY tid={}", i, tid); 523 H5.H5Aread(aid, tid, value); 524 } 525 catch (Exception ex) { 526 ex.printStackTrace(); 527 } 528 finally { 529 try { 530 H5.H5Tclose(tmptid1); 531 } 532 catch (Exception ex) { 533 log.debug("tid1 H5Tclose failure: ", ex); 534 } 535 try { 536 H5.H5Tclose(tmptid2); 537 } 538 catch (Exception ex) { 539 log.debug("tid2 H5Tclose failure: ", ex); 540 } 541 } 542 } 543 else { 544 log.trace("getAttribute: attribute[{}] H5Aread", i); 545 H5.H5Aread(aid, tid, value); 546 } 547 548 if (tclass == HDF5Constants.H5T_STRING) { 549 log.trace("getAttribute: attribute[{}] byteToString", i); 550 value = Dataset.byteToString((byte[]) value, H5.H5Tget_size(tid)); 551 } 552 else if (tclass == HDF5Constants.H5T_REFERENCE) { 553 log.trace("getAttribute: attribute[{}] byteToLong", i); 554 value = HDFNativeData.byteToLong((byte[]) value); 555 } 556 } 557 558 attr.setValue(value); 559 560 } 561 catch (HDF5Exception ex) { 562 log.debug("Attribute[{}] inspection failure: ", i, ex); 563 } 564 finally { 565 try { 566 H5.H5Tclose(tid); 567 } 568 catch (Exception ex) { 569 log.debug("H5Tclose[{}] failure: ", i, ex); 570 } 571 try { 572 H5.H5Sclose(sid); 573 } 574 catch (Exception ex) { 575 log.debug("H5Sclose[{}] failure: ", i, ex); 576 } 577 try { 578 H5.H5Aclose(aid); 579 } 580 catch (Exception ex) { 581 log.debug("H5Aclose[{}] failure: ", i, ex); 582 } 583 } 584 } // for (int i=0; i<obj_info.num_attrs; i++) 585 586 log.trace("getAttribute: finish"); 587 return attributeList; 588 } 589 590 /** 591 * Creates attributes for an HDF5 image dataset. 592 * <p> 593 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 594 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 595 * method may be used to write image attributes that are not handled by this method. 596 * <p> 597 * For more information about HDF5 image attributes, see the <a 598 * href="http://hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 599 * <p> 600 * This method can be called to create attributes for 24-bit true color and indexed images. The 601 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 602 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 603 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 604 * image with the indicated interlace mode. 605 * <p> 606 * <ul> 607 * The created attribute descriptions, names, and values are: 608 * <li>The image identifier: name="CLASS", value="IMAGE" 609 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 610 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 611 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 612 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 613 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 614 * to the palette datasets, with initial value of {-1} 615 * </ul> 616 * <p> 617 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 618 * Format implementation level. 619 * 620 * @param dataset 621 * The image dataset the attributes are added to. 622 * @param selectionFlag 623 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 624 * <ul> 625 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 626 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 627 * stored in a separate plane. 628 * </ul> 629 * 630 * @throws Exception 631 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 632 */ 633 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 634 String subclass = null; 635 String interlaceMode = null; 636 637 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 638 subclass = "IMAGE_TRUECOLOR"; 639 interlaceMode = "INTERLACE_PIXEL"; 640 } 641 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 642 subclass = "IMAGE_TRUECOLOR"; 643 interlaceMode = "INTERLACE_PLANE"; 644 } 645 else if (selectionFlag == -1) { 646 subclass = "IMAGE_INDEXED"; 647 } 648 else { 649 throw new HDF5Exception("The selectionFlag is invalid."); 650 } 651 652 String attrName = "CLASS"; 653 String[] classValue = { "IMAGE" }; 654 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, -1, -1); 655 Attribute attr = new Attribute(attrName, attrType, null); 656 attr.setValue(classValue); 657 dataset.writeMetadata(attr); 658 659 attrName = "IMAGE_VERSION"; 660 String[] versionValue = { "1.2" }; 661 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, -1, -1); 662 attr = new Attribute(attrName, attrType, null); 663 attr.setValue(versionValue); 664 dataset.writeMetadata(attr); 665 666 long[] attrDims = { 2 }; 667 attrName = "IMAGE_MINMAXRANGE"; 668 byte[] attrValueInt = { 0, (byte) 255 }; 669 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 670 attr = new Attribute(attrName, attrType, attrDims); 671 attr.setValue(attrValueInt); 672 dataset.writeMetadata(attr); 673 674 attrName = "IMAGE_SUBCLASS"; 675 String[] subclassValue = { subclass }; 676 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, -1, -1); 677 attr = new Attribute(attrName, attrType, null); 678 attr.setValue(subclassValue); 679 dataset.writeMetadata(attr); 680 681 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 682 attrName = "INTERLACE_MODE"; 683 String[] interlaceValue = { interlaceMode }; 684 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, -1, -1); 685 attr = new Attribute(attrName, attrType, null); 686 attr.setValue(interlaceValue); 687 dataset.writeMetadata(attr); 688 } 689 else { 690 attrName = "PALETTE"; 691 long[] palRef = { 0 }; // set ref to null 692 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 693 attr = new Attribute(attrName, attrType, null); 694 attr.setValue(palRef); 695 dataset.writeMetadata(attr); 696 } 697 } 698 699 /** 700 * Updates values of scalar dataset object references in copied file. 701 * <p> 702 * This method has very specific functionality as documented below, and the user is advised to pay close attention 703 * when dealing with files that contain references. 704 * <p> 705 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 706 * the references in the destination file are not updated by the copy and are therefore invalid. 707 * <p> 708 * When an entire file is copied, this method updates the values of the object references and dataset region 709 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 710 * destination file. The method does not update references that occur in objects other than scalar datasets. 711 * <p> 712 * In the current release, the updating of object references is not handled completely as it was not required by the 713 * projects that funded development. There is no support for updates when the copy does not include the entire file. 714 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 715 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 716 * updates itself. 717 * 718 * @param srcFile 719 * The file that was copied. 720 * @param dstFile 721 * The destination file where the object references will be updated. 722 * 723 * @throws Exception 724 * If there is a problem in the update process. 725 */ 726 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 727 if ((srcFile == null) || (dstFile == null)) { 728 return; 729 } 730 731 DefaultMutableTreeNode srcRoot = (DefaultMutableTreeNode) srcFile.getRootNode(); 732 DefaultMutableTreeNode newRoot = (DefaultMutableTreeNode) dstFile.getRootNode(); 733 734 Enumeration<?> srcEnum = srcRoot.breadthFirstEnumeration(); 735 Enumeration<?> newEnum = newRoot.breadthFirstEnumeration(); 736 737 // build one-to-one table between objects in 738 // the source file and new file 739 int did = -1, tid = -1; 740 HObject srcObj, newObj; 741 Hashtable<String, long[]> oidMap = new Hashtable<String, long[]>(); 742 List<ScalarDS> refDatasets = new Vector<ScalarDS>(); 743 while (newEnum.hasMoreElements() && srcEnum.hasMoreElements()) { 744 srcObj = (HObject) ((DefaultMutableTreeNode) srcEnum.nextElement()).getUserObject(); 745 newObj = (HObject) ((DefaultMutableTreeNode) newEnum.nextElement()).getUserObject(); 746 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 747 did = -1; 748 tid = -1; 749 750 // for Scalar DataSets in destination, if there is an object 751 // reference in the dataset, add it to the refDatasets list for 752 // later updating. 753 if (newObj instanceof ScalarDS) { 754 ScalarDS sd = (ScalarDS) newObj; 755 did = sd.open(); 756 if (did >= 0) { 757 try { 758 tid = H5.H5Dget_type(did); 759 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 760 refDatasets.add(sd); 761 } 762 } 763 catch (Exception ex) { 764 log.debug("ScalarDS reference failure: ", ex); 765 } 766 finally { 767 try { 768 H5.H5Tclose(tid); 769 } 770 catch (Exception ex) { 771 log.debug("ScalarDS reference H5Tclose failure: ", ex); 772 } 773 } 774 } 775 sd.close(did); 776 } // if (newObj instanceof ScalarDS) 777 } 778 779 // Update the references in the scalar datasets in the dest file. 780 H5ScalarDS d = null; 781 int sid = -1, size = 0, rank = 0; 782 int n = refDatasets.size(); 783 for (int i = 0; i < n; i++) { 784 log.trace("Update the references in the scalar datasets in the dest file"); 785 d = (H5ScalarDS) refDatasets.get(i); 786 byte[] buf = null; 787 long[] refs = null; 788 789 try { 790 did = d.open(); 791 if (did >= 0) { 792 tid = H5.H5Dget_type(did); 793 sid = H5.H5Dget_space(did); 794 rank = H5.H5Sget_simple_extent_ndims(sid); 795 size = 1; 796 if (rank > 0) { 797 long[] dims = new long[rank]; 798 H5.H5Sget_simple_extent_dims(sid, dims, null); 799 log.trace("updateReferenceDataset() rank={}, dims={}", rank, dims); 800 for (int j = 0; j < rank; j++) { 801 size *= (int) dims[j]; 802 } 803 dims = null; 804 } 805 806 buf = new byte[size * 8]; 807 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 808 809 // update the ref values 810 refs = HDFNativeData.byteToLong(buf); 811 size = refs.length; 812 for (int j = 0; j < size; j++) { 813 long[] theOID = oidMap.get(String.valueOf(refs[j])); 814 if (theOID != null) { 815 refs[j] = theOID[0]; 816 } 817 } 818 819 // write back to file 820 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 821 } 822 else { 823 log.debug("updateReferenceDataset() dest file dataset failed to open"); 824 } 825 } 826 catch (Exception ex) { 827 continue; 828 } 829 finally { 830 try { 831 H5.H5Tclose(tid); 832 } 833 catch (Exception ex) { 834 log.debug("H5ScalarDS reference[{}] H5Tclose failure: ", i, ex); 835 } 836 try { 837 H5.H5Sclose(sid); 838 } 839 catch (Exception ex) { 840 log.debug("H5ScalarDS reference[{}] H5Sclose failure: ", i, ex); 841 } 842 try { 843 H5.H5Dclose(did); 844 } 845 catch (Exception ex) { 846 log.debug("H5ScalarDS reference[{}] H5Dclose failure: ", i, ex); 847 } 848 } 849 850 refs = null; 851 buf = null; 852 } // for (int i=0; i<n; i++) 853 } 854 855 /*************************************************************************** 856 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 857 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 858 * instance methods. 859 **************************************************************************/ 860 861 /** 862 * Returns the version of the HDF5 library. 863 * 864 * @see hdf.object.FileFormat#getLibversion() 865 */ 866 @Override 867 public String getLibversion() { 868 int[] vers = new int[3]; 869 String ver = "HDF5 "; 870 871 try { 872 H5.H5get_libversion(vers); 873 } 874 catch (Throwable ex) { 875 ex.printStackTrace(); 876 } 877 878 ver += vers[0] + "." + vers[1] + "." + vers[2]; 879 log.debug("libversion is {}", ver); 880 881 return ver; 882 } 883 884 /** 885 * Checks if the specified FileFormat instance has the HDF5 format. 886 * 887 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 888 */ 889 @Override 890 public boolean isThisType(FileFormat theFile) { 891 return (theFile instanceof H5File); 892 } 893 894 /** 895 * Checks if the specified file has the HDF5 format. 896 * 897 * @see hdf.object.FileFormat#isThisType(java.lang.String) 898 */ 899 @Override 900 public boolean isThisType(String filename) { 901 boolean isH5 = false; 902 903 try { 904 isH5 = H5.H5Fis_hdf5(filename); 905 } 906 catch (HDF5Exception ex) { 907 isH5 = false; 908 } 909 910 return isH5; 911 } 912 913 /** 914 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 915 * 916 * @throws Exception 917 * If the file cannot be created or if createFlag has unexpected value. 918 * 919 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 920 * @see #H5File(String, int) 921 */ 922 @Override 923 public FileFormat createFile(String filename, int createFlag) throws Exception { 924 // Flag if we need to create or truncate the file. 925 Boolean doCreateFile = true; 926 log.trace("createFile start"); 927 928 // Won't create or truncate if CREATE_OPEN specified and file exists 929 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 930 File f = new File(filename); 931 if (f.exists()) { 932 doCreateFile = false; 933 } 934 } 935 936 if (doCreateFile) { 937 938 int fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 939 940 if ((createFlag & FILE_CREATE_EARLY_LIB) != FILE_CREATE_EARLY_LIB) { 941 H5.H5Pset_libver_bounds(fapl, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST); 942 } 943 944 int fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 945 try { 946 H5.H5Pclose(fapl); 947 H5.H5Fclose(fileid); 948 } 949 catch (HDF5Exception ex) { 950 log.debug("H5 file, {} failure: ", filename, ex); 951 } 952 } 953 log.trace("createFile finish"); 954 955 return new H5File(filename, WRITE); 956 } 957 958 /** 959 * Creates an H5File instance with specified file name and access. 960 * 961 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 962 * @see #H5File(String, int) 963 * 964 * @throws Exception 965 * If there is a failure. 966 */ 967 @Override 968 public FileFormat createInstance(String filename, int access) throws Exception { 969 log.trace("createInstance start"); 970 return new H5File(filename, access); 971 } 972 973 /*************************************************************************** 974 * Instance Methods 975 * 976 * These methods are related to the H5File class and to particular instances of objects with this class type. 977 **************************************************************************/ 978 979 /** 980 * Opens file and returns a file identifier. 981 * 982 * @see hdf.object.FileFormat#open() 983 */ 984 @Override 985 public int open() throws Exception { 986 return open(true); 987 } 988 989 /** 990 * Opens file and returns a file identifier. 991 * 992 * @see hdf.object.FileFormat#open(int...) 993 */ 994 @Override 995 public int open(int... indexList) throws Exception { 996 setIndexType(indexList[0]); 997 setIndexOrder(indexList[1]); 998 return open(true); 999 } 1000 1001 /** 1002 * Sets the bounds of library versions. 1003 * 1004 * @param low 1005 * The earliest version of the library. 1006 * @param high 1007 * The latest version of the library. 1008 * 1009 * @throws HDF5Exception 1010 * If there is an error at the HDF5 library level. 1011 */ 1012 public void setLibBounds(int low, int high) throws Exception { 1013 int fapl = HDF5Constants.H5P_DEFAULT; 1014 1015 if (fid < 0) 1016 return; 1017 1018 fapl = H5.H5Fget_access_plist(fid); 1019 1020 try { 1021 if (low < 0) 1022 low = HDF5Constants.H5F_LIBVER_EARLIEST; 1023 1024 if (high < 0) 1025 high = HDF5Constants.H5F_LIBVER_LATEST; 1026 1027 H5.H5Pset_libver_bounds(fapl, low, high); 1028 H5.H5Pget_libver_bounds(fapl, libver); 1029 } 1030 finally { 1031 try { 1032 H5.H5Pclose(fapl); 1033 } 1034 catch (Exception e) { 1035 log.debug("libver bounds H5Pclose failure: ", e); 1036 } 1037 } 1038 } 1039 1040 /** 1041 * Gets the bounds of library versions. 1042 * 1043 * @return libver The earliest and latest version of the library. 1044 * 1045 * @throws HDF5Exception 1046 * If there is an error at the HDF5 library level. 1047 */ 1048 public int[] getLibBounds() throws Exception { 1049 return libver; 1050 } 1051 1052 /** 1053 * Closes file associated with this H5File instance. 1054 * 1055 * @see hdf.object.FileFormat#close() 1056 * 1057 * @throws HDF5Exception 1058 * If there is an error at the HDF5 library level. 1059 */ 1060 @Override 1061 public void close() throws HDF5Exception { 1062 if (fid < 0) { 1063 log.debug("file {} is not open", fullFileName); 1064 return; 1065 } 1066 log.trace("H5File:close start"); 1067 // The current working directory may be changed at Dataset.read() 1068 // by System.setProperty("user.dir", newdir) to make it work for external 1069 // datasets. We need to set it back to the original current working 1070 // directory (when hdf-java application started) before the file 1071 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1072 // not work 1073 String rootPath = System.getProperty("hdfview.workdir"); 1074 if (rootPath == null) { 1075 rootPath = System.getProperty("user.dir"); 1076 } 1077 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 1078 1079 // clean up unused objects 1080 if (rootNode != null) { 1081 DefaultMutableTreeNode theNode = null; 1082 HObject theObj = null; 1083 Enumeration<?> local_enum = (rootNode).breadthFirstEnumeration(); 1084 while (local_enum.hasMoreElements()) { 1085 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1086 theObj = (HObject) theNode.getUserObject(); 1087 1088 if (theObj instanceof Dataset) { 1089 ((Dataset) theObj).clear(); 1090 } 1091 else if (theObj instanceof Group) { 1092 ((Group) theObj).clear(); 1093 } 1094 } 1095 } 1096 1097 // Close all open objects associated with this file. 1098 try { 1099 int n = 0, type = -1, oids[]; 1100 n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1101 log.trace("H5File:close open objects={}", n); 1102 1103 if (n > 0) { 1104 oids = new int[n]; 1105 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1106 1107 for (int i = 0; i < n; i++) { 1108 log.trace("H5File:close object[{}] id={}", i, oids[i]); 1109 type = H5.H5Iget_type(oids[i]); 1110 1111 if (HDF5Constants.H5I_DATASET == type) { 1112 try { 1113 H5.H5Dclose(oids[i]); 1114 } 1115 catch (Exception ex2) { 1116 log.debug("Object[{}] H5Dclose failure: ", i, ex2); 1117 } 1118 } 1119 else if (HDF5Constants.H5I_GROUP == type) { 1120 try { 1121 H5.H5Gclose(oids[i]); 1122 } 1123 catch (Exception ex2) { 1124 log.debug("Object[{}] H5Gclose failure: ", i, ex2); 1125 } 1126 } 1127 else if (HDF5Constants.H5I_DATATYPE == type) { 1128 try { 1129 H5.H5Tclose(oids[i]); 1130 } 1131 catch (Exception ex2) { 1132 log.debug("Object[{}] H5Tclose failure: ", i, ex2); 1133 } 1134 } 1135 else if (HDF5Constants.H5I_ATTR == type) { 1136 try { 1137 H5.H5Aclose(oids[i]); 1138 } 1139 catch (Exception ex2) { 1140 log.debug("Object[{}] H5Aclose failure: ", i, ex2); 1141 } 1142 } 1143 } // for (int i=0; i<n; i++) 1144 } // if ( n>0) 1145 } 1146 catch (Exception ex) { 1147 log.debug("close open objects failure: ", ex); 1148 } 1149 1150 try { 1151 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1152 } 1153 catch (Exception ex) { 1154 log.debug("H5Fflush failure: ", ex); 1155 } 1156 1157 try { 1158 H5.H5Fclose(fid); 1159 } 1160 catch (Exception ex) { 1161 log.debug("H5Fclose failure: ", ex); 1162 } 1163 1164 // Set fid to -1 but don't reset rootObject 1165 fid = -1; 1166 log.trace("H5File:close finish"); 1167 } 1168 1169 /** 1170 * Returns the root node of the open HDF5 File. 1171 * 1172 * @see hdf.object.FileFormat#getRootNode() 1173 */ 1174 @Override 1175 public TreeNode getRootNode() { 1176 return rootNode; 1177 } 1178 1179 /* 1180 * (non-Javadoc) 1181 * 1182 * @see hdf.object.FileFormat#get(java.lang.String) 1183 */ 1184 @Override 1185 public HObject get(String path) throws Exception { 1186 HObject obj = null; 1187 log.trace("H5File:get start"); 1188 1189 if ((path == null) || (path.length() <= 0)) { 1190 System.err.println("(path == null) || (path.length() <= 0)"); 1191 return null; 1192 } 1193 1194 // replace the wrong slash and get rid of "//" 1195 path = path.replace('\\', '/'); 1196 path = "/" + path; 1197 path = path.replaceAll("//", "/"); 1198 log.trace("H5File:get path:{}", path); 1199 1200 // the whole file tree is loaded. find the object in the tree 1201 if (rootNode != null) { 1202 obj = findObject(this, path); 1203 } 1204 1205 // found object in memory 1206 if (obj != null) { 1207 return obj; 1208 } 1209 1210 // open only the requested object 1211 String name = null, pPath = null; 1212 if (path.equals("/")) { 1213 name = "/"; // the root 1214 } 1215 else { 1216 // separate the parent path and the object name 1217 if (path.endsWith("/")) { 1218 path = path.substring(0, path.length() - 1); 1219 } 1220 1221 int idx = path.lastIndexOf('/'); 1222 name = path.substring(idx + 1); 1223 if (idx == 0) { 1224 pPath = "/"; 1225 } 1226 else { 1227 pPath = path.substring(0, idx); 1228 } 1229 } 1230 1231 // do not open the full tree structure, only the file handler 1232 int fid_before_open = fid; 1233 log.trace("H5File:get fid_before_open with:{}", fid); 1234 fid = open(false); 1235 if (fid < 0) { 1236 System.err.println("Could not open file handler"); 1237 return null; 1238 } 1239 1240 try { 1241 H5O_info_t info; 1242 int objType; 1243 int oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1244 log.trace("H5File:get H5Oopen:{}", oid); 1245 1246 if (oid >= 0) { 1247 info = H5.H5Oget_info(oid); 1248 log.trace("H5File:get H5Oget_info-type:{}", info.type); 1249 objType = info.type; 1250 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1251 int did = -1; 1252 try { 1253 log.trace("H5File:get H5O_TYPE_DATASET:{}-{}", name, pPath); 1254 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1255 obj = getDataset(did, name, pPath); 1256 } 1257 finally { 1258 try { 1259 H5.H5Dclose(did); 1260 } 1261 catch (Exception ex) { 1262 log.debug("{} H5Dclose failure: ", path, ex); 1263 } 1264 } 1265 } 1266 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1267 int gid = -1; 1268 try { 1269 log.trace("H5File:get H5O_TYPE_GROUP:{}-{}", name, pPath); 1270 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1271 H5Group pGroup = null; 1272 if (pPath != null) { 1273 pGroup = new H5Group(this, null, pPath, null); 1274 obj = getGroup(gid, name, pGroup); 1275 pGroup.addToMemberList(obj); 1276 } 1277 else { 1278 obj = getGroup(gid, name, pGroup); 1279 } 1280 } 1281 finally { 1282 try { 1283 H5.H5Gclose(gid); 1284 } 1285 catch (Exception ex) { 1286 log.debug("{} H5Gclose failure: ", path, ex); 1287 } 1288 } 1289 } 1290 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1291 obj = new H5Datatype(this, name, pPath); 1292 } 1293 } 1294 try { 1295 H5.H5Oclose(oid); 1296 } 1297 catch (Exception ex) { 1298 ex.printStackTrace(); 1299 } 1300 } 1301 catch (Exception ex) { 1302 log.debug("Exception finding obj {}", path); 1303 obj = null; 1304 } 1305 finally { 1306 if ((fid_before_open <= 0) && (obj == null)) { 1307 // close the fid that is not attached to any object 1308 try { 1309 H5.H5Fclose(fid); 1310 } 1311 catch (Exception ex) { 1312 log.debug("[] H5Fclose failure: ", path, ex); 1313 } 1314 fid = fid_before_open; 1315 } 1316 } 1317 log.trace("H5File:get finish"); 1318 1319 return obj; 1320 } 1321 1322 /* 1323 * (non-Javadoc) 1324 * 1325 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, java.lang.String) 1326 */ 1327 @Override 1328 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception { 1329 return createDatatype(tclass, tsize, torder, tsign, null, name); 1330 } 1331 1332 /* 1333 * (non-Javadoc) 1334 * 1335 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype, java.lang.String) 1336 */ 1337 @Override 1338 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) 1339 throws Exception { 1340 int tid = -1; 1341 H5Datatype dtype = null; 1342 1343 log.trace("createDatatype with name={} start", name); 1344 try { 1345 H5Datatype t = (H5Datatype) createDatatype(tclass, tsize, torder, tsign, tbase); 1346 if ((tid = t.toNative()) < 0) 1347 throw new Exception("toNative failed"); 1348 1349 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, 1350 HDF5Constants.H5P_DEFAULT); 1351 1352 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1353 long l = HDFNativeData.byteToLong(ref_buf, 0); 1354 1355 long[] oid = new long[1]; 1356 oid[0] = l; // save the object ID 1357 1358 dtype = new H5Datatype(this, null, name); 1359 1360 } 1361 finally { 1362 H5.H5Tclose(tid); 1363 } 1364 1365 log.trace("createDatatype with name={} finish", name); 1366 return dtype; 1367 } 1368 1369 /*************************************************************************** 1370 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1371 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1372 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1373 * so that we create the proper type of HObject... H5Group for example. 1374 * 1375 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1376 * methods (they can only be shadowed in Java), these are instance methods. 1377 * 1378 **************************************************************************/ 1379 1380 /* 1381 * (non-Javadoc) 1382 * 1383 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1384 */ 1385 @Override 1386 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1387 log.trace("createDatatype"); 1388 return new H5Datatype(tclass, tsize, torder, tsign); 1389 } 1390 1391 /* 1392 * (non-Javadoc) 1393 * 1394 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1395 */ 1396 @Override 1397 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1398 log.trace("createDatatype with base"); 1399 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1400 } 1401 1402 /* 1403 * (non-Javadoc) 1404 * 1405 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1406 * long[], long[], long[], int, java.lang.Object) 1407 */ 1408 @Override 1409 public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1410 int gzip, Object fillValue, Object data) throws Exception { 1411 if (pgroup == null) { 1412 // create new dataset at the root group by default 1413 pgroup = (Group) get("/"); 1414 } 1415 1416 log.trace("createScalarDS name={}", name); 1417 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1418 } 1419 1420 /* 1421 * (non-Javadoc) 1422 * 1423 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1424 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1425 */ 1426 @Override 1427 public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, 1428 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { 1429 int nMembers = memberNames.length; 1430 int memberRanks[] = new int[nMembers]; 1431 long memberDims[][] = new long[nMembers][1]; 1432 Dataset ds = null; 1433 1434 for (int i = 0; i < nMembers; i++) { 1435 memberRanks[i] = 1; 1436 if (memberSizes == null) { 1437 memberDims[i][0] = 1; 1438 } 1439 else { 1440 memberDims[i][0] = memberSizes[i]; 1441 } 1442 } 1443 1444 if (pgroup == null) { 1445 // create new dataset at the root group by default 1446 pgroup = (Group) get("/"); 1447 } 1448 log.trace("createCompoundDS name={}", name); 1449 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, memberNames, memberDatatypes, memberRanks, 1450 memberDims, data); 1451 1452 return ds; 1453 } 1454 1455 /* 1456 * (non-Javadoc) 1457 * 1458 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1459 * long[], long[], long[], int, int, int, java.lang.Object) 1460 */ 1461 @Override 1462 public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1463 int gzip, int ncomp, int interlace, Object data) throws Exception { 1464 if (pgroup == null) { // create at the root group by default 1465 pgroup = (Group) get("/"); 1466 } 1467 1468 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1469 1470 try { 1471 H5File.createImageAttributes(dataset, interlace); 1472 dataset.setIsImage(true); 1473 } 1474 catch (Exception ex) { 1475 log.debug("{} createImageAttributtes failure: ", name, ex); 1476 } 1477 1478 return dataset; 1479 } 1480 1481 /*** 1482 * Creates a new group with specified name in existing group. 1483 * 1484 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1485 */ 1486 @Override 1487 public Group createGroup(String name, Group pgroup) throws Exception { 1488 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1489 1490 } 1491 1492 /*** 1493 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1494 * 1495 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, int...) 1496 * 1497 */ 1498 public Group createGroup(String name, Group pgroup, int... gplist) throws Exception { 1499 // create new group at the root 1500 if (pgroup == null) { 1501 pgroup = (Group) this.get("/"); 1502 } 1503 1504 return H5Group.create(name, pgroup, gplist); 1505 } 1506 1507 /*** 1508 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1509 * 1510 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1511 * 1512 */ 1513 public int createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1514 int gcpl = -1; 1515 try { 1516 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1517 if (gcpl >= 0) { 1518 // Set link creation order. 1519 if (creationorder == Group.CRT_ORDER_TRACKED) { 1520 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1521 } 1522 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1523 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED 1524 + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1525 } 1526 // Set link storage. 1527 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1528 } 1529 } 1530 catch (Exception ex) { 1531 ex.printStackTrace(); 1532 } 1533 1534 return gcpl; 1535 } 1536 1537 /* 1538 * (non-Javadoc) 1539 * 1540 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1541 */ 1542 @Override 1543 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1544 if (currentObj instanceof HObject) 1545 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1546 else if (currentObj instanceof String) 1547 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1548 1549 return null; 1550 } 1551 1552 /** 1553 * Creates a link to an object in the open file. 1554 * <p> 1555 * If parentGroup is null, the new link is created in the root group. 1556 * 1557 * @param parentGroup 1558 * The group where the link is created. 1559 * @param name 1560 * The name of the link. 1561 * @param currentObj 1562 * The existing object the new link will reference. 1563 * @param lType 1564 * The type of link to be created. It can be a hard link, a soft link or an external link. 1565 * 1566 * @return The object pointed to by the new link if successful; otherwise returns null. 1567 * 1568 * @throws Exception 1569 * The exceptions thrown vary depending on the implementing class. 1570 */ 1571 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1572 HObject obj = null; 1573 int type = 0; 1574 String current_full_name = null, new_full_name = null, parent_path = null; 1575 1576 if (currentObj == null) { 1577 throw new HDF5Exception("The object pointed by the link cannot be null."); 1578 } 1579 if ((parentGroup == null) || parentGroup.isRoot()) { 1580 parent_path = HObject.separator; 1581 } 1582 else { 1583 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1584 } 1585 1586 new_full_name = parent_path + name; 1587 1588 if (lType == Group.LINK_TYPE_HARD) 1589 type = HDF5Constants.H5L_TYPE_HARD; 1590 1591 else if (lType == Group.LINK_TYPE_SOFT) 1592 type = HDF5Constants.H5L_TYPE_SOFT; 1593 1594 else if (lType == Group.LINK_TYPE_EXTERNAL) 1595 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1596 1597 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1598 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1599 } 1600 1601 if (type == HDF5Constants.H5L_TYPE_HARD) { 1602 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1603 throw new HDF5Exception("Cannot make a link to the root group."); 1604 } 1605 current_full_name = currentObj.getPath() + HObject.separator + currentObj.getName(); 1606 1607 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1608 HDF5Constants.H5P_DEFAULT); 1609 } 1610 1611 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1612 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1613 HDF5Constants.H5P_DEFAULT); 1614 } 1615 1616 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1617 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, 1618 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1619 } 1620 1621 if (currentObj instanceof Group) { 1622 obj = new H5Group(this, name, parent_path, parentGroup); 1623 } 1624 else if (currentObj instanceof H5Datatype) { 1625 obj = new H5Datatype(this, name, parent_path); 1626 } 1627 else if (currentObj instanceof H5CompoundDS) { 1628 obj = new H5CompoundDS(this, name, parent_path); 1629 } 1630 else if (currentObj instanceof H5ScalarDS) { 1631 obj = new H5ScalarDS(this, name, parent_path); 1632 } 1633 return obj; 1634 } 1635 1636 /** 1637 * Creates a soft or external links to objects in a file that do not exist at the time the link is created. 1638 * 1639 * @param parentGroup 1640 * The group where the link is created. 1641 * @param name 1642 * The name of the link. 1643 * @param currentObj 1644 * The name of the object the new link will reference. The object doesn't have to exist. 1645 * @param lType 1646 * The type of link to be created. 1647 * 1648 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1649 * 1650 * @throws Exception 1651 * The exceptions thrown vary depending on the implementing class. 1652 */ 1653 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1654 HObject obj = null; 1655 int type = 0; 1656 String new_full_name = null, parent_path = null; 1657 1658 if (currentObj == null) { 1659 throw new HDF5Exception("The object pointed by the link cannot be null."); 1660 } 1661 if ((parentGroup == null) || parentGroup.isRoot()) { 1662 parent_path = HObject.separator; 1663 } 1664 else { 1665 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1666 } 1667 1668 new_full_name = parent_path + name; 1669 1670 if (lType == Group.LINK_TYPE_HARD) 1671 type = HDF5Constants.H5L_TYPE_HARD; 1672 1673 else if (lType == Group.LINK_TYPE_SOFT) 1674 type = HDF5Constants.H5L_TYPE_SOFT; 1675 1676 else if (lType == Group.LINK_TYPE_EXTERNAL) 1677 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1678 1679 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1680 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1681 } 1682 1683 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1684 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1685 } 1686 1687 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1688 String fileName = null; 1689 String objectName = null; 1690 1691 // separate the object name and the file name 1692 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1693 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1694 objectName = objectName.substring(3); 1695 1696 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1697 HDF5Constants.H5P_DEFAULT); 1698 } 1699 1700 if (name.startsWith(HObject.separator)) { 1701 name = name.substring(1); 1702 } 1703 obj = new H5Link(this, name, parent_path); 1704 1705 return obj; 1706 } 1707 1708 /** 1709 * reload the sub-tree structure from file. 1710 * <p> 1711 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1712 * memory is not changed. 1713 * 1714 * @param g 1715 * the group where the structure is to be reloaded in memory 1716 */ 1717 public void reloadTree(Group g) { 1718 if (fid < 0 || rootNode == null || g == null) 1719 return; 1720 1721 HObject theObj = null; 1722 DefaultMutableTreeNode theNode = null; 1723 1724 if (g.equals(rootNode.getUserObject())) 1725 theNode = rootNode; 1726 else { 1727 Enumeration<?> local_enum = rootNode.breadthFirstEnumeration(); 1728 while (local_enum.hasMoreElements()) { 1729 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1730 theObj = (HObject) theNode.getUserObject(); 1731 if (g.equals(theObj)) 1732 break; 1733 } 1734 } 1735 1736 theNode.removeAllChildren(); 1737 depth_first(theNode, Integer.MIN_VALUE); 1738 } 1739 1740 /* 1741 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1742 * 1743 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1744 */ 1745 @Override 1746 public TreeNode copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1747 TreeNode newNode = null; 1748 1749 if ((srcObj == null) || (dstGroup == null)) { 1750 return null; 1751 } 1752 1753 if (dstName == null) { 1754 dstName = srcObj.getName(); 1755 } 1756 1757 List<HObject> members = dstGroup.getMemberList(); 1758 int n = members.size(); 1759 for (int i = 0; i < n; i++) { 1760 HObject obj = (HObject) members.get(i); 1761 String name = obj.getName(); 1762 while (name.equals(dstName)) 1763 dstName += "~copy"; 1764 } 1765 1766 if (srcObj instanceof Dataset) { 1767 newNode = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1768 } 1769 else if (srcObj instanceof H5Group) { 1770 newNode = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1771 } 1772 else if (srcObj instanceof H5Datatype) { 1773 newNode = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 1774 } 1775 1776 return newNode; 1777 } 1778 1779 /* 1780 * (non-Javadoc) 1781 * 1782 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 1783 */ 1784 @Override 1785 public void delete(HObject obj) throws Exception { 1786 if ((obj == null) || (fid < 0)) { 1787 return; 1788 } 1789 1790 String name = obj.getPath() + obj.getName(); 1791 1792 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 1793 } 1794 1795 /* 1796 * (non-Javadoc) 1797 * 1798 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 1799 */ 1800 @Override 1801 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 1802 String obj_name = obj.getFullName(); 1803 String name = attr.getName(); 1804 int tid = -1, sid = -1, aid = -1; 1805 log.trace("{} writeAttribute start", name); 1806 1807 int objID = obj.open(); 1808 if (objID < 0) { 1809 return; 1810 } 1811 1812 if ((tid = attr.getType().toNative()) >= 0) { 1813 log.trace("{} writeAttribute tid from native", name); 1814 try { 1815 if (attr.isScalar()) 1816 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 1817 else 1818 sid = H5.H5Screate_simple(attr.getRank(), attr.getDataDims(), null); 1819 1820 if (attrExisted) { 1821 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, 1822 HDF5Constants.H5P_DEFAULT); 1823 } 1824 else { 1825 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1826 } 1827 log.trace("{} writeAttribute aid opened/created", name); 1828 1829 // update value of the attribute 1830 Object attrValue = attr.getValue(); 1831 log.trace("{} writeAttribute getvalue", name); 1832 if (attrValue != null) { 1833 boolean isVlen = (H5.H5Tget_class(tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(tid)); 1834 if (isVlen) { 1835 log.trace("{} writeAttribute isvlen", name); 1836 try { 1837 /* 1838 * must use native type to write attribute data to file (see bug 1069) 1839 */ 1840 int tmptid = tid; 1841 tid = H5.H5Tget_native_type(tmptid); 1842 try { 1843 H5.H5Tclose(tmptid); 1844 } 1845 catch (Exception ex) { 1846 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1847 } 1848 log.trace("{} writeAttribute H5.H5AwriteVL", name); 1849 if ((attrValue instanceof String) || (attr.getDataDims().length == 1)) { 1850 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 1851 } 1852 else { 1853 log.info("Datatype is not a string, unable to write {} data", name); 1854 } 1855 } 1856 catch (Exception ex) { 1857 log.debug("{} writeAttribute native type failure: ", name, ex); 1858 } 1859 } 1860 else { 1861 if (attr.getType().getDatatypeClass() == Datatype.CLASS_REFERENCE && attrValue instanceof String) { 1862 // reference is a path+name to the object 1863 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 1864 log.trace("{} writeAttribute CLASS_REFERENCE", name); 1865 } 1866 else if (Array.get(attrValue, 0) instanceof String) { 1867 int size = H5.H5Tget_size(tid); 1868 int len = ((String[]) attrValue).length; 1869 byte[] bval = Dataset.stringToByte((String[]) attrValue, size); 1870 if (bval != null && bval.length == size * len) { 1871 bval[bval.length - 1] = 0; 1872 attrValue = bval; 1873 } 1874 log.trace("{} writeAttribute Array", name); 1875 } 1876 1877 try { 1878 /* 1879 * must use native type to write attribute data to file (see bug 1069) 1880 */ 1881 int tmptid = tid; 1882 tid = H5.H5Tget_native_type(tmptid); 1883 try { 1884 H5.H5Tclose(tmptid); 1885 } 1886 catch (Exception ex) { 1887 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1888 } 1889 log.trace("{} writeAttribute H5.H5Awrite", name); 1890 H5.H5Awrite(aid, tid, attrValue); 1891 } 1892 catch (Exception ex) { 1893 log.debug("{} writeAttribute native type failure: ", name, ex); 1894 } 1895 } 1896 } // if (attrValue != null) { 1897 } 1898 finally { 1899 try { 1900 H5.H5Tclose(tid); 1901 } 1902 catch (Exception ex) { 1903 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1904 } 1905 try { 1906 H5.H5Sclose(sid); 1907 } 1908 catch (Exception ex) { 1909 log.debug("{} writeAttribute H5Sclose failure: ", name, ex); 1910 } 1911 try { 1912 H5.H5Aclose(aid); 1913 } 1914 catch (Exception ex) { 1915 log.debug("{} writeAttribute H5Aclose failure: ", name, ex); 1916 } 1917 } 1918 } 1919 else { 1920 log.debug("{} writeAttribute toNative failure: ", name); 1921 } 1922 1923 obj.close(objID); 1924 log.trace("{} writeAttribute finish", name); 1925 } 1926 1927 /*************************************************************************** 1928 * Implementations for methods specific to H5File 1929 **************************************************************************/ 1930 1931 /** 1932 * Opens a file with specific file access property list. 1933 * <p> 1934 * This function does the same as "int open()" except the you can also pass an HDF5 file access property to file 1935 * open. For example, 1936 * 1937 * <pre> 1938 * // All open objects remaining in the file are closed then file is closed 1939 * int plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 1940 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 1941 * int fid = open(plist); 1942 * </pre> 1943 * 1944 * @param plist 1945 * a file access property list identifier. 1946 * 1947 * @return the file identifier if successful; otherwise returns negative value. 1948 * 1949 * @throws Exception 1950 * If there is a failure. 1951 */ 1952 public int open(int plist) throws Exception { 1953 return open(true, plist); 1954 } 1955 1956 /*************************************************************************** 1957 * Private methods. 1958 **************************************************************************/ 1959 1960 /** 1961 * Opens access to this file. 1962 * 1963 * @param loadFullHierarchy 1964 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 1965 * 1966 * @return the file identifier if successful; otherwise returns negative value. 1967 * 1968 * @throws Exception 1969 * If there is a failure. 1970 */ 1971 private int open(boolean loadFullHierarchy) throws Exception { 1972 int the_fid = -1; 1973 1974 int plist = HDF5Constants.H5P_DEFAULT; 1975 1976 /* 1977 * // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache try { //All open objects remaining in the file 1978 * are closed // then file is closed plist = H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); 1979 * H5.H5Pset_fclose_degree ( plist, HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {;} the_fid = 1980 * open(loadFullHierarchy, plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 1981 */ 1982 1983 log.trace("H5File:open loadFull={}", loadFullHierarchy); 1984 the_fid = open(loadFullHierarchy, plist); 1985 1986 return the_fid; 1987 } 1988 1989 /** 1990 * Opens access to this file. 1991 * 1992 * @param loadFullHierarchy 1993 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 1994 * 1995 * @return the file identifier if successful; otherwise returns negative value. 1996 * 1997 * @throws Exception 1998 * If there is a failure. 1999 */ 2000 private int open(boolean loadFullHierarchy, int plist) throws Exception { 2001 if (fid > 0) { 2002 return fid; // file is opened already 2003 } 2004 log.trace("open(loadFullHierarchy = {}, plist = {}) start", loadFullHierarchy, plist); 2005 2006 // The cwd may be changed at Dataset.read() by System.setProperty("user.dir", newdir) 2007 // to make it work for external datasets. We need to set it back 2008 // before the file is closed/opened. 2009 String rootPath = System.getProperty("hdfview.workdir"); 2010 if (rootPath == null) { 2011 rootPath = System.getProperty("user.dir"); 2012 } 2013 System.setProperty("user.dir", rootPath);//H5.H5Dchdir_ext(rootPath); 2014 2015 // check for valid file access permission 2016 if (flag < 0) { 2017 throw new HDF5Exception("Invalid access identifer -- " + flag); 2018 } 2019 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 2020 // create a new file 2021 log.trace("open: create file"); 2022 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 2023 HDF5Constants.H5P_DEFAULT); 2024 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 2025 H5.H5Fclose(fid); 2026 flag = HDF5Constants.H5F_ACC_RDWR; 2027 } 2028 else if (!exists()) { 2029 throw new HDF5Exception("File does not exist -- " + fullFileName); 2030 } 2031 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 2032 throw new HDF5Exception("Cannot write file, try open as read-only -- " + fullFileName); 2033 } 2034 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 2035 throw new HDF5Exception("Cannot read file -- " + fullFileName); 2036 } 2037 2038 try { 2039 log.trace("open: open file"); 2040 fid = H5.H5Fopen(fullFileName, flag, plist); 2041 } 2042 catch (Exception ex) { 2043 log.trace("open: open file exception", ex); 2044 try { 2045 log.trace("open: open file read only"); 2046 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 2047 isReadOnly = true; 2048 } 2049 catch (Exception ex2) { 2050 log.trace("open: open file read only exception", ex2); 2051 // try to see if it is a file family, always open a family file 2052 // from the first one since other files will not be recognized 2053 // as an HDF5 file 2054 File tmpf = new File(fullFileName); 2055 String tmpname = tmpf.getName(); 2056 int idx = tmpname.lastIndexOf("."); 2057 while (idx > 0) { 2058 char c = tmpname.charAt(idx); 2059 if (c >= '0') 2060 idx--; 2061 else 2062 break; 2063 } 2064 2065 if (idx > 0) { 2066 tmpname = tmpname.substring(0, idx - 1) + "%d" + tmpname.substring(tmpname.lastIndexOf(".")); 2067 int pid = -1; 2068 try { 2069 log.trace("open: open file family"); 2070 pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 2071 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 2072 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 2073 } 2074 finally { 2075 H5.H5Pclose(pid); 2076 } 2077 } 2078 } /* catch (Exception ex) { */ 2079 } 2080 2081 if ((fid >= 0) && loadFullHierarchy) { 2082 // load the hierarchy of the file 2083 rootNode = loadTree(); 2084 } 2085 2086 log.trace("open: finish"); 2087 return fid; 2088 } 2089 2090 /** 2091 * Reads the file structure into memory (tree node) 2092 * 2093 * @return the root node of the file structure. 2094 */ 2095 private DefaultMutableTreeNode loadTree() { 2096 if (fid < 0) { 2097 return null; 2098 } 2099 2100 DefaultMutableTreeNode root = null; 2101 2102 long[] rootOID = { 0 }; 2103 H5Group rootGroup = new H5Group(this, "/", null, // root node does not 2104 // have a parent path 2105 null); // root node does not have a parent node 2106 2107 root = new DefaultMutableTreeNode(rootGroup) { 2108 private static final long serialVersionUID = 991382067363411723L; 2109 2110 @Override 2111 public boolean isLeaf() { 2112 return false; 2113 } 2114 }; 2115 2116 depth_first(root, 0); // reload all 2117 2118 return root; 2119 } 2120 2121 /** 2122 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves groups and 2123 * datasets only. It does not include named datatypes and soft links. 2124 * <p> 2125 * It also detects and stops loops. A loop is detected if there exists an object with the same object ID by tracing 2126 * a path back up to the root. 2127 * 2128 * @param parentNode 2129 * the parent node. 2130 */ 2131 private int depth_first(MutableTreeNode parentNode, int nTotal) { 2132 int nelems; 2133 MutableTreeNode node = null; 2134 String fullPath = null; 2135 String ppath = null; 2136 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2137 int gid = -1; 2138 log.trace("depth_first: start"); 2139 2140 H5Group pgroup = (H5Group) (pnode.getUserObject()); 2141 ppath = pgroup.getPath(); 2142 2143 if (ppath == null) { 2144 fullPath = HObject.separator; 2145 } 2146 else { 2147 fullPath = ppath + pgroup.getName() + HObject.separator; 2148 } 2149 2150 nelems = 0; 2151 try { 2152 gid = pgroup.open(); 2153 H5G_info_t info = H5.H5Gget_info(gid); 2154 nelems = (int) info.nlinks; 2155 } 2156 catch (HDF5Exception ex) { 2157 nelems = -1; 2158 log.debug("H5Gget_info: ", ex); 2159 } 2160 2161 if (nelems <= 0) { 2162 pgroup.close(gid); 2163 return nTotal; 2164 } 2165 2166 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2167 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2168 // we use only one call to get all the information, which takes about 2169 // two seconds 2170 int[] objTypes = new int[nelems]; 2171 long[] fNos = new long[nelems]; 2172 long[] objRefs = new long[nelems]; 2173 String[] objNames = new String[nelems]; 2174 2175 try { 2176 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2177 } 2178 catch (HDF5Exception ex) { 2179 ex.printStackTrace(); 2180 return nTotal; 2181 } 2182 2183 int nStart = getStartMembers(); 2184 int nMax = getMaxMembers(); 2185 2186 String obj_name; 2187 int obj_type; 2188 2189 // Iterate through the file to see members of the group 2190 for (int i = 0; i < nelems; i++) { 2191 obj_name = objNames[i]; 2192 obj_type = objTypes[i]; 2193 log.trace("depth_first: obj_name={}, obj_type={}", obj_name, obj_type); 2194 long oid[] = { objRefs[i], fNos[i] }; 2195 2196 if (obj_name == null) { 2197 continue; 2198 } 2199 2200 nTotal++; 2201 2202 if (nMax > 0) { 2203 if ((nTotal - nStart) >= nMax) 2204 break; // loaded enough objects 2205 } 2206 2207 boolean skipLoad = false; 2208 if ((nTotal > 0) && (nTotal < nStart)) 2209 skipLoad = true; 2210 2211 // create a new group 2212 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2213 H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); // deprecated! 2214 node = new DefaultMutableTreeNode(g) { 2215 private static final long serialVersionUID = 5139629211215794015L; 2216 2217 @Override 2218 public boolean isLeaf() { 2219 return false; 2220 } 2221 }; 2222 pnode.add(node); 2223 pgroup.addToMemberList(g); 2224 2225 // detect and stop loops 2226 // a loop is detected if there exists object with the same 2227 // object ID by tracing path back up to the root. 2228 boolean hasLoop = false; 2229 HObject tmpObj = null; 2230 DefaultMutableTreeNode tmpNode = pnode; 2231 2232 while (tmpNode != null) { 2233 tmpObj = (HObject) tmpNode.getUserObject(); 2234 2235 if (tmpObj.equalsOID(oid) && !(tmpObj.getPath() == null)) { 2236 hasLoop = true; 2237 break; 2238 } 2239 else { 2240 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2241 } 2242 } 2243 2244 // recursively go through the next group 2245 // stops if it has loop. 2246 if (!hasLoop) { 2247 nTotal = depth_first(node, nTotal); 2248 } 2249 } 2250 else if (skipLoad) { 2251 continue; 2252 } 2253 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2254 int did = -1, tid = -1, tclass = -1; 2255 try { 2256 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2257 if (did >= 0) { 2258 tid = H5.H5Dget_type(did); 2259 2260 tclass = H5.H5Tget_class(tid); 2261 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2262 // for ARRAY, the type is determined by the base type 2263 int btid = H5.H5Tget_super(tid); 2264 int tmpclass = H5.H5Tget_class(btid); 2265 2266 tclass = H5.H5Tget_class(btid); 2267 2268 try { 2269 H5.H5Tclose(btid); 2270 } 2271 catch (Exception ex) { 2272 log.debug("depth_first[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2273 } 2274 } 2275 } 2276 else { 2277 log.debug("depth_first[{}] {} dataset open failure", i, obj_name); 2278 } 2279 } 2280 catch (Exception ex) { 2281 log.debug("depth_first[{}] {} dataset access failure: ", i, obj_name, ex); 2282 } 2283 finally { 2284 try { 2285 H5.H5Tclose(tid); 2286 } 2287 catch (Exception ex) { 2288 log.debug("depth_first[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2289 } 2290 try { 2291 H5.H5Dclose(did); 2292 } 2293 catch (Exception ex) { 2294 log.debug("depth_first[{}] {} dataset access H5Dclose failure: ", i, obj_name, ex); 2295 } 2296 } 2297 Dataset d = null; 2298 if (tclass == HDF5Constants.H5T_COMPOUND) { 2299 // create a new compound dataset 2300 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2301 } 2302 else { 2303 // create a new scalar dataset 2304 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2305 } 2306 2307 node = new DefaultMutableTreeNode(d); 2308 pnode.add(node); 2309 pgroup.addToMemberList(d); 2310 } 2311 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2312 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2313 2314 node = new DefaultMutableTreeNode(t); 2315 pnode.add(node); 2316 pgroup.addToMemberList(t); 2317 } 2318 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2319 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2320 2321 node = new DefaultMutableTreeNode(link); 2322 pnode.add(node); 2323 pgroup.addToMemberList(link); 2324 continue; // do the next one, if the object is not identified. 2325 } 2326 } // for ( i = 0; i < nelems; i++) 2327 2328 pgroup.close(gid); 2329 2330 log.trace("depth_first: finish"); 2331 return nTotal; 2332 } // private depth_first() 2333 2334 private void depth_first_old(MutableTreeNode parentNode) { 2335 int nelems; 2336 MutableTreeNode node = null; 2337 String fullPath = null; 2338 String ppath = null; 2339 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2340 int gid = -1; 2341 log.trace("depth_first_old: start"); 2342 2343 H5Group pgroup = (H5Group) (pnode.getUserObject()); 2344 ppath = pgroup.getPath(); 2345 2346 if (ppath == null) { 2347 fullPath = HObject.separator; 2348 } 2349 else { 2350 fullPath = ppath + pgroup.getName() + HObject.separator; 2351 } 2352 2353 nelems = 0; 2354 try { 2355 gid = pgroup.open(); 2356 H5G_info_t info = H5.H5Gget_info(gid); 2357 nelems = (int) info.nlinks; 2358 } 2359 catch (HDF5Exception ex) { 2360 nelems = -1; 2361 } 2362 2363 if (nelems <= 0) { 2364 pgroup.close(gid); 2365 return; 2366 } 2367 2368 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2369 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2370 // we use only one call to get all the information, which takes about 2371 // two seconds 2372 int[] objTypes = new int[nelems]; 2373 long[] fNos = new long[nelems]; 2374 long[] objRefs = new long[nelems]; 2375 String[] objNames = new String[nelems]; 2376 2377 try { 2378 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2379 } 2380 catch (HDF5Exception ex) { 2381 ex.printStackTrace(); 2382 return; 2383 } 2384 2385 int startIndex = Math.max(0, getStartMembers()); 2386 int endIndex = getMaxMembers(); 2387 if (endIndex >= nelems) { 2388 endIndex = nelems; 2389 startIndex = 0; // load all members 2390 } 2391 endIndex += startIndex; 2392 endIndex = Math.min(endIndex, nelems); 2393 2394 String obj_name; 2395 int obj_type; 2396 // int lnk_type; 2397 2398 // Iterate through the file to see members of the group 2399 for (int i = startIndex; i < endIndex; i++) { 2400 obj_name = objNames[i]; 2401 obj_type = objTypes[i]; 2402 log.trace("depth_first_old: obj_name={}, obj_type={}", obj_name, obj_type); 2403 long oid[] = { objRefs[i], fNos[i] }; 2404 2405 if (obj_name == null) { 2406 continue; 2407 } 2408 2409 // we need to use the OID for this release. we will rewrite this so 2410 // that we do not use the deprecated constructor 2411 if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2412 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2413 2414 node = new DefaultMutableTreeNode(link); 2415 pnode.add(node); 2416 pgroup.addToMemberList(link); 2417 continue; // do the next one, if the object is not identified. 2418 } 2419 2420 // create a new group 2421 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2422 H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); // deprecated! 2423 node = new DefaultMutableTreeNode(g) { 2424 private static final long serialVersionUID = 5139629211215794015L; 2425 2426 @Override 2427 public boolean isLeaf() { 2428 return false; 2429 } 2430 }; 2431 pnode.add(node); 2432 pgroup.addToMemberList(g); 2433 2434 // detect and stop loops 2435 // a loop is detected if there exists object with the same 2436 // object ID by tracing path back up to the root. 2437 boolean hasLoop = false; 2438 HObject tmpObj = null; 2439 DefaultMutableTreeNode tmpNode = pnode; 2440 2441 while (tmpNode != null) { 2442 tmpObj = (HObject) tmpNode.getUserObject(); 2443 2444 if (tmpObj.equalsOID(oid)) { 2445 hasLoop = true; 2446 break; 2447 } 2448 else { 2449 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2450 } 2451 } 2452 2453 // recursively go through the next group 2454 // stops if it has loop. 2455 if (!hasLoop) { 2456 depth_first_old(node); 2457 } 2458 } 2459 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2460 int did = -1, tid = -1, tclass = -1; 2461 try { 2462 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2463 if (did >= 0) { 2464 tid = H5.H5Dget_type(did); 2465 2466 tclass = H5.H5Tget_class(tid); 2467 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2468 // for ARRAY, the type is determined by the base type 2469 int btid = H5.H5Tget_super(tid); 2470 int tmpclass = H5.H5Tget_class(btid); 2471 2472 // cannot deal with ARRAY of COMPOUND in compound table 2473 // viewer 2474 if (tmpclass != HDF5Constants.H5T_COMPOUND) 2475 tclass = H5.H5Tget_class(btid); 2476 2477 try { 2478 H5.H5Tclose(btid); 2479 } 2480 catch (Exception ex) { 2481 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2482 } 2483 } 2484 } 2485 else { 2486 log.debug("depth_first_old[{}] {} dataset open failure", i, obj_name); 2487 } 2488 } 2489 catch (HDF5Exception ex) { 2490 log.debug("depth_first_old[{}] {} dataset access failure: ", i, obj_name, ex); 2491 } 2492 finally { 2493 try { 2494 H5.H5Tclose(tid); 2495 } 2496 catch (Exception ex) { 2497 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2498 } 2499 try { 2500 H5.H5Dclose(did); 2501 } 2502 catch (Exception ex) { 2503 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2504 } 2505 } 2506 Dataset d = null; 2507 if (tclass == HDF5Constants.H5T_COMPOUND) { 2508 // create a new compound dataset 2509 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2510 } 2511 else { 2512 // create a new scalar dataset 2513 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2514 } 2515 2516 node = new DefaultMutableTreeNode(d); 2517 pnode.add(node); 2518 pgroup.addToMemberList(d); 2519 } 2520 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2521 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2522 2523 node = new DefaultMutableTreeNode(t); 2524 pnode.add(node); 2525 pgroup.addToMemberList(t); 2526 } 2527 } // for ( i = 0; i < nelems; i++) 2528 2529 pgroup.close(gid); 2530 log.trace("depth_first_old: finish"); 2531 } // private depth_first() 2532 2533 private TreeNode copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2534 Dataset dataset = null; 2535 TreeNode newNode; 2536 int srcdid = -1, dstdid = -1; 2537 int ocp_plist_id = -1; 2538 String dname = null, path = null; 2539 2540 if (pgroup.isRoot()) { 2541 path = HObject.separator; 2542 } 2543 else { 2544 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2545 } 2546 2547 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2548 dstName = srcDataset.getName(); 2549 } 2550 dname = path + dstName; 2551 2552 try { 2553 srcdid = srcDataset.open(); 2554 dstdid = pgroup.open(); 2555 2556 try { 2557 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2558 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2559 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2560 } 2561 catch (Exception ex) { 2562 log.debug("copyDataset {} failure: ", dname, ex); 2563 } 2564 finally { 2565 try { 2566 H5.H5Pclose(ocp_plist_id); 2567 } 2568 catch (Exception ex) { 2569 log.debug("copyDataset {} H5Pclose failure: ", dname, ex); 2570 } 2571 } 2572 2573 if (srcDataset instanceof H5ScalarDS) { 2574 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2575 } 2576 else { 2577 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2578 } 2579 2580 pgroup.addToMemberList(dataset); 2581 newNode = new DefaultMutableTreeNode(dataset); 2582 } 2583 finally { 2584 try { 2585 srcDataset.close(srcdid); 2586 } 2587 catch (Exception ex) { 2588 log.debug("copyDataset {} srcDataset.close failure: ", dname, ex); 2589 } 2590 try { 2591 pgroup.close(dstdid); 2592 } 2593 catch (Exception ex) { 2594 log.debug("copyDataset {} pgroup.close failure: ", dname, ex); 2595 } 2596 } 2597 2598 return newNode; 2599 } 2600 2601 /** 2602 * Constructs a dataset for specified dataset identifier. 2603 * 2604 * @param did 2605 * the dataset identifier 2606 * @param name 2607 * the name of the dataset 2608 * @param path 2609 * the path of the dataset 2610 * 2611 * @return the dataset if successful; otherwise return null. 2612 * 2613 * @throws HDF5Exception 2614 * If there is an error at the HDF5 library level. 2615 */ 2616 private Dataset getDataset(int did, String name, String path) throws HDF5Exception { 2617 Dataset dataset = null; 2618 if (did >= 0) { 2619 int tid = -1, tclass = -1; 2620 try { 2621 tid = H5.H5Dget_type(did); 2622 tclass = H5.H5Tget_class(tid); 2623 if (tclass == HDF5Constants.H5T_ARRAY) { 2624 // for ARRAY, the type is determined by the base type 2625 int btid = H5.H5Tget_super(tid); 2626 tclass = H5.H5Tget_class(btid); 2627 try { 2628 H5.H5Tclose(btid); 2629 } 2630 catch (Exception ex) { 2631 log.debug("getDataset {} H5Tclose failure: ", name, ex); 2632 } 2633 } 2634 } 2635 finally { 2636 try { 2637 H5.H5Tclose(tid); 2638 } 2639 catch (Exception ex) { 2640 log.debug("getDataset {} H5Tclose failure: ", name, ex); 2641 } 2642 } 2643 2644 if (tclass == HDF5Constants.H5T_COMPOUND) { 2645 dataset = new H5CompoundDS(this, name, path); 2646 } 2647 else { 2648 dataset = new H5ScalarDS(this, name, path); 2649 } 2650 } 2651 else { 2652 log.debug("getDataset id failure"); 2653 } 2654 2655 return dataset; 2656 } 2657 2658 /** 2659 * Copies a named datatype to another location. 2660 * 2661 * @param srcType 2662 * the source datatype 2663 * @param pgroup 2664 * the group which the new datatype is copied to 2665 * @param dstName 2666 * the name of the new dataype 2667 * 2668 * @throws Exception 2669 * If there is a failure. 2670 */ 2671 private TreeNode copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2672 Datatype datatype = null; 2673 int tid_src = -1, gid_dst = -1; 2674 String path = null; 2675 DefaultMutableTreeNode newNode = null; 2676 2677 if (pgroup.isRoot()) { 2678 path = HObject.separator; 2679 } 2680 else { 2681 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2682 } 2683 2684 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2685 dstName = srcType.getName(); 2686 } 2687 2688 try { 2689 tid_src = srcType.open(); 2690 gid_dst = pgroup.open(); 2691 2692 try { 2693 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2694 } 2695 catch (Exception ex) { 2696 log.debug("copyDatatype {} H5Ocopy failure: ", dstName, ex); 2697 } 2698 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2699 2700 pgroup.addToMemberList(datatype); 2701 newNode = new DefaultMutableTreeNode(datatype); 2702 } 2703 finally { 2704 try { 2705 srcType.close(tid_src); 2706 } 2707 catch (Exception ex) { 2708 log.debug("copyDatatype {} srcType.close failure: ", dstName, ex); 2709 } 2710 try { 2711 pgroup.close(gid_dst); 2712 } 2713 catch (Exception ex) { 2714 log.debug("copyDatatype {} pgroup.close failure: ", dstName, ex); 2715 } 2716 } 2717 2718 return newNode; 2719 } 2720 2721 /** 2722 * Copies a group and its members to a new location. 2723 * 2724 * @param srcGroup 2725 * the source group 2726 * @param dstGroup 2727 * the location where the new group is located 2728 * @param dstName 2729 * the name of the new group 2730 * 2731 * @throws Exception 2732 * If there is a failure. 2733 */ 2734 private TreeNode copyGroup(H5Group srcGroup, H5Group dstGroup, String dstName) throws Exception { 2735 H5Group group = null; 2736 DefaultMutableTreeNode newNode = null; 2737 int srcgid = -1, dstgid = -1; 2738 String gname = null, path = null; 2739 2740 if (dstGroup.isRoot()) { 2741 path = HObject.separator; 2742 } 2743 else { 2744 path = dstGroup.getPath() + dstGroup.getName() + HObject.separator; 2745 } 2746 2747 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2748 dstName = srcGroup.getName(); 2749 } 2750 2751 try { 2752 srcgid = srcGroup.open(); 2753 dstgid = dstGroup.open(); 2754 try { 2755 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2756 } 2757 catch (Exception ex) { 2758 log.debug("copyGroup {} H5Ocopy failure: ", dstName, ex); 2759 } 2760 2761 group = new H5Group(dstGroup.getFileFormat(), dstName, path, dstGroup); 2762 newNode = new DefaultMutableTreeNode(group) { 2763 private static final long serialVersionUID = -4981107816640372359L; 2764 2765 @Override 2766 public boolean isLeaf() { 2767 return false; 2768 } 2769 }; 2770 depth_first(newNode, Integer.MIN_VALUE); // reload all 2771 dstGroup.addToMemberList(group); 2772 } 2773 2774 finally { 2775 try { 2776 srcGroup.close(srcgid); 2777 } 2778 catch (Exception ex) { 2779 log.debug("copyGroup {} srcGroup.close failure: ", dstName, ex); 2780 } 2781 try { 2782 dstGroup.close(dstgid); 2783 } 2784 catch (Exception ex) { 2785 log.debug("copyGroup {} pgroup.close failure: ", dstName, ex); 2786 } 2787 } 2788 2789 return newNode; 2790 } 2791 2792 /** 2793 * Constructs a group for specified group identifier and retrieves members. 2794 * 2795 * @param gid 2796 * The group identifier. 2797 * @param name 2798 * The group name. 2799 * @param pGroup 2800 * The parent group, or null for the root group. 2801 * 2802 * @return The group if successful; otherwise returns false. 2803 * 2804 * @throws HDF5Exception 2805 * If there is an error at the HDF5 library level. 2806 */ 2807 private H5Group getGroup(int gid, String name, Group pGroup) throws HDF5Exception { 2808 String parentPath = null; 2809 String thisFullName = null; 2810 String memberFullName = null; 2811 2812 if (pGroup == null) { 2813 thisFullName = name = "/"; 2814 } 2815 else { 2816 parentPath = pGroup.getFullName(); 2817 if ((parentPath == null) || parentPath.equals("/")) { 2818 thisFullName = "/" + name; 2819 } 2820 else { 2821 thisFullName = parentPath + "/" + name; 2822 } 2823 } 2824 2825 // get rid of any extra "/" 2826 if (parentPath != null) { 2827 parentPath = parentPath.replaceAll("//", "/"); 2828 } 2829 if (thisFullName != null) { 2830 thisFullName = thisFullName.replaceAll("//", "/"); 2831 } 2832 2833 H5Group group = new H5Group(this, name, parentPath, pGroup); 2834 2835 H5G_info_t group_info = null; 2836 H5O_info_t obj_info = null; 2837 int oid = -1; 2838 String link_name = null; 2839 try { 2840 group_info = H5.H5Gget_info(gid); 2841 } 2842 catch (Exception ex) { 2843 log.debug("getGroup {} H5Gget_info failure: ", name, ex); 2844 } 2845 try { 2846 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 2847 } 2848 catch (Exception ex) { 2849 log.debug("getGroup {} H5Oopen failure: ", name, ex); 2850 } 2851 2852 // retrieve only the immediate members of the group, do not follow 2853 // subgroups 2854 for (int i = 0; i < group_info.nlinks; i++) { 2855 try { 2856 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, 2857 HDF5Constants.H5P_DEFAULT); 2858 obj_info = H5 2859 .H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2860 } 2861 catch (HDF5Exception ex) { 2862 log.debug("getGroup[{}] {} name,info failure: ", i, name, ex); 2863 // do not stop if accessing one member fails 2864 continue; 2865 } 2866 // create a new group 2867 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 2868 H5Group g = new H5Group(this, link_name, thisFullName, group); 2869 group.addToMemberList(g); 2870 } 2871 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 2872 int did = -1; 2873 Dataset d = null; 2874 2875 if ((thisFullName == null) || thisFullName.equals("/")) { 2876 memberFullName = "/" + link_name; 2877 } 2878 else { 2879 memberFullName = thisFullName + "/" + link_name; 2880 } 2881 2882 try { 2883 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 2884 d = getDataset(did, link_name, thisFullName); 2885 } 2886 finally { 2887 try { 2888 H5.H5Dclose(did); 2889 } 2890 catch (Exception ex) { 2891 log.debug("getGroup[{}] {} H5Dclose failure: ", i, name, ex); 2892 } 2893 } 2894 group.addToMemberList(d); 2895 } 2896 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2897 Datatype t = new H5Datatype(this, link_name, thisFullName); 2898 group.addToMemberList(t); 2899 } 2900 } // End of for loop. 2901 try { 2902 if (oid >= 0) 2903 H5.H5Oclose(oid); 2904 } 2905 catch (Exception ex) { 2906 log.debug("getGroup {} H5Oclose failure: ", name, ex); 2907 } 2908 return group; 2909 } 2910 2911 /** 2912 * Retrieves the name of the target object that is being linked to. 2913 * 2914 * @param obj 2915 * The current link object. 2916 * 2917 * @return The name of the target object. 2918 * 2919 * @throws HDF5Exception 2920 * If there is an error at the HDF5 library level. 2921 */ 2922 public static String getLinkTargetName(HObject obj) throws Exception { 2923 String[] link_value = { null, null }; 2924 String targetObjName = null; 2925 2926 if (obj == null) { 2927 return null; 2928 } 2929 2930 if (obj.getFullName().equals("/")) { 2931 return null; 2932 } 2933 2934 H5L_info_t link_info = null; 2935 try { 2936 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 2937 } 2938 catch (Throwable err) { 2939 log.debug("H5Lget_info {} failure: ", obj.getFullName()); 2940 log.trace("H5Lget_info {} failure: ", obj.getFullName(), err); 2941 } 2942 if (link_info != null) { 2943 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 2944 try { 2945 H5.H5Lget_val(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 2946 } 2947 catch (Exception ex) { 2948 log.debug("H5Lget_val {} failure: ", obj.getFullName(), ex); 2949 } 2950 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 2951 targetObjName = link_value[0]; 2952 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 2953 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 2954 } 2955 } 2956 } 2957 return targetObjName; 2958 } 2959 2960 /** 2961 * Export dataset. 2962 * 2963 * @param file_export_name 2964 * The file name to export data into. 2965 * @param file_name 2966 * The name of the HDF5 file containing the dataset. 2967 * @param object_path 2968 * The full path of the dataset to be exported. 2969 * 2970 * @throws Exception 2971 * If there is a failure. 2972 */ 2973 public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) 2974 throws Exception { 2975 H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); 2976 } 2977 2978 /** 2979 * Renames an attribute. 2980 * 2981 * @param obj 2982 * The object whose attribute is to be renamed. 2983 * @param oldAttrName 2984 * The current name of the attribute. 2985 * @param newAttrName 2986 * The new name of the attribute. 2987 * 2988 * @throws HDF5Exception 2989 * If there is an error at the HDF5 library level. 2990 */ 2991 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 2992 log.trace("renameAttribute {} to {}", oldAttrName, newAttrName); 2993 if (!attrFlag) { 2994 attrFlag = true; 2995 H5.H5Arename_by_name(obj.getFID(), obj.getName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 2996 } 2997 } 2998 2999 /** 3000 * Rename the given object 3001 * 3002 * @param obj 3003 * the object to be renamed. 3004 * @param newName 3005 * the new name of the object. 3006 * 3007 * @throws Exception 3008 * If there is a failure. 3009 */ 3010 public static void renameObject(HObject obj, String newName) throws Exception { 3011 String currentFullPath = obj.getPath() + obj.getName(); 3012 String newFullPath = obj.getPath() + newName; 3013 3014 currentFullPath = currentFullPath.replaceAll("//", "/"); 3015 newFullPath = newFullPath.replaceAll("//", "/"); 3016 3017 if (currentFullPath.equals("/")) { 3018 throw new HDF5Exception("Can't rename the root group."); 3019 } 3020 3021 if (currentFullPath.equals(newFullPath)) { 3022 throw new HDF5Exception("The new name is the same as the current name."); 3023 } 3024 3025 // Call the library to move things in the file 3026 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, 3027 HDF5Constants.H5P_DEFAULT); 3028 } 3029 3030 public static int getIndexTypeValue(String strtype) { 3031 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3032 return HDF5Constants.H5_INDEX_NAME; 3033 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3034 return HDF5Constants.H5_INDEX_CRT_ORDER; 3035 if (strtype.compareTo("H5_INDEX_N") == 0) 3036 return HDF5Constants.H5_INDEX_N; 3037 return HDF5Constants.H5_INDEX_UNKNOWN; 3038 } 3039 3040 public static int getIndexOrderValue(String strorder) { 3041 if (strorder.compareTo("H5_ITER_INC") == 0) 3042 return HDF5Constants.H5_ITER_INC; 3043 if (strorder.compareTo("H5_ITER_DEC") == 0) 3044 return HDF5Constants.H5_ITER_DEC; 3045 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3046 return HDF5Constants.H5_ITER_NATIVE; 3047 if (strorder.compareTo("H5_ITER_N") == 0) 3048 return HDF5Constants.H5_ITER_N; 3049 return HDF5Constants.H5_ITER_UNKNOWN; 3050 } 3051 3052 public int getIndexType(String strtype) { 3053 if (strtype != null) { 3054 if (strtype.compareTo("H5_INDEX_NAME") == 0) 3055 return HDF5Constants.H5_INDEX_NAME; 3056 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 3057 return HDF5Constants.H5_INDEX_CRT_ORDER; 3058 return HDF5Constants.H5_INDEX_UNKNOWN; 3059 } 3060 return getIndexType(); 3061 } 3062 3063 public int getIndexType() { 3064 return indexType; 3065 } 3066 3067 public void setIndexType(int indexType) { 3068 this.indexType = indexType; 3069 } 3070 3071 public int getIndexOrder(String strorder) { 3072 if (strorder != null) { 3073 if (strorder.compareTo("H5_ITER_INC") == 0) 3074 return HDF5Constants.H5_ITER_INC; 3075 if (strorder.compareTo("H5_ITER_DEC") == 0) 3076 return HDF5Constants.H5_ITER_DEC; 3077 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 3078 return HDF5Constants.H5_ITER_NATIVE; 3079 if (strorder.compareTo("H5_ITER_N") == 0) 3080 return HDF5Constants.H5_ITER_N; 3081 return HDF5Constants.H5_ITER_UNKNOWN; 3082 } 3083 return getIndexOrder(); 3084 } 3085 3086 public int getIndexOrder() { 3087 return indexOrder; 3088 } 3089 3090 public void setIndexOrder(int indexOrder) { 3091 this.indexOrder = indexOrder; 3092 } 3093}