001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h5; 016 017import java.io.File; 018import java.lang.reflect.Array; 019import java.util.Enumeration; 020import java.util.Hashtable; 021import java.util.List; 022import java.util.Vector; 023 024import javax.swing.tree.DefaultMutableTreeNode; 025import javax.swing.tree.MutableTreeNode; 026import javax.swing.tree.TreeNode; 027 028import hdf.hdf5lib.H5; 029import hdf.hdf5lib.HDF5Constants; 030import hdf.hdf5lib.HDFNativeData; 031import hdf.hdf5lib.exceptions.HDF5Exception; 032import hdf.hdf5lib.structs.H5G_info_t; 033import hdf.hdf5lib.structs.H5L_info_t; 034import hdf.hdf5lib.structs.H5O_info_t; 035import hdf.object.Attribute; 036import hdf.object.Dataset; 037import hdf.object.Datatype; 038import hdf.object.FileFormat; 039import hdf.object.Group; 040import hdf.object.HObject; 041import hdf.object.ScalarDS; 042 043/** 044 * H5File is an implementation of the FileFormat class for HDF5 files. 045 * <p> 046 * The HDF5 file structure is stored in a tree that is made up of Java TreeNode objects. Each tree node represents an 047 * HDF5 object: a Group, Dataset, or Named Datatype. Starting from the root of the tree, <i>rootNode</i>, the tree can 048 * be traversed to find a specific object. 049 * <p> 050 * The following example shows the implementation of finding an object for a given path in FileFormat. User applications 051 * can directly call the static method FileFormat.findObject(file, objPath) to get the object. 052 * 053 * <pre> 054 * HObject findObject(FileFormat file, String path) { 055 * if (file == null || path == null) 056 * return null; 057 * if (!path.endsWith("/")) 058 * path = path + "/"; 059 * DefaultMutableTreeNode theRoot = (DefaultMutableTreeNode) file 060 * .getRootNode(); 061 * if (theRoot == null) 062 * return null; 063 * else if (path.equals("/")) 064 * return (HObject) theRoot.getUserObject(); 065 * 066 * Enumeration local_enum = ((DefaultMutableTreeNode) theRoot) 067 * .breadthFirstEnumeration(); 068 * DefaultMutableTreeNode theNode = null; 069 * HObject theObj = null; 070 * while (local_enum.hasMoreElements()) { 071 * theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 072 * theObj = (HObject) theNode.getUserObject(); 073 * String fullPath = theObj.getFullName() + "/"; 074 * if (path.equals(fullPath) && theObj.getPath() != null ) { 075 * break; 076 * } 077 * return theObj; 078 * } 079 * </pre> 080 * 081 * @author Peter X. Cao 082 * @version 2.4 9/4/2007 083 */ 084public class H5File extends FileFormat { 085 private static final long serialVersionUID = 6247335559471526045L; 086 087 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5File.class); 088 089 /** 090 * the file access flag. Valid values are HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5F_ACC_RDWR and 091 * HDF5Constants.H5F_ACC_CREAT. 092 */ 093 private int flag; 094 095 /** 096 * The index type. Valid values are HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_INDEX_CRT_ORDER. 097 */ 098 private int indexType = HDF5Constants.H5_INDEX_NAME; 099 100 /** 101 * The index order. Valid values are HDF5Constants.H5_ITER_INC, HDF5Constants.H5_ITER_DEC. 102 */ 103 private int indexOrder = HDF5Constants.H5_ITER_INC; 104 105 /** 106 * The root node of the file hierarchy. 107 */ 108 private DefaultMutableTreeNode rootNode; 109 110 /** 111 * How many characters maximum in an attribute name? 112 */ 113 private static final int attrNameLen = 256; 114 115 /** 116 * The library version bounds 117 */ 118 private int[] libver; 119 120 private boolean attrFlag; 121 122 /*************************************************************************** 123 * Constructor 124 **************************************************************************/ 125 /** 126 * Constructs an H5File instance with an empty file name and read-only access. 127 */ 128 public H5File() { 129 this("", READ); 130 } 131 132 /** 133 * Constructs an H5File instance with specified file name and read/write access. 134 * <p> 135 * This constructor does not open the file for access, nor does it confirm that the file can be opened read/write. 136 * 137 * @param fileName 138 * A valid file name, with a relative or absolute path. 139 * @throws NullPointerException 140 * If the <code>fileName</code> argument is <code>null</code>. 141 */ 142 public H5File(String fileName) { 143 this(fileName, WRITE); 144 } 145 146 /** 147 * Constructs an H5File instance with specified file name and access. 148 * <p> 149 * The access parameter values and corresponding behaviors: 150 * <ul> 151 * <li>READ: Read-only access; open() will fail file doesn't exist. 152 * <li>WRITE: Read/Write access; open() will fail if file doesn't exist or if file can't be opened with read/write 153 * access. 154 * <li>CREATE: Read/Write access; create a new file or truncate an existing one; open() will fail if file can't be 155 * created or if file exists but can't be opened read/write. 156 * </ul> 157 * <p> 158 * This constructor does not open the file for access, nor does it confirm that the file can later be opened 159 * read/write or created. 160 * <p> 161 * The flag returned by {@link #isReadOnly()} is set to true if the access parameter value is READ, even though the 162 * file isn't yet open. 163 * 164 * @param fileName 165 * A valid file name, with a relative or absolute path. 166 * @param access 167 * The file access flag, which determines behavior when file is opened. Acceptable values are 168 * <code> READ, WRITE, </code> and <code>CREATE</code>. 169 * @throws NullPointerException 170 * If the <code>fileName</code> argument is <code>null</code>. 171 */ 172 public H5File(String fileName, int access) { 173 // Call FileFormat ctor to set absolute path name 174 super(fileName); 175 libver = new int[2]; 176 attrFlag = false; 177 178 // set metadata for the instance 179 rootNode = null; 180 this.fid = -1; 181 isReadOnly = (access == READ); 182 183 // At this point we just set up the flags for what happens later. 184 // We just pass unexpected access values on... subclasses may have 185 // their own values. 186 if (access == READ) { 187 flag = HDF5Constants.H5F_ACC_RDONLY; 188 } 189 else if (access == WRITE) { 190 flag = HDF5Constants.H5F_ACC_RDWR; 191 } 192 else if (access == CREATE) { 193 flag = HDF5Constants.H5F_ACC_CREAT; 194 } 195 else { 196 flag = access; 197 } 198 } 199 200 /*************************************************************************** 201 * Class methods 202 **************************************************************************/ 203 204 /** 205 * Copies the attributes of one object to another object. 206 * <p> 207 * This method copies all the attributes from one object (source object) to another (destination object). If an 208 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 209 * 256 characters will be truncated in the destination object. 210 * <p> 211 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 212 * there is no H5Object class and it is specific to HDF5 objects. 213 * <p> 214 * The copy can fail for a number of reasons, including an invalid source or destination object, but no exceptions 215 * are thrown. The actual copy is carried out by the method: {@link #copyAttributes(int, int)} 216 * 217 * @param src 218 * The source object. 219 * @param dst 220 * The destination object. 221 * @see #copyAttributes(int, int) 222 */ 223 public static final void copyAttributes(HObject src, HObject dst) { 224 if ((src != null) && (dst != null)) { 225 int srcID = src.open(); 226 int dstID = dst.open(); 227 228 if ((srcID >= 0) && (dstID >= 0)) { 229 copyAttributes(srcID, dstID); 230 } 231 232 if (srcID >= 0) { 233 src.close(srcID); 234 } 235 236 if (dstID >= 0) { 237 dst.close(dstID); 238 } 239 } 240 } 241 242 /** 243 * Copies the attributes of one object to another object. 244 * <p> 245 * This method copies all the attributes from one object (source object) to another (destination object). If an 246 * attribute already exists in the destination object, the attribute will not be copied. Attribute names exceeding 247 * 256 characters will be truncated in the destination object. 248 * <p> 249 * The object can be an H5Group, an H5Dataset, or a named H5Datatype. This method is in the H5File class because 250 * there is no H5Object class and it is specific to HDF5 objects. 251 * <p> 252 * The copy can fail for a number of reasons, including an invalid source or destination object identifier, but no 253 * exceptions are thrown. 254 * 255 * @param src_id 256 * The identifier of the source object. 257 * @param dst_id 258 * The identifier of the destination object. 259 */ 260 public static final void copyAttributes(int src_id, int dst_id) { 261 int aid_src = -1, aid_dst = -1, atid = -1, asid = -1; 262 String[] aName = { "" }; 263 H5O_info_t obj_info = null; 264 265 try { 266 obj_info = H5.H5Oget_info(src_id); 267 } 268 catch (Exception ex) { 269 obj_info.num_attrs = -1; 270 } 271 272 if (obj_info.num_attrs < 0) { 273 return; 274 } 275 276 for (int i = 0; i < obj_info.num_attrs; i++) { 277 aName[0] = new String(""); 278 279 try { 280 aid_src = H5.H5Aopen_by_idx(src_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 281 i, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 282 H5.H5Aget_name(aid_src, H5File.attrNameLen, aName); 283 atid = H5.H5Aget_type(aid_src); 284 asid = H5.H5Aget_space(aid_src); 285 286 aid_dst = H5.H5Acreate(dst_id, aName[0], atid, asid, HDF5Constants.H5P_DEFAULT, 287 HDF5Constants.H5P_DEFAULT); 288 289 // use native data copy 290 H5.H5Acopy(aid_src, aid_dst); 291 292 } 293 catch (Exception ex) { 294 log.debug("Attribute[{}] failure: ", i, ex); 295 } 296 297 try { 298 H5.H5Sclose(asid); 299 } 300 catch (Exception ex) { 301 log.debug("H5Sclose failure: ", ex); 302 } 303 try { 304 H5.H5Tclose(atid); 305 } 306 catch (Exception ex) { 307 log.debug("H5Tclose failure: ", ex); 308 } 309 try { 310 H5.H5Aclose(aid_src); 311 } 312 catch (Exception ex) { 313 log.debug("src H5Aclose failure: ", ex); 314 } 315 try { 316 H5.H5Aclose(aid_dst); 317 } 318 catch (Exception ex) { 319 log.debug("dst H5Aclose failure: ", ex); 320 } 321 322 } // for (int i=0; i<num_attr; i++) 323 } 324 325 /** 326 * Returns a list of attributes for the specified object. 327 * <p> 328 * This method returns a list containing the attributes associated with the identified object. If there are no 329 * associated attributes, an empty list will be returned. 330 * <p> 331 * Attribute names exceeding 256 characters will be truncated in the returned list. 332 * 333 * @param objID 334 * The identifier for the object whose attributes are to be returned. 335 * @return The list of the object's attributes. 336 * @throws HDF5Exception 337 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 338 * attributes. A variety of failures throw this exception. 339 * @see #getAttribute(int,int,int) 340 */ 341 public static final List<Attribute> getAttribute(int objID) throws HDF5Exception { 342 return H5File.getAttribute(objID, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC); 343 } 344 345 /** 346 * Returns a list of attributes for the specified object, in creation or alphabetical order. 347 * <p> 348 * This method returns a list containing the attributes associated with the identified object. If there are no 349 * associated attributes, an empty list will be returned. The list of attributes returned can be in increasing or 350 * decreasing, creation or alphabetical order. 351 * <p> 352 * Attribute names exceeding 256 characters will be truncated in the returned list. 353 * 354 * @param objID 355 * The identifier for the object whose attributes are to be returned. 356 * @param idx_type 357 * The type of index. Valid values are: 358 * <ul> 359 * <li>H5_INDEX_NAME: An alpha-numeric index by attribute name <li>H5_INDEX_CRT_ORDER: An index by 360 * creation order 361 * </ul> 362 * @param order 363 * The index traversal order. Valid values are: 364 * <ul> 365 * <li>H5_ITER_INC: A top-down iteration incrementing the index position at each step. <li>H5_ITER_DEC: A 366 * bottom-up iteration decrementing the index position at each step. 367 * </ul> 368 * @return The list of the object's attributes. 369 * @throws HDF5Exception 370 * If an underlying HDF library routine is unable to perform a step necessary to retrieve the 371 * attributes. A variety of failures throw this exception. 372 */ 373 374 public static final List<Attribute> getAttribute(int objID, int idx_type, int order) throws HDF5Exception { 375 List<Attribute> attributeList = null; 376 int aid = -1, sid = -1, tid = -1; 377 H5O_info_t obj_info = null; 378 log.trace("getAttribute: start"); 379 380 try { 381 obj_info = H5.H5Oget_info(objID); 382 } 383 catch (Exception ex) { 384 log.debug("H5Oget_info failure: ", ex); 385 } 386 if (obj_info.num_attrs <= 0) { 387 return (attributeList = new Vector<Attribute>()); 388 } 389 390 int n = (int) obj_info.num_attrs; 391 attributeList = new Vector<Attribute>(n); 392 log.trace("getAttribute: num_attrs={}", n); 393 394 for (int i = 0; i < n; i++) { 395 long lsize = 1; 396 log.trace("getAttribute: attribute[{}]", i); 397 398 try { 399 aid = H5.H5Aopen_by_idx(objID, ".", idx_type, order, i, HDF5Constants.H5P_DEFAULT, 400 HDF5Constants.H5P_DEFAULT); 401 sid = H5.H5Aget_space(aid); 402 403 long dims[] = null; 404 int rank = H5.H5Sget_simple_extent_ndims(sid); 405 406 if (rank > 0) { 407 dims = new long[rank]; 408 H5.H5Sget_simple_extent_dims(sid, dims, null); 409 for (int j = 0; j < dims.length; j++) { 410 lsize *= dims[j]; 411 } 412 } 413 String[] nameA = { "" }; 414 H5.H5Aget_name(aid, H5File.attrNameLen, nameA); 415 log.trace("getAttribute: attribute[{}] is {}", i, nameA); 416 417 int tmptid = -1; 418 try { 419 tmptid = H5.H5Aget_type(aid); 420 tid = H5.H5Tget_native_type(tmptid); 421 log.trace("getAttribute: attribute[{}] tid={} native tmptid={} from aid={}", i, tid, tmptid, aid); 422 } 423 finally { 424 try { 425 H5.H5Tclose(tmptid); 426 } 427 catch (Exception ex) { 428 log.debug("H5Tclose failure: ", ex); 429 } 430 } 431 Datatype attrType = new H5Datatype(tid); 432 Attribute attr = new Attribute(nameA[0], attrType, dims); 433 attributeList.add(attr); 434 log.trace("getAttribute: attribute[{}] Datatype={}", i, attrType.getDatatypeDescription()); 435 436 boolean is_variable_str = false; 437 boolean isVLEN = false; 438 boolean isCompound = false; 439 boolean isScalar = false; 440 int tclass = H5.H5Tget_class(tid); 441 442 if (dims == null) 443 isScalar = true; 444 try { 445 is_variable_str = H5.H5Tis_variable_str(tid); 446 } 447 catch (Exception ex) { 448 log.debug("H5Tis_variable_str failure: ", ex); 449 } 450 isVLEN = (tclass == HDF5Constants.H5T_VLEN); 451 isCompound = (tclass == HDF5Constants.H5T_COMPOUND); 452 log.trace( 453 "getAttribute: attribute[{}] has size={} isCompound={} isScalar={} is_variable_str={} isVLEN={}", 454 i, lsize, isCompound, isScalar, is_variable_str, isVLEN); 455 456 // retrieve the attribute value 457 if (lsize <= 0) { 458 continue; 459 } 460 461 Object value = null; 462 if (isVLEN || is_variable_str || isCompound || (isScalar && tclass == HDF5Constants.H5T_ARRAY)) { 463 String[] strs = new String[(int) lsize]; 464 for (int j = 0; j < lsize; j++) { 465 strs[j] = ""; 466 } 467 try { 468 log.trace("getAttribute: attribute[{}] H5AreadVL", i); 469 H5.H5AreadVL(aid, tid, strs); 470 } 471 catch (Exception ex) { 472 ex.printStackTrace(); 473 } 474 value = strs; 475 } 476 else { 477 value = H5Datatype.allocateArray(tid, (int) lsize); 478 if (value == null) { 479 continue; 480 } 481 482 if (tclass == HDF5Constants.H5T_ARRAY) { 483 int tmptid1 = -1, tmptid2 = -1; 484 try { 485 log.trace("getAttribute: attribute[{}] H5Aread ARRAY tid={}", i, tid); 486 H5.H5Aread(aid, tid, value); 487 } 488 catch (Exception ex) { 489 ex.printStackTrace(); 490 } 491 finally { 492 try { 493 H5.H5Tclose(tmptid1); 494 } 495 catch (Exception ex) { 496 log.debug("tid1 H5Tclose failure: ", ex); 497 } 498 try { 499 H5.H5Tclose(tmptid2); 500 } 501 catch (Exception ex) { 502 log.debug("tid2 H5Tclose failure: ", ex); 503 } 504 } 505 } 506 else { 507 log.trace("getAttribute: attribute[{}] H5Aread", i); 508 H5.H5Aread(aid, tid, value); 509 } 510 511 if (tclass == HDF5Constants.H5T_STRING) { 512 log.trace("getAttribute: attribute[{}] byteToString", i); 513 value = Dataset.byteToString((byte[]) value, H5.H5Tget_size(tid)); 514 } 515 else if (tclass == HDF5Constants.H5T_REFERENCE) { 516 log.trace("getAttribute: attribute[{}] byteToLong", i); 517 value = HDFNativeData.byteToLong((byte[]) value); 518 } 519 } 520 521 attr.setValue(value); 522 523 } 524 catch (HDF5Exception ex) { 525 log.debug("Attribute[{}] inspection failure: ", i, ex); 526 } 527 finally { 528 try { 529 H5.H5Tclose(tid); 530 } 531 catch (Exception ex) { 532 log.debug("H5Tclose[{}] failure: ", i, ex); 533 } 534 try { 535 H5.H5Sclose(sid); 536 } 537 catch (Exception ex) { 538 log.debug("H5Sclose[{}] failure: ", i, ex); 539 } 540 try { 541 H5.H5Aclose(aid); 542 } 543 catch (Exception ex) { 544 log.debug("H5Aclose[{}] failure: ", i, ex); 545 } 546 } 547 } // for (int i=0; i<obj_info.num_attrs; i++) 548 549 log.trace("getAttribute: finish"); 550 return attributeList; 551 } 552 553 /** 554 * Creates attributes for an HDF5 image dataset. 555 * <p> 556 * This method creates attributes for two common types of HDF5 images. It provides a way of adding multiple 557 * attributes to an HDF5 image dataset with a single call. The {@link #writeAttribute(HObject, Attribute, boolean)} 558 * method may be used to write image attributes that are not handled by this method. 559 * <p> 560 * For more information about HDF5 image attributes, see the <a 561 * href="http://hdfgroup.org/HDF5/doc/ADGuide/ImageSpec.html"> HDF5 Image and Palette Specification</a>. 562 * <p> 563 * This method can be called to create attributes for 24-bit true color and indexed images. The 564 * <code>selectionFlag</code> parameter controls whether this will be an indexed or true color image. If 565 * <code>selectionFlag</code> is <code>-1</code>, this will be an indexed image. If the value is 566 * <code>ScalarDS.INTERLACE_PIXEL</code> or <code>ScalarDS.INTERLACE_PLANE</code>, it will be a 24-bit true color 567 * image with the indicated interlace mode. 568 * <p> 569 * <ul> 570 * The created attribute descriptions, names, and values are: 571 * <li>The image identifier: name="CLASS", value="IMAGE" 572 * <li>The version of image: name="IMAGE_VERSION", value="1.2" 573 * <li>The range of data values: name="IMAGE_MINMAXRANGE", value=[0, 255] 574 * <li>The type of the image: name="IMAGE_SUBCLASS", value="IMAGE_TRUECOLOR" or "IMAGE_INDEXED" 575 * <li>For IMAGE_TRUECOLOR, the interlace mode: name="INTERLACE_MODE", value="INTERLACE_PIXEL" or "INTERLACE_PLANE" 576 * <li>For IMAGE_INDEXED, the palettes to use in viewing the image: name="PALETTE", value= 1-d array of references 577 * to the palette datasets, with initial value of {-1} 578 * </ul> 579 * <p> 580 * This method is in the H5File class rather than H5ScalarDS because images are typically thought of at the File 581 * Format implementation level. 582 * 583 * @param dataset 584 * The image dataset the attributes are added to. 585 * @param selectionFlag 586 * Selects the image type and, for 24-bit true color images, the interlace mode. Valid values are: 587 * <ul> 588 * <li>-1: Indexed Image. <li>ScalarDS.INTERLACE_PIXEL: True Color Image. The component values for a 589 * pixel are stored contiguously. <li>ScalarDS.INTERLACE_PLANE: True Color Image. Each component is 590 * stored in a separate plane. 591 * </ul> 592 * @throws Exception 593 * If there is a problem creating the attributes, or if the selectionFlag is invalid. 594 */ 595 private static final void createImageAttributes(Dataset dataset, int selectionFlag) throws Exception { 596 String subclass = null; 597 String interlaceMode = null; 598 599 if (selectionFlag == ScalarDS.INTERLACE_PIXEL) { 600 subclass = "IMAGE_TRUECOLOR"; 601 interlaceMode = "INTERLACE_PIXEL"; 602 } 603 else if (selectionFlag == ScalarDS.INTERLACE_PLANE) { 604 subclass = "IMAGE_TRUECOLOR"; 605 interlaceMode = "INTERLACE_PLANE"; 606 } 607 else if (selectionFlag == -1) { 608 subclass = "IMAGE_INDEXED"; 609 } 610 else { 611 throw new HDF5Exception("The selectionFlag is invalid."); 612 } 613 614 String attrName = "CLASS"; 615 String[] classValue = { "IMAGE" }; 616 Datatype attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, -1, -1); 617 Attribute attr = new Attribute(attrName, attrType, null); 618 attr.setValue(classValue); 619 dataset.writeMetadata(attr); 620 621 attrName = "IMAGE_VERSION"; 622 String[] versionValue = { "1.2" }; 623 attrType = new H5Datatype(Datatype.CLASS_STRING, versionValue[0].length() + 1, -1, -1); 624 attr = new Attribute(attrName, attrType, null); 625 attr.setValue(versionValue); 626 dataset.writeMetadata(attr); 627 628 long[] attrDims = { 2 }; 629 attrName = "IMAGE_MINMAXRANGE"; 630 byte[] attrValueInt = { 0, (byte) 255 }; 631 attrType = new H5Datatype(Datatype.CLASS_CHAR, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 632 attr = new Attribute(attrName, attrType, attrDims); 633 attr.setValue(attrValueInt); 634 dataset.writeMetadata(attr); 635 636 attrName = "IMAGE_SUBCLASS"; 637 String[] subclassValue = { subclass }; 638 attrType = new H5Datatype(Datatype.CLASS_STRING, subclassValue[0].length() + 1, -1, -1); 639 attr = new Attribute(attrName, attrType, null); 640 attr.setValue(subclassValue); 641 dataset.writeMetadata(attr); 642 643 if ((selectionFlag == ScalarDS.INTERLACE_PIXEL) || (selectionFlag == ScalarDS.INTERLACE_PLANE)) { 644 attrName = "INTERLACE_MODE"; 645 String[] interlaceValue = { interlaceMode }; 646 attrType = new H5Datatype(Datatype.CLASS_STRING, interlaceValue[0].length() + 1, -1, -1); 647 attr = new Attribute(attrName, attrType, null); 648 attr.setValue(interlaceValue); 649 dataset.writeMetadata(attr); 650 } 651 else { 652 attrName = "PALETTE"; 653 long[] palRef = { 0 }; // set ref to null 654 attrType = new H5Datatype(Datatype.CLASS_REFERENCE, 1, Datatype.NATIVE, Datatype.SIGN_NONE); 655 attr = new Attribute(attrName, attrType, null); 656 attr.setValue(palRef); 657 dataset.writeMetadata(attr); 658 } 659 } 660 661 /** 662 * Updates values of scalar dataset object references in copied file. 663 * <p> 664 * This method has very specific functionality as documented below, and the user is advised to pay close attention 665 * when dealing with files that contain references. 666 * <p> 667 * When a copy is made from one HDF file to another, object references and dataset region references are copied, but 668 * the references in the destination file are not updated by the copy and are therefore invalid. 669 * <p> 670 * When an entire file is copied, this method updates the values of the object references and dataset region 671 * references that are in scalar datasets in the destination file so that they point to the correct object(s) in the 672 * destination file. The method does not update references that occur in objects other than scalar datasets. 673 * <p> 674 * In the current release, the updating of object references is not handled completely as it was not required by the 675 * projects that funded development. There is no support for updates when the copy does not include the entire file. 676 * Nor is there support for updating objects other than scalar datasets in full-file copies. This functionality will 677 * be extended as funding becomes available or, possibly, when the underlying HDF library supports the reference 678 * updates itself. 679 * 680 * @param srcFile 681 * The file that was copied. 682 * @param dstFile 683 * The destination file where the object references will be updated. 684 * @throws Exception 685 * If there is a problem in the update process. 686 */ 687 public static final void updateReferenceDataset(H5File srcFile, H5File dstFile) throws Exception { 688 if ((srcFile == null) || (dstFile == null)) { 689 return; 690 } 691 692 DefaultMutableTreeNode srcRoot = (DefaultMutableTreeNode) srcFile.getRootNode(); 693 DefaultMutableTreeNode newRoot = (DefaultMutableTreeNode) dstFile.getRootNode(); 694 695 Enumeration<?> srcEnum = srcRoot.breadthFirstEnumeration(); 696 Enumeration<?> newEnum = newRoot.breadthFirstEnumeration(); 697 698 // build one-to-one table of between objects in 699 // the source file and new file 700 int did = -1, tid = -1; 701 HObject srcObj, newObj; 702 Hashtable<String, long[]> oidMap = new Hashtable<String, long[]>(); 703 List<ScalarDS> refDatasets = new Vector<ScalarDS>(); 704 while (newEnum.hasMoreElements() && srcEnum.hasMoreElements()) { 705 srcObj = (HObject) ((DefaultMutableTreeNode) srcEnum.nextElement()).getUserObject(); 706 newObj = (HObject) ((DefaultMutableTreeNode) newEnum.nextElement()).getUserObject(); 707 oidMap.put(String.valueOf((srcObj.getOID())[0]), newObj.getOID()); 708 did = -1; 709 tid = -1; 710 711 // for Scalar DataSets in destination, if there is an object 712 // reference in the dataset, add it to the refDatasets list for 713 // later updating. 714 if (newObj instanceof ScalarDS) { 715 ScalarDS sd = (ScalarDS) newObj; 716 did = sd.open(); 717 if (did >= 0) { 718 try { 719 tid = H5.H5Dget_type(did); 720 if (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ)) { 721 refDatasets.add(sd); 722 } 723 } 724 catch (Exception ex) { 725 log.debug("ScalarDS reference failure: ", ex); 726 } 727 finally { 728 try { 729 H5.H5Tclose(tid); 730 } 731 catch (Exception ex) { 732 log.debug("ScalarDS reference H5Tclose failure: ", ex); 733 } 734 } 735 } 736 sd.close(did); 737 } // if (newObj instanceof ScalarDS) 738 } 739 740 // Update the references in the scalar datasets in the dest file. 741 H5ScalarDS d = null; 742 int sid = -1, size = 0, rank = 0; 743 int n = refDatasets.size(); 744 for (int i = 0; i < n; i++) { 745 log.trace("Update the references in the scalar datasets in the dest file"); 746 d = (H5ScalarDS) refDatasets.get(i); 747 byte[] buf = null; 748 long[] refs = null; 749 750 try { 751 did = d.open(); 752 if (did >= 0) { 753 tid = H5.H5Dget_type(did); 754 sid = H5.H5Dget_space(did); 755 rank = H5.H5Sget_simple_extent_ndims(sid); 756 size = 1; 757 if (rank > 0) { 758 long[] dims = new long[rank]; 759 H5.H5Sget_simple_extent_dims(sid, dims, null); 760 for (int j = 0; j < rank; j++) { 761 size *= (int) dims[j]; 762 } 763 dims = null; 764 } 765 766 buf = new byte[size * 8]; 767 H5.H5Dread(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buf); 768 769 // update the ref values 770 refs = HDFNativeData.byteToLong(buf); 771 size = refs.length; 772 for (int j = 0; j < size; j++) { 773 long[] theOID = oidMap.get(String.valueOf(refs[j])); 774 if (theOID != null) { 775 refs[j] = theOID[0]; 776 } 777 } 778 779 // write back to file 780 H5.H5Dwrite(did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, refs); 781 } 782 else { 783 log.debug("dest file dataset failed to open"); 784 } 785 } 786 catch (Exception ex) { 787 continue; 788 } 789 finally { 790 try { 791 H5.H5Tclose(tid); 792 } 793 catch (Exception ex) { 794 log.debug("H5ScalarDS reference[{}] H5Tclose failure: ", i, ex); 795 } 796 try { 797 H5.H5Sclose(sid); 798 } 799 catch (Exception ex) { 800 log.debug("H5ScalarDS reference[{}] H5Sclose failure: ", i, ex); 801 } 802 try { 803 H5.H5Dclose(did); 804 } 805 catch (Exception ex) { 806 log.debug("H5ScalarDS reference[{}] H5Dclose failure: ", i, ex); 807 } 808 } 809 810 refs = null; 811 buf = null; 812 } // for (int i=0; i<n; i++) 813 } 814 815 /*************************************************************************** 816 * Implementation Class methods. These methods are related to the implementing H5File class, but not to a particular 817 * instance of the class. Since we can't override class methods (they can only be shadowed in Java), these are 818 * instance methods. 819 **************************************************************************/ 820 821 /** 822 * Returns the version of the HDF5 library. 823 * 824 * @see hdf.object.FileFormat#getLibversion() 825 */ 826 @Override 827 public String getLibversion() { 828 int[] vers = new int[3]; 829 String ver = "HDF5 "; 830 831 try { 832 H5.H5get_libversion(vers); 833 } 834 catch (Throwable ex) { 835 ex.printStackTrace(); 836 } 837 838 ver += vers[0] + "." + vers[1] + "." + vers[2]; 839 log.debug("libversion is {}", ver); 840 841 return ver; 842 } 843 844 /** 845 * Checks if the specified FileFormat instance has the HDF5 format. 846 * 847 * @see hdf.object.FileFormat#isThisType(hdf.object.FileFormat) 848 */ 849 @Override 850 public boolean isThisType(FileFormat theFile) { 851 return (theFile instanceof H5File); 852 } 853 854 /** 855 * Checks if the specified file has the HDF5 format. 856 * 857 * @see hdf.object.FileFormat#isThisType(java.lang.String) 858 */ 859 @Override 860 public boolean isThisType(String filename) { 861 boolean isH5 = false; 862 863 try { 864 isH5 = H5.H5Fis_hdf5(filename); 865 } 866 catch (HDF5Exception ex) { 867 isH5 = false; 868 } 869 870 return isH5; 871 } 872 873 /** 874 * Creates an HDF5 file with the specified name and returns a new H5File instance associated with the file. 875 * 876 * @throws HDF5Exception 877 * If the file cannot be created or if createFlag has unexpected value. 878 * @see hdf.object.FileFormat#createFile(java.lang.String, int) 879 * @see #H5File(String, int) 880 */ 881 @Override 882 public FileFormat createFile(String filename, int createFlag) throws Exception { 883 // Flag if we need to create or truncate the file. 884 Boolean doCreateFile = true; 885 886 // Won't create or truncate if CREATE_OPEN specified and file exists 887 if ((createFlag & FILE_CREATE_OPEN) == FILE_CREATE_OPEN) { 888 File f = new File(filename); 889 if (f.exists()) { 890 doCreateFile = false; 891 } 892 } 893 894 if (doCreateFile) { 895 896 int fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 897 ; 898 899 if ((createFlag & FILE_CREATE_EARLY_LIB) != FILE_CREATE_EARLY_LIB) { 900 H5.H5Pset_libver_bounds(fapl, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST); 901 } 902 903 int fileid = H5.H5Fcreate(filename, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl); 904 try { 905 H5.H5Pclose(fapl); 906 H5.H5Fclose(fileid); 907 } 908 catch (HDF5Exception ex) { 909 log.debug("H5 file, {} failure: ", filename, ex); 910 } 911 } 912 913 return new H5File(filename, WRITE); 914 } 915 916 /** 917 * Creates an H5File instance with specified file name and access. 918 * <p> 919 * 920 * @see hdf.object.FileFormat#createInstance(java.lang.String, int) 921 * @see #H5File(String, int) 922 */ 923 @Override 924 public FileFormat createInstance(String filename, int access) throws Exception { 925 return new H5File(filename, access); 926 } 927 928 /*************************************************************************** 929 * Instance Methods 930 * 931 * These methods are related to the H5File class and to particular instances of objects with this class type. 932 **************************************************************************/ 933 934 /** 935 * Opens file and returns a file identifier. 936 * 937 * @see hdf.object.FileFormat#open() 938 */ 939 @Override 940 public int open() throws Exception { 941 return open(true); 942 } 943 944 /** 945 * Opens file and returns a file identifier. 946 * 947 * @see hdf.object.FileFormat#open(int...) 948 */ 949 @Override 950 public int open(int... propList) throws Exception { 951 setIndexType(propList[0]); 952 return open(true); 953 } 954 955 /** 956 * Sets the bounds of library versions. 957 * 958 * @param low 959 * The earliest version of the library. 960 * @param high 961 * The latest version of the library. 962 * @throws HDF5Exception 963 */ 964 public void setLibBounds(int low, int high) throws Exception { 965 int fapl = HDF5Constants.H5P_DEFAULT; 966 967 if (fid < 0) 968 return; 969 970 fapl = H5.H5Fget_access_plist(fid); 971 972 try { 973 if (low < 0) 974 low = HDF5Constants.H5F_LIBVER_EARLIEST; 975 976 if (high < 0) 977 high = HDF5Constants.H5F_LIBVER_LATEST; 978 979 H5.H5Pset_libver_bounds(fapl, low, high); 980 H5.H5Pget_libver_bounds(fapl, libver); 981 } 982 finally { 983 try { 984 H5.H5Pclose(fapl); 985 } 986 catch (Exception e) { 987 log.debug("libver bounds H5Pclose failure: ", e); 988 } 989 } 990 } 991 992 /** 993 * Gets the bounds of library versions. 994 * 995 * @return libver The earliest and latest version of the library. 996 * @throws HDF5Exception 997 */ 998 public int[] getLibBounds() throws Exception { 999 return libver; 1000 } 1001 1002 /** 1003 * Closes file associated with this H5File instance. 1004 * 1005 * @see hdf.object.FileFormat#close() 1006 * @throws HDF5Exception 1007 */ 1008 @Override 1009 public void close() throws HDF5Exception { 1010 if (fid < 0) { 1011 log.debug("file {} is not open", fullFileName); 1012 return; 1013 } 1014 // The current working directory may be changed at Dataset.read() 1015 // by H5Dchdir_ext()by this file to make it work for external 1016 // datasets. We need to set it back to the original current working 1017 // directory (when hdf-java application started) before the file 1018 // is closed/opened. Otherwise, relative path, e.g. "./test.h5" may 1019 // not work 1020 String rootPath = System.getProperty("hdfview.workdir"); 1021 if (rootPath == null) { 1022 rootPath = System.getProperty("user.dir"); 1023 } 1024 H5.H5Dchdir_ext(rootPath); 1025 1026 // clean up unused objects 1027 if (rootNode != null) { 1028 DefaultMutableTreeNode theNode = null; 1029 HObject theObj = null; 1030 Enumeration<?> local_enum = (rootNode).breadthFirstEnumeration(); 1031 while (local_enum.hasMoreElements()) { 1032 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1033 theObj = (HObject) theNode.getUserObject(); 1034 1035 if (theObj instanceof Dataset) { 1036 ((Dataset) theObj).clear(); 1037 } 1038 else if (theObj instanceof Group) { 1039 ((Group) theObj).clear(); 1040 } 1041 } 1042 } 1043 1044 // Close all open objects associated with this file. 1045 try { 1046 int n = 0, type = -1, oids[]; 1047 n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL); 1048 1049 if (n > 0) { 1050 oids = new int[n]; 1051 H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids); 1052 1053 for (int i = 0; i < n; i++) { 1054 type = H5.H5Iget_type(oids[i]); 1055 1056 if (HDF5Constants.H5I_DATASET == type) { 1057 try { 1058 H5.H5Dclose(oids[i]); 1059 } 1060 catch (Exception ex2) { 1061 log.debug("Object[{}] H5Dclose failure: ", i, ex2); 1062 } 1063 } 1064 else if (HDF5Constants.H5I_GROUP == type) { 1065 try { 1066 H5.H5Gclose(oids[i]); 1067 } 1068 catch (Exception ex2) { 1069 log.debug("Object[{}] H5Gclose failure: ", i, ex2); 1070 } 1071 } 1072 else if (HDF5Constants.H5I_DATATYPE == type) { 1073 try { 1074 H5.H5Tclose(oids[i]); 1075 } 1076 catch (Exception ex2) { 1077 log.debug("Object[{}] H5Tclose failure: ", i, ex2); 1078 } 1079 } 1080 else if (HDF5Constants.H5I_ATTR == type) { 1081 try { 1082 H5.H5Aclose(oids[i]); 1083 } 1084 catch (Exception ex2) { 1085 log.debug("Object[{}] H5Aclose failure: ", i, ex2); 1086 } 1087 } 1088 } // for (int i=0; i<n; i++) 1089 } // if ( n>0) 1090 } 1091 catch (Exception ex) { 1092 log.debug("close open objects failure: ", ex); 1093 } 1094 1095 try { 1096 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL); 1097 } 1098 catch (Exception ex) { 1099 log.debug("H5Fflush failure: ", ex); 1100 } 1101 1102 try { 1103 H5.H5Fclose(fid); 1104 } 1105 catch (Exception ex) { 1106 log.debug("H5Fclose failure: ", ex); 1107 } 1108 1109 // Set fid to -1 but don't reset rootNode 1110 fid = -1; 1111 } 1112 1113 /** 1114 * Returns the root node of the open HDF5 File. 1115 * 1116 * @see hdf.object.FileFormat#getRootNode() 1117 */ 1118 @Override 1119 public TreeNode getRootNode() { 1120 return rootNode; 1121 } 1122 1123 /* 1124 * (non-Javadoc) 1125 * 1126 * @see hdf.object.FileFormat#get(java.lang.String) 1127 */ 1128 @Override 1129 public HObject get(String path) throws Exception { 1130 HObject obj = null; 1131 1132 if ((path == null) || (path.length() <= 0)) { 1133 System.err.println("(path == null) || (path.length() <= 0)"); 1134 return null; 1135 } 1136 1137 // replace the wrong slash and get rid of "//" 1138 path = path.replace('\\', '/'); 1139 path = "/" + path; 1140 path = path.replaceAll("//", "/"); 1141 1142 // the whole file tree is loaded. find the object in the tree 1143 if (rootNode != null) { 1144 obj = findObject(this, path); 1145 } 1146 1147 // found object in memory 1148 if (obj != null) { 1149 return obj; 1150 } 1151 1152 // open only the requested object 1153 String name = null, pPath = null; 1154 if (path.equals("/")) { 1155 name = "/"; // the root 1156 } 1157 else { 1158 // separate the parent path and the object name 1159 if (path.endsWith("/")) { 1160 path = path.substring(0, path.length() - 1); 1161 } 1162 1163 int idx = path.lastIndexOf('/'); 1164 name = path.substring(idx + 1); 1165 if (idx == 0) { 1166 pPath = "/"; 1167 } 1168 else { 1169 pPath = path.substring(0, idx); 1170 } 1171 } 1172 1173 // do not open the full tree structure, only the file handler 1174 int fid_before_open = fid; 1175 fid = open(false); 1176 if (fid < 0) { 1177 System.err.println("Could not open file handler"); 1178 return null; 1179 } 1180 1181 try { 1182 H5O_info_t info; 1183 int objType; 1184 int oid = H5.H5Oopen(fid, path, HDF5Constants.H5P_DEFAULT); 1185 1186 if (oid >= 0) { 1187 info = H5.H5Oget_info(oid); 1188 objType = info.type; 1189 if (objType == HDF5Constants.H5O_TYPE_DATASET) { 1190 int did = -1; 1191 try { 1192 did = H5.H5Dopen(fid, path, HDF5Constants.H5P_DEFAULT); 1193 obj = getDataset(did, name, pPath); 1194 } 1195 finally { 1196 try { 1197 H5.H5Dclose(did); 1198 } 1199 catch (Exception ex) { 1200 log.debug("{} H5Dclose failure: ", path, ex); 1201 } 1202 } 1203 } 1204 else if (objType == HDF5Constants.H5O_TYPE_GROUP) { 1205 int gid = -1; 1206 try { 1207 gid = H5.H5Gopen(fid, path, HDF5Constants.H5P_DEFAULT); 1208 H5Group pGroup = null; 1209 if (pPath != null) { 1210 pGroup = new H5Group(this, null, pPath, null); 1211 obj = getGroup(gid, name, pGroup); 1212 pGroup.addToMemberList(obj); 1213 } 1214 else { 1215 obj = getGroup(gid, name, pGroup); 1216 } 1217 } 1218 finally { 1219 try { 1220 H5.H5Gclose(gid); 1221 } 1222 catch (Exception ex) { 1223 log.debug("{} H5Gclose failure: ", path, ex); 1224 } 1225 } 1226 } 1227 else if (objType == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 1228 obj = new H5Datatype(this, name, pPath); 1229 } 1230 } 1231 try { 1232 H5.H5Oclose(oid); 1233 } 1234 catch (Exception ex) { 1235 ex.printStackTrace(); 1236 } 1237 } 1238 catch (Exception ex) { 1239 log.debug("Exception finding obj {}", path); 1240 obj = null; 1241 } 1242 finally { 1243 if ((fid_before_open <= 0) && (obj == null)) { 1244 // close the fid that is not attached to any object 1245 try { 1246 H5.H5Fclose(fid); 1247 } 1248 catch (Exception ex) { 1249 log.debug("[] H5Fclose failure: ", path, ex); 1250 } 1251 fid = fid_before_open; 1252 } 1253 } 1254 1255 return obj; 1256 } 1257 1258 /* 1259 * (non-Javadoc) 1260 * 1261 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, java.lang.String) 1262 */ 1263 @Override 1264 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, String name) throws Exception { 1265 return createDatatype(tclass, tsize, torder, tsign, null, name); 1266 } 1267 1268 /* 1269 * (non-Javadoc) 1270 * 1271 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype, java.lang.String) 1272 */ 1273 @Override 1274 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase, String name) 1275 throws Exception { 1276 int tid = -1; 1277 H5Datatype dtype = null; 1278 1279 log.trace("createDatatype with name={} start", name); 1280 try { 1281 H5Datatype t = (H5Datatype) createDatatype(tclass, tsize, torder, tsign, tbase); 1282 if ((tid = t.toNative()) < 0) 1283 throw new Exception("toNative failed"); 1284 1285 H5.H5Tcommit(fid, name, tid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, 1286 HDF5Constants.H5P_DEFAULT); 1287 1288 byte[] ref_buf = H5.H5Rcreate(fid, name, HDF5Constants.H5R_OBJECT, -1); 1289 long l = HDFNativeData.byteToLong(ref_buf, 0); 1290 1291 long[] oid = new long[1]; 1292 oid[0] = l; // save the object ID 1293 1294 dtype = new H5Datatype(this, null, name); 1295 1296 } 1297 finally { 1298 H5.H5Tclose(tid); 1299 } 1300 1301 log.trace("createDatatype with name={} finish", name); 1302 return dtype; 1303 } 1304 1305 /*************************************************************************** 1306 * Methods related to Datatypes and HObjects in HDF5 Files. Strictly speaking, these methods aren't related to 1307 * H5File and the actions could be carried out through the H5Group, H5Datatype and H5*DS classes. But, in some cases 1308 * they allow a null input and expect the generated object to be of HDF5 type. So, we put them in the H5File class 1309 * so that we create the proper type of HObject... H5Group for example. 1310 * 1311 * Here again, if there could be Implementation Class methods we'd use those. But, since we can't override class 1312 * methods (they can only be shadowed in Java), these are instance methods. 1313 * 1314 **************************************************************************/ 1315 1316 /* 1317 * (non-Javadoc) 1318 * 1319 * @see hdf.object.FileFormat#createDatatype(int, int, int, int) 1320 */ 1321 @Override 1322 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign) throws Exception { 1323 log.trace("createDatatype"); 1324 return new H5Datatype(tclass, tsize, torder, tsign); 1325 } 1326 1327 /* 1328 * (non-Javadoc) 1329 * 1330 * @see hdf.object.FileFormat#createDatatype(int, int, int, int, Datatype) 1331 */ 1332 @Override 1333 public Datatype createDatatype(int tclass, int tsize, int torder, int tsign, Datatype tbase) throws Exception { 1334 log.trace("createDatatype with base"); 1335 return new H5Datatype(tclass, tsize, torder, tsign, tbase); 1336 } 1337 1338 /* 1339 * (non-Javadoc) 1340 * 1341 * @see hdf.object.FileFormat#createScalarDS(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1342 * long[], long[], long[], int, java.lang.Object) 1343 */ 1344 @Override 1345 public Dataset createScalarDS(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1346 int gzip, Object fillValue, Object data) throws Exception { 1347 if (pgroup == null) { 1348 // create new dataset at the root group by default 1349 pgroup = (Group) get("/"); 1350 } 1351 1352 log.trace("createScalarDS name={}", name); 1353 return H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, fillValue, data); 1354 } 1355 1356 /* 1357 * (non-Javadoc) 1358 * 1359 * @see hdf.object.FileFormat#createCompoundDS(java.lang.String, hdf.object.Group, long[], long[], long[], 1360 * int, java.lang.String[], hdf.object.Datatype[], int[], java.lang.Object) 1361 */ 1362 @Override 1363 public Dataset createCompoundDS(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip, 1364 String[] memberNames, Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception { 1365 int nMembers = memberNames.length; 1366 int memberRanks[] = new int[nMembers]; 1367 long memberDims[][] = new long[nMembers][1]; 1368 Dataset ds = null; 1369 1370 for (int i = 0; i < nMembers; i++) { 1371 memberRanks[i] = 1; 1372 if (memberSizes == null) { 1373 memberDims[i][0] = 1; 1374 } 1375 else { 1376 memberDims[i][0] = memberSizes[i]; 1377 } 1378 } 1379 1380 if (pgroup == null) { 1381 // create new dataset at the root group by default 1382 pgroup = (Group) get("/"); 1383 } 1384 log.trace("createCompoundDS name={}", name); 1385 ds = H5CompoundDS.create(name, pgroup, dims, maxdims, chunks, gzip, memberNames, memberDatatypes, memberRanks, 1386 memberDims, data); 1387 1388 return ds; 1389 } 1390 1391 /* 1392 * (non-Javadoc) 1393 * 1394 * @see hdf.object.FileFormat#createImage(java.lang.String, hdf.object.Group, hdf.object.Datatype, 1395 * long[], long[], long[], int, int, int, java.lang.Object) 1396 */ 1397 @Override 1398 public Dataset createImage(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims, long[] chunks, 1399 int gzip, int ncomp, int interlace, Object data) throws Exception { 1400 if (pgroup == null) { // create at the root group by default 1401 pgroup = (Group) get("/"); 1402 } 1403 1404 H5ScalarDS dataset = (H5ScalarDS)H5ScalarDS.create(name, pgroup, type, dims, maxdims, chunks, gzip, data); 1405 1406 try { 1407 H5File.createImageAttributes(dataset, interlace); 1408 dataset.setIsImage(true); 1409 } 1410 catch (Exception ex) { 1411 log.debug("{} createImageAttributtes failure: ", name, ex); 1412 } 1413 1414 return dataset; 1415 } 1416 1417 /*** 1418 * Creates a new group with specified name in existing group. 1419 * 1420 * @see hdf.object.FileFormat#createGroup(java.lang.String, hdf.object.Group) 1421 */ 1422 @Override 1423 public Group createGroup(String name, Group pgroup) throws Exception { 1424 return this.createGroup(name, pgroup, HDF5Constants.H5P_DEFAULT); 1425 1426 } 1427 1428 /*** 1429 * Creates a new group with specified name in existing group and with the group creation properties list, gplist. 1430 * 1431 * @see hdf.object.h5.H5Group#create(java.lang.String, hdf.object.Group, int...) 1432 * 1433 */ 1434 public Group createGroup(String name, Group pgroup, int... gplist) throws Exception { 1435 // create new group at the root 1436 if (pgroup == null) { 1437 pgroup = (Group) this.get("/"); 1438 } 1439 1440 return H5Group.create(name, pgroup, gplist); 1441 } 1442 1443 /*** 1444 * Creates the group creation property list identifier, gcpl. This identifier is used when creating Groups. 1445 * 1446 * @see hdf.object.FileFormat#createGcpl(int, int, int) 1447 * 1448 */ 1449 public int createGcpl(int creationorder, int maxcompact, int mindense) throws Exception { 1450 int gcpl = -1; 1451 try { 1452 gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); 1453 if (gcpl >= 0) { 1454 // Set link creation order. 1455 if (creationorder == Group.CRT_ORDER_TRACKED) { 1456 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED); 1457 } 1458 else if (creationorder == Group.CRT_ORDER_INDEXED) { 1459 H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED 1460 + HDF5Constants.H5P_CRT_ORDER_INDEXED); 1461 } 1462 // Set link storage. 1463 H5.H5Pset_link_phase_change(gcpl, maxcompact, mindense); 1464 } 1465 } 1466 catch (Exception ex) { 1467 ex.printStackTrace(); 1468 } 1469 1470 return gcpl; 1471 } 1472 1473 /* 1474 * (non-Javadoc) 1475 * 1476 * @see hdf.object.FileFormat#createLink(hdf.object.Group, java.lang.String, hdf.object.HObject) 1477 */ 1478 @Override 1479 public HObject createLink(Group parentGroup, String name, Object currentObj) throws Exception { 1480 if (currentObj instanceof HObject) 1481 return this.createLink(parentGroup, name, (HObject) currentObj, Group.LINK_TYPE_HARD); 1482 else if (currentObj instanceof String) 1483 return this.createLink(parentGroup, name, (String) currentObj, Group.LINK_TYPE_HARD); 1484 1485 return null; 1486 } 1487 1488 /** 1489 * Creates a link to an object in the open file. 1490 * <p> 1491 * If parentGroup is null, the new link is created in the root group. 1492 * 1493 * @param parentGroup 1494 * The group where the link is created. 1495 * @param name 1496 * The name of the link. 1497 * @param currentObj 1498 * The existing object the new link will reference. 1499 * @param lType 1500 * The type of link to be created. It can be a hard link, a soft link or an external link. 1501 * @return The object pointed to by the new link if successful; otherwise returns null. 1502 * @throws Exception 1503 * The exceptions thrown vary depending on the implementing class. 1504 */ 1505 public HObject createLink(Group parentGroup, String name, HObject currentObj, int lType) throws Exception { 1506 HObject obj = null; 1507 int type = 0; 1508 String current_full_name = null, new_full_name = null, parent_path = null; 1509 1510 if (currentObj == null) { 1511 throw new HDF5Exception("The object pointed by the link cannot be null."); 1512 } 1513 if ((parentGroup == null) || parentGroup.isRoot()) { 1514 parent_path = HObject.separator; 1515 } 1516 else { 1517 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1518 } 1519 1520 new_full_name = parent_path + name; 1521 1522 if (lType == Group.LINK_TYPE_HARD) 1523 type = HDF5Constants.H5L_TYPE_HARD; 1524 1525 else if (lType == Group.LINK_TYPE_SOFT) 1526 type = HDF5Constants.H5L_TYPE_SOFT; 1527 1528 else if (lType == Group.LINK_TYPE_EXTERNAL) 1529 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1530 1531 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1532 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1533 } 1534 1535 if (type == HDF5Constants.H5L_TYPE_HARD) { 1536 if ((currentObj instanceof Group) && ((Group) currentObj).isRoot()) { 1537 throw new HDF5Exception("Cannot make a link to the root group."); 1538 } 1539 current_full_name = currentObj.getPath() + HObject.separator + currentObj.getName(); 1540 1541 H5.H5Lcreate_hard(fid, current_full_name, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1542 HDF5Constants.H5P_DEFAULT); 1543 } 1544 1545 else if (type == HDF5Constants.H5L_TYPE_SOFT) { 1546 H5.H5Lcreate_soft(currentObj.getFullName(), fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1547 HDF5Constants.H5P_DEFAULT); 1548 } 1549 1550 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1551 H5.H5Lcreate_external(currentObj.getFile(), currentObj.getFullName(), fid, new_full_name, 1552 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1553 } 1554 1555 if (currentObj instanceof Group) { 1556 obj = new H5Group(this, name, parent_path, parentGroup); 1557 } 1558 else if (currentObj instanceof H5Datatype) { 1559 obj = new H5Datatype(this, name, parent_path); 1560 } 1561 else if (currentObj instanceof H5CompoundDS) { 1562 obj = new H5CompoundDS(this, name, parent_path); 1563 } 1564 else if (currentObj instanceof H5ScalarDS) { 1565 obj = new H5ScalarDS(this, name, parent_path); 1566 } 1567 return obj; 1568 } 1569 1570 /** 1571 * Creates a soft or external links to objects in a file that do not exist at the time the link is created. 1572 * 1573 * @param parentGroup 1574 * The group where the link is created. 1575 * @param name 1576 * The name of the link. 1577 * @param currentObj 1578 * The name of the object the new link will reference. The object doesn't have to exist. 1579 * @param lType 1580 * The type of link to be created. 1581 * @return The H5Link object pointed to by the new link if successful; otherwise returns null. 1582 * @throws Exception 1583 * The exceptions thrown vary depending on the implementing class. 1584 */ 1585 public HObject createLink(Group parentGroup, String name, String currentObj, int lType) throws Exception { 1586 HObject obj = null; 1587 int type = 0; 1588 String new_full_name = null, parent_path = null; 1589 1590 if (currentObj == null) { 1591 throw new HDF5Exception("The object pointed by the link cannot be null."); 1592 } 1593 if ((parentGroup == null) || parentGroup.isRoot()) { 1594 parent_path = HObject.separator; 1595 } 1596 else { 1597 parent_path = parentGroup.getPath() + HObject.separator + parentGroup.getName() + HObject.separator; 1598 } 1599 1600 new_full_name = parent_path + name; 1601 1602 if (lType == Group.LINK_TYPE_HARD) 1603 type = HDF5Constants.H5L_TYPE_HARD; 1604 1605 else if (lType == Group.LINK_TYPE_SOFT) 1606 type = HDF5Constants.H5L_TYPE_SOFT; 1607 1608 else if (lType == Group.LINK_TYPE_EXTERNAL) 1609 type = HDF5Constants.H5L_TYPE_EXTERNAL; 1610 1611 if (H5.H5Lexists(fid, new_full_name, HDF5Constants.H5P_DEFAULT)) { 1612 H5.H5Ldelete(fid, new_full_name, HDF5Constants.H5P_DEFAULT); 1613 } 1614 1615 if (type == HDF5Constants.H5L_TYPE_SOFT) { 1616 H5.H5Lcreate_soft(currentObj, fid, new_full_name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1617 } 1618 1619 else if (type == HDF5Constants.H5L_TYPE_EXTERNAL) { 1620 String fileName = null; 1621 String objectName = null; 1622 1623 // separate the object name and the file name 1624 fileName = currentObj.substring(0, currentObj.lastIndexOf(FileFormat.FILE_OBJ_SEP)); 1625 objectName = currentObj.substring(currentObj.indexOf(FileFormat.FILE_OBJ_SEP)); 1626 objectName = objectName.substring(3); 1627 1628 H5.H5Lcreate_external(fileName, objectName, fid, new_full_name, HDF5Constants.H5P_DEFAULT, 1629 HDF5Constants.H5P_DEFAULT); 1630 } 1631 1632 if (name.startsWith(HObject.separator)) { 1633 name = name.substring(1); 1634 } 1635 obj = new H5Link(this, name, parent_path); 1636 1637 return obj; 1638 } 1639 1640 /** 1641 * reload the sub-tree structure from file. 1642 * <p> 1643 * reloadTree(Group g) is useful when the structure of the group in file is changed while the group structure in 1644 * memory is not changed. 1645 * 1646 * @param g 1647 * the group where the structure is to be reloaded in memory 1648 */ 1649 public void reloadTree(Group g) { 1650 if (fid < 0 || rootNode == null || g == null) 1651 return; 1652 1653 HObject theObj = null; 1654 DefaultMutableTreeNode theNode = null; 1655 1656 if (g.equals(rootNode.getUserObject())) 1657 theNode = rootNode; 1658 else { 1659 Enumeration<?> local_enum = rootNode.breadthFirstEnumeration(); 1660 while (local_enum.hasMoreElements()) { 1661 theNode = (DefaultMutableTreeNode) local_enum.nextElement(); 1662 theObj = (HObject) theNode.getUserObject(); 1663 if (g.equals(theObj)) 1664 break; 1665 } 1666 } 1667 1668 theNode.removeAllChildren(); 1669 depth_first(theNode, Integer.MIN_VALUE); 1670 } 1671 1672 /* 1673 * (non-Javadoc) NOTE: Object references are copied but not updated by this method. 1674 * 1675 * @see hdf.object.FileFormat#copy(hdf.object.HObject, hdf.object.Group, java.lang.String) 1676 */ 1677 @Override 1678 public TreeNode copy(HObject srcObj, Group dstGroup, String dstName) throws Exception { 1679 TreeNode newNode = null; 1680 1681 if ((srcObj == null) || (dstGroup == null)) { 1682 return null; 1683 } 1684 1685 if (dstName == null) { 1686 dstName = srcObj.getName(); 1687 } 1688 1689 List<HObject> members = dstGroup.getMemberList(); 1690 int n = members.size(); 1691 for (int i = 0; i < n; i++) { 1692 HObject obj = (HObject) members.get(i); 1693 String name = obj.getName(); 1694 while (name.equals(dstName)) 1695 dstName += "~copy"; 1696 } 1697 1698 if (srcObj instanceof Dataset) { 1699 newNode = copyDataset((Dataset) srcObj, (H5Group) dstGroup, dstName); 1700 } 1701 else if (srcObj instanceof H5Group) { 1702 newNode = copyGroup((H5Group) srcObj, (H5Group) dstGroup, dstName); 1703 } 1704 else if (srcObj instanceof H5Datatype) { 1705 newNode = copyDatatype((H5Datatype) srcObj, (H5Group) dstGroup, dstName); 1706 } 1707 1708 return newNode; 1709 } 1710 1711 /* 1712 * (non-Javadoc) 1713 * 1714 * @see hdf.object.FileFormat#delete(hdf.object.HObject) 1715 */ 1716 @Override 1717 public void delete(HObject obj) throws Exception { 1718 if ((obj == null) || (fid < 0)) { 1719 return; 1720 } 1721 1722 String name = obj.getPath() + obj.getName(); 1723 1724 H5.H5Ldelete(fid, name, HDF5Constants.H5P_DEFAULT); 1725 } 1726 1727 /* 1728 * (non-Javadoc) 1729 * 1730 * @see hdf.object.FileFormat#writeAttribute(hdf.object.HObject, hdf.object.Attribute, boolean) 1731 */ 1732 @Override 1733 public void writeAttribute(HObject obj, Attribute attr, boolean attrExisted) throws HDF5Exception { 1734 String obj_name = obj.getFullName(); 1735 String name = attr.getName(); 1736 int tid = -1, sid = -1, aid = -1; 1737 log.trace("{} writeAttribute start", name); 1738 1739 int objID = obj.open(); 1740 if (objID < 0) { 1741 return; 1742 } 1743 1744 if ((tid = attr.getType().toNative()) >= 0) { 1745 log.trace("{} writeAttribute tid from native", name); 1746 try { 1747 if (attr.isScalar()) 1748 sid = H5.H5Screate(HDF5Constants.H5S_SCALAR); 1749 else 1750 sid = H5.H5Screate_simple(attr.getRank(), attr.getDataDims(), null); 1751 1752 if (attrExisted) { 1753 aid = H5.H5Aopen_by_name(objID, obj_name, name, HDF5Constants.H5P_DEFAULT, 1754 HDF5Constants.H5P_DEFAULT); 1755 } 1756 else { 1757 aid = H5.H5Acreate(objID, name, tid, sid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 1758 } 1759 log.trace("{} writeAttribute aid opened/created", name); 1760 1761 // update value of the attribute 1762 Object attrValue = attr.getValue(); 1763 log.trace("{} writeAttribute getvalue", name); 1764 if (attrValue != null) { 1765 boolean isVlen = (H5.H5Tget_class(tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(tid)); 1766 if (isVlen) { 1767 log.trace("{} writeAttribute isvlen", name); 1768 try { 1769 /* 1770 * must use native type to write attribute data to file (see bug 1069) 1771 */ 1772 int tmptid = tid; 1773 tid = H5.H5Tget_native_type(tmptid); 1774 try { 1775 H5.H5Tclose(tmptid); 1776 } 1777 catch (Exception ex) { 1778 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1779 } 1780 log.trace("{} writeAttribute H5.H5AwriteVL", name); 1781 if ((attrValue instanceof String) || (attr.getDataDims().length == 1)) { 1782 H5.H5AwriteVL(aid, tid, (String[]) attrValue); 1783 } 1784 else { 1785 log.info("Datatype is not a string, unable to write {} data", name); 1786 } 1787 } 1788 catch (Exception ex) { 1789 log.debug("{} writeAttribute native type failure: ", name, ex); 1790 } 1791 } 1792 else { 1793 if (attr.getType().getDatatypeClass() == Datatype.CLASS_REFERENCE && attrValue instanceof String) { 1794 // reference is a path+name to the object 1795 attrValue = H5.H5Rcreate(getFID(), (String) attrValue, HDF5Constants.H5R_OBJECT, -1); 1796 log.trace("{} writeAttribute CLASS_REFERENCE", name); 1797 } 1798 else if (Array.get(attrValue, 0) instanceof String) { 1799 int size = H5.H5Tget_size(tid); 1800 int len = ((String[]) attrValue).length; 1801 byte[] bval = Dataset.stringToByte((String[]) attrValue, size); 1802 if (bval != null && bval.length == size * len) { 1803 bval[bval.length - 1] = 0; 1804 attrValue = bval; 1805 } 1806 log.trace("{} writeAttribute Array", name); 1807 } 1808 1809 try { 1810 /* 1811 * must use native type to write attribute data to file (see bug 1069) 1812 */ 1813 int tmptid = tid; 1814 tid = H5.H5Tget_native_type(tmptid); 1815 try { 1816 H5.H5Tclose(tmptid); 1817 } 1818 catch (Exception ex) { 1819 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1820 } 1821 log.trace("{} writeAttribute H5.H5Awrite", name); 1822 H5.H5Awrite(aid, tid, attrValue); 1823 } 1824 catch (Exception ex) { 1825 log.debug("{} writeAttribute native type failure: ", name, ex); 1826 } 1827 } 1828 } // if (attrValue != null) { 1829 } 1830 finally { 1831 try { 1832 H5.H5Tclose(tid); 1833 } 1834 catch (Exception ex) { 1835 log.debug("{} writeAttribute H5Tclose failure: ", name, ex); 1836 } 1837 try { 1838 H5.H5Sclose(sid); 1839 } 1840 catch (Exception ex) { 1841 log.debug("{} writeAttribute H5Sclose failure: ", name, ex); 1842 } 1843 try { 1844 H5.H5Aclose(aid); 1845 } 1846 catch (Exception ex) { 1847 log.debug("{} writeAttribute H5Aclose failure: ", name, ex); 1848 } 1849 } 1850 } 1851 else { 1852 log.debug("{} writeAttribute toNative failure: ", name); 1853 } 1854 1855 obj.close(objID); 1856 log.trace("{} writeAttribute finish", name); 1857 } 1858 1859 /*************************************************************************** 1860 * Implementations for methods specific to H5File 1861 **************************************************************************/ 1862 1863 /** 1864 * Opens a file with specific file access property list. 1865 * <p> 1866 * This function does the same as "int open()" except the you can also pass an HDF5 file access property to file 1867 * open. For example, 1868 * 1869 * <pre> 1870 * // All open objects remaining in the file are closed then file is closed 1871 * int plist = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 1872 * H5.H5Pset_fclose_degree(plist, HDF5Constants.H5F_CLOSE_STRONG); 1873 * int fid = open(plist); 1874 * </pre> 1875 * 1876 * @param plist 1877 * a file access property list identifier. 1878 * @return the file identifier if successful; otherwise returns negative value. 1879 */ 1880 public int open(int plist) throws Exception { 1881 return open(true, plist); 1882 } 1883 1884 /*************************************************************************** 1885 * Private methods. 1886 **************************************************************************/ 1887 1888 /** 1889 * Opens access to this file. 1890 * 1891 * @param loadFullHierarchy 1892 * if true, load the full hierarchy into memory; otherwise just opens the file idenfitier. 1893 * @return the file identifier if successful; otherwise returns negative value. 1894 */ 1895 private int open(boolean loadFullHierarchy) throws Exception { 1896 int the_fid = -1; 1897 1898 int plist = HDF5Constants.H5P_DEFAULT; 1899 1900 /* 1901 * // BUG: HDF5Constants.H5F_CLOSE_STRONG does not flush cache try { //All open objects remaining in the file 1902 * are closed // then file is closed plist = H5.H5Pcreate (HDF5Constants.H5P_FILE_ACCESS); 1903 * H5.H5Pset_fclose_degree ( plist, HDF5Constants.H5F_CLOSE_STRONG); } catch (Exception ex) {;} the_fid = 1904 * open(loadFullHierarchy, plist); try { H5.H5Pclose(plist); } catch (Exception ex) {} 1905 */ 1906 1907 the_fid = open(loadFullHierarchy, plist); 1908 1909 return the_fid; 1910 } 1911 1912 /** 1913 * Opens access to this file. 1914 * 1915 * @param loadFullHierarchy 1916 * if true, load the full hierarchy into memory; otherwise just opens the file identifier. 1917 * @return the file identifier if successful; otherwise returns negative value. 1918 */ 1919 private int open(boolean loadFullHierarchy, int plist) throws Exception { 1920 if (fid > 0) { 1921 return fid; // file is opened already 1922 } 1923 log.trace("open: loadFullHierarchy={} start", loadFullHierarchy); 1924 1925 // The cwd may be changed at Dataset.read() by H5Dchdir_ext() 1926 // to make it work for external datasets. We need to set it back 1927 // before the file is closed/opened. 1928 String rootPath = System.getProperty("hdfview.workdir"); 1929 if (rootPath == null) { 1930 rootPath = System.getProperty("user.dir"); 1931 } 1932 H5.H5Dchdir_ext(rootPath); 1933 1934 // check for valid file access permission 1935 if (flag < 0) { 1936 throw new HDF5Exception("Invalid access identifer -- " + flag); 1937 } 1938 else if (HDF5Constants.H5F_ACC_CREAT == flag) { 1939 // create a new file 1940 log.trace("open: create file"); 1941 fid = H5.H5Fcreate(fullFileName, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 1942 HDF5Constants.H5P_DEFAULT); 1943 H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL); 1944 H5.H5Fclose(fid); 1945 flag = HDF5Constants.H5F_ACC_RDWR; 1946 } 1947 else if (!exists()) { 1948 throw new HDF5Exception("File does not exist -- " + fullFileName); 1949 } 1950 else if (((flag == HDF5Constants.H5F_ACC_RDWR) || (flag == HDF5Constants.H5F_ACC_CREAT)) && !canWrite()) { 1951 throw new HDF5Exception("Cannot write file, try open as read-only -- " + fullFileName); 1952 } 1953 else if ((flag == HDF5Constants.H5F_ACC_RDONLY) && !canRead()) { 1954 throw new HDF5Exception("Cannot read file -- " + fullFileName); 1955 } 1956 1957 try { 1958 log.trace("open: open file"); 1959 fid = H5.H5Fopen(fullFileName, flag, plist); 1960 } 1961 catch (Exception ex) { 1962 try { 1963 fid = H5.H5Fopen(fullFileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 1964 isReadOnly = true; 1965 } 1966 catch (Exception ex2) { 1967 // try to see if it is a file family, always open a family file 1968 // from the first one since other files will not be recongized 1969 // as 1970 // an HDF5 file 1971 File tmpf = new File(fullFileName); 1972 String tmpname = tmpf.getName(); 1973 int idx = tmpname.lastIndexOf("."); 1974 while (idx > 0) { 1975 char c = tmpname.charAt(idx); 1976 if (c >= '0') 1977 idx--; 1978 else 1979 break; 1980 } 1981 1982 if (idx > 0) { 1983 tmpname = tmpname.substring(0, idx - 1) + "%d" + tmpname.substring(tmpname.lastIndexOf(".")); 1984 int pid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); 1985 H5.H5Pset_fapl_family(pid, 0, HDF5Constants.H5P_DEFAULT); 1986 fid = H5.H5Fopen(tmpf.getParent() + File.separator + tmpname, flag, pid); 1987 H5.H5Pclose(pid); 1988 } 1989 } /* catch (Exception ex) { */ 1990 } 1991 1992 if ((fid >= 0) && loadFullHierarchy) { 1993 // load the hierearchy of the file 1994 rootNode = loadTree(); 1995 } 1996 1997 log.trace("open: finish"); 1998 return fid; 1999 } 2000 2001 /** 2002 * Reads the file structure into memory (tree node) 2003 * 2004 * @return the root node of the file structure. 2005 */ 2006 private DefaultMutableTreeNode loadTree() { 2007 if (fid < 0) { 2008 return null; 2009 } 2010 2011 DefaultMutableTreeNode root = null; 2012 2013 long[] rootOID = { 0 }; 2014 H5Group rootGroup = new H5Group(this, "/", null, // root node does not 2015 // have a parent path 2016 null); // root node does not have a parent node 2017 2018 root = new DefaultMutableTreeNode(rootGroup) { 2019 private static final long serialVersionUID = 991382067363411723L; 2020 2021 @Override 2022 public boolean isLeaf() { 2023 return false; 2024 } 2025 }; 2026 2027 depth_first(root, 0); // reload all 2028 2029 return root; 2030 } 2031 2032 /** 2033 * Retrieves the file structure by depth-first order, recursively. The current implementation retrieves group and 2034 * dataset only. It does not include named datatype and soft links. 2035 * <p> 2036 * It also detects and stops loops. A loop is detected if there exists object with the same object ID by tracing 2037 * path back up to the root. 2038 * <p> 2039 * 2040 * @param parentNode 2041 * the parent node. 2042 */ 2043 private int depth_first(MutableTreeNode parentNode, int nTotal) { 2044 int nelems; 2045 MutableTreeNode node = null; 2046 String fullPath = null; 2047 String ppath = null; 2048 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2049 int gid = -1; 2050 log.trace("depth_first: start"); 2051 2052 H5Group pgroup = (H5Group) (pnode.getUserObject()); 2053 ppath = pgroup.getPath(); 2054 2055 if (ppath == null) { 2056 fullPath = HObject.separator; 2057 } 2058 else { 2059 fullPath = ppath + pgroup.getName() + HObject.separator; 2060 } 2061 2062 nelems = 0; 2063 try { 2064 gid = pgroup.open(); 2065 H5G_info_t info = H5.H5Gget_info(gid); 2066 nelems = (int) info.nlinks; 2067 } 2068 catch (HDF5Exception ex) { 2069 nelems = -1; 2070 log.debug("H5Gget_info: ", ex); 2071 } 2072 2073 if (nelems <= 0) { 2074 pgroup.close(gid); 2075 return nTotal; 2076 } 2077 2078 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2079 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2080 // we use only one call to get all the information, which takes about 2081 // two seconds 2082 int[] objTypes = new int[nelems]; 2083 long[] fNos = new long[nelems]; 2084 long[] objRefs = new long[nelems]; 2085 String[] objNames = new String[nelems]; 2086 2087 try { 2088 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2089 } 2090 catch (HDF5Exception ex) { 2091 ex.printStackTrace(); 2092 return nTotal; 2093 } 2094 2095 int nStart = getStartMembers(); 2096 int nMax = getMaxMembers(); 2097 2098 String obj_name; 2099 int obj_type; 2100 2101 // Iterate through the file to see members of the group 2102 for (int i = 0; i < nelems; i++) { 2103 obj_name = objNames[i]; 2104 obj_type = objTypes[i]; 2105 log.trace("depth_first: obj_name={}, obj_type={}", obj_name, obj_type); 2106 long oid[] = { objRefs[i], fNos[i] }; 2107 2108 if (obj_name == null) { 2109 continue; 2110 } 2111 2112 nTotal++; 2113 2114 if (nMax > 0) { 2115 if ((nTotal - nStart) >= nMax) 2116 break; // loaded enough objects 2117 } 2118 2119 boolean skipLoad = false; 2120 if ((nTotal > 0) && (nTotal < nStart)) 2121 skipLoad = true; 2122 2123 // create a new group 2124 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2125 H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); // deprecated! 2126 node = new DefaultMutableTreeNode(g) { 2127 private static final long serialVersionUID = 5139629211215794015L; 2128 2129 @Override 2130 public boolean isLeaf() { 2131 return false; 2132 } 2133 }; 2134 pnode.add(node); 2135 pgroup.addToMemberList(g); 2136 2137 // detect and stop loops 2138 // a loop is detected if there exists object with the same 2139 // object ID by tracing path back up to the root. 2140 boolean hasLoop = false; 2141 HObject tmpObj = null; 2142 DefaultMutableTreeNode tmpNode = pnode; 2143 2144 while (tmpNode != null) { 2145 tmpObj = (HObject) tmpNode.getUserObject(); 2146 2147 if (tmpObj.equalsOID(oid) && !(tmpObj.getPath() == null)) { 2148 hasLoop = true; 2149 break; 2150 } 2151 else { 2152 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2153 } 2154 } 2155 2156 // recursively go through the next group 2157 // stops if it has loop. 2158 if (!hasLoop) { 2159 nTotal = depth_first(node, nTotal); 2160 } 2161 } 2162 else if (skipLoad) { 2163 continue; 2164 } 2165 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2166 int did = -1, tid = -1, tclass = -1; 2167 try { 2168 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2169 if (did >= 0) { 2170 tid = H5.H5Dget_type(did); 2171 2172 tclass = H5.H5Tget_class(tid); 2173 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2174 // for ARRAY, the type is determined by the base type 2175 int btid = H5.H5Tget_super(tid); 2176 int tmpclass = H5.H5Tget_class(btid); 2177 2178 // cannot deal with ARRAY of COMPOUND in compound table 2179 // viewer 2180 if (tmpclass != HDF5Constants.H5T_COMPOUND) 2181 tclass = H5.H5Tget_class(btid); 2182 2183 try { 2184 H5.H5Tclose(btid); 2185 } 2186 catch (Exception ex) { 2187 log.debug("depth_first[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2188 } 2189 } 2190 } 2191 else { 2192 log.debug("depth_first[{}] {} dataset open failure", i, obj_name); 2193 } 2194 } 2195 catch (Exception ex) { 2196 log.debug("depth_first[{}] {} dataset access failure: ", i, obj_name, ex); 2197 } 2198 finally { 2199 try { 2200 H5.H5Tclose(tid); 2201 } 2202 catch (Exception ex) { 2203 log.debug("depth_first[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2204 } 2205 try { 2206 H5.H5Dclose(did); 2207 } 2208 catch (Exception ex) { 2209 log.debug("depth_first[{}] {} dataset access H5Dclose failure: ", i, obj_name, ex); 2210 } 2211 } 2212 Dataset d = null; 2213 if (tclass == HDF5Constants.H5T_COMPOUND) { 2214 // create a new compound dataset 2215 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2216 } 2217 else { 2218 // create a new scalar dataset 2219 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2220 } 2221 2222 node = new DefaultMutableTreeNode(d); 2223 pnode.add(node); 2224 pgroup.addToMemberList(d); 2225 } 2226 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2227 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2228 2229 node = new DefaultMutableTreeNode(t); 2230 pnode.add(node); 2231 pgroup.addToMemberList(t); 2232 } 2233 else if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2234 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2235 2236 node = new DefaultMutableTreeNode(link); 2237 pnode.add(node); 2238 pgroup.addToMemberList(link); 2239 continue; // do the next one, if the object is not identified. 2240 } 2241 } // for ( i = 0; i < nelems; i++) 2242 2243 pgroup.close(gid); 2244 2245 log.trace("depth_first: finish"); 2246 return nTotal; 2247 } // private depth_first() 2248 2249 private void depth_first_old(MutableTreeNode parentNode) { 2250 int nelems; 2251 MutableTreeNode node = null; 2252 String fullPath = null; 2253 String ppath = null; 2254 DefaultMutableTreeNode pnode = (DefaultMutableTreeNode) parentNode; 2255 int gid = -1; 2256 log.trace("depth_first_old: start"); 2257 2258 H5Group pgroup = (H5Group) (pnode.getUserObject()); 2259 ppath = pgroup.getPath(); 2260 2261 if (ppath == null) { 2262 fullPath = HObject.separator; 2263 } 2264 else { 2265 fullPath = ppath + pgroup.getName() + HObject.separator; 2266 } 2267 2268 nelems = 0; 2269 try { 2270 gid = pgroup.open(); 2271 H5G_info_t info = H5.H5Gget_info(gid); 2272 nelems = (int) info.nlinks; 2273 } 2274 catch (HDF5Exception ex) { 2275 nelems = -1; 2276 } 2277 2278 if (nelems <= 0) { 2279 pgroup.close(gid); 2280 return; 2281 } 2282 2283 // since each call of H5.H5Gget_objname_by_idx() takes about one second. 2284 // 1,000,000 calls take 12 days. Instead of calling it in a loop, 2285 // we use only one call to get all the information, which takes about 2286 // two seconds 2287 int[] objTypes = new int[nelems]; 2288 long[] fNos = new long[nelems]; 2289 long[] objRefs = new long[nelems]; 2290 String[] objNames = new String[nelems]; 2291 2292 try { 2293 H5.H5Gget_obj_info_full(fid, fullPath, objNames, objTypes, null, fNos, objRefs, indexType, indexOrder); 2294 } 2295 catch (HDF5Exception ex) { 2296 ex.printStackTrace(); 2297 return; 2298 } 2299 2300 int startIndex = Math.max(0, getStartMembers()); 2301 int endIndex = getMaxMembers(); 2302 if (endIndex >= nelems) { 2303 endIndex = nelems; 2304 startIndex = 0; // load all members 2305 } 2306 endIndex += startIndex; 2307 endIndex = Math.min(endIndex, nelems); 2308 2309 String obj_name; 2310 int obj_type; 2311 // int lnk_type; 2312 2313 // Iterate through the file to see members of the group 2314 for (int i = startIndex; i < endIndex; i++) { 2315 obj_name = objNames[i]; 2316 obj_type = objTypes[i]; 2317 log.trace("depth_first_old: obj_name={}, obj_type={}", obj_name, obj_type); 2318 long oid[] = { objRefs[i], fNos[i] }; 2319 2320 if (obj_name == null) { 2321 continue; 2322 } 2323 2324 // we need to use the OID for this release. we will rewrite this so 2325 // that we do not use the deprecated constructor 2326 if (obj_type == HDF5Constants.H5O_TYPE_UNKNOWN) { 2327 H5Link link = new H5Link(this, obj_name, fullPath, oid); 2328 2329 node = new DefaultMutableTreeNode(link); 2330 pnode.add(node); 2331 pgroup.addToMemberList(link); 2332 continue; // do the next one, if the object is not identified. 2333 } 2334 2335 // create a new group 2336 if (obj_type == HDF5Constants.H5O_TYPE_GROUP) { 2337 H5Group g = new H5Group(this, obj_name, fullPath, pgroup, oid); // deprecated! 2338 node = new DefaultMutableTreeNode(g) { 2339 private static final long serialVersionUID = 5139629211215794015L; 2340 2341 @Override 2342 public boolean isLeaf() { 2343 return false; 2344 } 2345 }; 2346 pnode.add(node); 2347 pgroup.addToMemberList(g); 2348 2349 // detect and stop loops 2350 // a loop is detected if there exists object with the same 2351 // object ID by tracing path back up to the root. 2352 boolean hasLoop = false; 2353 HObject tmpObj = null; 2354 DefaultMutableTreeNode tmpNode = pnode; 2355 2356 while (tmpNode != null) { 2357 tmpObj = (HObject) tmpNode.getUserObject(); 2358 2359 if (tmpObj.equalsOID(oid)) { 2360 hasLoop = true; 2361 break; 2362 } 2363 else { 2364 tmpNode = (DefaultMutableTreeNode) tmpNode.getParent(); 2365 } 2366 } 2367 2368 // recursively go through the next group 2369 // stops if it has loop. 2370 if (!hasLoop) { 2371 depth_first_old(node); 2372 } 2373 } 2374 else if (obj_type == HDF5Constants.H5O_TYPE_DATASET) { 2375 int did = -1, tid = -1, tclass = -1; 2376 try { 2377 did = H5.H5Dopen(fid, fullPath + obj_name, HDF5Constants.H5P_DEFAULT); 2378 if (did >= 0) { 2379 tid = H5.H5Dget_type(did); 2380 2381 tclass = H5.H5Tget_class(tid); 2382 if ((tclass == HDF5Constants.H5T_ARRAY) || (tclass == HDF5Constants.H5T_VLEN)) { 2383 // for ARRAY, the type is determined by the base type 2384 int btid = H5.H5Tget_super(tid); 2385 int tmpclass = H5.H5Tget_class(btid); 2386 2387 // cannot deal with ARRAY of COMPOUND in compound table 2388 // viewer 2389 if (tmpclass != HDF5Constants.H5T_COMPOUND) 2390 tclass = H5.H5Tget_class(btid); 2391 2392 try { 2393 H5.H5Tclose(btid); 2394 } 2395 catch (Exception ex) { 2396 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2397 } 2398 } 2399 } 2400 else { 2401 log.debug("depth_first_old[{}] {} dataset open failure", i, obj_name); 2402 } 2403 } 2404 catch (HDF5Exception ex) { 2405 log.debug("depth_first_old[{}] {} dataset access failure: ", i, obj_name, ex); 2406 } 2407 finally { 2408 try { 2409 H5.H5Tclose(tid); 2410 } 2411 catch (Exception ex) { 2412 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2413 } 2414 try { 2415 H5.H5Dclose(did); 2416 } 2417 catch (Exception ex) { 2418 log.debug("depth_first_old[{}] {} dataset access H5Tclose failure: ", i, obj_name, ex); 2419 } 2420 } 2421 Dataset d = null; 2422 if (tclass == HDF5Constants.H5T_COMPOUND) { 2423 // create a new compound dataset 2424 d = new H5CompoundDS(this, obj_name, fullPath, oid); // deprecated! 2425 } 2426 else { 2427 // create a new scalar dataset 2428 d = new H5ScalarDS(this, obj_name, fullPath, oid); // deprecated! 2429 } 2430 2431 node = new DefaultMutableTreeNode(d); 2432 pnode.add(node); 2433 pgroup.addToMemberList(d); 2434 } 2435 else if (obj_type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2436 Datatype t = new H5Datatype(this, obj_name, fullPath, oid); // deprecated! 2437 2438 node = new DefaultMutableTreeNode(t); 2439 pnode.add(node); 2440 pgroup.addToMemberList(t); 2441 } 2442 } // for ( i = 0; i < nelems; i++) 2443 2444 pgroup.close(gid); 2445 log.trace("depth_first_old: finish"); 2446 } // private depth_first() 2447 2448 private TreeNode copyDataset(Dataset srcDataset, H5Group pgroup, String dstName) throws Exception { 2449 Dataset dataset = null; 2450 TreeNode newNode; 2451 int srcdid = -1, dstdid = -1; 2452 int ocp_plist_id = -1; 2453 String dname = null, path = null; 2454 2455 if (pgroup.isRoot()) { 2456 path = HObject.separator; 2457 } 2458 else { 2459 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2460 } 2461 2462 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2463 dstName = srcDataset.getName(); 2464 } 2465 dname = path + dstName; 2466 2467 try { 2468 srcdid = srcDataset.open(); 2469 dstdid = pgroup.open(); 2470 2471 try { 2472 ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY); 2473 H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG); 2474 H5.H5Ocopy(srcdid, ".", dstdid, dstName, ocp_plist_id, HDF5Constants.H5P_DEFAULT); 2475 } 2476 catch (Exception ex) { 2477 log.debug("copyDataset {} failure: ", dname, ex); 2478 } 2479 finally { 2480 try { 2481 H5.H5Pclose(ocp_plist_id); 2482 } 2483 catch (Exception ex) { 2484 log.debug("copyDataset {} H5Pclose failure: ", dname, ex); 2485 } 2486 } 2487 2488 if (srcDataset instanceof H5ScalarDS) { 2489 dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path); 2490 } 2491 else { 2492 dataset = new H5CompoundDS(pgroup.getFileFormat(), dstName, path); 2493 } 2494 2495 pgroup.addToMemberList(dataset); 2496 newNode = new DefaultMutableTreeNode(dataset); 2497 } 2498 finally { 2499 try { 2500 srcDataset.close(srcdid); 2501 } 2502 catch (Exception ex) { 2503 log.debug("copyDataset {} srcDataset.close failure: ", dname, ex); 2504 } 2505 try { 2506 pgroup.close(dstdid); 2507 } 2508 catch (Exception ex) { 2509 log.debug("copyDataset {} pgroup.close failure: ", dname, ex); 2510 } 2511 } 2512 2513 return newNode; 2514 } 2515 2516 /** 2517 * Constructs a dataset for specified dataset identifier. 2518 * 2519 * @param did 2520 * the dataset identifier 2521 * @param name 2522 * the name of the dataset 2523 * @param path 2524 * the path of the dataset 2525 * @return the dataset if successful; otherwise return null. 2526 * @throws HDF5Exception 2527 */ 2528 private Dataset getDataset(int did, String name, String path) throws HDF5Exception { 2529 Dataset dataset = null; 2530 if (did >= 0) { 2531 int tid = -1, tclass = -1; 2532 try { 2533 tid = H5.H5Dget_type(did); 2534 tclass = H5.H5Tget_class(tid); 2535 if (tclass == HDF5Constants.H5T_ARRAY) { 2536 // for ARRAY, the type is determined by the base type 2537 int btid = H5.H5Tget_super(tid); 2538 tclass = H5.H5Tget_class(btid); 2539 try { 2540 H5.H5Tclose(btid); 2541 } 2542 catch (Exception ex) { 2543 log.debug("getDataset {} H5Tclose failure: ", name, ex); 2544 } 2545 } 2546 } 2547 finally { 2548 try { 2549 H5.H5Tclose(tid); 2550 } 2551 catch (Exception ex) { 2552 log.debug("getDataset {} H5Tclose failure: ", name, ex); 2553 } 2554 } 2555 2556 if (tclass == HDF5Constants.H5T_COMPOUND) { 2557 dataset = new H5CompoundDS(this, name, path); 2558 } 2559 else { 2560 dataset = new H5ScalarDS(this, name, path); 2561 } 2562 } 2563 else { 2564 log.debug("getDataset id failure"); 2565 } 2566 2567 return dataset; 2568 } 2569 2570 /** 2571 * Copies a named datatype to another location 2572 * 2573 * @param srcType 2574 * the source datatype 2575 * @param pgroup 2576 * the group which the new datatype is copied to 2577 * @param dstName 2578 * the name of the new dataype 2579 * @return the tree node containing the new datatype. 2580 * @throws Exception 2581 */ 2582 private TreeNode copyDatatype(Datatype srcType, H5Group pgroup, String dstName) throws Exception { 2583 Datatype datatype = null; 2584 int tid_src = -1, gid_dst = -1; 2585 String path = null; 2586 DefaultMutableTreeNode newNode = null; 2587 2588 if (pgroup.isRoot()) { 2589 path = HObject.separator; 2590 } 2591 else { 2592 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2593 } 2594 2595 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2596 dstName = srcType.getName(); 2597 } 2598 2599 try { 2600 tid_src = srcType.open(); 2601 gid_dst = pgroup.open(); 2602 2603 try { 2604 H5.H5Ocopy(tid_src, ".", gid_dst, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2605 } 2606 catch (Exception ex) { 2607 log.debug("copyDatatype {} H5Ocopy failure: ", dstName, ex); 2608 } 2609 datatype = new H5Datatype(pgroup.getFileFormat(), dstName, path); 2610 2611 pgroup.addToMemberList(datatype); 2612 newNode = new DefaultMutableTreeNode(datatype); 2613 } 2614 finally { 2615 try { 2616 srcType.close(tid_src); 2617 } 2618 catch (Exception ex) { 2619 log.debug("copyDatatype {} srcType.close failure: ", dstName, ex); 2620 } 2621 try { 2622 pgroup.close(gid_dst); 2623 } 2624 catch (Exception ex) { 2625 log.debug("copyDatatype {} pgroup.close failure: ", dstName, ex); 2626 } 2627 } 2628 2629 return newNode; 2630 } 2631 2632 /** 2633 * Copies a group and its members to a new location 2634 * 2635 * @param srcGroup 2636 * the source group 2637 * @param pgroup 2638 * the location which the new group is located 2639 * @param dstName 2640 * the name of the new group 2641 * @return the tree node containing the new group; 2642 */ 2643 private TreeNode copyGroup(H5Group srcGroup, H5Group pgroup, String dstName) throws Exception { 2644 H5Group group = null; 2645 DefaultMutableTreeNode newNode = null; 2646 int srcgid = -1, dstgid = -1; 2647 String gname = null, path = null; 2648 2649 if (pgroup.isRoot()) { 2650 path = HObject.separator; 2651 } 2652 else { 2653 path = pgroup.getPath() + pgroup.getName() + HObject.separator; 2654 } 2655 2656 if ((dstName == null) || dstName.equals(HObject.separator) || (dstName.length() < 1)) { 2657 dstName = srcGroup.getName(); 2658 } 2659 2660 gname = path + dstName; 2661 2662 try { 2663 srcgid = srcGroup.open(); 2664 dstgid = pgroup.open(); 2665 try { 2666 H5.H5Ocopy(srcgid, ".", dstgid, dstName, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); 2667 } 2668 catch (Exception ex) { 2669 log.debug("copyGroup {} H5Ocopy failure: ", dstName, ex); 2670 } 2671 2672 group = new H5Group(pgroup.getFileFormat(), dstName, path, pgroup); 2673 newNode = new DefaultMutableTreeNode(group) { 2674 private static final long serialVersionUID = -4981107816640372359L; 2675 2676 @Override 2677 public boolean isLeaf() { 2678 return false; 2679 } 2680 }; 2681 depth_first(newNode, Integer.MIN_VALUE); // reload all 2682 pgroup.addToMemberList(group); 2683 } 2684 2685 finally { 2686 try { 2687 srcGroup.close(srcgid); 2688 } 2689 catch (Exception ex) { 2690 log.debug("copyGroup {} srcGroup.close failure: ", dstName, ex); 2691 } 2692 try { 2693 pgroup.close(dstgid); 2694 } 2695 catch (Exception ex) { 2696 log.debug("copyGroup {} pgroup.close failure: ", dstName, ex); 2697 } 2698 } 2699 2700 return newNode; 2701 } 2702 2703 /** 2704 * Constructs a group for specified group identifier and retrieves members. 2705 * 2706 * @param gid 2707 * The group identifier. 2708 * @param name 2709 * The group name. 2710 * @param pGroup 2711 * The parent group, or null for the root group. 2712 * @return The group if successful; otherwise returns false. 2713 * @throws HDF5Exception 2714 */ 2715 private H5Group getGroup(int gid, String name, Group pGroup) throws HDF5Exception { 2716 String parentPath = null; 2717 String thisFullName = null; 2718 String memberFullName = null; 2719 2720 if (pGroup == null) { 2721 thisFullName = name = "/"; 2722 } 2723 else { 2724 parentPath = pGroup.getFullName(); 2725 if ((parentPath == null) || parentPath.equals("/")) { 2726 thisFullName = "/" + name; 2727 } 2728 else { 2729 thisFullName = parentPath + "/" + name; 2730 } 2731 } 2732 2733 // get rid of any extra "/" 2734 if (parentPath != null) { 2735 parentPath = parentPath.replaceAll("//", "/"); 2736 } 2737 if (thisFullName != null) { 2738 thisFullName = thisFullName.replaceAll("//", "/"); 2739 } 2740 2741 H5Group group = new H5Group(this, name, parentPath, pGroup); 2742 2743 H5G_info_t group_info = null; 2744 H5O_info_t obj_info = null; 2745 int oid = -1; 2746 String link_name = null; 2747 try { 2748 group_info = H5.H5Gget_info(gid); 2749 } 2750 catch (Exception ex) { 2751 log.debug("getGroup {} H5Gget_info failure: ", name, ex); 2752 } 2753 try { 2754 oid = H5.H5Oopen(gid, thisFullName, HDF5Constants.H5P_DEFAULT); 2755 } 2756 catch (Exception ex) { 2757 log.debug("getGroup {} H5Oopen failure: ", name, ex); 2758 } 2759 2760 // retrieve only the immediate members of the group, do not follow 2761 // subgroups 2762 for (int i = 0; i < group_info.nlinks; i++) { 2763 try { 2764 link_name = H5.H5Lget_name_by_idx(gid, thisFullName, indexType, indexOrder, i, 2765 HDF5Constants.H5P_DEFAULT); 2766 obj_info = H5 2767 .H5Oget_info_by_idx(oid, thisFullName, indexType, indexOrder, i, HDF5Constants.H5P_DEFAULT); 2768 } 2769 catch (HDF5Exception ex) { 2770 log.debug("getGroup[{}] {} name,info failure: ", i, name, ex); 2771 // do not stop if accessing one member fails 2772 continue; 2773 } 2774 // create a new group 2775 if (obj_info.type == HDF5Constants.H5O_TYPE_GROUP) { 2776 H5Group g = new H5Group(this, link_name, thisFullName, group); 2777 group.addToMemberList(g); 2778 } 2779 else if (obj_info.type == HDF5Constants.H5O_TYPE_DATASET) { 2780 int did = -1; 2781 Dataset d = null; 2782 2783 if ((thisFullName == null) || thisFullName.equals("/")) { 2784 memberFullName = "/" + link_name; 2785 } 2786 else { 2787 memberFullName = thisFullName + "/" + link_name; 2788 } 2789 2790 try { 2791 did = H5.H5Dopen(fid, memberFullName, HDF5Constants.H5P_DEFAULT); 2792 d = getDataset(did, link_name, thisFullName); 2793 } 2794 finally { 2795 try { 2796 H5.H5Dclose(did); 2797 } 2798 catch (Exception ex) { 2799 log.debug("getGroup[{}] {} H5Dclose failure: ", i, name, ex); 2800 } 2801 } 2802 group.addToMemberList(d); 2803 } 2804 else if (obj_info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) { 2805 Datatype t = new H5Datatype(this, link_name, thisFullName); 2806 group.addToMemberList(t); 2807 } 2808 } // End of for loop. 2809 try { 2810 if (oid >= 0) 2811 H5.H5Oclose(oid); 2812 } 2813 catch (Exception ex) { 2814 log.debug("getGroup {} H5Oclose failure: ", name, ex); 2815 } 2816 return group; 2817 } 2818 2819 /** 2820 * Retrieves the name of the target object that is being linked to. 2821 * 2822 * @param obj 2823 * The current link object. 2824 * @return The name of the target object. 2825 * @throws HDF5Exception 2826 */ 2827 public static String getLinkTargetName(HObject obj) throws Exception { 2828 String[] link_value = { null, null }; 2829 String targetObjName = null; 2830 2831 if (obj == null) { 2832 return null; 2833 } 2834 2835 if (obj.getFullName().equals("/")) { 2836 return null; 2837 } 2838 2839 H5L_info_t link_info = null; 2840 try { 2841 link_info = H5.H5Lget_info(obj.getFID(), obj.getFullName(), HDF5Constants.H5P_DEFAULT); 2842 } 2843 catch (Throwable err) { 2844 log.debug("H5Lget_info {} failure: ", obj.getFullName()); 2845 log.trace("H5Lget_info {} failure: ", obj.getFullName(), err); 2846 } 2847 if (link_info != null) { 2848 if ((link_info.type == HDF5Constants.H5L_TYPE_SOFT) || (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL)) { 2849 try { 2850 H5.H5Lget_val(obj.getFID(), obj.getFullName(), link_value, HDF5Constants.H5P_DEFAULT); 2851 } 2852 catch (Exception ex) { 2853 log.debug("H5Lget_val {} failure: ", obj.getFullName(), ex); 2854 } 2855 if (link_info.type == HDF5Constants.H5L_TYPE_SOFT) 2856 targetObjName = link_value[0]; 2857 else if (link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL) { 2858 targetObjName = link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]; 2859 } 2860 } 2861 } 2862 return targetObjName; 2863 } 2864 2865 /** 2866 * Export dataset. 2867 * 2868 * @param file_export_name 2869 * The file name to export data into. 2870 * @param file_name 2871 * The name of the HDF5 file containing the dataset. 2872 * @param object_path 2873 * The full path of the dataset to be exported. 2874 * @throws Exception 2875 */ 2876 public void exportDataset(String file_export_name, String file_name, String object_path, int binary_order) 2877 throws Exception { 2878 H5.H5export_dataset(file_export_name, file_name, object_path, binary_order); 2879 } 2880 2881 /** 2882 * Renames an attribute. 2883 * 2884 * @param obj 2885 * The object whose attribute is to be renamed. 2886 * @param oldAttrName 2887 * The current name of the attribute. 2888 * @param newAttrName 2889 * The new name of the attribute. 2890 * @throws HDF5Exception 2891 */ 2892 public void renameAttribute(HObject obj, String oldAttrName, String newAttrName) throws Exception { 2893 log.trace("renameAttribute {} to {}", oldAttrName, newAttrName); 2894 if (!attrFlag) { 2895 attrFlag = true; 2896 H5.H5Arename_by_name(obj.getFID(), obj.getName(), oldAttrName, newAttrName, HDF5Constants.H5P_DEFAULT); 2897 } 2898 } 2899 2900 /** 2901 * Rename the given object 2902 * 2903 * @param obj 2904 * the object to be renamed. 2905 * @param newName 2906 * the new name of the object. 2907 * @throws Exception 2908 */ 2909 public static void renameObject(HObject obj, String newName) throws Exception { 2910 String currentFullPath = obj.getPath() + obj.getName(); 2911 String newFullPath = obj.getPath() + newName; 2912 2913 currentFullPath = currentFullPath.replaceAll("//", "/"); 2914 newFullPath = newFullPath.replaceAll("//", "/"); 2915 2916 if (currentFullPath.equals("/")) { 2917 throw new HDF5Exception("Can't rename the root group."); 2918 } 2919 2920 if (currentFullPath.equals(newFullPath)) { 2921 throw new HDF5Exception("The new name is the same as the current name."); 2922 } 2923 2924 // Call the library to move things in the file 2925 H5.H5Lmove(obj.getFID(), currentFullPath, obj.getFID(), newFullPath, HDF5Constants.H5P_DEFAULT, 2926 HDF5Constants.H5P_DEFAULT); 2927 } 2928 2929 public static int getIndexTypeValue(String strtype) { 2930 if (strtype.compareTo("H5_INDEX_NAME") == 0) 2931 return HDF5Constants.H5_INDEX_NAME; 2932 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 2933 return HDF5Constants.H5_INDEX_CRT_ORDER; 2934 if (strtype.compareTo("H5_INDEX_N") == 0) 2935 return HDF5Constants.H5_INDEX_N; 2936 return HDF5Constants.H5_INDEX_UNKNOWN; 2937 } 2938 2939 public static int getIndexOrderValue(String strorder) { 2940 if (strorder.compareTo("H5_ITER_INC") == 0) 2941 return HDF5Constants.H5_ITER_INC; 2942 if (strorder.compareTo("H5_ITER_DEC") == 0) 2943 return HDF5Constants.H5_ITER_DEC; 2944 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 2945 return HDF5Constants.H5_ITER_NATIVE; 2946 if (strorder.compareTo("H5_ITER_N") == 0) 2947 return HDF5Constants.H5_ITER_N; 2948 return HDF5Constants.H5_ITER_UNKNOWN; 2949 } 2950 2951 public int getIndexType(String strtype) { 2952 if (strtype != null) { 2953 if (strtype.compareTo("H5_INDEX_NAME") == 0) 2954 return HDF5Constants.H5_INDEX_NAME; 2955 if (strtype.compareTo("H5_INDEX_CRT_ORDER") == 0) 2956 return HDF5Constants.H5_INDEX_CRT_ORDER; 2957 return HDF5Constants.H5_INDEX_UNKNOWN; 2958 } 2959 return getIndexType(); 2960 } 2961 2962 public int getIndexType() { 2963 return indexType; 2964 } 2965 2966 public void setIndexType(int indexType) { 2967 this.indexType = indexType; 2968 } 2969 2970 public int getIndexOrder(String strorder) { 2971 if (strorder != null) { 2972 if (strorder.compareTo("H5_ITER_INC") == 0) 2973 return HDF5Constants.H5_ITER_INC; 2974 if (strorder.compareTo("H5_ITER_DEC") == 0) 2975 return HDF5Constants.H5_ITER_DEC; 2976 if (strorder.compareTo("H5_ITER_NATIVE") == 0) 2977 return HDF5Constants.H5_ITER_NATIVE; 2978 if (strorder.compareTo("H5_ITER_N") == 0) 2979 return HDF5Constants.H5_ITER_N; 2980 return HDF5Constants.H5_ITER_UNKNOWN; 2981 } 2982 return getIndexOrder(); 2983 } 2984 2985 public int getIndexOrder() { 2986 return indexOrder; 2987 } 2988 2989 public void setIndexOrder(int indexOrder) { 2990 this.indexOrder = indexOrder; 2991 } 2992}