001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.util.List; 018import java.util.Vector; 019 020import hdf.hdflib.HDFConstants; 021import hdf.hdflib.HDFException; 022import hdf.hdflib.HDFLibrary; 023import hdf.object.Attribute; 024import hdf.object.CompoundDS; 025import hdf.object.Dataset; 026import hdf.object.Datatype; 027import hdf.object.FileFormat; 028import hdf.object.Group; 029 030/** 031 * H4Vdata describes a multi-dimension array of HDF4 vdata, inheriting CompoundDS. 032 * <p> 033 * A vdata is like a table that consists of a collection of records whose values 034 * are stored in fixed-length fields. All records have the same structure and 035 * all values in each field have the same data type. Vdatas are uniquely 036 * identified by a name, a class, and a series of individual field names. 037 * <p> 038 * <b>How to Select a Subset</b> 039 * <p> 040 * Dataset defines APIs for read, write and subset a dataset. No function is defined 041 * to select a subset of a data array. The selection is done in an implicit way. 042 * Function calls to dimension information such as getSelectedDims() return an array 043 * of dimension values, which is a reference to the array in the dataset object. 044 * Changes of the array outside the dataset object directly change the values of 045 * the array in the dataset object. It is like pointers in C. 046 * <p> 047 * 048 * The following is an example of how to make a subset. In the example, the dataset 049 * is a 4-dimension with size of [200][100][50][10], i.e. 050 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br> 051 * We want to select every other data points in dims[1] and dims[2] 052 * <pre> 053 int rank = dataset.getRank(); // number of dimension of the dataset 054 long[] dims = dataset.getDims(); // the dimension sizes of the dataset 055 long[] selected = dataset.getSelectedDims(); // the selected size of the dataet 056 long[] start = dataset.getStartDims(); // the off set of the selection 057 long[] stride = dataset.getStride(); // the stride of the dataset 058 int[] selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display 059 060 // select dim1 and dim2 as 2D data for display,and slice through dim0 061 selectedIndex[0] = 1; 062 selectedIndex[1] = 2; 063 selectedIndex[1] = 0; 064 065 // reset the selection arrays 066 for (int i=0; i<rank; i++) { 067 start[i] = 0; 068 selected[i] = 1; 069 stride[i] = 1; 070 } 071 072 // set stride to 2 on dim1 and dim2 so that every other data points are selected. 073 stride[1] = 2; 074 stride[2] = 2; 075 076 // set the selection size of dim1 and dim2 077 selected[1] = dims[1]/stride[1]; 078 selected[2] = dims[1]/stride[2]; 079 080 // when dataset.read() is called, the selection above will be used since 081 // the dimension arrays is passed by reference. Changes of these arrays 082 // outside the dataset object directly change the values of these array 083 // in the dataset object. 084 085 * </pre> 086 * 087 * <p> 088 * @version 1.1 9/4/2007 089 * @author Peter X. Cao 090 */ 091public class H4Vdata extends CompoundDS 092{ 093 /** 094 * 095 */ 096 private static final long serialVersionUID = -5978700886955419959L; 097 098 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4Vdata.class); 099 100 /** 101 * The list of attributes of this data object. Members of the list are 102 * instance of Attribute. 103 */ 104 private List attributeList; 105 106 /** 107 * Number of records of this Vdata table. 108 */ 109 private int numberOfRecords; 110 111 /** 112 * The data types of the members of the compound dataset. 113 */ 114 private int[] memberTIDs; 115 116 private int nAttributes = -1; 117 118 119 public H4Vdata(FileFormat theFile, String name, String path) 120 { 121 this(theFile, name, path, null); 122 } 123 124 /** 125 * Creates an H4Vdata object with specific name and path. 126 * <p> 127 * @param theFile the HDF file. 128 * @param name the name of this H4Vdata. 129 * @param path the full path of this H4Vdata. 130 * @param oid the unique identifier of this data object. 131 */ 132 public H4Vdata( 133 FileFormat theFile, 134 String name, 135 String path, 136 long[] oid) 137 { 138 super (theFile, name, path, oid); 139 numberOfRecords = 0; 140 numberOfMembers = 0; 141 memberOrders = null; 142 } 143 144 /* 145 * (non-Javadoc) 146 * @see hdf.object.DataFormat#hasAttribute() 147 */ 148 public boolean hasAttribute () 149 { 150 if (nAttributes < 0) { 151 int id = open(); 152 try { 153 nAttributes = HDFLibrary.VSnattrs(id); 154 } 155 catch (Exception ex ) { 156 nAttributes = 0; 157 } 158 close(id); 159 } 160 161 return (nAttributes>0); 162 } 163 164 // implementing Dataset 165 @Override 166 public Datatype getDatatype() 167 { 168 if (datatype == null) { 169 datatype = new H4Datatype(-1); 170 } 171 172 return datatype; 173 } 174 175 // Implementing Dataset 176 @Override 177 public byte[] readBytes() throws HDFException 178 { 179 byte[] theData = null; 180 181 if (rank <=0 ) { 182 init(); 183 } 184 if (numberOfMembers <= 0) { 185 return null; // this Vdata does not have any filed 186 } 187 188 int id = open(); 189 if (id < 0) { 190 return null; 191 } 192 193 String allNames = memberNames[0]; 194 for (int i=0; i<numberOfMembers; i++) { 195 allNames += ","+memberNames[i]; 196 } 197 198 log.trace("readBytes(): start"); 199 try { 200 // moves the access pointer to the start position 201 HDFLibrary.VSseek(id, (int)startDims[0]); 202 // Specify the fields to be accessed 203 HDFLibrary.VSsetfields(id, allNames); 204 int[] recordSize = {0}; 205 HDFLibrary.VSQueryvsize(id, recordSize); 206 int size =recordSize[0] * (int)selectedDims[0]; 207 theData = new byte[size]; 208 int read_num = HDFLibrary.VSread( 209 id, 210 theData, 211 (int)selectedDims[0], 212 HDFConstants.FULL_INTERLACE); 213 } 214 finally { 215 close(id); 216 } 217 218 log.trace("readBytes(): finish"); 219 return theData; 220 } 221 222 // Implementing DataFormat 223 @Override 224 public Object read() throws HDFException 225 { 226 List list = null; 227 228 if (rank <=0 ) { 229 init(); 230 } 231 if (numberOfMembers <= 0) { 232 return null; // this Vdata does not have any filed 233 } 234 235 int id = open(); 236 if (id < 0) { 237 return null; 238 } 239 240 log.trace("read(): start"); 241 list = new Vector(); 242 243 // assume external data files are located in the same directory as the main file. 244 HDFLibrary.HXsetdir(getFileFormat().getParent()); 245 246 Object member_data = null; 247 for (int i=0; i<numberOfMembers; i++) { 248 if (!isMemberSelected[i]) { 249 continue; 250 } 251 252 try { 253 // moves the access pointer to the start position 254 HDFLibrary.VSseek(id, (int)startDims[0]); 255 // Specify the fields to be accessed 256 HDFLibrary.VSsetfields(id, memberNames[i]); 257 } 258 catch (HDFException ex) { 259 isMemberSelected[i] = false; 260 continue; 261 } 262 263 int n = memberOrders[i]*(int)selectedDims[0]; 264 member_data = H4Datatype.allocateArray(memberTIDs[i], n); 265 266 log.trace("read(): index={} isMemberSelected[i]={} memberOrders[i]={} array size={}", i, isMemberSelected[i], memberOrders[i], n); 267 if (member_data == null) { 268 String[] nullValues = new String[n]; 269 for (int j=0; j<n; j++) { 270 nullValues[j] = "*error*"; 271 } 272 list.add(nullValues); 273 continue; 274 } 275 276 try { 277 int read_num = HDFLibrary.VSread( 278 id, 279 member_data, 280 (int)selectedDims[0], 281 HDFConstants.FULL_INTERLACE); 282 if ((memberTIDs[i] == HDFConstants.DFNT_CHAR) || 283 (memberTIDs[i] == HDFConstants.DFNT_UCHAR8)) { 284 // convert characters to string 285 log.trace("read(): convert characters to string"); 286 member_data = Dataset.byteToString((byte[])member_data, memberOrders[i]); 287 memberTypes[i] = new H4Datatype(Datatype.CLASS_STRING, memberOrders[i], -1, -1); 288 memberOrders[i] = 1; //one String 289 } 290 else if (H4Datatype.isUnsigned(memberTIDs[i])) { 291 // convert unsigned integer to appropriate Java integer 292 log.trace("read(): convert unsigned integer to appropriate Java integer"); 293 member_data = Dataset.convertFromUnsignedC(member_data); 294 } 295 } 296 catch (HDFException ex) { 297 String[] nullValues = new String[n]; 298 for (int j=0; j<n; j++) { 299 nullValues[j] = "*error*"; 300 } 301 list.add(nullValues); 302 continue; 303 } 304 305 list.add(member_data); 306 } // for (int i=0; i<numberOfMembers; i++) 307 308 close(id); 309 310 log.trace("read(): finish"); 311 return list; 312 } 313 314 // Implementing DataFormat 315 @Override 316 public void write(Object buf) throws HDFException 317 { 318 //For writing to a vdata, VSsetfields can only be called once, to set 319 //up the fields in a vdata. Once the vdata fields are set, they may 320 //not be changed. Thus, to update some fields of a record after the 321 //first write, the user must read all the fields to a buffer, update 322 //the buffer, then write the entire record back to the vdata. 323 log.trace("write(): disabled"); 324/* 325 if (buf == null || numberOfMembers <= 0 || !(buf instanceof List)) 326 return; // no data to write 327 328 List list = (List)buf; 329 Object member_data = null; 330 String member_name = null; 331 332 int vid = open(); 333 if (vid < 0) return; 334 335 int idx = 0; 336 for (int i=0; i<numberOfMembers; i++) { 337 if (!isMemberSelected[i]) 338 continue; 339 340 HDFLibrary.VSsetfields(vid, memberNames[i]); 341 342 try { 343 // Specify the fields to be accessed 344 345 // moves the access pointer to the start position 346 HDFLibrary.VSseek(vid, (int)startDims[0]); 347 } 348 catch (HDFException ex) { 349 continue; 350 } 351 352 member_data = list.get(idx++); 353 if (member_data == null) 354 continue; 355 356 if (memberTIDs[i] == HDFConstants.DFNT_CHAR || 357 memberTIDs[i] == HDFConstants.DFNT_UCHAR8) { 358 member_data = Dataset.stringToByte((String[])member_data, memberOrders[i]); 359 } 360 else if (H4Datatype.isUnsigned(memberTIDs[i])) { 361 // convert unsigned integer to appropriate Java integer 362 member_data = Dataset.convertToUnsignedC(member_data); 363 } 364 365 366 int interlace = HDFConstants.NO_INTERLACE; 367 try { 368 int write_num = HDFLibrary.VSwrite( 369 vid, member_data, (int)selectedDims[0], interlace); 370 } 371 catch (HDFException ex) { 372 log.debug("write():", ex); 373 } 374 } // for (int i=0; i<numberOfMembers; i++) 375 376 close(vid); 377*/ 378 } 379 380 // Implementing DataFormat 381 public List getMetadata() throws HDFException 382 { 383 if (attributeList != null) { 384 return attributeList; 385 } 386 387 int id = open(); 388 389 if (id < 0) { 390 return attributeList; 391 } 392 393 log.trace("getMetadata(): start"); 394 int n=0; 395 try { 396 n = HDFLibrary.VSnattrs(id); 397 398 if (n <=0 ) { 399 return attributeList; 400 } 401 402 attributeList = new Vector(n, 5); 403 boolean b = false; 404 String[] attrName = new String[1]; 405 int[] attrInfo = new int[5]; 406 407 // _HDF_VDATA (or -1) to specify the vdata attribute 408 int nleft = n; 409 for (int j=-1; j<numberOfMembers; j++) { 410 for (int i=0; i<nleft; i++) { 411 attrName[0] = ""; 412 413 try { 414 b = HDFLibrary.VSattrinfo(id, j, i, attrName, attrInfo); 415 // mask off the litend bit 416 attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND); 417 } 418 catch (HDFException ex) { 419 b = false; 420 ex.printStackTrace(); 421 } 422 423 if (!b || attrName[0].length()<=0) { 424 continue; 425 } 426 427 long[] attrDims = {attrInfo[1]}; 428 Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims); 429 if (j>=0) 430 attr.setProperty("field", memberNames[j]); 431 attributeList.add(attr); 432 433 Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]); 434 try { 435 HDFLibrary.VSgetattr(id, j, i, buf); 436 } 437 catch (HDFException ex) { 438 buf = null; 439 } 440 441 if (buf != null) { 442 if ((attrInfo[0] == HDFConstants.DFNT_CHAR) || 443 (attrInfo[0] == HDFConstants.DFNT_UCHAR8)) { 444 buf = Dataset.byteToString((byte[])buf, attrInfo[1]); 445 } 446 447 attr.setValue(buf); 448 nleft--; 449 } 450 } // for (int i=0; i<n; i++) 451 } // for (int j=-1; j<numberOfMembers; j++) 452 453 } 454 finally { 455 close(id); 456 } 457 458 // todo: We shall also load attributes of fields 459 460 log.trace("getMetadata(): finish"); 461 return attributeList; 462 } 463 464 // To do: Implementing DataFormat 465 public void writeMetadata(Object info) throws Exception 466 { 467 // only attribute metadata is supported. 468 if (!(info instanceof Attribute)) { 469 return; 470 } 471 log.trace("writeMetadata(): start"); 472 473 getFileFormat().writeAttribute(this, (Attribute)info, true); 474 475 if (attributeList == null) { 476 attributeList = new Vector(); 477 } 478 479 attributeList.add(info); 480 nAttributes = attributeList.size(); 481 log.trace("writeMetadata(): finish"); 482 } 483 484 // To do: Implementing DataFormat 485 public void removeMetadata(Object info) throws HDFException 486 { 487 log.trace("removeMetadata(): disabled"); 488 } 489 490 // implementing DataFormat 491 public void updateMetadata(Object info) throws Exception { 492 log.trace("updateMetadata(): disabled"); 493 } 494 495 // Implementing DataFormat 496 @Override 497 public int open() 498 { 499 int vsid = -1; 500 501 // try to open with write permission 502 log.trace("open(): start"); 503 try { 504 vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "w"); 505 } 506 catch (HDFException ex) { 507 vsid = -1; 508 } 509 510 // try to open with read-only permission 511 if (vsid < 0) { 512 try { 513 vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "r"); 514 } 515 catch (HDFException ex) { 516 vsid = -1; 517 } 518 } 519 520 log.trace("open(): finish"); 521 return vsid; 522 } 523 524 // Implementing DataFormat 525 @Override 526 public void close(int vsid) 527 { 528 try { 529 HDFLibrary.VSdetach(vsid); 530 } 531 catch (Exception ex) { 532 log.debug("close.VSdetach:", ex); 533 } 534 } 535 536 /** 537 * Initializes the H4Vdata such as dimension sizes of this dataset. 538 */ 539 @Override 540 public void init() 541 { 542 log.trace("init(): start"); 543 if (rank>0) { 544 return; // already called. Initialize only once 545 } 546 547 int id = open(); 548 if (id < 0) { 549 return; 550 } 551 552 try { 553 numberOfMembers = HDFLibrary.VFnfields(id); 554 numberOfRecords = HDFLibrary.VSelts(id); 555 } 556 catch (HDFException ex) { 557 numberOfMembers = 0; 558 numberOfRecords = 0; 559 } 560 561// Still need to get information if there is no record, see bug 1738 562// if ((numberOfMembers <=0) || (numberOfRecords <= 0)) { 563// // no table field is defined or no records 564// close(id); 565// return; 566// } 567 568 // a Vdata table is an one dimension array of records. 569 // each record has the same fields 570 rank = 1; 571 dims = new long[1]; 572 dims[0] = numberOfRecords; 573 selectedDims = new long[1]; 574 selectedDims[0] = numberOfRecords; 575 selectedIndex[0] = 0; 576 startDims = new long[1]; 577 startDims[0] = 0; 578 579 memberNames = new String[numberOfMembers]; 580 memberTIDs = new int[numberOfMembers]; 581 memberTypes = new Datatype[numberOfMembers]; 582 memberOrders = new int[numberOfMembers]; 583 isMemberSelected = new boolean[numberOfMembers]; 584 585 for (int i=0; i<numberOfMembers; i++) { 586 isMemberSelected[i] = true; 587 try { 588 memberNames[i] = HDFLibrary.VFfieldname(id, i); 589 memberTIDs[i] = HDFLibrary.VFfieldtype(id, i); 590 memberTypes[i] = new H4Datatype(memberTIDs[i]); 591 // mask off the litend bit 592 memberTIDs[i] = memberTIDs[i] & (~HDFConstants.DFNT_LITEND); 593 memberOrders[i] = HDFLibrary.VFfieldorder(id, i); 594 log.trace("init():{}> isMemberSelected[i]={} memberNames[i]={} memberTIDs[i]={} memberOrders[i]={}", i, isMemberSelected[i], memberNames[i], memberTIDs[i], memberOrders[i]); 595 } 596 catch (HDFException ex) { 597 continue; 598 } 599 } // for (int i=0; i<numberOfMembers; i++) 600 601 close(id); 602 log.trace("init(): finish"); 603 } 604 605 /** 606 * Returns the number of records. 607 */ 608 public int getRecordCount() 609 { 610 return numberOfRecords; 611 } 612 613 /** 614 * Returns the number of fields. 615 */ 616 public int getFieldCount() 617 { 618 return numberOfMembers; 619 } 620 621 /** 622 * Returns the orders of fields 623 */ 624 public int[] getFieldOrders() 625 { 626 return memberOrders; 627 } 628 629 //Implementing DataFormat 630 public List getMetadata(int... attrPropList) throws Exception { 631 throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported"); 632 } 633 634 public Dataset copy(Group pgroup, String name, long[] dims, Object data) 635 throws Exception { 636 throw new UnsupportedOperationException( 637 "Writing a vdata to a new dataset is not implemented."); 638 } 639}