001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the files COPYING and Copyright.html. * 009 * COPYING can be found at the root of the source code distribution tree. * 010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.util.List; 018import java.util.Vector; 019 020import hdf.hdflib.HDFConstants; 021import hdf.hdflib.HDFException; 022import hdf.hdflib.HDFLibrary; 023import hdf.object.Attribute; 024import hdf.object.CompoundDS; 025import hdf.object.Dataset; 026import hdf.object.Datatype; 027import hdf.object.FileFormat; 028import hdf.object.Group; 029 030/** 031 * H4Vdata describes a multi-dimension array of HDF4 vdata, inheriting CompoundDS. 032 * <p> 033 * A vdata is like a table that consists of a collection of records whose values 034 * are stored in fixed-length fields. All records have the same structure and 035 * all values in each field have the same data type. Vdatas are uniquely 036 * identified by a name, a class, and a series of individual field names. 037 * <p> 038 * <b>How to Select a Subset</b> 039 * <p> 040 * Dataset defines APIs for read, write and subset a dataset. No function is defined 041 * to select a subset of a data array. The selection is done in an implicit way. 042 * Function calls to dimension information such as getSelectedDims() return an array 043 * of dimension values, which is a reference to the array in the dataset object. 044 * Changes of the array outside the dataset object directly change the values of 045 * the array in the dataset object. It is like pointers in C. 046 * <p> 047 * 048 * The following is an example of how to make a subset. In the example, the dataset 049 * is a 4-dimension with size of [200][100][50][10], i.e. 050 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br> 051 * We want to select every other data points in dims[1] and dims[2] 052 * <pre> 053 int rank = dataset.getRank(); // number of dimension of the dataset 054 long[] dims = dataset.getDims(); // the dimension sizes of the dataset 055 long[] selected = dataset.getSelectedDims(); // the selected size of the dataet 056 long[] start = dataset.getStartDims(); // the off set of the selection 057 long[] stride = dataset.getStride(); // the stride of the dataset 058 int[] selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display 059 060 // select dim1 and dim2 as 2D data for display,and slice through dim0 061 selectedIndex[0] = 1; 062 selectedIndex[1] = 2; 063 selectedIndex[1] = 0; 064 065 // reset the selection arrays 066 for (int i=0; i<rank; i++) { 067 start[i] = 0; 068 selected[i] = 1; 069 stride[i] = 1; 070 } 071 072 // set stride to 2 on dim1 and dim2 so that every other data points are selected. 073 stride[1] = 2; 074 stride[2] = 2; 075 076 // set the selection size of dim1 and dim2 077 selected[1] = dims[1]/stride[1]; 078 selected[2] = dims[1]/stride[2]; 079 080 // when dataset.read() is called, the selection above will be used since 081 // the dimension arrays is passed by reference. Changes of these arrays 082 // outside the dataset object directly change the values of these array 083 // in the dataset object. 084 085 * </pre> 086 * 087 * @version 1.1 9/4/2007 088 * @author Peter X. Cao 089 */ 090public class H4Vdata extends CompoundDS 091{ 092 /** 093 * 094 */ 095 private static final long serialVersionUID = -5978700886955419959L; 096 097 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4Vdata.class); 098 099 /** 100 * The list of attributes of this data object. Members of the list are 101 * instance of Attribute. 102 */ 103 private List attributeList; 104 105 /** 106 * Number of records of this Vdata table. 107 */ 108 private int numberOfRecords; 109 110 /** 111 * The data types of the members of the compound dataset. 112 */ 113 private int[] memberTIDs; 114 115 private int nAttributes = -1; 116 117 118 public H4Vdata(FileFormat theFile, String name, String path) 119 { 120 this(theFile, name, path, null); 121 } 122 123 /** 124 * Creates an H4Vdata object with specific name and path. 125 * 126 * @param theFile the HDF file. 127 * @param name the name of this H4Vdata. 128 * @param path the full path of this H4Vdata. 129 * @param oid the unique identifier of this data object. 130 */ 131 public H4Vdata( 132 FileFormat theFile, 133 String name, 134 String path, 135 long[] oid) 136 { 137 super (theFile, name, path, oid); 138 numberOfRecords = 0; 139 numberOfMembers = 0; 140 memberOrders = null; 141 } 142 143 /* 144 * (non-Javadoc) 145 * @see hdf.object.DataFormat#hasAttribute() 146 */ 147 public boolean hasAttribute () 148 { 149 if (nAttributes < 0) { 150 int id = open(); 151 try { 152 nAttributes = HDFLibrary.VSnattrs(id); 153 } 154 catch (Exception ex ) { 155 nAttributes = 0; 156 } 157 close(id); 158 } 159 160 return (nAttributes>0); 161 } 162 163 // implementing Dataset 164 @Override 165 public Datatype getDatatype() 166 { 167 if (datatype == null) { 168 datatype = new H4Datatype(-1); 169 } 170 171 return datatype; 172 } 173 174 // Implementing Dataset 175 @Override 176 public byte[] readBytes() throws HDFException 177 { 178 byte[] theData = null; 179 180 if (rank <=0 ) { 181 init(); 182 } 183 if (numberOfMembers <= 0) { 184 return null; // this Vdata does not have any filed 185 } 186 187 int id = open(); 188 if (id < 0) { 189 return null; 190 } 191 192 String allNames = memberNames[0]; 193 for (int i=0; i<numberOfMembers; i++) { 194 allNames += ","+memberNames[i]; 195 } 196 197 log.trace("readBytes(): start"); 198 try { 199 // moves the access pointer to the start position 200 HDFLibrary.VSseek(id, (int)startDims[0]); 201 // Specify the fields to be accessed 202 HDFLibrary.VSsetfields(id, allNames); 203 int[] recordSize = {0}; 204 HDFLibrary.VSQueryvsize(id, recordSize); 205 int size =recordSize[0] * (int)selectedDims[0]; 206 theData = new byte[size]; 207 int read_num = HDFLibrary.VSread( 208 id, 209 theData, 210 (int)selectedDims[0], 211 HDFConstants.FULL_INTERLACE); 212 } 213 finally { 214 close(id); 215 } 216 217 log.trace("readBytes(): finish"); 218 return theData; 219 } 220 221 // Implementing DataFormat 222 @Override 223 public Object read() throws HDFException 224 { 225 List list = null; 226 227 if (rank <=0 ) { 228 init(); 229 } 230 if (numberOfMembers <= 0) { 231 return null; // this Vdata does not have any filed 232 } 233 234 int id = open(); 235 if (id < 0) { 236 return null; 237 } 238 239 log.trace("read(): start"); 240 list = new Vector(); 241 242 // assume external data files are located in the same directory as the main file. 243 HDFLibrary.HXsetdir(getFileFormat().getParent()); 244 245 Object member_data = null; 246 for (int i=0; i<numberOfMembers; i++) { 247 if (!isMemberSelected[i]) { 248 continue; 249 } 250 251 try { 252 // moves the access pointer to the start position 253 HDFLibrary.VSseek(id, (int)startDims[0]); 254 // Specify the fields to be accessed 255 HDFLibrary.VSsetfields(id, memberNames[i]); 256 } 257 catch (HDFException ex) { 258 isMemberSelected[i] = false; 259 continue; 260 } 261 262 int n = memberOrders[i]*(int)selectedDims[0]; 263 member_data = H4Datatype.allocateArray(memberTIDs[i], n); 264 265 log.trace("read(): index={} isMemberSelected[i]={} memberOrders[i]={} array size={}", i, isMemberSelected[i], memberOrders[i], n); 266 if (member_data == null) { 267 String[] nullValues = new String[n]; 268 for (int j=0; j<n; j++) { 269 nullValues[j] = "*error*"; 270 } 271 list.add(nullValues); 272 continue; 273 } 274 275 try { 276 int read_num = HDFLibrary.VSread( 277 id, 278 member_data, 279 (int)selectedDims[0], 280 HDFConstants.FULL_INTERLACE); 281 if ((memberTIDs[i] == HDFConstants.DFNT_CHAR) || 282 (memberTIDs[i] == HDFConstants.DFNT_UCHAR8)) { 283 // convert characters to string 284 log.trace("read(): convert characters to string"); 285 member_data = Dataset.byteToString((byte[])member_data, memberOrders[i]); 286 memberTypes[i] = new H4Datatype(Datatype.CLASS_STRING, memberOrders[i], -1, -1); 287 memberOrders[i] = 1; //one String 288 } 289 else if (H4Datatype.isUnsigned(memberTIDs[i])) { 290 // convert unsigned integer to appropriate Java integer 291 log.trace("read(): convert unsigned integer to appropriate Java integer"); 292 member_data = Dataset.convertFromUnsignedC(member_data); 293 } 294 } 295 catch (HDFException ex) { 296 String[] nullValues = new String[n]; 297 for (int j=0; j<n; j++) { 298 nullValues[j] = "*error*"; 299 } 300 list.add(nullValues); 301 continue; 302 } 303 304 list.add(member_data); 305 } // for (int i=0; i<numberOfMembers; i++) 306 307 close(id); 308 309 log.trace("read(): finish"); 310 return list; 311 } 312 313 // Implementing DataFormat 314 @Override 315 public void write(Object buf) throws HDFException 316 { 317 //For writing to a vdata, VSsetfields can only be called once, to set 318 //up the fields in a vdata. Once the vdata fields are set, they may 319 //not be changed. Thus, to update some fields of a record after the 320 //first write, the user must read all the fields to a buffer, update 321 //the buffer, then write the entire record back to the vdata. 322 log.trace("write(): disabled"); 323/* 324 if (buf == null || numberOfMembers <= 0 || !(buf instanceof List)) 325 return; // no data to write 326 327 List list = (List)buf; 328 Object member_data = null; 329 String member_name = null; 330 331 int vid = open(); 332 if (vid < 0) return; 333 334 int idx = 0; 335 for (int i=0; i<numberOfMembers; i++) { 336 if (!isMemberSelected[i]) 337 continue; 338 339 HDFLibrary.VSsetfields(vid, memberNames[i]); 340 341 try { 342 // Specify the fields to be accessed 343 344 // moves the access pointer to the start position 345 HDFLibrary.VSseek(vid, (int)startDims[0]); 346 } 347 catch (HDFException ex) { 348 continue; 349 } 350 351 member_data = list.get(idx++); 352 if (member_data == null) 353 continue; 354 355 if (memberTIDs[i] == HDFConstants.DFNT_CHAR || 356 memberTIDs[i] == HDFConstants.DFNT_UCHAR8) { 357 member_data = Dataset.stringToByte((String[])member_data, memberOrders[i]); 358 } 359 else if (H4Datatype.isUnsigned(memberTIDs[i])) { 360 // convert unsigned integer to appropriate Java integer 361 member_data = Dataset.convertToUnsignedC(member_data); 362 } 363 364 365 int interlace = HDFConstants.NO_INTERLACE; 366 try { 367 int write_num = HDFLibrary.VSwrite( 368 vid, member_data, (int)selectedDims[0], interlace); 369 } 370 catch (HDFException ex) { 371 log.debug("write():", ex); 372 } 373 } // for (int i=0; i<numberOfMembers; i++) 374 375 close(vid); 376*/ 377 } 378 379 // Implementing DataFormat 380 public List getMetadata() throws HDFException 381 { 382 if (attributeList != null) { 383 return attributeList; 384 } 385 386 int id = open(); 387 388 if (id < 0) { 389 return attributeList; 390 } 391 392 log.trace("getMetadata(): start"); 393 int n=0; 394 try { 395 n = HDFLibrary.VSnattrs(id); 396 397 if (n <=0 ) { 398 return attributeList; 399 } 400 401 attributeList = new Vector(n, 5); 402 boolean b = false; 403 String[] attrName = new String[1]; 404 int[] attrInfo = new int[5]; 405 406 // _HDF_VDATA (or -1) to specify the vdata attribute 407 int nleft = n; 408 for (int j=-1; j<numberOfMembers; j++) { 409 for (int i=0; i<nleft; i++) { 410 attrName[0] = ""; 411 412 try { 413 b = HDFLibrary.VSattrinfo(id, j, i, attrName, attrInfo); 414 // mask off the litend bit 415 attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND); 416 } 417 catch (HDFException ex) { 418 b = false; 419 ex.printStackTrace(); 420 } 421 422 if (!b || attrName[0].length()<=0) { 423 continue; 424 } 425 426 long[] attrDims = {attrInfo[1]}; 427 Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims); 428 if (j>=0) 429 attr.setProperty("field", memberNames[j]); 430 attributeList.add(attr); 431 432 Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]); 433 try { 434 HDFLibrary.VSgetattr(id, j, i, buf); 435 } 436 catch (HDFException ex) { 437 buf = null; 438 } 439 440 if (buf != null) { 441 if ((attrInfo[0] == HDFConstants.DFNT_CHAR) || 442 (attrInfo[0] == HDFConstants.DFNT_UCHAR8)) { 443 buf = Dataset.byteToString((byte[])buf, attrInfo[1]); 444 } 445 446 attr.setValue(buf); 447 nleft--; 448 } 449 } // for (int i=0; i<n; i++) 450 } // for (int j=-1; j<numberOfMembers; j++) 451 452 } 453 finally { 454 close(id); 455 } 456 457 // todo: We shall also load attributes of fields 458 459 log.trace("getMetadata(): finish"); 460 return attributeList; 461 } 462 463 // To do: Implementing DataFormat 464 public void writeMetadata(Object info) throws Exception 465 { 466 // only attribute metadata is supported. 467 if (!(info instanceof Attribute)) { 468 return; 469 } 470 log.trace("writeMetadata(): start"); 471 472 getFileFormat().writeAttribute(this, (Attribute)info, true); 473 474 if (attributeList == null) { 475 attributeList = new Vector(); 476 } 477 478 attributeList.add(info); 479 nAttributes = attributeList.size(); 480 log.trace("writeMetadata(): finish"); 481 } 482 483 // To do: Implementing DataFormat 484 public void removeMetadata(Object info) throws HDFException 485 { 486 log.trace("removeMetadata(): disabled"); 487 } 488 489 // implementing DataFormat 490 public void updateMetadata(Object info) throws Exception { 491 log.trace("updateMetadata(): disabled"); 492 } 493 494 // Implementing DataFormat 495 @Override 496 public int open() 497 { 498 int vsid = -1; 499 500 // try to open with write permission 501 log.trace("open(): start"); 502 try { 503 vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "w"); 504 } 505 catch (HDFException ex) { 506 vsid = -1; 507 } 508 509 // try to open with read-only permission 510 if (vsid < 0) { 511 try { 512 vsid = HDFLibrary.VSattach(getFID(), (int)oid[1], "r"); 513 } 514 catch (HDFException ex) { 515 vsid = -1; 516 } 517 } 518 519 log.trace("open(): finish"); 520 return vsid; 521 } 522 523 // Implementing DataFormat 524 @Override 525 public void close(int vsid) 526 { 527 try { 528 HDFLibrary.VSdetach(vsid); 529 } 530 catch (Exception ex) { 531 log.debug("close.VSdetach:", ex); 532 } 533 } 534 535 /** 536 * Initializes the H4Vdata such as dimension sizes of this dataset. 537 */ 538 @Override 539 public void init() 540 { 541 log.trace("init(): start"); 542 if (rank>0) { 543 return; // already called. Initialize only once 544 } 545 546 int id = open(); 547 if (id < 0) { 548 return; 549 } 550 551 try { 552 numberOfMembers = HDFLibrary.VFnfields(id); 553 numberOfRecords = HDFLibrary.VSelts(id); 554 } 555 catch (HDFException ex) { 556 numberOfMembers = 0; 557 numberOfRecords = 0; 558 } 559 560// Still need to get information if there is no record, see bug 1738 561// if ((numberOfMembers <=0) || (numberOfRecords <= 0)) { 562// // no table field is defined or no records 563// close(id); 564// return; 565// } 566 567 // a Vdata table is an one dimension array of records. 568 // each record has the same fields 569 rank = 1; 570 dims = new long[1]; 571 dims[0] = numberOfRecords; 572 selectedDims = new long[1]; 573 selectedDims[0] = numberOfRecords; 574 selectedIndex[0] = 0; 575 startDims = new long[1]; 576 startDims[0] = 0; 577 578 memberNames = new String[numberOfMembers]; 579 memberTIDs = new int[numberOfMembers]; 580 memberTypes = new Datatype[numberOfMembers]; 581 memberOrders = new int[numberOfMembers]; 582 isMemberSelected = new boolean[numberOfMembers]; 583 584 for (int i=0; i<numberOfMembers; i++) { 585 isMemberSelected[i] = true; 586 try { 587 memberNames[i] = HDFLibrary.VFfieldname(id, i); 588 memberTIDs[i] = HDFLibrary.VFfieldtype(id, i); 589 memberTypes[i] = new H4Datatype(memberTIDs[i]); 590 // mask off the litend bit 591 memberTIDs[i] = memberTIDs[i] & (~HDFConstants.DFNT_LITEND); 592 memberOrders[i] = HDFLibrary.VFfieldorder(id, i); 593 log.trace("init():{}> isMemberSelected[i]={} memberNames[i]={} memberTIDs[i]={} memberOrders[i]={}", i, isMemberSelected[i], memberNames[i], memberTIDs[i], memberOrders[i]); 594 } 595 catch (HDFException ex) { 596 continue; 597 } 598 } // for (int i=0; i<numberOfMembers; i++) 599 600 close(id); 601 log.trace("init(): finish"); 602 } 603 604 /** 605 * Returns the number of records. 606 * 607 * @return the number of records 608 */ 609 public int getRecordCount() 610 { 611 return numberOfRecords; 612 } 613 614 /** 615 * Returns the number of fields. 616 * 617 * @return the number of fields 618 */ 619 public int getFieldCount() 620 { 621 return numberOfMembers; 622 } 623 624 /** 625 * Returns the orders of fields 626 * 627 * @return the orders of fields 628 */ 629 public int[] getFieldOrders() 630 { 631 return memberOrders; 632 } 633 634 //Implementing DataFormat 635 public List getMetadata(int... attrPropList) throws Exception { 636 throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported"); 637 } 638 639 public Dataset copy(Group pgroup, String name, long[] dims, Object data) 640 throws Exception { 641 throw new UnsupportedOperationException( 642 "Writing a vdata to a new dataset is not implemented."); 643 } 644}