001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.io.ByteArrayOutputStream;
018import java.io.FileOutputStream;
019import java.io.OutputStream;
020import java.lang.reflect.Array;
021import java.text.DecimalFormat;
022import java.util.List;
023import java.util.Vector;
024
025import hdf.hdf5lib.H5;
026import hdf.hdf5lib.HDF5Constants;
027import hdf.hdf5lib.HDFNativeData;
028import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
029import hdf.hdf5lib.exceptions.HDF5Exception;
030import hdf.hdf5lib.structs.H5O_info_t;
031import hdf.object.Attribute;
032import hdf.object.CompoundDS;
033import hdf.object.Dataset;
034import hdf.object.Datatype;
035import hdf.object.FileFormat;
036import hdf.object.Group;
037import hdf.object.HObject;
038
039/**
040 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
041 * <p>
042 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that stores a
043 * description of the data elements, data layout, and all other information necessary to write, read, and interpret the
044 * stored data.
045 * <p>
046 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of one or more
047 * atomic types or small arrays of such types. Each member of a compound type has a name which is unique within that
048 * type, and a byte offset that determines the first byte (smallest byte address) of that member in a compound datum.
049 * <p>
050 * For more information on HDF5 datasets and datatypes, read the <a
051 * href="http://hdfgroup.org/HDF5/doc/UG/index.html">HDF5 User's Guide</a>.
052 * <p>
053 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a simple
054 * compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array of compound data.
055 * <p>
056 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the following C
057 * example.
058 *
059 * <pre>
060 * typedef struct s1_t {
061 *         int    a;
062 *         float  b;
063 *         double c;
064 *         } s1_t;
065 *     s1_t       s1[LENGTH];
066 *     ...
067 *     H5Dwrite(..., s1);
068 *     H5Dread(..., s1);
069 * </pre>
070 *
071 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by fields
072 * instead of compound structure. As for the example above, the java.util.Vector object has three elements: int[LENGTH],
073 * float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of int, float and double, we will be
074 * able to read/write the compound data by field.
075 * <p>
076 * <p>
077 *
078 * @version 1.1 9/4/2007
079 * @author Peter X. Cao
080 */
081public class H5CompoundDS extends CompoundDS {
082    /**
083     *
084     */
085    private static final long serialVersionUID = -5968625125574032736L;
086
087    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
088
089    /**
090     * The list of attributes of this data object. Members of the list are instance of Attribute.
091     */
092    private List<Attribute> attributeList;
093
094    private int nAttributes = -1;
095
096    private H5O_info_t obj_info;
097
098    /**
099     * A list of names of all fields including nested fields.
100     * <p>
101     * The nested names are separated by CompoundDs.separator. For example, if compound dataset "A" has the following
102     * nested structure,
103     *
104     * <pre>
105     * A --> m01
106     * A --> m02
107     * A --> nest1 --> m11
108     * A --> nest1 --> m12
109     * A --> nest1 --> nest2 --> m21
110     * A --> nest1 --> nest2 --> m22
111     * i.e.
112     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
113     * </pre>
114     *
115     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12, nest1[nest2[m21,
116     * nest1[nest2[m22}
117     *
118     */
119    private List<String> flatNameList;
120
121    /**
122     * A list of datatypes of all fields including nested fields.
123     */
124    private List<Integer> flatTypeList;
125
126    /** flag to indicate is the dataset is an external dataset */
127    private boolean isExternal = false;
128
129    /**
130     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
131     * <p>
132     * The dataset object represents an existing dataset in the file. For example, new H5CompoundDS(file, "dset1",
133     * "/g0/") constructs a dataset object that corresponds to the dataset,"dset1", at group "/g0/".
134     * <p>
135     * This object is usually constructed at FileFormat.open(), which loads the file structure and object information
136     * into tree structure (TreeNode). It is rarely used elsewhere.
137     * <p>
138     *
139     * @param theFile
140     *            the file that contains the data object.
141     * @param theName
142     *            the name of the data object, e.g. "dset".
143     * @param thePath
144     *            the full path of the data object, e.g. "/arrays/".
145     */
146    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
147        this(theFile, theName, thePath, null);
148    }
149
150    /**
151     * @deprecated Not for public use in the future.<br>
152     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
153     */
154    @Deprecated
155    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
156        super(theFile, theName, thePath, oid);
157        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
158
159        if ((oid == null) && (theFile != null)) {
160            // retrieve the object ID
161            try {
162                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
163                this.oid = new long[1];
164                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
165            }
166            catch (Exception ex) {
167                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
168            }
169        }
170    }
171
172    /*
173     * (non-Javadoc)
174     *
175     * @see hdf.object.HObject#open()
176     */
177    @Override
178    public int open() {
179        int did = -1;
180
181        try {
182            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
183        }
184        catch (HDF5Exception ex) {
185            log.debug("Failed to open dataset {}", getPath() + getName());
186            did = -1;
187        }
188
189        return did;
190    }
191
192    /*
193     * (non-Javadoc)
194     *
195     * @see hdf.object.HObject#close(int)
196     */
197    @Override
198    public void close(int did) {
199        if (did >= 0) {
200            try {
201                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
202            }
203            catch (Exception ex) {
204                log.debug("close.H5Fflush:", ex);
205            }
206            try {
207                H5.H5Dclose(did);
208            }
209            catch (HDF5Exception ex) {
210                log.debug("close.H5Dclose:", ex);
211            }
212        }
213    }
214
215    /*
216     * (non-Javadoc)
217     *
218     * @see hdf.object.Dataset#init()
219     */
220    @Override
221    public void init() {
222        if (rank > 0) {
223            resetSelection();
224            return; // already called. Initialize only once
225        }
226        log.trace("init() start");
227
228        int did = -1, sid = -1, tid = -1, tclass = -1;
229        flatNameList = new Vector<String>();
230        flatTypeList = new Vector<Integer>();
231        int[] memberTIDs = null;
232
233        did = open();
234        if (did >= 0) {
235            // check if it is an external dataset
236            int pid = -1;
237            try {
238                pid = H5.H5Dget_create_plist(did);
239                int nfiles = H5.H5Pget_external_count(pid);
240                isExternal = (nfiles > 0);
241            }
242            catch (Exception ex) {
243                log.debug("check if it is an external dataset:", ex);
244            }
245            finally {
246                try {
247                    H5.H5Pclose(pid);
248                }
249                catch (Exception ex) {
250                    log.debug("finally close:", ex);
251                }
252            }
253
254            try {
255                sid = H5.H5Dget_space(did);
256                rank = H5.H5Sget_simple_extent_ndims(sid);
257                tid = H5.H5Dget_type(did);
258                tclass = H5.H5Tget_class(tid);
259
260                int tmptid = 0;
261                if (tclass == HDF5Constants.H5T_ARRAY) {
262                    // array of compound
263                    tmptid = tid;
264                    tid = H5.H5Tget_super(tmptid);
265                    try {H5.H5Tclose(tmptid);} catch (HDF5Exception ex) {}
266                }
267
268                if (rank == 0) {
269                    // a scalar data point
270                    rank = 1;
271                    dims = new long[1];
272                    dims[0] = 1;
273                }
274                else {
275                    dims = new long[rank];
276                    maxDims = new long[rank];
277                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
278                }
279
280                startDims = new long[rank];
281                selectedDims = new long[rank];
282
283                // initialize member information
284                extractCompoundInfo(tid, "", flatNameList, flatTypeList);
285                numberOfMembers = flatNameList.size();
286
287                memberNames = new String[numberOfMembers];
288                memberTIDs = new int[numberOfMembers];
289                memberTypes = new Datatype[numberOfMembers];
290                memberOrders = new int[numberOfMembers];
291                isMemberSelected = new boolean[numberOfMembers];
292                memberDims = new Object[numberOfMembers];
293
294                for (int i = 0; i < numberOfMembers; i++) {
295                    isMemberSelected[i] = true;
296                    memberTIDs[i] = ((Integer) flatTypeList.get(i)).intValue();
297                    memberTypes[i] = new H5Datatype(memberTIDs[i]);
298                    memberNames[i] = (String) flatNameList.get(i);
299                    memberOrders[i] = 1;
300                    memberDims[i] = null;
301
302                    try {
303                        tclass = H5.H5Tget_class(memberTIDs[i]);
304                    }
305                    catch (HDF5Exception ex) {
306                        log.debug("memberTIDs[{}]:", i, ex);
307                    }
308
309                    if (tclass == HDF5Constants.H5T_ARRAY) {
310                        int n = H5.H5Tget_array_ndims(memberTIDs[i]);
311                        long mdim[] = new long[n];
312                        H5.H5Tget_array_dims(memberTIDs[i], mdim);
313                        int idim[] = new int[n];
314                        for (int j = 0; j < n; j++)
315                            idim[j] = (int) mdim[j];
316                        memberDims[i] = idim;
317                        tmptid = H5.H5Tget_super(memberTIDs[i]);
318                        memberOrders[i] = (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid));
319                        try {
320                            H5.H5Tclose(tmptid);
321                        }
322                        catch (HDF5Exception ex) {
323                            log.debug("close temp of memberTIDs[{}]:", i, ex);
324                        }
325                    }
326                } // for (int i=0; i<numberOfMembers; i++)
327            }
328            catch (HDF5Exception ex) {
329                numberOfMembers = 0;
330                memberNames = null;
331                memberTypes = null;
332                memberOrders = null;
333                log.debug("init():", ex);
334            }
335            finally {
336                try {
337                    H5.H5Tclose(tid);
338                }
339                catch (HDF5Exception ex2) {
340                    log.debug("finally close:", ex2);
341                }
342                try {
343                    H5.H5Sclose(sid);
344                }
345                catch (HDF5Exception ex2) {
346                    log.debug("finally close:", ex2);
347                }
348
349                if (memberTIDs != null) {
350                    for (int i = 0; i < memberTIDs.length; i++) {
351                        try {
352                            H5.H5Tclose(memberTIDs[i]);
353                        }
354                        catch (Exception ex) {
355                            log.debug("finally close:", ex);
356                        }
357                    }
358                }
359            }
360
361            close(did);
362        }
363        else {
364            log.debug("init() failed to open dataset");
365        }
366
367        resetSelection();
368    }
369
370    /*
371     * (non-Javadoc)
372     *
373     * @see hdf.object.DataFormat#hasAttribute()
374     */
375    public boolean hasAttribute() {
376        obj_info.num_attrs = nAttributes;
377
378        if (obj_info.num_attrs < 0) {
379            int did = open();
380            if (did >= 0) {
381                try {
382                    obj_info = H5.H5Oget_info(did);
383                    nAttributes = (int) obj_info.num_attrs;
384                }
385                catch (Exception ex) {
386                    obj_info.num_attrs = 0;
387                    log.debug("hasAttribute: get object info:", ex);
388                }
389                close(did);
390            }
391            else {
392                log.debug("could not open dataset");
393            }
394        }
395
396        return (obj_info.num_attrs > 0);
397    }
398
399    /*
400     * (non-Javadoc)
401     *
402     * @see hdf.object.Dataset#getDatatype()
403     */
404    @Override
405    public Datatype getDatatype() {
406        if (datatype == null) {
407            log.trace("H5CompoundDS getDatatype: datatype == null");
408            datatype = new H5Datatype(Datatype.CLASS_COMPOUND, -1, -1, -1);
409        }
410
411        return datatype;
412    }
413
414    /*
415     * (non-Javadoc)
416     *
417     * @see hdf.object.Dataset#clear()
418     */
419    @Override
420    public void clear() {
421        super.clear();
422
423        if (attributeList != null) {
424            ((Vector<Attribute>) attributeList).setSize(0);
425        }
426    }
427
428    /*
429     * (non-Javadoc)
430     *
431     * @see hdf.object.Dataset#readBytes()
432     */
433    @Override
434    public byte[] readBytes() throws HDF5Exception {
435        byte[] theData = null;
436
437        log.trace("H5CompoundDS readBytes: start");
438        if (rank <= 0) {
439            init();
440        }
441
442        int did = open();
443        if (did >= 0) {
444            int fspace = -1, mspace = -1, tid = -1;
445
446            try {
447                long[] lsize = { 1 };
448                for (int j = 0; j < selectedDims.length; j++) {
449                    lsize[0] *= selectedDims[j];
450                }
451
452                fspace = H5.H5Dget_space(did);
453                mspace = H5.H5Screate_simple(rank, selectedDims, null);
454
455                // set the rectangle selection
456                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
457                if (rank * dims[0] > 1) {
458                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
459                            selectedDims, null); // set
460                    // block
461                    // to 1
462                }
463
464                tid = H5.H5Dget_type(did);
465                int size = H5.H5Tget_size(tid) * (int) lsize[0];
466                log.trace("H5CompoundDS readBytes: size = {}", size);
467                theData = new byte[size];
468                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
469            }
470            finally {
471                try {
472                    H5.H5Sclose(fspace);
473                }
474                catch (Exception ex2) {
475                    log.debug("finally close:", ex2);
476                }
477                try {
478                    H5.H5Sclose(mspace);
479                }
480                catch (Exception ex2) {
481                    log.debug("finally close:", ex2);
482                }
483                try {
484                    H5.H5Tclose(tid);
485                }
486                catch (HDF5Exception ex2) {
487                    log.debug("finally close:", ex2);
488                }
489                close(did);
490            }
491        }
492        log.trace("H5CompoundDS readBytes: finish");
493
494        return theData;
495    }
496
497    /*
498     * (non-Javadoc)
499     *
500     * @see hdf.object.Dataset#read()
501     */
502    @Override
503    public Object read() throws Exception {
504        List<Object> list = null;
505        Object member_data = null;
506        String member_name = null;
507        int member_class = -1;
508        int member_size = 0;
509        int atom_tid = -1;
510        int did = -1;
511        int tid = -1;
512        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
513
514        log.trace("H5CompoundDS read: start");
515        if (rank <= 0) {
516            init(); // read data information into memory
517        }
518
519        if (numberOfMembers <= 0) {
520            return null; // this compound dataset does not have any member
521        }
522
523        if (isExternal) {
524            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
525
526            if (pdir == null) {
527                pdir = ".";
528            }
529            H5.H5Dchdir_ext(pdir);
530        }
531
532        long[] lsize = { 1 };
533        log.trace("H5CompoundDS read: open dataset");
534        did = open();
535        if (did >= 0) {
536            list = new Vector<Object>(flatNameList.size());
537            Vector<Integer> atomicList = new Vector<Integer>();
538            try {
539                lsize[0] = selectHyperslab(did, spaceIDs);
540                log.trace("H5CompoundDS read: opened dataset size {} for {}", lsize[0], nPoints);
541
542                if (lsize[0] == 0) {
543                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
544                }
545
546                if (log.isDebugEnabled()) {
547                    // check is storage space is allocated
548                    try {
549                        long ssize = H5.H5Dget_storage_size(did);
550                        log.trace("Storage space allocated = {}.", ssize);
551                    }
552                    catch (Exception ex) {
553                        log.debug("check if storage space is allocated:", ex);
554                    }
555                }
556
557                // read each of member data into a byte array, then extract
558                // it into its type such, int, long, float, etc.
559                int n = flatNameList.size();
560                tid = H5.H5Dget_type(did);
561                log.trace("H5CompoundDS read: H5Tget_super");
562                int tclass = H5.H5Tget_class(tid);
563                if (tclass == HDF5Constants.H5T_ARRAY) {
564                    // array of compound
565                    int tmptid = -1;
566                    try {
567                        tmptid = tid;
568                        tid = H5.H5Tget_super(tmptid);
569                    }
570                    finally {
571                        try {H5.H5Tclose(tmptid);}
572                        catch (Exception ex2) {log.debug("finally close:", ex2);}
573                    }
574                }
575
576                extractCompoundInfo(tid, null, null, atomicList);
577
578                log.trace("H5CompoundrDS read: foreach nMembers={}", n);
579                for (int i = 0; i < n; i++) {
580                    boolean isVL = false;
581
582                    if (!isMemberSelected[i]) {
583                        log.debug("H5CompoundDS read: Member[{}] is not selected", i);
584                        continue; // the field is not selected
585                    }
586
587                    member_name = new String(memberNames[i]);
588
589                    atom_tid = ((Integer) atomicList.get(i)).intValue();
590                    try {
591                        member_class = H5.H5Tget_class(atom_tid);
592                        member_size = H5.H5Tget_size(atom_tid);
593                        member_data = H5Datatype.allocateArray(atom_tid, (int) lsize[0]);
594                    }
595                    catch (OutOfMemoryError err) {
596                        member_data = null;
597                        throw new HDF5Exception("Out Of Memory.");
598                    }
599                    catch (Exception ex) {
600                        member_data = null;
601                    }
602                    log.trace("H5CompoundDS read: {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
603
604                    if (member_data == null || H5.H5Tequal(atom_tid, HDF5Constants.H5T_STD_REF_DSETREG)) {
605                        String[] nullValues = new String[(int) lsize[0]];
606                        String errorStr = "*unsupported*";
607                        for (int j = 0; j < lsize[0]; j++) {
608                            nullValues[j] = errorStr;
609                        }
610                        list.add(nullValues);
611                        continue;
612                    }
613
614                    if (member_data != null) {
615                        int comp_tid = -1;
616                        int compInfo[] = { member_class, member_size, 0 };
617                        try {
618                            comp_tid = createCompoundFieldType(atom_tid, member_name, compInfo);
619                        }
620                        catch (HDF5Exception ex) {
621                            String[] nullValues = new String[(int) lsize[0]];
622                            for (int j = 0; j < lsize[0]; j++) {
623                                nullValues[j] = "";
624                            }
625                            list.add(nullValues);
626                            log.debug("H5CompoundDS read: {} Member[{}] createCompoundFieldTypefailure:", member_name, i, ex);
627                            continue;
628                        }
629                        try {
630                            // See BUG#951 isVL = H5.H5Tdetect_class(atom_tid,
631                            // HDF5Constants.H5T_VLEN);
632                            isVL = isVL || H5.H5Tis_variable_str(atom_tid);
633                            isVL = isVL || H5.H5Tdetect_class(atom_tid, HDF5Constants.H5T_VLEN);
634                        }
635                        catch (Exception ex) {
636                            log.debug("H5CompoundDS read: detection of varstr:", ex);
637                            isVL = false;
638                        }
639                        try {
640                            log.trace("H5CompoundDS read: H5Dread did={} spaceIDs[0]={} spaceIDs[1]={}", did, spaceIDs[0], spaceIDs[1]);
641                            if (isVL) {
642                                H5.H5DreadVL(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
643                                        (Object[]) member_data);
644                            }
645                            else {
646                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, member_data);
647                            }
648                        }
649                        catch (HDF5DataFiltersException exfltr) {
650                            log.debug("H5CompoundDS read: {} Member[{}] read failure:", member_name, i, exfltr);
651                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
652                        }
653                        catch (HDF5Exception ex2) {
654                            String[] nullValues = new String[(int) lsize[0]];
655                            for (int j = 0; j < lsize[0]; j++) {
656                                nullValues[j] = "";
657                            }
658                            list.add(nullValues);
659                            log.debug("H5CompoundDS read: {} Member[{}] read failure:", member_name, i, ex2);
660                            continue;
661                        }
662                        finally {
663                            try {H5.H5Tclose(comp_tid);}
664                            catch (Exception ex3) {log.debug("H5CompoundDS read: finally close:", ex3);}
665                        }
666
667                        if (!isVL) {
668                            String cname = member_data.getClass().getName();
669                            char dname = cname.charAt(cname.lastIndexOf("[") + 1);
670                            log.trace("H5CompoundDS read(!isVL): {} Member[{}] is cname {} of dname={} convert={}", member_name, i, cname, dname, convertByteToString);
671
672                            if ((member_class == HDF5Constants.H5T_STRING) && convertByteToString) {
673                                if (dname == 'B') {
674                                    member_data = byteToString((byte[]) member_data, member_size / memberOrders[i]);
675                                    log.trace("H5CompoundDS read(!isVL)convertByteToString: {} Member[{}]", member_name, i);
676                                }
677                            }
678                            else if (member_class == HDF5Constants.H5T_REFERENCE) {
679                                if (dname == 'B') {
680                                    member_data = HDFNativeData.byteToLong((byte[]) member_data);
681                                    log.trace("H5CompoundDS read(!isVL)convertByteToLong: {} Member[{}]", member_name, i);
682                                }
683                            }
684                            else if (compInfo[2] != 0) {
685                                member_data = Dataset.convertFromUnsignedC(member_data, null);
686                                log.trace("H5CompoundDS read(!isVL)convertFromUnsignedC: {} Member[{}]", member_name, i);
687                            }
688                            else if (member_class == HDF5Constants.H5T_ENUM && enumConverted) {
689                                try {
690                                    String[] strs = H5Datatype.convertEnumValueToName(atom_tid, member_data, null);
691                                    if (strs != null) {
692                                        member_data = strs;
693                                        log.trace("H5CompoundDS read(!isVL)convertEnumValueToName: {} Member[{}]", member_name, i);
694                                    }
695                                }
696                                catch (Exception ex) {
697                                    log.debug("read: H5Datatype.convertEnumValueToName:", ex);
698                                }
699                            }
700                        }
701
702                        list.add(member_data);
703                    } // if (member_data != null)
704                } // end of for (int i=0; i<num_members; i++)
705
706            }
707            finally {
708                try {
709                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
710                        H5.H5Sclose(spaceIDs[0]);
711                }
712                catch (Exception ex2) {
713                    log.debug("read: finally close:", ex2);
714                }
715                try {
716                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
717                        H5.H5Sclose(spaceIDs[1]);
718                }
719                catch (Exception ex2) {
720                    log.debug("read: finally close:", ex2);
721                }
722
723                // close atomic types
724                int ntypes = atomicList.size();
725                for (int i = 0; i < ntypes; i++) {
726                    atom_tid = ((Integer) atomicList.get(i)).intValue();
727                    try {
728                        H5.H5Tclose(atom_tid);
729                    }
730                    catch (Exception ex2) {
731                        log.debug("finally close:", ex2);
732                    }
733                }
734                try {H5.H5Tclose(tid);}
735                catch (Exception ex2) {log.debug("finally close:", ex2);}
736
737                close(did);
738            }
739        }
740
741        log.trace("H5CompoundDS read: finish");
742        return list;
743    }
744
745    /**
746     * Writes the given data buffer into this dataset in a file.
747     * <p>
748     * The data buffer is a vector that contains the data values of compound fields. The data is written into file field
749     * by field.
750     *
751     * @param buf
752     *            The vector that contains the data values of compound fields.
753     */
754    @Override
755    public void write(Object buf) throws HDF5Exception {
756        log.trace("H5CompoundDS write: start");
757        int did = -1;
758        int tid = -1;
759        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
760        Object member_data = null;
761        String member_name = null;
762        int atom_tid = -1, member_class = -1, member_size = 0;
763
764        List<?> list = (List<?>) buf;
765        if ((buf == null) || (numberOfMembers <= 0) || !(buf instanceof List)) {
766            return;
767        }
768
769        long[] lsize = { 1 };
770        did = open();
771        log.trace("H5CompoundDS write: dataset opened");
772        if (did >= 0) {
773            Vector<Integer> atomicList = new Vector<Integer>();
774            try {
775                lsize[0] = selectHyperslab(did, spaceIDs);
776                int tmptid = H5.H5Dget_type(did);
777
778                // read each of member data into a byte array, then extract
779                // it into its type such, int, long, float, etc.
780                int idx = 0;
781                int n = flatNameList.size();
782                boolean isEnum = false;
783
784                try {
785                    extractCompoundInfo(tmptid, null, null, atomicList);
786                }
787                finally {
788                    try {H5.H5Tclose(tmptid);}
789                    catch (Exception ex2) {log.debug("finally close:", ex2);}
790                }
791                for (int i = 0; i < n; i++) {
792                    log.trace("H5CompoundDS write: Member[{}] of {}", i, n);
793                    if (!isMemberSelected[i]) {
794                        log.debug("H5CompoundDS write: Member[{}] is not selected", i);
795                        continue; // the field is not selected
796                    }
797
798                    member_name = new String(memberNames[i]);
799                    atom_tid = ((Integer) atomicList.get(i)).intValue();
800                    member_data = list.get(idx++);
801
802                    if (member_data == null) {
803                        log.debug("H5CompoundDS write: Member[{}] data is null", i);
804                        continue;
805                    }
806
807                    boolean isVL = false;
808                    try {
809                        isVL = (H5.H5Tget_class(atom_tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(atom_tid));
810                        log.debug("H5CompoundDS write: Member[{}] is VL", i);
811                    }
812                    catch (Exception ex) {
813                        log.debug("isVL:", ex);
814                    }
815
816                    try {
817                        member_class = H5.H5Tget_class(atom_tid);
818                        member_size = H5.H5Tget_size(atom_tid);
819                        isEnum = (member_class == HDF5Constants.H5T_ENUM);
820                    }
821                    catch (Exception ex) {
822                        log.debug("H5CompoundDS write: member class - size:", ex);
823                    }
824                    log.trace("H5CompoundDS write: {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
825
826                    Object tmpData = member_data;
827
828                    int compInfo[] = { member_class, member_size, 0 };
829                    try {
830                        tid = createCompoundFieldType(atom_tid, member_name, compInfo);
831                        log.debug("H5CompoundDS write: {} Member[{}] compInfo[class]={} compInfo[size]={} compInfo[unsigned]={}",
832                                member_name, i, compInfo[0], compInfo[1], compInfo[2]);
833                        if(isVL) {
834                            H5.H5DwriteString(did, tid,
835                                    spaceIDs[0], spaceIDs[1],
836                                    HDF5Constants.H5P_DEFAULT, (String[])tmpData);
837                        }
838                        else {
839                            if (compInfo[2] != 0) {
840                                // check if need to convert integer data
841                                int tsize = H5.H5Tget_size(tid);
842                                String cname = member_data.getClass().getName();
843                                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
844                                boolean doConversion = (((tsize == 1) && (dname == 'S'))
845                                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
846
847                                tmpData = member_data;
848                                if (doConversion) {
849                                    tmpData = convertToUnsignedC(member_data, null);
850                                }
851                                log.trace("H5CompoundDS write: {} Member[{}] convertToUnsignedC", member_name, i);
852                            }
853                            else if ((member_class == HDF5Constants.H5T_STRING) && (Array.get(member_data, 0) instanceof String)) {
854                                tmpData = stringToByte((String[]) member_data, member_size);
855                                log.trace("H5CompoundDS write: {} Member[{}] stringToByte", member_name, i);
856                            }
857                            else if (isEnum && (Array.get(member_data, 0) instanceof String)) {
858                                tmpData = H5Datatype.convertEnumNameToValue(atom_tid, (String[]) member_data, null);
859                                log.trace("H5CompoundDS write: {} Member[{}] convertEnumNameToValue", member_name, i);
860                            }
861
862                            if (tmpData != null) {
863                                // BUG!!! does not write nested compound data and no
864                                // exception was caught
865                                // need to check if it is a java error or C library
866                                // error
867                                log.debug("H5CompoundDS write: H5Dwrite warning - does not write nested compound data");
868                                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
869                            }
870                        }
871                    }
872                    catch (Exception ex1) {
873                        log.debug("write: H5Dwrite process failure:", ex1);
874                    }
875                    finally {
876                        try {
877                            H5.H5Tclose(tid);
878                        }
879                        catch (Exception ex2) {
880                            log.debug("write: finally close:", ex2);
881                        }
882                    }
883                } // end of for (int i=0; i<num_members; i++)
884            }
885            finally {
886                try {
887                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
888                        H5.H5Sclose(spaceIDs[0]);
889                }
890                catch (Exception ex2) {
891                    log.debug("write: finally close:", ex2);
892                }
893                try {
894                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
895                        H5.H5Sclose(spaceIDs[1]);
896                }
897                catch (Exception ex2) {
898                    log.debug("write: finally close:", ex2);
899                }
900
901                // close atomic types
902                int ntypes = atomicList.size();
903                for (int i = 0; i < ntypes; i++) {
904                    atom_tid = ((Integer) atomicList.get(i)).intValue();
905                    try {
906                        H5.H5Tclose(atom_tid);
907                    }
908                    catch (Exception ex2) {
909                        log.debug("write: finally close:", ex2);
910                    }
911                }
912            }
913            close(did);
914        }
915        log.trace("H5CompoundDS write: finish");
916    }
917
918    /**
919     * Set up the selection of hyperslab
920     *
921     * @param did
922     *            IN dataset ID
923     * @param spaceIDs
924     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
925     * @return total number of data point selected
926     */
927    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
928        long lsize = 1;
929
930        boolean isAllSelected = true;
931        for (int i = 0; i < rank; i++) {
932            lsize *= selectedDims[i];
933            if (selectedDims[i] < dims[i]) {
934                isAllSelected = false;
935            }
936        }
937
938        if (isAllSelected) {
939            spaceIDs[0] = HDF5Constants.H5S_ALL;
940            spaceIDs[1] = HDF5Constants.H5S_ALL;
941        }
942        else {
943            spaceIDs[1] = H5.H5Dget_space(did);
944
945            // When 1D dataspace is used in chunked dataset, reading is very
946            // slow.
947            // It is a known problem on HDF5 library for chunked dataset.
948            // mspace = H5.H5Screate_simple(1, lsize, null);
949            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
950            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
951                    null);
952        }
953
954        return lsize;
955    }
956
957    /*
958     * (non-Javadoc)
959     *
960     * @see hdf.object.DataFormat#getMetadata()
961     */
962    public List<Attribute> getMetadata() throws HDF5Exception {
963        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
964    }
965
966    /*
967     * (non-Javadoc)
968     *
969     * @see hdf.object.DataFormat#getMetadata(int...)
970     */
971    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
972        if (rank <= 0) {
973            init();
974        }
975        log.trace("getMetadata: inited");
976
977        try {
978            this.linkTargetObjName = H5File.getLinkTargetName(this);
979        }
980        catch (Exception ex) {
981            log.debug("getLinkTargetName failed: ", ex);
982        }
983
984        if (attributeList != null) {
985            return attributeList;
986        }
987
988        // load attributes first
989        int did = -1, pid = -1;
990        int indxType = fileFormat.getIndexType(null);
991        int order = fileFormat.getIndexOrder(null);
992
993        if (attrPropList.length > 0) {
994            indxType = attrPropList[0];
995            if (attrPropList.length > 1) {
996                order = attrPropList[1];
997            }
998        }
999        log.trace("getMetadata: open dataset");
1000        did = open();
1001        if (did >= 0) {
1002            log.trace("getMetadata: dataset opened");
1003            try {
1004                compression = "";
1005                attributeList = H5File.getAttribute(did, indxType, order);
1006                log.trace("getMetadata: attributeList loaded");
1007
1008                // get the compression and chunk information
1009                pid = H5.H5Dget_create_plist(did);
1010                long storage_size = H5.H5Dget_storage_size(did);
1011                int nfilt = H5.H5Pget_nfilters(pid);
1012                if (H5.H5Pget_layout(pid) == HDF5Constants.H5D_CHUNKED) {
1013                    chunkSize = new long[rank];
1014                    H5.H5Pget_chunk(pid, rank, chunkSize);
1015                    if(nfilt > 0) {
1016                        long    nelmts = 1;
1017                        long    uncomp_size;
1018                        long    datum_size = getDatatype().getDatatypeSize();
1019                        if (datum_size < 0) {
1020                            int tmptid = -1;
1021                            try {
1022                                tmptid = H5.H5Dget_type(did);
1023                                datum_size = H5.H5Tget_size(tmptid);
1024                            }
1025                            finally {
1026                                try {H5.H5Tclose(tmptid);}
1027                                catch (Exception ex2) {log.debug("finally close:", ex2);}
1028                            }
1029                        }
1030
1031
1032                        for(int i = 0; i < rank; i++) {
1033                            nelmts *= dims[i];
1034                        }
1035                        uncomp_size = nelmts * datum_size;
1036
1037                        /* compression ratio = uncompressed size /  compressed size */
1038
1039                        if(storage_size != 0) {
1040                            double ratio = (double) uncomp_size / (double) storage_size;
1041                            DecimalFormat df = new DecimalFormat();
1042                            df.setMinimumFractionDigits(3);
1043                            df.setMaximumFractionDigits(3);
1044                            compression +=  df.format(ratio) + ":1";
1045                        }
1046                    }
1047                }
1048                else {
1049                    chunkSize = null;
1050                }
1051
1052                int[] flags = { 0, 0 };
1053                long[] cd_nelmts = { 20 };
1054                int[] cd_values = new int[(int) cd_nelmts[0]];;
1055                String[] cd_name = { "", "" };
1056                log.trace("getMetadata: {} filters in pipeline", nfilt);
1057                int filter = -1;
1058                int[] filter_config = { 1 };
1059                filters = "";
1060
1061                for (int i = 0, k = 0; i < nfilt; i++) {
1062                    log.trace("getMetadata: filter[{}]", i);
1063                    if (i > 0) {
1064                        filters += ", ";
1065                    }
1066                    if (k > 0) {
1067                        compression += ", ";
1068                    }
1069
1070                    try {
1071                        cd_nelmts[0] = 20;
1072                        cd_values = new int[(int) cd_nelmts[0]];
1073                        cd_values = new int[(int) cd_nelmts[0]];
1074                        filter = H5.H5Pget_filter(pid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1075                        log.trace("getMetadata: filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1076                        for (int j = 0; j < cd_nelmts[0]; j++) {
1077                            log.trace("getMetadata: filter[{}] element {} = {}", i, j, cd_values[j]);
1078                        }
1079                    }
1080                    catch (Throwable err) {
1081                        filters += "ERROR";
1082                        continue;
1083                    }
1084
1085                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1086                        filters += "NONE";
1087                    }
1088                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1089                        filters += "GZIP";
1090                        compression += compression_gzip_txt + cd_values[0];
1091                        k++;
1092                    }
1093                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1094                        filters += "Error detection filter";
1095                    }
1096                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1097                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1098                    }
1099                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1100                        filters += "NBIT";
1101                    }
1102                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1103                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1104                    }
1105                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1106                        filters += "SZIP";
1107                        compression += "SZIP: Pixels per block = " + cd_values[1];
1108                        k++;
1109                        int flag = -1;
1110                        try {
1111                            flag = H5.H5Zget_filter_info(filter);
1112                        }
1113                        catch (Exception ex) {
1114                            flag = -1;
1115                        }
1116                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1117                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1118                        }
1119                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1120                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1121                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1122                        }
1123                    }
1124                    else {
1125                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1126                        for (int j = 0; j < cd_nelmts[0]; j++) {
1127                            if (j > 0)
1128                                filters += ", ";
1129                            filters += cd_values[j];
1130                        }
1131                        log.debug("getMetadata: filter[{}] is user defined compression", i);
1132                    }
1133                } // for (int i=0; i<nfilt; i++)
1134
1135                if (compression.length() == 0) {
1136                    compression = "NONE";
1137                }
1138                log.trace("getMetadata: filter compression={}", compression);
1139
1140                if (filters.length() == 0) {
1141                    filters = "NONE";
1142                }
1143                log.trace("getMetadata: filter information={}", filters);
1144
1145                storage = "" + storage_size;
1146                try {
1147                    int[] at = { 0 };
1148                    H5.H5Pget_alloc_time(pid, at);
1149                    storage += ", allocation time: ";
1150                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1151                        storage += "Early";
1152                    }
1153                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1154                        storage += "Incremental";
1155                    }
1156                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1157                        storage += "Late";
1158                    }
1159                }
1160                catch (Exception ex) {
1161                    log.debug("Storage allocation time:", ex);
1162                }
1163                if (storage.length() == 0) {
1164                    storage = "NONE";
1165                }
1166                log.trace("getMetadata: storage={}", storage);
1167            }
1168            finally {
1169                try {
1170                    H5.H5Pclose(pid);
1171                }
1172                catch (Exception ex) {
1173                    log.debug("finally close:", ex);
1174                }
1175                close(did);
1176            }
1177        }
1178
1179        log.trace("getMetadata: finish");
1180        return attributeList;
1181    }
1182
1183    /*
1184     * (non-Javadoc)
1185     *
1186     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1187     */
1188    public void writeMetadata(Object info) throws Exception {
1189        // only attribute metadata is supported.
1190        if (!(info instanceof Attribute)) {
1191            return;
1192        }
1193
1194        boolean attrExisted = false;
1195        Attribute attr = (Attribute) info;
1196        log.trace("writeMetadata: {}", attr.getName());
1197
1198        if (attributeList == null) {
1199            this.getMetadata();
1200        }
1201
1202        if (attributeList != null)
1203            attrExisted = attributeList.contains(attr);
1204
1205        getFileFormat().writeAttribute(this, attr, attrExisted);
1206        // add the new attribute into attribute list
1207        if (!attrExisted) {
1208            attributeList.add(attr);
1209            nAttributes = attributeList.size();
1210        }
1211    }
1212
1213    /*
1214     * (non-Javadoc)
1215     *
1216     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1217     */
1218    public void removeMetadata(Object info) throws HDF5Exception {
1219        // only attribute metadata is supported.
1220        if (!(info instanceof Attribute)) {
1221            return;
1222        }
1223
1224        Attribute attr = (Attribute) info;
1225        log.trace("removeMetadata: {}", attr.getName());
1226        int did = open();
1227        if (did >= 0) {
1228            try {
1229                H5.H5Adelete(did, attr.getName());
1230                List<Attribute> attrList = getMetadata();
1231                attrList.remove(attr);
1232                nAttributes = attrList.size();
1233            }
1234            finally {
1235                close(did);
1236            }
1237        }
1238    }
1239
1240    /*
1241     * (non-Javadoc)
1242     *
1243     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1244     */
1245    public void updateMetadata(Object info) throws HDF5Exception {
1246        // only attribute metadata is supported.
1247        if (!(info instanceof Attribute)) {
1248            return;
1249        }
1250        log.trace("updateMetadata");
1251
1252        nAttributes = -1;
1253    }
1254
1255    /*
1256     * (non-Javadoc)
1257     *
1258     * @see hdf.object.HObject#setName(java.lang.String)
1259     */
1260    @Override
1261    public void setName(String newName) throws Exception {
1262        H5File.renameObject(this, newName);
1263        super.setName(newName);
1264    }
1265
1266    /**
1267     * Resets selection of dataspace
1268     */
1269    private void resetSelection() {
1270        log.trace("resetSelection: start");
1271
1272        for (int i = 0; i < rank; i++) {
1273            startDims[i] = 0;
1274            selectedDims[i] = 1;
1275            if (selectedStride != null) {
1276                selectedStride[i] = 1;
1277            }
1278        }
1279
1280        if (rank == 1) {
1281            selectedIndex[0] = 0;
1282            selectedDims[0] = dims[0];
1283        }
1284        else if (rank == 2) {
1285            selectedIndex[0] = 0;
1286            selectedIndex[1] = 1;
1287            selectedDims[0] = dims[0];
1288            selectedDims[1] = dims[1];
1289        }
1290        else if (rank > 2) {
1291            // selectedIndex[0] = rank - 2; // columns
1292            // selectedIndex[1] = rank - 1; // rows
1293            // selectedIndex[2] = rank - 3;
1294            selectedIndex[0] = 0; // width, the fastest dimension
1295            selectedIndex[1] = 1; // height
1296            selectedIndex[2] = 2; // frames
1297            // selectedDims[rank - 1] = dims[rank - 1];
1298            // selectedDims[rank - 2] = dims[rank - 2];
1299            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1300            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1301        }
1302
1303        isDataLoaded = false;
1304        setMemberSelection(true);
1305        log.trace("resetSelection: finish");
1306    }
1307
1308    /**
1309     * @deprecated Not for public use in the future. <br>
1310     *             Using
1311     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1312     */
1313    @Deprecated
1314    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1315            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1316        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1317                || (memberSizes == null)) {
1318            return null;
1319        }
1320
1321        int nMembers = memberNames.length;
1322        int memberRanks[] = new int[nMembers];
1323        long memberDims[][] = new long[nMembers][1];
1324        for (int i = 0; i < nMembers; i++) {
1325            memberRanks[i] = 1;
1326            memberDims[i][0] = memberSizes[i];
1327        }
1328
1329        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1330    }
1331
1332    /**
1333     * @deprecated Not for public use in the future. <br>
1334     *             Using
1335     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1336     */
1337    @Deprecated
1338    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1339            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1340        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1341                memberDims, data);
1342    }
1343
1344    /**
1345     * Creates a simple compound dataset in a file with/without chunking and compression.
1346     * <p>
1347     * This function provides an easy way to create a simple compound dataset in file by hiding tedious details of
1348     * creating a compound dataset from users.
1349     * <p>
1350     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound dataset is not
1351     * supported. The required information to create a compound dataset includes the name, the parent group and data
1352     * space of the dataset, the names, datatypes and data spaces of the compound fields. Other information such as
1353     * chunks, compression and the data buffer is optional.
1354     * <p>
1355     * The following example shows how to use this function to create a compound dataset in file.
1356     *
1357     * <pre>
1358     * H5File file = null;
1359     * String message = &quot;&quot;;
1360     * Group pgroup = null;
1361     * int[] DATA_INT = new int[DIM_SIZE];
1362     * float[] DATA_FLOAT = new float[DIM_SIZE];
1363     * String[] DATA_STR = new String[DIM_SIZE];
1364     * long[] DIMs = { 50, 10 };
1365     * long[] CHUNKs = { 25, 5 };
1366     *
1367     * try {
1368     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1369     *     file.open();
1370     *     pgroup = (Group) file.get(&quot;/&quot;);
1371     * }
1372     * catch (Exception ex) {
1373     * }
1374     *
1375     * Vector data = new Vector();
1376     * data.add(0, DATA_INT);
1377     * data.add(1, DATA_FLOAT);
1378     * data.add(2, DATA_STR);
1379     *
1380     * // create groups
1381     * Datatype[] mdtypes = new H5Datatype[3];
1382     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1383     * Dataset dset = null;
1384     * try {
1385     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, -1, -1);
1386     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, -1, -1);
1387     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
1388     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1389     * }
1390     * catch (Exception ex) {
1391     *     failed(message, ex, file);
1392     *     return 1;
1393     * }
1394     * </pre>
1395     *
1396     * @param name
1397     *            the name of the dataset to create.
1398     * @param pgroup
1399     *            parent group where the new dataset is created.
1400     * @param dims
1401     *            the dimension size of the dataset.
1402     * @param maxdims
1403     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1404     * @param chunks
1405     *            the chunk size of the dataset. No chunking if chunk = null.
1406     * @param gzip
1407     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1408     * @param memberNames
1409     *            the names of compound datatype
1410     * @param memberDatatypes
1411     *            the datatypes of the compound datatype
1412     * @param memberRanks
1413     *            the ranks of the members
1414     * @param memberDims
1415     *            the dim sizes of the members
1416     * @param data
1417     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1418     *
1419     * @return the new compound dataset if successful; otherwise returns null.
1420     */
1421    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1422            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1423            throws Exception {
1424        H5CompoundDS dataset = null;
1425        String fullPath = null;
1426        int did = -1, sid = -1, tid = -1, plist = -1;
1427
1428        log.trace("H5CompoundDS create start");
1429        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1430                || (memberNames == null) || (memberDatatypes == null)
1431                || (memberRanks == null) || (memberDims == null)) {
1432            return null;
1433        }
1434
1435        H5File file = (H5File) pgroup.getFileFormat();
1436        if (file == null) {
1437            return null;
1438        }
1439
1440        String path = HObject.separator;
1441        if (!pgroup.isRoot()) {
1442            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1443            if (name.endsWith("/")) {
1444                name = name.substring(0, name.length() - 1);
1445            }
1446            int idx = name.lastIndexOf("/");
1447            if (idx >= 0) {
1448                name = name.substring(idx + 1);
1449            }
1450        }
1451
1452        fullPath = path + name;
1453
1454        int typeSize = 0;
1455        int nMembers = memberNames.length;
1456        int[] mTypes = new int[nMembers];
1457        int memberSize = 1;
1458        for (int i = 0; i < nMembers; i++) {
1459            memberSize = 1;
1460            for (int j = 0; j < memberRanks[i]; j++) {
1461                memberSize *= memberDims[i][j];
1462            }
1463
1464            mTypes[i] = -1;
1465            // the member is an array
1466            if ((memberSize > 1) && (memberDatatypes[i].getDatatypeClass() != Datatype.CLASS_STRING)) {
1467                int tmptid = -1;
1468                if ((tmptid = memberDatatypes[i].toNative()) >= 0) {
1469                    try {
1470                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1471                    }
1472                    finally {
1473                        try {H5.H5Tclose(tmptid);}
1474                        catch (Exception ex) {log.debug("compound array create finally close:", ex);}
1475                    }
1476                }
1477            }
1478            else {
1479                mTypes[i] = memberDatatypes[i].toNative();
1480            }
1481            try {
1482                typeSize += H5.H5Tget_size(mTypes[i]);
1483            }
1484            catch (Exception ex) {
1485                log.debug("array create H5Tget_size:", ex);
1486
1487                while (i > 0) {
1488                    try {H5.H5Tclose(mTypes[i]);}
1489                    catch (HDF5Exception ex2) {log.debug("compound create finally close:", ex2);}
1490                    i--;
1491                }
1492                throw ex;
1493            }
1494        } // for (int i = 0; i < nMembers; i++) {
1495
1496        // setup chunking and compression
1497        boolean isExtentable = false;
1498        if (maxdims != null) {
1499            for (int i = 0; i < maxdims.length; i++) {
1500                if (maxdims[i] == 0) {
1501                    maxdims[i] = dims[i];
1502                }
1503                else if (maxdims[i] < 0) {
1504                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1505                }
1506
1507                if (maxdims[i] != dims[i]) {
1508                    isExtentable = true;
1509                }
1510            }
1511        }
1512
1513        // HDF5 requires you to use chunking in order to define extendible
1514        // datasets. Chunking makes it possible to extend datasets efficiently,
1515        // without having to reorganize storage excessively. Using default size
1516        // of 64x...which has good performance
1517        if ((chunks == null) && isExtentable) {
1518            chunks = new long[dims.length];
1519            for (int i = 0; i < dims.length; i++)
1520                chunks[i] = Math.min(dims[i], 64);
1521        }
1522
1523        // prepare the dataspace and datatype
1524        int rank = dims.length;
1525
1526        try {
1527            sid = H5.H5Screate_simple(rank, dims, maxdims);
1528
1529            // figure out creation properties
1530            plist = HDF5Constants.H5P_DEFAULT;
1531
1532            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1533            int offset = 0;
1534            for (int i = 0; i < nMembers; i++) {
1535                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1536                offset += H5.H5Tget_size(mTypes[i]);
1537            }
1538
1539            if (chunks != null) {
1540                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1541
1542                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1543                H5.H5Pset_chunk(plist, rank, chunks);
1544
1545                // compression requires chunking
1546                if (gzip > 0) {
1547                    H5.H5Pset_deflate(plist, gzip);
1548                }
1549            }
1550
1551            int fid = file.getFID();
1552
1553            log.trace("H5CompoundDS create dataset");
1554            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1555            log.trace("H5CompoundDS create H5CompoundDS");
1556            dataset = new H5CompoundDS(file, name, path);
1557        }
1558        finally {
1559            try {
1560                H5.H5Pclose(plist);
1561            }
1562            catch (HDF5Exception ex) {
1563                log.debug("create finally close:", ex);
1564            }
1565            try {
1566                H5.H5Sclose(sid);
1567            }
1568            catch (HDF5Exception ex) {
1569                log.debug("create finally close:", ex);
1570            }
1571            try {
1572                H5.H5Tclose(tid);
1573            }
1574            catch (HDF5Exception ex) {
1575                log.debug("create finally close:", ex);
1576            }
1577            try {
1578                H5.H5Dclose(did);
1579            }
1580            catch (HDF5Exception ex) {
1581                log.debug("create finally close:", ex);
1582            }
1583
1584            for (int i = 0; i < nMembers; i++) {
1585                try {
1586                    H5.H5Tclose(mTypes[i]);
1587                }
1588                catch (HDF5Exception ex) {
1589                    log.debug("compound create finally close:", ex);
1590                }
1591            }
1592        }
1593
1594        if (dataset != null) {
1595            pgroup.addToMemberList(dataset);
1596            if (data != null) {
1597                dataset.init();
1598                long selected[] = dataset.getSelectedDims();
1599                for (int i = 0; i < rank; i++) {
1600                    selected[i] = dims[i];
1601                }
1602                dataset.write(data);
1603            }
1604        }
1605        log.trace("H5CompoundDS create finish");
1606
1607        return dataset;
1608    }
1609
1610    /**
1611     * Extracts compound information into flat structure.
1612     * <p>
1613     * For example, compound datatype "nest" has {nest1{a, b, c}, d, e} then extractCompoundInfo() will put the names of
1614     * nested compound fields into a flat list as
1615     *
1616     * <pre>
1617     * nest.nest1.a
1618     * nest.nest1.b
1619     * nest.nest1.c
1620     * nest.d
1621     * nest.e
1622     * </pre>
1623     */
1624    private void extractCompoundInfo(int tid, String name, List<String> names, List<Integer> types) {
1625        int nMembers = 0, mclass = -1, mtype = -1;
1626        String mname = null;
1627
1628        try {
1629            nMembers = H5.H5Tget_nmembers(tid);
1630        }
1631        catch (Exception ex) {
1632            nMembers = 0;
1633        }
1634
1635        if (nMembers <= 0) {
1636            return;
1637        }
1638
1639        int tmptid = -1;
1640        for (int i = 0; i < nMembers; i++) {
1641
1642            try {
1643                mtype = H5.H5Tget_member_type(tid, i);
1644            }
1645            catch (Exception ex) {
1646                log.debug("continue H5Tget_member_type[{}]:", i, ex);
1647                continue;
1648            }
1649
1650            try {
1651                tmptid = mtype;
1652                mtype = H5.H5Tget_native_type(tmptid);
1653            }
1654            catch (HDF5Exception ex) {
1655                log.debug("continue H5Tget_native_type[{}]:", i, ex);
1656                continue;
1657            }
1658            finally {
1659                try {
1660                    H5.H5Tclose(tmptid);
1661                }
1662                catch (HDF5Exception ex) {
1663                    log.debug("finally close:", ex);
1664                }
1665            }
1666
1667            try {
1668                mclass = H5.H5Tget_class(mtype);
1669            }
1670            catch (HDF5Exception ex) {
1671                log.debug("continue H5Tget_class[{}]:", i, ex);
1672                continue;
1673            }
1674
1675            if (names != null) {
1676                mname = name + H5.H5Tget_member_name(tid, i);
1677            }
1678
1679            if (mclass == HDF5Constants.H5T_COMPOUND) {
1680                log.debug("continue after recursive H5T_COMPOUND[{}]:", i);
1681                extractCompoundInfo(mtype, mname + CompoundDS.separator, names, types);
1682                continue;
1683            }
1684            else if (mclass == HDF5Constants.H5T_ARRAY) {
1685                try {
1686                    tmptid = H5.H5Tget_super(mtype);
1687                    int tmpclass = H5.H5Tget_class(tmptid);
1688
1689                    // cannot deal with ARRAY of COMPOUND or ARRAY of ARRAY
1690                    // support only ARRAY of atomic types
1691                    if ((tmpclass == HDF5Constants.H5T_COMPOUND) || (tmpclass == HDF5Constants.H5T_ARRAY)) {
1692                        log.debug("continue unsupported ARRAY of COMPOUND or ARRAY of ARRAY[{}]:", i);
1693                        continue;
1694                    }
1695                }
1696                catch (Exception ex) {
1697                    log.debug("continue H5T_ARRAY id or class failure[{}]:", i, ex);
1698                    continue;
1699                }
1700                finally {
1701                    try {
1702                        H5.H5Tclose(tmptid);
1703                    }
1704                    catch (Exception ex) {
1705                        log.debug("finally close[{}]:", i, ex);
1706                    }
1707                }
1708            }
1709
1710            if (names != null) {
1711                names.add(mname);
1712            }
1713            types.add(new Integer(mtype));
1714
1715        } // for (int i=0; i<nMembers; i++)
1716    } // extractNestedCompoundInfo
1717
1718    /*
1719     * (non-Javadoc)
1720     *
1721     * @see hdf.object.Dataset#isString(int)
1722     */
1723    @Override
1724    public boolean isString(int tid) {
1725        boolean b = false;
1726        try {
1727            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
1728        }
1729        catch (Exception ex) {
1730            b = false;
1731        }
1732
1733        return b;
1734    }
1735
1736    /*
1737     * (non-Javadoc)
1738     *
1739     * @see hdf.object.Dataset#getSize(int)
1740     */
1741    @Override
1742    public int getSize(int tid) {
1743        int tsize = -1;
1744
1745        try {
1746            tsize = H5.H5Tget_size(tid);
1747        }
1748        catch (Exception ex) {
1749            tsize = -1;
1750        }
1751
1752        return tsize;
1753    }
1754
1755    /**
1756     * Creates a datatype of a compound with one field.
1757     * <p>
1758     * This function is needed to read/write data field by field.
1759     * <p>
1760     *
1761     * @param member_tid
1762     *            The datatype identifier of the compound to create
1763     * @param member_name
1764     *            The name of the datatype
1765     * @param compInfo
1766     *            compInfo[0]--IN: class of member datatype; compInfo[1]--IN: size of member datatype; compInfo[2]--OUT:
1767     *            non-zero if the base type of the compound field is unsigned; zero, otherwise.
1768     * @return the identifier of the compound datatype.
1769     */
1770    private final int createCompoundFieldType(int member_tid, String member_name, int[] compInfo) throws HDF5Exception {
1771        int nested_tid = -1;
1772
1773        int arrayType = -1;
1774        int baseType = -1;
1775        int tmp_tid1 = -1, tmp_tid4 = -1;
1776
1777        try {
1778            int member_class = compInfo[0];
1779            int member_size = compInfo[1];
1780
1781            log.trace("{} Member is class {} of size={} with baseType={}", member_name, member_class, member_size,
1782                    baseType);
1783            if (member_class == HDF5Constants.H5T_ARRAY) {
1784                int mn = H5.H5Tget_array_ndims(member_tid);
1785                long[] marray = new long[mn];
1786                H5.H5Tget_array_dims(member_tid, marray);
1787                baseType = H5.H5Tget_super(member_tid);
1788                tmp_tid4 = H5.H5Tget_native_type(baseType);
1789                arrayType = H5.H5Tarray_create(tmp_tid4, mn, marray);
1790                log.trace("H5T_ARRAY {} Member is class {} of size={} with baseType={}", member_name, member_class,
1791                        member_size, baseType);
1792            }
1793
1794            try {
1795                if (baseType < 0) {
1796                    if (H5Datatype.isUnsigned(member_tid)) {
1797                        compInfo[2] = 1;
1798                    }
1799                }
1800                else {
1801                    if (H5Datatype.isUnsigned(baseType)) {
1802                        compInfo[2] = 1;
1803                    }
1804                }
1805            }
1806            catch (Exception ex2) {
1807                log.debug("baseType isUnsigned:", ex2);
1808            }
1809            try {
1810                H5.H5Tclose(baseType);
1811                baseType = -1;
1812            }
1813            catch (HDF5Exception ex4) {
1814                log.debug("finally close:", ex4);
1815            }
1816
1817            member_size = H5.H5Tget_size(member_tid);
1818
1819            // construct nested compound structure with a single field
1820            String theName = member_name;
1821            if (arrayType < 0) {
1822                tmp_tid1 = H5.H5Tcopy(member_tid);
1823            }
1824            else {
1825                tmp_tid1 = H5.H5Tcopy(arrayType);
1826            }
1827            try {
1828                H5.H5Tclose(arrayType);
1829                arrayType = -1;
1830            }
1831            catch (HDF5Exception ex4) {
1832                log.debug("finally close:", ex4);
1833            }
1834            int sep = member_name.lastIndexOf(CompoundDS.separator);
1835
1836            while (sep > 0) {
1837                theName = member_name.substring(sep + 1);
1838                nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
1839                H5.H5Tinsert(nested_tid, theName, 0, tmp_tid1);
1840                try {
1841                    H5.H5Tclose(tmp_tid1);
1842                }
1843                catch (Exception ex) {
1844                    log.debug("close nested temp {}:", sep, ex);
1845                }
1846                tmp_tid1 = nested_tid;
1847                member_name = member_name.substring(0, sep);
1848                sep = member_name.lastIndexOf(CompoundDS.separator);
1849            }
1850
1851            nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
1852
1853            H5.H5Tinsert(nested_tid, member_name, 0, tmp_tid1);
1854        }
1855        finally {
1856            try {
1857                H5.H5Tclose(tmp_tid1);
1858            }
1859            catch (HDF5Exception ex3) {
1860                log.debug("finally close:", ex3);
1861            }
1862            try {
1863                H5.H5Tclose(tmp_tid4);
1864            }
1865            catch (HDF5Exception ex3) {
1866                log.debug("finally close:", ex3);
1867            }
1868            try {
1869                H5.H5Tclose(baseType);
1870            }
1871            catch (HDF5Exception ex4) {
1872                log.debug("finally close:", ex4);
1873            }
1874            try {
1875                H5.H5Tclose(arrayType);
1876            }
1877            catch (HDF5Exception ex4) {
1878                log.debug("finally close:", ex4);
1879            }
1880        }
1881
1882        return nested_tid;
1883    }
1884
1885}