001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.exceptions.HDF5LibraryException;
028import hdf.hdf5lib.structs.H5O_info_t;
029import hdf.object.Attribute;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
039 * float, double and string, and operations performed on the scalar dataset.
040 * <p>
041 * The library predefines a modest number of datatypes. For details, read <a
042 * href="http://hdfgroup.org/HDF5/doc/Datatypes.html">The Datatype Interface (H5T).</a>
043 * <p>
044 * 
045 * @version 1.1 9/4/2007
046 * @author Peter X. Cao
047 */
048public class H5ScalarDS extends ScalarDS {
049    /**
050     * 
051     */
052    private static final long serialVersionUID = 2887517608230611642L;
053
054    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
055
056    /**
057     * The list of attributes of this data object. Members of the list are instance of Attribute.
058     */
059    private List<Attribute> attributeList;
060
061    private int nAttributes = -1;
062
063    private H5O_info_t obj_info;
064
065    /**
066     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
067     * array length is 8*numberOfPalettes.
068     */
069    private byte[] paletteRefs;
070
071    /** flag to indicate if the dataset is a variable length */
072    private boolean isVLEN = false;
073
074    /** flag to indicate if the dataset is enum */
075    private boolean isEnum = false;
076
077    /** flag to indicate if the dataset is an external dataset */
078    private boolean isExternal = false;
079
080    private boolean isArrayOfCompound = false;
081
082    private boolean isArrayOfVLEN = false;
083    /**
084     * flag to indicate if the datatype in file is the same as dataype in memory
085     */
086    private boolean isNativeDatatype = false;
087
088    /** flag to indicate is the datatype is reg. ref. */
089    private boolean isRegRef = false;
090
091    /**
092     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
093     * <p>
094     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
095     * path of the dataset.
096     * 
097     * @param theFile
098     *            the file that contains the data object.
099     * @param theName
100     *            the name of the data object, e.g. "dset".
101     * @param thePath
102     *            the full path of the data object, e.g. "/arrays/".
103     */
104    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
105        this(theFile, theName, thePath, null);
106    }
107
108    /**
109     * @deprecated Not for public use in the future.<br>
110     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
111     */
112    @Deprecated
113    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
114        super(theFile, theName, thePath, oid);
115        unsignedConverted = false;
116        paletteRefs = null;
117        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
118
119        if ((oid == null) && (theFile != null)) {
120            // retrieve the object ID
121            try {
122                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
123                this.oid = new long[1];
124                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
125            }
126            catch (Exception ex) {
127                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
128            }
129        }
130    }
131
132    /*
133     * (non-Javadoc)
134     * 
135     * @see hdf.object.HObject#open()
136     */
137    @Override
138    public int open() {
139        int did = -1;
140
141        try {
142            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
143        }
144        catch (HDF5Exception ex) {
145            log.debug("Failed to open dataset {}", getPath() + getName());
146            did = -1;
147        }
148
149        return did;
150    }
151
152    /*
153     * (non-Javadoc)
154     * 
155     * @see hdf.object.HObject#close(int)
156     */
157    @Override
158    public void close(int did) {
159        if (did >= 0) {
160            try {
161                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
162            }
163            catch (Exception ex) {
164                log.debug("close.H5Fflush:", ex);
165            }
166            try {
167                H5.H5Dclose(did);
168            }
169            catch (HDF5Exception ex) {
170                log.debug("close.H5Dclose:", ex);
171            }
172        }
173    }
174
175    /*
176     * (non-Javadoc)
177     * 
178     * @see hdf.object.Dataset#init()
179     */
180    @Override
181    public void init() {
182        if (rank > 0) {
183            resetSelection();
184            return; // already called. Initialize only once
185        }
186        log.trace("init() start");
187
188        int did = -1, sid = -1, tid = -1, tclass = -1;
189
190        did = open();
191        if (did >= 0) {
192            // check if it is an external dataset
193            int pid = -1;
194            try {
195                pid = H5.H5Dget_create_plist(did);
196                int nfiles = H5.H5Pget_external_count(pid);
197                isExternal = (nfiles > 0);
198            }
199            catch (Exception ex) {
200                log.debug("check if it is an external dataset:", ex);
201            }
202            finally {
203                try {
204                    H5.H5Pclose(pid);
205                }
206                catch (Exception ex) {
207                    log.debug("finally close:", ex);
208                }
209            }
210            
211            paletteRefs = getPaletteRefs(did);
212
213            try {
214                sid = H5.H5Dget_space(did);
215                rank = H5.H5Sget_simple_extent_ndims(sid);
216                tid = H5.H5Dget_type(did);
217                tclass = H5.H5Tget_class(tid);
218
219                int tmptid = 0;
220                if (tclass == HDF5Constants.H5T_ARRAY) {
221                    // use the base datatype to define the array
222                    int basetid = H5.H5Tget_super(tid);
223                    int baseclass = H5.H5Tget_class(basetid);
224                    isArrayOfCompound = (baseclass == HDF5Constants.H5T_COMPOUND);
225                    isArrayOfVLEN = (baseclass == HDF5Constants.H5T_VLEN);
226                }
227
228                isText = (tclass == HDF5Constants.H5T_STRING);
229                isVLEN = ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
230                isEnum = (tclass == HDF5Constants.H5T_ENUM);
231                isUnsigned = H5Datatype.isUnsigned(tid);
232                isRegRef = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_DSETREG);
233                log.debug(
234                        "init() tid={} is tclass={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
235                        tid, tclass, isText, isVLEN, isEnum, isUnsigned, isRegRef);
236
237                // check if datatype in file is native datatype
238                try {
239                    tmptid = H5.H5Tget_native_type(tid);
240                    isNativeDatatype = H5.H5Tequal(tid, tmptid);
241                    log.trace("init() isNativeDatatype={}", isNativeDatatype);
242
243                    /* see if fill value is defined */
244                    pid = H5.H5Dget_create_plist(did);
245                    int[] fillStatus = { 0 };
246                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
247                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
248                            fillValue = H5Datatype.allocateArray(tmptid, 1);
249                            log.trace("init() fillValue={}", fillValue);
250                            try {
251                                H5.H5Pget_fill_value(pid, tmptid, fillValue);
252                                log.trace("init() H5Pget_fill_value={}", fillValue);
253                                if (fillValue != null) {
254                                    if (isFillValueConverted)
255                                        fillValue = ScalarDS.convertToUnsignedC(fillValue, null);
256
257                                    int n = Array.getLength(fillValue);
258                                    for (int i = 0; i < n; i++)
259                                        addFilteredImageValue((Number) Array.get(fillValue, i));
260                                }
261                            }
262                            catch (Exception ex2) {
263                                log.debug("fill value was defined :", ex2);
264                                fillValue = null;
265                            }
266                        }
267                    }
268                }
269                catch (HDF5Exception ex) {
270                    log.debug("check if datatype in file is native datatype :", ex);
271                }
272                finally {
273                    try {
274                        H5.H5Tclose(tmptid);
275                    }
276                    catch (HDF5Exception ex) {
277                        log.debug("finally close:", ex);
278                    }
279                    try {
280                        H5.H5Pclose(pid);
281                    }
282                    catch (Exception ex) {
283                        log.debug("finally close:", ex);
284                    }
285                }
286
287                if (rank == 0) {
288                    // a scalar data point
289                    rank = 1;
290                    dims = new long[1];
291                    dims[0] = 1;
292                }
293                else {
294                    dims = new long[rank];
295                    maxDims = new long[rank];
296                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
297                }
298            }
299            catch (HDF5Exception ex) {
300                log.debug("init():", ex);
301            }
302            finally {
303                try {
304                    H5.H5Tclose(tid);
305                }
306                catch (HDF5Exception ex2) {
307                    log.debug("finally close:", ex2);
308                }
309                try {
310                    H5.H5Sclose(sid);
311                }
312                catch (HDF5Exception ex2) {
313                    log.debug("finally close:", ex2);
314                }
315            }
316
317            // check for the type of image and interlace mode
318            // it is a true color image at one of three cases:
319            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
320            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
321            // 3) INTERLACE_MODE = INTERLACE_PLANE
322            if ((rank >= 3) && isImage) {
323                interlace = -1;
324                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
325
326                if (isTrueColor) {
327                    interlace = INTERLACE_PIXEL;
328                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
329                        interlace = INTERLACE_PLANE;
330                    }
331                }
332            }
333
334            close(did);
335        }
336        else {
337            log.debug("init() failed to open dataset");
338        }
339
340        startDims = new long[rank];
341        selectedDims = new long[rank];
342        log.trace("init() finish");
343        resetSelection();
344    }
345
346    /*
347     * (non-Javadoc)
348     * 
349     * @see hdf.object.DataFormat#hasAttribute()
350     */
351    public boolean hasAttribute() {
352        obj_info.num_attrs = nAttributes;
353
354        log.trace("hasAttribute start: nAttributes = {}", nAttributes);
355        if (obj_info.num_attrs < 0) {
356            int did = open();
357            if (did >= 0) {
358                int tid = -1;
359                obj_info.num_attrs = 0;
360
361                try {
362                    obj_info = H5.H5Oget_info(did);
363                    nAttributes = (int) obj_info.num_attrs;
364
365                    tid = H5.H5Dget_type(did);
366
367                    int tclass = H5.H5Tget_class(tid);
368                    isText = (tclass == HDF5Constants.H5T_STRING);
369                    isVLEN = ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
370                    isEnum = (tclass == HDF5Constants.H5T_ENUM);
371                    log.trace("hasAttribute: obj_info.num_attrs={} with tclass type: isText={},isVLEN={},isEnum={}", nAttributes, isText, isVLEN, isEnum);
372                }
373                catch (Exception ex) {
374                    obj_info.num_attrs = 0;
375                    log.debug("hasAttribute: get object info:", ex);
376                }
377                finally {
378                    try {H5.H5Tclose(tid);} catch (HDF5Exception ex) {}
379                }
380
381                // test if it is an image
382                // check image
383                Object avalue = getAttrValue(did, "CLASS");
384                if (avalue != null) {
385                    try {
386                        isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
387                        log.trace("hasAttribute: isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
388                    }
389                    catch (Throwable err) {
390                        log.debug("check image:", err);
391                    }
392                }
393
394                // retrieve the IMAGE_MINMAXRANGE
395                avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
396                if (avalue != null) {
397                    double x0 = 0, x1 = 0;
398                    try {
399                        x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
400                        x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
401                    }
402                    catch (Exception ex2) {
403                        x0 = x1 = 0;
404                    }
405                    if (x1 > x0) {
406                        imageDataRange = new double[2];
407                        imageDataRange[0] = x0;
408                        imageDataRange[1] = x1;
409                    }
410                }
411
412                try {
413                    checkCFconvention(did);
414                }
415                catch (Exception ex) {
416                    log.debug("checkCFconvention({}):", did, ex);
417                }
418
419                close(did);
420            }
421            else {
422                log.debug("could not open dataset");
423            }
424        }
425        log.trace("hasAttribute exit");
426
427        return (obj_info.num_attrs > 0);
428    }
429
430    /*
431     * (non-Javadoc)
432     * 
433     * @see hdf.object.Dataset#getDatatype()
434     */
435    @Override
436    public Datatype getDatatype() {
437        if (datatype == null) {
438            log.trace("H5ScalarDS getDatatype: datatype == null");
439            int did = -1, tid = -1;
440
441            did = open();
442            if (did >= 0) {
443                try {
444                    tid = H5.H5Dget_type(did);
445
446                    log.trace("H5ScalarDS getDatatype: isNativeDatatype", isNativeDatatype);
447                    if (!isNativeDatatype) {
448                        int tmptid = -1;
449                        try {
450                            tmptid = tid;
451                            tid = H5.H5Tget_native_type(tmptid);
452                        }
453                        finally {
454                            try {
455                                H5.H5Tclose(tmptid);
456                            }
457                            catch (Exception ex2) {
458                                log.debug("finally close:", ex2);
459                            }
460                        }
461                    }
462                    datatype = new H5Datatype(tid);
463                }
464                catch (Exception ex) {
465                    log.debug("new H5Datatype:", ex);
466                }
467                finally {
468                    try {
469                        H5.H5Tclose(tid);
470                    }
471                    catch (HDF5Exception ex) {
472                        log.debug("finally close:", ex);
473                    }
474                    try {
475                        H5.H5Dclose(did);
476                    }
477                    catch (HDF5Exception ex) {
478                        log.debug("finally close:", ex);
479                    }
480                }
481            }
482        }
483
484        return datatype;
485    }
486
487    /*
488     * (non-Javadoc)
489     * 
490     * @see hdf.object.Dataset#clear()
491     */
492    @Override
493    public void clear() {
494        super.clear();
495
496        if (attributeList != null) {
497            ((Vector<Attribute>) attributeList).setSize(0);
498        }
499    }
500
501    /*
502     * (non-Javadoc)
503     * 
504     * @see hdf.object.Dataset#readBytes()
505     */
506    @Override
507    public byte[] readBytes() throws HDF5Exception {
508        byte[] theData = null;
509
510        log.trace("H5ScalarDS readBytes: start");
511        if (rank <= 0) {
512            init();
513        }
514
515        int did = open();
516        if (did >= 0) {
517            int fspace = -1, mspace = -1, tid = -1;
518
519            try {
520                long[] lsize = { 1 };
521                for (int j = 0; j < selectedDims.length; j++) {
522                    lsize[0] *= selectedDims[j];
523                }
524
525                fspace = H5.H5Dget_space(did);
526                mspace = H5.H5Screate_simple(rank, selectedDims, null);
527
528                // set the rectangle selection
529                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
530                if (rank * dims[0] > 1) {
531                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
532                            selectedDims, null); // set
533                    // block
534                    // to 1
535                }
536
537                tid = H5.H5Dget_type(did);
538                int size = H5.H5Tget_size(tid) * (int) lsize[0];
539                log.trace("H5ScalarDS readBytes: size = {}", size);
540                theData = new byte[size];
541                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
542            }
543            finally {
544                try {
545                    H5.H5Sclose(fspace);
546                }
547                catch (Exception ex2) {
548                    log.debug("finally close:", ex2);
549                }
550                try {
551                    H5.H5Sclose(mspace);
552                }
553                catch (Exception ex2) {
554                    log.debug("finally close:", ex2);
555                }
556                try {
557                    H5.H5Tclose(tid);
558                }
559                catch (HDF5Exception ex2) {
560                    log.debug("finally close:", ex2);
561                }
562                close(did);
563            }
564        }
565        log.trace("H5ScalarDS readBytes: finish");
566
567        return theData;
568    }
569
570    /*
571     * (non-Javadoc)
572     * 
573     * @see hdf.object.Dataset#read()
574     */
575    @Override
576    public Object read() throws Exception {
577        Object theData = null;
578        int did = -1;
579        int tid = -1;
580        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
581
582        log.trace("H5ScalarDS read: start");
583        if (rank <= 0) {
584            init(); // read data information into memory
585        }
586
587        if (isArrayOfCompound)
588            throw new HDF5Exception("Cannot show data with datatype of ARRAY of COMPOUND.");
589        if (isArrayOfVLEN)
590            throw new HDF5Exception("Cannot show data with datatype of ARRAY of VL.");
591
592        if (isExternal) {
593            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
594
595            if (pdir == null) {
596                pdir = ".";
597            }
598            H5.H5Dchdir_ext(pdir);
599        }
600
601        boolean isREF = false;
602        long[] lsize = { 1 };
603        log.trace("H5ScalarDS read: open dataset");
604        did = open();
605        if (did >= 0) {
606            try {
607                lsize[0] = selectHyperslab(did, spaceIDs);
608                log.trace("H5ScalarDS read: opened dataset size {} for {}", lsize[0], nPoints);
609
610                if (lsize[0] == 0) {
611                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
612                }
613
614                if (log.isDebugEnabled()) {
615                    // check is storage space is allocated
616                    try {
617                        long ssize = H5.H5Dget_storage_size(did);
618                        log.trace("Storage space allocated = {}.", ssize);
619                    }
620                    catch (Exception ex) {
621                        log.debug("check if storage space is allocated:", ex);
622                    }
623                }
624
625                tid = H5.H5Dget_type(did);
626                log.trace("H5ScalarDS read: H5Tget_native_type:");
627                log.trace("H5ScalarDS read: isNativeDatatype={}", isNativeDatatype);
628                if (!isNativeDatatype) {
629                    int tmptid = -1;
630                    try {
631                        tmptid = tid;
632                        tid = H5.H5Tget_native_type(tmptid);
633                    }
634                    finally {
635                        try {H5.H5Tclose(tmptid);}
636                        catch (Exception ex2) {log.debug("finally close:", ex2);}
637                    }
638                }
639
640                isREF = (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ));
641
642                log.trace("H5ScalarDS read: originalBuf={} isText={} isREF={} lsize[0]={} nPoints={}", originalBuf, isText, isREF, lsize[0], nPoints);
643                if ((originalBuf == null) || isEnum || isText || isREF || ((originalBuf != null) && (lsize[0] != nPoints))) {
644                    try {
645                        theData = H5Datatype.allocateArray(tid, (int) lsize[0]);
646                    }
647                    catch (OutOfMemoryError err) {
648                        throw new HDF5Exception("Out Of Memory.");
649                    }
650                }
651                else {
652                    theData = originalBuf; // reuse the buffer if the size is the
653                    // same
654                }
655
656                if (theData != null) {
657                    if (isVLEN) {
658                        log.trace("H5ScalarDS read: H5DreadVL");
659                        H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) theData);
660                    }
661                    else {
662                        log.trace("H5ScalarDS read: H5Dread did={} spaceIDs[0]={} spaceIDs[1]={}", did, spaceIDs[0], spaceIDs[1]);
663                        H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
664                    }
665                } // if (theData != null)
666            }
667            catch (HDF5DataFiltersException exfltr) {
668                log.debug("H5ScalarDS read: read failure:", exfltr);
669                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
670            }
671            catch (HDF5Exception h5ex) {
672                log.debug("H5ScalarDS read: read failure", h5ex);
673                throw new HDF5Exception(h5ex.toString());
674            }
675            finally {
676                try {
677                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
678                        H5.H5Sclose(spaceIDs[0]);
679                }
680                catch (Exception ex2) {
681                    log.debug("read: finally close:", ex2);
682                }
683                try {
684                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
685                        H5.H5Sclose(spaceIDs[1]);
686                }
687                catch (Exception ex2) {
688                    log.debug("read: finally close:", ex2);
689                }
690                try {
691                    if (isText && convertByteToString) {
692                        log.trace("H5ScalarDS read: H5Dread convertByteToString");
693                        theData = byteToString((byte[]) theData, H5.H5Tget_size(tid));
694                    }
695                    else if (isREF) {
696                        log.trace("H5ScalarDS read: H5Dread isREF");
697                        theData = HDFNativeData.byteToLong((byte[]) theData);
698                    }
699                    else if (isEnum && isEnumConverted()) {
700                        log.trace("H5ScalarDS read: H5Dread isEnum theData={}", theData);
701                        theData = H5Datatype.convertEnumValueToName(tid, theData, null);
702                    }
703                }
704                catch (Exception ex) {
705                    log.debug("H5ScalarDS read: convert data:", ex);
706                }
707                try {H5.H5Tclose(tid);}
708                catch (Exception ex2) {log.debug("finally close:", ex2);}
709
710                close(did);
711            }
712        }
713
714        log.trace("H5ScalarDS read: finish");
715        return theData;
716    }
717
718
719    /**
720     * Writes the given data buffer into this dataset in a file.
721     * 
722     * @param buf
723     *            The buffer that contains the data values.
724     */
725    @Override
726    public void write(Object buf) throws HDF5Exception {
727        log.trace("H5ScalarDS write: start");
728        int did = -1;
729        int tid = -1;
730        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
731        Object tmpData = null;
732
733        if (buf == null) {
734            return;
735        }
736
737        if (isVLEN && !isText) {
738            log.trace("H5ScalarDS write: VL data={}", buf);
739            throw (new HDF5Exception("Writing non-string variable-length data is not supported"));
740        }
741        else if (isRegRef) {
742            throw (new HDF5Exception("Writing region references data is not supported"));
743        }
744
745        long[] lsize = { 1 };
746        did = open();
747        log.trace("H5ScalarDS write: dataset opened");
748        if (did >= 0) {
749            try {
750                lsize[0] = selectHyperslab(did, spaceIDs);
751                tid = H5.H5Dget_type(did);
752
753                log.trace("H5ScalarDS write: isNativeDatatype={}", isNativeDatatype);
754                if (!isNativeDatatype) {
755                    int tmptid = -1;
756                    try {
757                        tmptid = tid;
758                        tid = H5.H5Tget_native_type(tmptid);
759                    }
760                    finally {
761                        try {H5.H5Tclose(tmptid);}
762                        catch (Exception ex2) {log.debug("finally close:", ex2);}
763                    }
764                }
765
766                isText = (H5.H5Tget_class(tid) == HDF5Constants.H5T_STRING);
767
768                // check if need to convert integer data
769                int tsize = H5.H5Tget_size(tid);
770                String cname = buf.getClass().getName();
771                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
772                boolean doConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
773                        || ((tsize == 4) && (dname == 'J')) || (isUnsigned && unsignedConverted));
774                log.trace("H5ScalarDS write: tsize={} cname={} dname={} doConversion={}", tsize, cname, dname,
775                        doConversion);
776
777                tmpData = buf;
778                if (doConversion) {
779                    tmpData = convertToUnsignedC(buf, null);
780                }
781                // do not convert v-len strings, regardless of conversion request
782                // type
783                else if (isText && convertByteToString && !H5.H5Tis_variable_str(tid)) {
784                    tmpData = stringToByte((String[]) buf, H5.H5Tget_size(tid));
785                }
786                else if (isEnum && (Array.get(buf, 0) instanceof String)) {
787                    tmpData = H5Datatype.convertEnumNameToValue(tid, (String[]) buf, null);
788                }
789
790                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
791
792            }
793            finally {
794                tmpData = null;
795                try {
796                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
797                        H5.H5Sclose(spaceIDs[0]);
798                }
799                catch (Exception ex2) {
800                    log.debug("write: finally close:", ex2);
801                }
802                try {
803                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
804                        H5.H5Sclose(spaceIDs[1]);
805                }
806                catch (Exception ex2) {
807                    log.debug("write: finally close:", ex2);
808                }
809                try {
810                    H5.H5Tclose(tid);
811                }
812                catch (Exception ex2) {
813                    log.debug("write: finally close:", ex2);
814                }
815            }
816            close(did);
817        }
818        log.trace("H5ScalarDS write: finish");
819    }
820
821    /**
822     * Set up the selection of hyperslab
823     * 
824     * @param did
825     *            IN dataset ID
826     * @param spaceIDs
827     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
828     * @return total number of data point selected
829     */
830    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
831        long lsize = 1;
832
833        boolean isAllSelected = true;
834        for (int i = 0; i < rank; i++) {
835            lsize *= selectedDims[i];
836            if (selectedDims[i] < dims[i]) {
837                isAllSelected = false;
838            }
839        }
840
841        if (isAllSelected) {
842            spaceIDs[0] = HDF5Constants.H5S_ALL;
843            spaceIDs[1] = HDF5Constants.H5S_ALL;
844        }
845        else {
846            spaceIDs[1] = H5.H5Dget_space(did);
847
848            // When 1D dataspace is used in chunked dataset, reading is very
849            // slow.
850            // It is a known problem on HDF5 library for chunked dataset.
851            // mspace = H5.H5Screate_simple(1, lsize, null);
852            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
853            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
854                    null);
855        }
856
857        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
858            isDefaultImageOrder = false;
859        else
860            isDefaultImageOrder = true;
861
862        return lsize;
863    }
864
865    /*
866     * (non-Javadoc)
867     * 
868     * @see hdf.object.DataFormat#getMetadata()
869     */
870    public List<Attribute> getMetadata() throws HDF5Exception {
871        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
872    }
873
874    /*
875     * (non-Javadoc)
876     * 
877     * @see hdf.object.DataFormat#getMetadata(int...)
878     */
879    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
880        if (rank <= 0) {
881            init();
882        }
883        log.trace("getMetadata: inited");
884
885        try {
886            this.linkTargetObjName = H5File.getLinkTargetName(this);
887        }
888        catch (Exception ex) {
889            log.debug("getLinkTargetName failed: ", ex);
890        }
891
892        if (attributeList != null) {
893            log.trace("getMetadata: attributeList != null");
894            return attributeList;
895        }
896
897        // load attributes first
898        int did = -1, pid = -1;
899        int indxType = fileFormat.getIndexType(null);
900        int order = fileFormat.getIndexOrder(null);
901
902        if (attrPropList.length > 0) {
903            indxType = attrPropList[0];
904            if (attrPropList.length > 1) {
905                order = attrPropList[1];
906            }
907        }
908        log.trace("getMetadata: open dataset");
909        did = open();
910        if (did >= 0) {
911            log.trace("getMetadata: dataset opened");
912            try {
913                compression = "";
914                attributeList = H5File.getAttribute(did, indxType, order);
915                log.trace("getMetadata: attributeList loaded");
916
917                // get the compression and chunk information
918                pid = H5.H5Dget_create_plist(did);
919                long storage_size = H5.H5Dget_storage_size(did);
920                int nfilt = H5.H5Pget_nfilters(pid);
921                if (H5.H5Pget_layout(pid) == HDF5Constants.H5D_CHUNKED) {
922                    chunkSize = new long[rank];
923                    H5.H5Pget_chunk(pid, rank, chunkSize);
924                    if(nfilt > 0) {
925                        long    nelmts = 1;
926                        long    uncomp_size;
927                        long    datum_size = getDatatype().getDatatypeSize();
928                        if (datum_size < 0) {
929                            int tmptid = -1;
930                            try {
931                                tmptid = H5.H5Dget_type(did);
932                                datum_size = H5.H5Tget_size(tmptid);
933                            }
934                            finally {
935                                try {H5.H5Tclose(tmptid);}
936                                catch (Exception ex2) {log.debug("finally close:", ex2);}
937                            }
938                        }
939                        
940
941                        for(int i = 0; i < rank; i++) {
942                            nelmts *= dims[i];
943                        }
944                        uncomp_size = nelmts * datum_size;
945
946                        /* compression ratio = uncompressed size /  compressed size */
947
948                        if(storage_size != 0) {
949                            double ratio = (double) uncomp_size / (double) storage_size;
950                            DecimalFormat df = new DecimalFormat();
951                            df.setMinimumFractionDigits(3);
952                            df.setMaximumFractionDigits(3);
953                            compression +=  df.format(ratio) + ":1";
954                        }
955                    }
956                }
957                else {
958                    chunkSize = null;
959                }
960
961                int[] flags = { 0, 0 };
962                long[] cd_nelmts = { 20 };
963                int[] cd_values = new int[(int) cd_nelmts[0]];;
964                String[] cd_name = { "", "" };
965                log.trace("getMetadata: {} filters in pipeline", nfilt);
966                int filter = -1;
967                int[] filter_config = { 1 };
968                filters = "";
969
970                for (int i = 0, k = 0; i < nfilt; i++) {
971                    log.trace("getMetadata: filter[{}]", i);
972                    if (i > 0) {
973                        filters += ", ";
974                    }
975                    if (k > 0) {
976                        compression += ", ";
977                    }
978
979                    try {
980                        cd_nelmts[0] = 20;
981                        cd_values = new int[(int) cd_nelmts[0]];
982                        cd_values = new int[(int) cd_nelmts[0]];
983                        filter = H5.H5Pget_filter(pid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
984                        log.trace("getMetadata: filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
985                        for (int j = 0; j < cd_nelmts[0]; j++) {
986                            log.trace("getMetadata: filter[{}] element {} = {}", i, j, cd_values[j]);
987                        }
988                    }
989                    catch (Throwable err) {
990                        filters += "ERROR";
991                        continue;
992                    }
993
994                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
995                        filters += "NONE";
996                    }
997                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
998                        filters += "GZIP";
999                        compression += compression_gzip_txt + cd_values[0];
1000                        k++;
1001                    }
1002                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1003                        filters += "Error detection filter";
1004                    }
1005                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1006                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1007                    }
1008                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1009                        filters += "NBIT";
1010                    }
1011                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1012                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1013                    }
1014                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1015                        filters += "SZIP";
1016                        compression += "SZIP: Pixels per block = " + cd_values[1];
1017                        k++;
1018                        int flag = -1;
1019                        try {
1020                            flag = H5.H5Zget_filter_info(filter);
1021                        }
1022                        catch (Exception ex) {
1023                            flag = -1;
1024                        }
1025                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1026                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1027                        }
1028                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1029                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1030                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1031                        }
1032                    }
1033                    else {
1034                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1035                        for (int j = 0; j < cd_nelmts[0]; j++) {
1036                            if (j > 0)
1037                                filters += ", ";
1038                            filters += cd_values[j];
1039                        }
1040                        log.debug("getMetadata: filter[{}] is user defined compression", i);
1041                    }
1042                } // for (int i=0; i<nfilt; i++)
1043
1044                if (compression.length() == 0) {
1045                    compression = "NONE";
1046                }
1047                log.trace("getMetadata: filter compression={}", compression);
1048
1049                if (filters.length() == 0) {
1050                    filters = "NONE";
1051                }
1052                log.trace("getMetadata: filter information={}", filters);
1053
1054                storage = "" + storage_size;
1055                try {
1056                    int[] at = { 0 };
1057                    H5.H5Pget_alloc_time(pid, at);
1058                    storage += ", allocation time: ";
1059                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1060                        storage += "Early";
1061                    }
1062                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1063                        storage += "Incremental";
1064                    }
1065                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1066                        storage += "Late";
1067                    }
1068                }
1069                catch (Exception ex) {
1070                    log.debug("Storage allocation time:", ex);
1071                }
1072                if (storage.length() == 0) {
1073                    storage = "NONE";
1074                }
1075                log.trace("getMetadata: storage={}", storage);
1076            }
1077            finally {
1078                try {
1079                    H5.H5Pclose(pid);
1080                }
1081                catch (Exception ex) {
1082                    log.debug("finally close:", ex);
1083                }
1084                close(did);
1085            }
1086        }
1087
1088        log.trace("getMetadata: finish");
1089        return attributeList;
1090    }
1091
1092    /*
1093     * (non-Javadoc)
1094     * 
1095     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1096     */
1097    public void writeMetadata(Object info) throws Exception {
1098        // only attribute metadata is supported.
1099        if (!(info instanceof Attribute)) {
1100            return;
1101        }
1102
1103        boolean attrExisted = false;
1104        Attribute attr = (Attribute) info;
1105        log.trace("writeMetadata: {}", attr.getName());
1106
1107        if (attributeList == null) {
1108            this.getMetadata();
1109        }
1110
1111        if (attributeList != null)
1112            attrExisted = attributeList.contains(attr);
1113
1114        getFileFormat().writeAttribute(this, attr, attrExisted);
1115        // add the new attribute into attribute list
1116        if (!attrExisted) {
1117            attributeList.add(attr);
1118            nAttributes = attributeList.size();
1119        }
1120    }
1121
1122    /*
1123     * (non-Javadoc)
1124     * 
1125     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1126     */
1127    public void removeMetadata(Object info) throws HDF5Exception {
1128        // only attribute metadata is supported.
1129        if (!(info instanceof Attribute)) {
1130            return;
1131        }
1132
1133        Attribute attr = (Attribute) info;
1134        log.trace("removeMetadata: {}", attr.getName());
1135        int did = open();
1136        if (did >= 0) {
1137            try {
1138                H5.H5Adelete(did, attr.getName());
1139                List<Attribute> attrList = getMetadata();
1140                attrList.remove(attr);
1141                nAttributes = attrList.size();
1142            }
1143            finally {
1144                close(did);
1145            }
1146        }
1147    }
1148
1149    /*
1150     * (non-Javadoc)
1151     * 
1152     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1153     */
1154    public void updateMetadata(Object info) throws HDF5Exception {
1155        // only attribute metadata is supported.
1156        if (!(info instanceof Attribute)) {
1157            return;
1158        }
1159        log.trace("updateMetadata");
1160
1161        Attribute attr = (Attribute) info;
1162        log.trace("updateMetadata: {}", attr.getName());
1163        nAttributes = -1;
1164    }
1165
1166    /*
1167     * (non-Javadoc)
1168     * 
1169     * @see hdf.object.HObject#setName(java.lang.String)
1170     */
1171    @Override
1172    public void setName(String newName) throws Exception {
1173        H5File.renameObject(this, newName);
1174        super.setName(newName);
1175    }
1176
1177    /**
1178     * Resets selection of dataspace
1179     */
1180    private void resetSelection() {
1181        log.trace("resetSelection: start");
1182
1183        for (int i = 0; i < rank; i++) {
1184            startDims[i] = 0;
1185            selectedDims[i] = 1;
1186            if (selectedStride != null) {
1187                selectedStride[i] = 1;
1188            }
1189        }
1190
1191        if (interlace == INTERLACE_PIXEL) {
1192            // 24-bit TRUE color image
1193            // [height][width][pixel components]
1194            selectedDims[2] = 3;
1195            selectedDims[0] = dims[0];
1196            selectedDims[1] = dims[1];
1197            selectedIndex[0] = 0; // index for height
1198            selectedIndex[1] = 1; // index for width
1199            selectedIndex[2] = 2; // index for depth
1200        }
1201        else if (interlace == INTERLACE_PLANE) {
1202            // 24-bit TRUE color image
1203            // [pixel components][height][width]
1204            selectedDims[0] = 3;
1205            selectedDims[1] = dims[1];
1206            selectedDims[2] = dims[2];
1207            selectedIndex[0] = 1; // index for height
1208            selectedIndex[1] = 2; // index for width
1209            selectedIndex[2] = 0; // index for depth
1210        }
1211        else if (rank == 1) {
1212            selectedIndex[0] = 0;
1213            selectedDims[0] = dims[0];
1214        }
1215        else if (rank == 2) {
1216            selectedIndex[0] = 0;
1217            selectedIndex[1] = 1;
1218            selectedDims[0] = dims[0];
1219            selectedDims[1] = dims[1];
1220        }
1221        else if (rank > 2) {
1222            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1223            // [frame][height][width] by default
1224            // selectedIndex[1] = rank-1; // width, the fastest dimension
1225            // selectedIndex[0] = rank-2; // height
1226            // selectedIndex[2] = rank-3; // frames
1227
1228            //
1229            // (5/4/09) Modified the default dimension order. See bug#1379
1230            // We change the default order to the following. In most situation,
1231            // users want to use the natural order of
1232            // selectedIndex[0] = 0
1233            // selectedIndex[1] = 1
1234            // selectedIndex[2] = 2
1235            // Most of NPOESS data is the the order above.
1236
1237            if (isImage) {
1238                // 3D dataset is arranged in the order of [frame][height][width]
1239                selectedIndex[1] = rank - 1; // width, the fastest dimension
1240                selectedIndex[0] = rank - 2; // height
1241                selectedIndex[2] = rank - 3; // frames
1242            }
1243            else {
1244                selectedIndex[0] = 0; // width, the fastest dimension
1245                selectedIndex[1] = 1; // height
1246                selectedIndex[2] = 2; // frames
1247            }
1248
1249            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1250            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1251        }
1252
1253        // by default, only one-D is selected for text data
1254        if ((rank > 1) && isText) {
1255            selectedIndex[0] = rank - 1;
1256            selectedIndex[1] = 0;
1257            selectedDims[0] = 1;
1258            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1259        }
1260
1261        isDataLoaded = false;
1262        isDefaultImageOrder = true;
1263        log.trace("resetSelection: finish");
1264    }
1265
1266    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1267            long[] chunks, int gzip, Object data) throws Exception {
1268        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1269    }
1270
1271    /**
1272     * Creates a scalar dataset in a file with/without chunking and compression.
1273     * <p>
1274     * The following example shows how to create a string dataset using this function.
1275     * 
1276     * <pre>
1277     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1278     * int max_str_len = 120;
1279     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, -1, -1);
1280     * int size = 10000;
1281     * long dims[] = { size };
1282     * long chunks[] = { 1000 };
1283     * int gzip = 9;
1284     * String strs[] = new String[size];
1285     * 
1286     * for (int i = 0; i &lt; size; i++)
1287     *     strs[i] = String.valueOf(i);
1288     * 
1289     * file.open();
1290     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1291     * 
1292     * try {
1293     *     file.close();
1294     * }
1295     * catch (Exception ex) {
1296     * }
1297     * </pre>
1298     * 
1299     * @param name
1300     *            the name of the dataset to create.
1301     * @param pgroup
1302     *            parent group where the new dataset is created.
1303     * @param type
1304     *            the datatype of the dataset.
1305     * @param dims
1306     *            the dimension size of the dataset.
1307     * @param maxdims
1308     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1309     * @param chunks
1310     *            the chunk size of the dataset. No chunking if chunk = null.
1311     * @param gzip
1312     *            GZIP compression level (1 to 9). No compression if gzip<=0.
1313     * @param data
1314     *            the array of data values.
1315     * 
1316     * @return the new scalar dataset if successful; otherwise returns null.
1317     */
1318    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1319            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1320        H5ScalarDS dataset = null;
1321        String fullPath = null;
1322        int did = -1, sid = -1, tid = -1, plist = -1;
1323
1324        log.trace("H5ScalarDS create start");
1325        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1326            return null;
1327        }
1328
1329        H5File file = (H5File) pgroup.getFileFormat();
1330        if (file == null) {
1331            return null;
1332        }
1333
1334        String path = HObject.separator;
1335        if (!pgroup.isRoot()) {
1336            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1337            if (name.endsWith("/")) {
1338                name = name.substring(0, name.length() - 1);
1339            }
1340            int idx = name.lastIndexOf("/");
1341            if (idx >= 0) {
1342                name = name.substring(idx + 1);
1343            }
1344        }
1345
1346        fullPath = path + name;
1347
1348        // setup chunking and compression
1349        boolean isExtentable = false;
1350        if (maxdims != null) {
1351            for (int i = 0; i < maxdims.length; i++) {
1352                if (maxdims[i] == 0) {
1353                    maxdims[i] = dims[i];
1354                }
1355                else if (maxdims[i] < 0) {
1356                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1357                }
1358
1359                if (maxdims[i] != dims[i]) {
1360                    isExtentable = true;
1361                }
1362            }
1363        }
1364
1365        // HDF5 requires you to use chunking in order to define extendible
1366        // datasets. Chunking makes it possible to extend datasets efficiently,
1367        // without having to reorganize storage excessively. Using default size
1368        // of 64x...which has good performance
1369        if ((chunks == null) && isExtentable) {
1370            chunks = new long[dims.length];
1371            for (int i = 0; i < dims.length; i++)
1372                chunks[i] = Math.min(dims[i], 64);
1373        }
1374
1375        // prepare the dataspace and datatype
1376        int rank = dims.length;
1377
1378        if ((tid = type.toNative()) >= 0) {
1379            try {
1380                sid = H5.H5Screate_simple(rank, dims, maxdims);
1381
1382                // figure out creation properties
1383                plist = HDF5Constants.H5P_DEFAULT;
1384
1385                byte[] val_fill = null;
1386                try {
1387                    val_fill = parseFillValue(type, fillValue);
1388                }
1389                catch (Exception ex) {
1390                    log.debug("fill value:", ex);
1391                }
1392
1393                if (chunks != null || val_fill != null) {
1394                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1395
1396                    if (chunks != null) {
1397                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1398                        H5.H5Pset_chunk(plist, rank, chunks);
1399
1400                        // compression requires chunking
1401                        if (gzip > 0) {
1402                            H5.H5Pset_deflate(plist, gzip);
1403                        }
1404                    }
1405
1406                    if (val_fill != null) {
1407                        H5.H5Pset_fill_value(plist, tid, val_fill);
1408                    }
1409                }
1410
1411                int fid = file.getFID();
1412
1413                log.trace("H5ScalarDS create dataset");
1414                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1415                log.trace("H5ScalarDS create H5ScalarDS");
1416                dataset = new H5ScalarDS(file, name, path);
1417            }
1418            finally {
1419                try {
1420                    H5.H5Pclose(plist);
1421                }
1422                catch (HDF5Exception ex) {
1423                    log.debug("create finally close:", ex);
1424                }
1425                try {
1426                    H5.H5Sclose(sid);
1427                }
1428                catch (HDF5Exception ex) {
1429                    log.debug("create finally close:", ex);
1430                }
1431                try {
1432                    H5.H5Tclose(tid);
1433                }
1434                catch (HDF5Exception ex) {
1435                    log.debug("create finally close:", ex);
1436                }
1437                try {
1438                    H5.H5Dclose(did);
1439                }
1440                catch (HDF5Exception ex) {
1441                    log.debug("create finally close:", ex);
1442                }
1443            }
1444        }
1445
1446        if (dataset != null) {
1447            pgroup.addToMemberList(dataset);
1448            if (data != null) {
1449                dataset.init();
1450                long selected[] = dataset.getSelectedDims();
1451                for (int i = 0; i < rank; i++) {
1452                    selected[i] = dims[i];
1453                }
1454                dataset.write(data);
1455            }
1456        }
1457        log.trace("H5ScalarDS create finish");
1458
1459        return dataset;
1460    }
1461
1462    // check _FillValue, valid_min, valid_max, and valid_range
1463    private void checkCFconvention(int oid) throws Exception {
1464        Object avalue = getAttrValue(oid, "_FillValue");
1465
1466        if (avalue != null) {
1467            int n = Array.getLength(avalue);
1468            for (int i = 0; i < n; i++)
1469                addFilteredImageValue((Number) Array.get(avalue, i));
1470        }
1471
1472        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1473            double x0 = 0, x1 = 0;
1474            avalue = getAttrValue(oid, "valid_range");
1475            if (avalue != null) {
1476                try {
1477                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1478                    x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
1479                    imageDataRange = new double[2];
1480                    imageDataRange[0] = x0;
1481                    imageDataRange[1] = x1;
1482                    return;
1483                }
1484                catch (Exception ex) {
1485                    log.debug("valid_range:", ex);
1486                }
1487            }
1488
1489            avalue = getAttrValue(oid, "valid_min");
1490            if (avalue != null) {
1491                try {
1492                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1493                }
1494                catch (Exception ex) {
1495                    log.debug("valid_min:", ex);
1496                }
1497                avalue = getAttrValue(oid, "valid_max");
1498                if (avalue != null) {
1499                    try {
1500                        x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1501                        imageDataRange = new double[2];
1502                        imageDataRange[0] = x0;
1503                        imageDataRange[1] = x1;
1504                    }
1505                    catch (Exception ex) {
1506                        log.debug("valid_max:", ex);
1507                    }
1508                }
1509            }
1510        } // if (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1511    }
1512
1513    private Object getAttrValue(int oid, String aname) {
1514        int aid = -1, atid = -1, asid = -1;
1515        Object avalue = null;
1516        log.trace("getAttrValue: start name={}", aname);
1517
1518        try {
1519            // try to find attribute name
1520            aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1521        }
1522        catch (HDF5LibraryException ex5) {
1523            log.debug("Failed to find attribute {} : Expected", aname);
1524        }
1525        catch (Exception ex) {
1526            log.debug("try to find attribute {}:", aname, ex);
1527        }
1528        if (aid > 0) {
1529            try {
1530                atid = H5.H5Aget_type(aid);
1531                int tmptid = atid;
1532                atid = H5.H5Tget_native_type(tmptid);
1533                try {
1534                    H5.H5Tclose(tmptid);
1535                }
1536                catch (Exception ex) {
1537                    log.debug("close H5Aget_type after getting H5Tget_native_type:", ex);
1538                }
1539
1540                asid = H5.H5Aget_space(aid);
1541                long adims[] = null;
1542
1543                int arank = H5.H5Sget_simple_extent_ndims(asid);
1544                if (arank > 0) {
1545                    adims = new long[arank];
1546                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1547                }
1548                log.trace("getAttrValue: adims={}", adims);
1549
1550                // retrieve the attribute value
1551                long lsize = 1;
1552                if (adims != null) {
1553                    for (int j = 0; j < adims.length; j++) {
1554                        lsize *= adims[j];
1555                    }
1556                }
1557                log.trace("getAttrValue: lsize={}", lsize);
1558                avalue = H5Datatype.allocateArray(atid, (int) lsize);
1559
1560                if (avalue != null) {
1561                    log.trace("read attribute id {} of size={}", atid, lsize);
1562                    H5.H5Aread(aid, atid, avalue);
1563
1564                    if (H5Datatype.isUnsigned(atid)) {
1565                        log.trace("id {} is unsigned", atid);
1566                        avalue = convertFromUnsignedC(avalue, null);
1567                    }
1568                }
1569            }
1570            catch (Exception ex) {
1571                log.debug("try to get value for attribute {}:", aname, ex);
1572            }
1573            finally {
1574                try {
1575                    H5.H5Tclose(atid);
1576                }
1577                catch (HDF5Exception ex) {
1578                    log.debug("finally close:", ex);
1579                }
1580                try {
1581                    H5.H5Sclose(asid);
1582                }
1583                catch (HDF5Exception ex) {
1584                    log.debug("finally close:", ex);
1585                }
1586                try {
1587                    H5.H5Aclose(aid);
1588                }
1589                catch (HDF5Exception ex) {
1590                    log.debug("finally close:", ex);
1591                }
1592            }
1593        } // if (aid > 0)
1594
1595        log.trace("getAttrValue: finish");
1596        return avalue;
1597    }
1598
1599    private boolean isStringAttributeOf(int objID, String name, String value) {
1600        boolean retValue = false;
1601        int aid = -1, atid = -1;
1602
1603        try {
1604            // try to find out interlace mode
1605            aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1606            atid = H5.H5Aget_type(aid);
1607            int size = H5.H5Tget_size(atid);
1608            byte[] attrValue = new byte[size];
1609            H5.H5Aread(aid, atid, attrValue);
1610            String strValue = new String(attrValue).trim();
1611            retValue = strValue.equalsIgnoreCase(value);
1612        }
1613        catch (Exception ex) {
1614            log.debug("try to find out interlace mode:", ex);
1615        }
1616        finally {
1617            try {
1618                H5.H5Tclose(atid);
1619            }
1620            catch (HDF5Exception ex) {
1621                log.debug("finally close:", ex);
1622            }
1623            try {
1624                H5.H5Aclose(aid);
1625            }
1626            catch (HDF5Exception ex) {
1627                log.debug("finally close:", ex);
1628            }
1629        }
1630
1631        return retValue;
1632    }
1633
1634    /*
1635     * (non-Javadoc)
1636     * 
1637     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1638     */
1639    @Override
1640    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1641        // must give a location to copy
1642        if (pgroup == null) {
1643            return null;
1644        }
1645
1646        Dataset dataset = null;
1647        int srcdid = -1, dstdid = -1, tid = -1, sid = -1, plist = -1;
1648        String dname = null, path = null;
1649
1650        if (pgroup.isRoot()) {
1651            path = HObject.separator;
1652        }
1653        else {
1654            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1655        }
1656        dname = path + dstName;
1657
1658        srcdid = open();
1659        if (srcdid >= 0) {
1660            try {
1661                tid = H5.H5Dget_type(srcdid);
1662                sid = H5.H5Screate_simple(dims.length, dims, null);
1663                plist = H5.H5Dget_create_plist(srcdid);
1664
1665                long[] chunks = new long[dims.length];
1666                boolean setChunkFlag = false;
1667                try {
1668                    H5.H5Pget_chunk(plist, dims.length, chunks);
1669                    for (int i = 0; i < dims.length; i++) {
1670                        if (dims[i] < chunks[i]) {
1671                            setChunkFlag = true;
1672                            if (dims[i] == 1)
1673                                chunks[i] = 1;
1674                            else
1675                                chunks[i] = dims[i] / 2;
1676                        }
1677                    }
1678                }
1679                catch (Exception ex) {
1680                    log.debug("copy chunk:", ex);
1681                }
1682
1683                if (setChunkFlag)
1684                    H5.H5Pset_chunk(plist, dims.length, chunks);
1685
1686                try {
1687                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1688                            HDF5Constants.H5P_DEFAULT);
1689                }
1690                catch (Exception e) {
1691                    log.debug("copy create:", e);
1692                }
1693                finally {
1694                    try {
1695                        H5.H5Dclose(dstdid);
1696                    }
1697                    catch (Exception ex2) {
1698                        log.debug("finally close:", ex2);
1699                    }
1700                }
1701
1702                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1703                if (buff != null) {
1704                    dataset.init();
1705                    dataset.write(buff);
1706                }
1707
1708                dstdid = dataset.open();
1709                if (dstdid >= 0) {
1710                    try {
1711                        H5File.copyAttributes(srcdid, dstdid);
1712                    }
1713                    finally {
1714                        try {
1715                            H5.H5Dclose(dstdid);
1716                        }
1717                        catch (Exception ex) {
1718                            log.debug("finally close:", ex);
1719                        }
1720                    }
1721                }
1722            }
1723            finally {
1724                try {
1725                    H5.H5Pclose(plist);
1726                }
1727                catch (Exception ex) {
1728                    log.debug("finally close:", ex);
1729                }
1730                try {
1731                    H5.H5Sclose(sid);
1732                }
1733                catch (Exception ex) {
1734                    log.debug("finally close:", ex);
1735                }
1736                try {
1737                    H5.H5Tclose(tid);
1738                }
1739                catch (Exception ex) {
1740                    log.debug("finally close:", ex);
1741                }
1742                try {
1743                    H5.H5Dclose(srcdid);
1744                }
1745                catch (Exception ex) {
1746                    log.debug("finally close:", ex);
1747                }
1748            }
1749        }
1750
1751        pgroup.addToMemberList(dataset);
1752
1753        ((ScalarDS) dataset).setIsImage(isImage);
1754
1755        return dataset;
1756    }
1757
1758    /*
1759     * (non-Javadoc)
1760     * 
1761     * @see hdf.object.ScalarDS#getPalette()
1762     */
1763    @Override
1764    public byte[][] getPalette() {
1765        if (palette == null) {
1766            palette = readPalette(0);
1767        }
1768
1769        return palette;
1770    }
1771
1772    /*
1773     * (non-Javadoc)
1774     * 
1775     * @see hdf.object.ScalarDS#getPaletteName(int)
1776     */
1777    public String getPaletteName(int idx) {
1778
1779        byte[] refs = getPaletteRefs();
1780        int did = -1, pal_id = -1;
1781        String[] paletteName = { "" };
1782        long size = 100L;
1783
1784        if (refs == null) {
1785            return null;
1786        }
1787
1788        byte[] ref_buf = new byte[8];
1789
1790        try {
1791            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1792        }
1793        catch (Throwable err) {
1794            return null;
1795        }
1796
1797        did = open();
1798        if (did >= 0) {
1799            try {
1800                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
1801                H5.H5Iget_name(pal_id, paletteName, size);
1802            }
1803            catch (Exception ex) {
1804                ex.printStackTrace();
1805            }
1806            finally {
1807                close(pal_id);
1808                close(did);
1809            }
1810        }
1811
1812        return paletteName[0];
1813    }
1814
1815    /*
1816     * (non-Javadoc)
1817     * 
1818     * @see hdf.object.ScalarDS#readPalette(int)
1819     */
1820    @Override
1821    public byte[][] readPalette(int idx) {
1822        byte[][] thePalette = null;
1823        byte[] refs = getPaletteRefs();
1824        int did = -1, pal_id = -1, tid = -1;
1825
1826        if (refs == null) {
1827            return null;
1828        }
1829
1830        byte[] p = null;
1831        byte[] ref_buf = new byte[8];
1832
1833        try {
1834            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1835        }
1836        catch (Throwable err) {
1837            return null;
1838        }
1839
1840        did = open();
1841        if (did >= 0) {
1842            try {
1843                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
1844                tid = H5.H5Dget_type(pal_id);
1845
1846                // support only 3*256 byte palette data
1847                if (H5.H5Dget_storage_size(pal_id) <= 768) {
1848                    p = new byte[3 * 256];
1849                    H5.H5Dread(pal_id, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
1850                }
1851            }
1852            catch (HDF5Exception ex) {
1853                p = null;
1854            }
1855            finally {
1856                try {
1857                    H5.H5Tclose(tid);
1858                }
1859                catch (HDF5Exception ex2) {
1860                }
1861                close(pal_id);
1862                close(did);
1863            }
1864        }
1865
1866        if (p != null) {
1867            thePalette = new byte[3][256];
1868            for (int i = 0; i < 256; i++) {
1869                thePalette[0][i] = p[i * 3];
1870                thePalette[1][i] = p[i * 3 + 1];
1871                thePalette[2][i] = p[i * 3 + 2];
1872            }
1873        }
1874
1875        return thePalette;
1876    }
1877
1878    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
1879        byte[] data = null;
1880
1881        if (type == null || fillValue == null)
1882            return null;
1883
1884        int datatypeClass = type.getDatatypeClass();
1885        int datatypeSize = type.getDatatypeSize();
1886
1887        double val_dbl = 0;
1888        String val_str = null;
1889
1890        if (fillValue instanceof String) {
1891            val_str = (String) fillValue;
1892        }
1893        else if (fillValue.getClass().isArray()) {
1894            val_str = Array.get(fillValue, 0).toString();
1895        }
1896
1897        if (datatypeClass != Datatype.CLASS_STRING) {
1898            try {
1899                val_dbl = Double.parseDouble(val_str);
1900            }
1901            catch (NumberFormatException ex) {
1902                return null;
1903            }
1904        }
1905
1906        try {
1907            switch (datatypeClass) {
1908            case Datatype.CLASS_INTEGER:
1909            case Datatype.CLASS_ENUM:
1910            case Datatype.CLASS_CHAR:
1911                if (datatypeSize == 1) {
1912                    data = new byte[] { (byte) val_dbl };
1913                }
1914                else if (datatypeSize == 2) {
1915                    data = HDFNativeData.shortToByte((short) val_dbl);
1916                }
1917                else if (datatypeSize == 8) {
1918                    data = HDFNativeData.longToByte((long) val_dbl);
1919                }
1920                else {
1921                    data = HDFNativeData.intToByte((int) val_dbl);
1922                }
1923                break;
1924            case Datatype.CLASS_FLOAT:
1925                if (datatypeSize == 8) {
1926                    data = HDFNativeData.doubleToByte(val_dbl);
1927                }
1928                else {
1929                    data = HDFNativeData.floatToByte((float) val_dbl);
1930                    ;
1931                }
1932                break;
1933            case Datatype.CLASS_STRING:
1934                data = val_str.getBytes();
1935                break;
1936            case Datatype.CLASS_REFERENCE:
1937                data = HDFNativeData.longToByte((long) val_dbl);
1938                break;
1939            default:
1940                log.debug("parseFillValue datatypeClass unknown");
1941                break;
1942            } // switch (tclass)
1943        }
1944        catch (Exception ex) {
1945            data = null;
1946        }
1947
1948        return data;
1949    }
1950
1951    /*
1952     * (non-Javadoc)
1953     * 
1954     * @see hdf.object.ScalarDS#getPaletteRefs()
1955     */
1956    @Override
1957    public byte[] getPaletteRefs() {
1958        if (rank <= 0) {
1959            init(); // init will be called to get refs
1960        }
1961
1962        return paletteRefs;
1963    }
1964
1965    /**
1966     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
1967     * length is 8*numberOfPalettes.
1968     */
1969    private byte[] getPaletteRefs(int did) {
1970        int aid = -1, sid = -1, size = 0, rank = 0, atype = -1;
1971        byte[] ref_buf = null;
1972
1973        try {
1974            aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1975            sid = H5.H5Aget_space(aid);
1976            rank = H5.H5Sget_simple_extent_ndims(sid);
1977            size = 1;
1978            if (rank > 0) {
1979                long[] dims = new long[rank];
1980                H5.H5Sget_simple_extent_dims(sid, dims, null);
1981                for (int i = 0; i < rank; i++) {
1982                    size *= (int) dims[i];
1983                }
1984            }
1985
1986            ref_buf = new byte[size * 8];
1987            atype = H5.H5Aget_type(aid);
1988
1989            H5.H5Aread(aid, atype, ref_buf);
1990        }
1991        catch (HDF5Exception ex) {
1992            log.debug("Palette attribute search failed: Expected");
1993            ref_buf = null;
1994        }
1995        finally {
1996            try {
1997                H5.H5Tclose(atype);
1998            }
1999            catch (HDF5Exception ex2) {
2000                log.debug("finally close:", ex2);
2001            }
2002            try {
2003                H5.H5Sclose(sid);
2004            }
2005            catch (HDF5Exception ex2) {
2006                log.debug("finally close:", ex2);
2007            }
2008            try {
2009                H5.H5Aclose(aid);
2010            }
2011            catch (HDF5Exception ex2) {
2012                log.debug("finally close:", ex2);
2013            }
2014        }
2015
2016        return ref_buf;
2017    }
2018
2019    /**
2020     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2021     * of size is the same as that of the dataspace of the dataset being changed.
2022     * 
2023     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2024     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2025     * H5Screate_simple)
2026     */
2027    public void extend(long[] newDims) throws HDF5Exception {
2028        int did = -1, sid = -1;
2029
2030        did = open();
2031        if (did >= 0) {
2032            try {
2033                H5.H5Dset_extent(did, newDims);
2034                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2035                sid = H5.H5Dget_space(did);
2036                long[] checkDims = new long[rank];
2037                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2038                for (int i = 0; i < rank; i++) {
2039                    if (checkDims[i] != newDims[i]) {
2040                        throw new HDF5Exception("error extending dataset " + getName());
2041                    }
2042                }
2043                dims = checkDims;
2044            }
2045            catch (Exception e) {
2046                throw new HDF5Exception(e.getMessage());
2047            }
2048            finally {
2049                if (sid > 0)
2050                    H5.H5Sclose(sid);
2051
2052                close(did);
2053            }
2054        }
2055    }
2056
2057}