001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFJPEGCompInfo;
026import hdf.hdflib.HDFLibrary;
027import hdf.hdflib.HDFNBITCompInfo;
028import hdf.hdflib.HDFSKPHUFFCompInfo;
029import hdf.hdflib.HDFSZIPCompInfo;
030import hdf.object.Attribute;
031import hdf.object.Dataset;
032import hdf.object.Datatype;
033import hdf.object.FileFormat;
034import hdf.object.Group;
035import hdf.object.HObject;
036import hdf.object.ScalarDS;
037
038/**
039 * H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
040 * the SDS. A SDS, is a group of data structures used to store and describe
041 * multidimensional arrays of scientific data.
042 * <p>
043 * The data contained in an SDS array has a data type associated with it. The
044 * standard data types supported by the SD interface include 32- and 64-bit
045 * floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
046 * 32-bit unsigned integers, and 8-bit characters.
047 * <p>
048 * <b>How to Select a Subset</b>
049 * <p>
050 * Dataset defines APIs for read, write and subet a dataset. No function is defined
051 * to select a subset of a data array. The selection is done in an implicit way.
052 * Function calls to dimension information such as getSelectedDims() return an array
053 * of dimension values, which is a reference to the array in the dataset object.
054 * Changes of the array outside the dataset object directly change the values of
055 * the array in the dataset object. It is like pointers in C.
056 * <p>
057 *
058 * The following is an example of how to make a subset. In the example, the dataset
059 * is a 4-dimension with size of [200][100][50][10], i.e.
060 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
061 * We want to select every other data points in dims[1] and dims[2]
062 * <pre>
063     int rank = dataset.getRank();   // number of dimension of the dataset
064     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
065     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
066     long[] start = dataset.getStartDims(); // the off set of the selection
067     long[] stride = dataset.getStride(); // the stride of the dataset
068     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
069
070     // select dim1 and dim2 as 2D data for display,and slice through dim0
071     selectedIndex[0] = 1;
072     selectedIndex[1] = 2;
073     selectedIndex[1] = 0;
074
075     // reset the selection arrays
076     for (int i=0; i<rank; i++) {
077         start[i] = 0;
078         selected[i] = 1;
079         stride[i] = 1;
080    }
081
082    // set stride to 2 on dim1 and dim2 so that every other data points are selected.
083    stride[1] = 2;
084    stride[2] = 2;
085
086    // set the selection size of dim1 and dim2
087    selected[1] = dims[1]/stride[1];
088    selected[2] = dims[1]/stride[2];
089
090    // when dataset.read() is called, the slection above will be used since
091    // the dimension arrays is passed by reference. Changes of these arrays
092    // outside the dataset object directly change the values of these array
093    // in the dataset object.
094
095 * </pre>
096 *
097 * <p>
098 * @version 1.1 9/4/2007
099 * @author Peter X. Cao
100 */
101public class H4SDS extends ScalarDS
102{
103    /**
104     * 
105     */
106    private static final long serialVersionUID = 2557157923292438696L;
107
108    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4SDS.class);
109
110    /** tag for netCDF datasets.
111     *  HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
112     */
113    // magic number for netCDF: "C(67) D(68) F(70) '\001'"
114    public static final int DFTAG_NDG_NETCDF = 67687001;
115
116    /**
117     * The list of attributes of this data object. Members of the list are
118     * instance of Attribute.
119     */
120    private List attributeList;
121
122    /**
123     * The SDS interface identifier obtained from SDstart(filename, access)
124     */
125    private int sdid;
126    
127    /** the datatype identifier */
128    private int datatypeID = -1;
129    
130    private int nAttributes = -1;
131    
132
133    public H4SDS(FileFormat theFile, String name, String path)
134    {
135        this(theFile, name, path, null);
136    } 
137
138    /**
139     * Creates an H4SDS object with specific name and path.
140     * <p>
141     * @param theFile the HDF file.
142     * @param name the name of this H4SDS.
143     * @param path the full path of this H4SDS.
144     * @param oid the unique identifier of this data object.
145     */
146    public H4SDS(
147        FileFormat theFile,
148        String name,
149        String path,
150        long[] oid)
151    {
152        super (theFile, name, path, oid);
153        unsignedConverted = false;
154        sdid = ((H4File)getFileFormat()).getSDAccessID();
155    }
156
157    /*
158     * (non-Javadoc)
159     * @see hdf.object.DataFormat#hasAttribute()
160     */
161    public boolean hasAttribute () 
162    { 
163        if (nAttributes < 0) {
164            sdid = ((H4File)getFileFormat()).getSDAccessID();
165
166            int id = open();
167            try { // retireve attributes of the dataset
168                String[] objName = {""};
169                int[] sdInfo = {0, 0, 0};
170                int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
171                HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
172                nAttributes = sdInfo[2];
173            } 
174            catch (Exception ex) {
175                nAttributes=0;
176            }
177            close(id);
178        }
179        
180        return (nAttributes>0);
181    }
182
183    // ***** need to implement from ScalarDS *****
184    @Override
185    public byte[][] readPalette(int idx) { return null;}
186
187    // ***** need to implement from ScalarDS *****
188    @Override
189    public byte[] getPaletteRefs() { return null;}
190
191    // implementing Dataset
192    @Override
193    public Datatype getDatatype()
194    {
195        if (datatype == null)
196        {
197            datatype = new H4Datatype(datatypeID);
198        }
199
200        return datatype;
201    }
202
203    // To do: Implementing Dataset
204    @Override
205    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff)
206    throws Exception
207    {
208        Dataset dataset = null;
209        int srcdid=-1, dstdid=-1, tid=-1, size=1, theRank=2;
210        String path=null;
211        int[] count=null, start=null;
212
213        if (pgroup == null) {
214            return null;
215        }
216
217        if (dname == null) {
218            dname = getName();
219        }
220
221        if (pgroup.isRoot()) {
222            path = HObject.separator;
223        } 
224        else {
225            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
226        }
227        log.trace("copy(): start with path={}", path);
228
229        srcdid = open();
230        if (srcdid < 0) {
231            return null;
232        }
233
234        if (dims == null) {
235            theRank = getRank();
236            if (theRank <=0) {
237                init();
238            }
239            theRank = getRank();
240
241            dims = getDims();
242        }
243        else {
244            theRank = dims.length;
245        }
246
247        start = new int[theRank];
248        count = new int[theRank];
249        for (int i=0; i<theRank; i++) {
250            start[i] = 0;
251            count[i] = (int)dims[i];
252            size *= count[i];
253        }
254        log.trace("copy(): theRank={} with size={}", theRank, size);
255
256        // create the new dataset and attached it to the parent group
257        tid = datatypeID;
258        dstdid = HDFLibrary.SDcreate(
259            ((H4File)pgroup.getFileFormat()).getSDAccessID(),
260            dname, tid, theRank, count);
261        if (dstdid < 0) {
262            return null;
263        }
264
265        int ref = HDFLibrary.SDidtoref(dstdid);
266        if (!pgroup.isRoot()) {
267            int vgid = pgroup.open();
268            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
269            pgroup.close(vgid);
270        }
271
272        // copy attributes from one object to the new object
273        copyAttribute(srcdid, dstdid);
274
275        // read data from the source dataset
276        log.trace("copy(): read data from the source dataset");
277        if (buff == null) {
278            buff = new byte[size * HDFLibrary.DFKNTsize(tid)];
279            HDFLibrary.SDreaddata(srcdid, start, null, count, buff);
280        }
281
282        // write the data into the destination dataset
283        log.trace("copy(): write the data into the destination dataset");
284        HDFLibrary.SDwritedata(dstdid, start, null, count, buff);
285
286        long[] oid = {HDFConstants.DFTAG_NDG, ref};
287        dataset = new H4SDS(pgroup.getFileFormat(), dname, path, oid);
288
289        pgroup.addToMemberList(dataset);
290
291        close(srcdid);
292        try { 
293                HDFLibrary.SDendaccess(dstdid); 
294        }
295        catch (HDFException ex) {
296                log.debug("copy.SDendaccess:", ex);
297        }
298
299        log.trace("copy(): finish");
300        return dataset;
301    }
302
303    // Implementing Dataset
304    @Override
305    public byte[] readBytes() throws HDFException
306    {
307        byte[] theData = null;
308
309        log.trace("readBytes(): start");
310        if (rank <=0 ) {
311            init();
312        }
313
314        int id = open();
315        if (id < 0) {
316            return null;
317        }
318
319        int datasize = 1;
320        int[] select = new int[rank];
321        int[] start = new int[rank];
322        for (int i=0; i<rank; i++) {
323            datasize *= (int)selectedDims[i];
324            select[i] = (int)selectedDims[i];
325            start[i] = (int)startDims[i];
326        }
327
328        int[] stride = null;
329        if (selectedStride != null) {
330            stride = new int[rank];
331            for (int i=0; i<rank; i++) {
332                stride[i] = (int)selectedStride[i];
333            }
334        }
335
336        try {
337            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
338            theData = new byte[size];
339            HDFLibrary.SDreaddata(id, start, stride, select, theData);
340        } 
341        finally {
342            close(id);
343        }
344
345        log.trace("readBytes(): finish");
346        return theData;
347    }
348
349    // Implementing DataFormat
350    @Override
351    public Object read() throws HDFException
352    {
353        Object theData = null;
354
355        log.trace("read(): start");
356        if (rank <=0 ) {
357            init();
358        }
359
360        int id = open();
361        if (id < 0) {
362            return null;
363        }
364
365        int datasize = 1;
366        int[] select = new int[rank];
367        int[] start = new int[rank];
368        for (int i=0; i<rank; i++) {
369            datasize *= (int)selectedDims[i];
370            select[i] = (int)selectedDims[i];
371            start[i] = (int)startDims[i];
372        }
373
374        int[] stride = null;
375        if (selectedStride != null) {
376            stride = new int[rank];
377            for (int i=0; i<rank; i++) {
378                stride[i] = (int)selectedStride[i];
379            }
380        }
381
382        try {
383            theData = H4Datatype.allocateArray(datatypeID, datasize);
384
385            if (theData != null) {
386                // assume external data files are located in the same directory as the main file.
387                HDFLibrary.HXsetdir(getFileFormat().getParent());
388                
389                boolean status = HDFLibrary.SDreaddata(id, start, stride, select, theData);
390
391                if (isText) {
392                    theData = byteToString((byte[])theData, select[0]);
393                }
394            }
395        } 
396        finally {
397            close(id);
398        }
399        
400        if (fillValue==null && isImageDisplay) {
401            try {
402                getMetadata(); 
403            } // need to set fillValue for images
404            catch (Exception ex) {
405                log.debug("read.getMetadata():", ex);
406            }
407        }
408
409        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
410            isDefaultImageOrder = false;
411        else
412            isDefaultImageOrder = true;
413        
414        log.trace("read(): finish");
415        return theData;
416    }
417
418    // Implementing DataFormat
419    @Override
420    public void write(Object buf) throws HDFException
421    {
422        if (buf == null) {
423            return;
424        }
425
426        log.trace("write(): start");
427        int id = open();
428        if (id < 0) {
429            return;
430        }
431
432        int[] select = new int[rank];
433        int[] start = new int[rank];
434        for (int i=0; i<rank; i++) {
435            select[i] = (int)selectedDims[i];
436            start[i] = (int)startDims[i];
437        }
438
439        int[] stride = null;
440        if (selectedStride != null) {
441            stride = new int[rank];
442            for (int i=0; i<rank; i++) {
443                stride[i] = (int)selectedStride[i];
444            }
445        }
446
447        Object tmpData = buf;
448        try {
449            if ( isUnsigned && unsignedConverted) {
450                tmpData = convertToUnsignedC(buf);
451            }
452            // assume external data files are located in the same directory as the main file.
453            HDFLibrary.HXsetdir(getFileFormat().getParent());
454            
455            HDFLibrary.SDwritedata(id, start, stride, select, tmpData);
456        //} catch (Exception ex) {ex.printStackTrace(); 
457        } 
458        finally {
459            tmpData = null;
460            close(id);
461        }
462        log.trace("write(): finish");
463    }
464
465    // Implementing DataFormat
466    public List getMetadata() throws HDFException
467    {
468        if (attributeList != null) {
469            return attributeList;
470        }
471
472        int id = open();
473        String[] objName = {""};
474        int[] sdInfo = {0, 0, 0};
475        try {
476
477            // retireve attributes of the dataset
478            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
479            HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
480            int n = sdInfo[2];
481
482            if ((attributeList == null) && (n>0)) {
483                attributeList = new Vector(n, 5);
484            }
485
486            boolean b = false;
487            String[] attrName = new String[1];
488            int[] attrInfo = {0, 0};
489            for (int i=0; i<n; i++) {
490                attrName[0] = "";
491                try {
492                    b = HDFLibrary.SDattrinfo(id, i, attrName, attrInfo);
493                    // mask off the litend bit
494                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
495                } 
496                catch (HDFException ex) {
497                    b = false;
498                }
499
500                if (!b) {
501                    continue;
502                }
503
504                long[] attrDims = {attrInfo[1]};
505                Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims);
506                attributeList.add(attr);
507
508                Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
509                try {
510                    HDFLibrary.SDreadattr(id, i, buf);
511                } 
512                catch (HDFException ex) {
513                    buf = null;
514                }
515
516                if (buf != null) {
517                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
518                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
519                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
520                    } 
521                    else if (attrName[0].equalsIgnoreCase("fillValue") || 
522                            attrName[0].equalsIgnoreCase("_fillValue")) {
523                        fillValue = buf;
524                    }
525
526                    attr.setValue(buf);
527                }
528                
529            } // for (int i=0; i<n; i++)
530
531            // retrieve attribute of dimension
532            // BUG !! HDFLibrary.SDgetdimstrs(dimID, argv, 80) does not return anything
533/*
534            for (int i=0; i< rank; i++) {
535                int dimID = HDFLibrary.SDgetdimid(id, i);
536                String[] argv = {" ", " ", " "};
537                HDFLibrary.SDgetdimstrs(dimID, argv, 80);
538            }
539*/
540        } 
541        finally {
542            close(id);
543        }
544
545        return attributeList;
546    }
547
548   // To do: implementing DataFormat
549    public void writeMetadata(Object info) throws Exception
550    {
551        // only attribute metadata is supported.
552        if (!(info instanceof Attribute)) {
553            return;
554        }
555
556        getFileFormat().writeAttribute(this, (Attribute)info, true);
557
558        if (attributeList == null) {
559            attributeList = new Vector();
560        }
561
562        attributeList.add(info);
563        nAttributes = attributeList.size();
564    }
565
566   // To do: implementing DataFormat
567    public void removeMetadata(Object info) throws HDFException {;}
568
569    // implementing DataFormat
570    public void updateMetadata(Object info) throws Exception {
571        log.trace("updateMetadata(): disabled");
572    }
573
574    // Implementing HObject
575    @Override
576    public int open()
577    {
578        int id=-1;
579
580        try {
581            int index = 0;
582            int tag = (int)oid[0];
583
584            if (tag == H4SDS.DFTAG_NDG_NETCDF) {
585                index = (int)oid[1]; //HDFLibrary.SDidtoref(id) fails for netCDF
586            } 
587            else {
588                index = HDFLibrary.SDreftoindex(sdid, (int)oid[1]);
589            }
590
591            id = HDFLibrary.SDselect(sdid,index);
592        } 
593        catch (HDFException ex) {
594            id = -1;
595        }
596
597        return id;
598    }
599
600    // Implementing HObject
601    @Override
602    public void close(int id)
603    {
604        try { HDFLibrary.SDendaccess(id); }
605        catch (HDFException ex) { ; }
606    }
607
608    /**
609     * Initializes the H4SDS such as dimension size of this dataset.
610     */
611    @Override
612    public void init()
613    {
614        if (rank>0) {
615            return; // already called. Initialize only once
616        }
617
618        log.trace("init(): start");
619        int id = open();
620        String[] objName = {""};
621        String[] dimName = {""};
622        int[] dimInfo = {0, 0, 0};
623        int[] sdInfo = {0, 0, 0};
624        boolean isUnlimited = false;
625
626        int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
627        try {
628            HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
629            // mask off the litend bit
630            sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
631            nAttributes = sdInfo[2];
632            rank = sdInfo[0];
633
634            if (rank <= 0) {
635                rank = 1;
636                idims[0] = 1;
637            }
638
639            isUnlimited = HDFLibrary.SDisrecord(id);
640           
641            datatypeID = sdInfo[1];
642            isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
643
644            //idims = new int[rank];
645            //HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
646
647            // get the dimension names
648            try {
649                dimNames = new String[rank];
650                for (int i=0; i<rank; i++) {
651                    int dimid = HDFLibrary.SDgetdimid(id, i);
652                    HDFLibrary.SDdiminfo(dimid, dimName, dimInfo);
653                    dimNames[i] = dimName[0];
654                }
655            } 
656            catch (Exception ex) {
657                log.debug("get the dimension names:", ex);
658            }
659
660            // get compression information
661            try {
662                HDFCompInfo compInfo = new HDFCompInfo();
663
664                boolean status = HDFLibrary.SDgetcompress(id, compInfo);
665                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
666                    HDFDeflateCompInfo comp = new HDFDeflateCompInfo();
667                    HDFLibrary.SDgetcompress(id, comp);
668                    compression = "GZIP(level="+comp.level+")";
669                } 
670                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
671                    HDFSZIPCompInfo comp = new HDFSZIPCompInfo();
672                    HDFLibrary.SDgetcompress(id, comp);
673                    compression = "SZIP(bits_per_pixel="+comp.bits_per_pixel+",options_mask="+comp.options_mask+
674                                  ",pixels="+comp.pixels+",pixels_per_block="+comp.pixels_per_block+
675                                  ",pixels_per_scanline="+comp.pixels_per_scanline+")";
676                } 
677                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
678                    HDFJPEGCompInfo comp = new HDFJPEGCompInfo();
679                    HDFLibrary.SDgetcompress(id, comp);
680                    compression = "JPEG(quality="+comp.quality+",options_mask="+
681                                  ",force_baseline="+comp.force_baseline+")";
682                } 
683                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
684                    HDFSKPHUFFCompInfo comp = new HDFSKPHUFFCompInfo();
685                    HDFLibrary.SDgetcompress(id, comp);
686                    compression = "SKPHUFF(skp_size="+comp.skp_size+")";
687                } 
688                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
689                    compression = "RLE";
690                } 
691                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
692                    HDFNBITCompInfo comp = new HDFNBITCompInfo();
693                    HDFLibrary.SDgetcompress(id, comp);
694                    compression = "NBIT(nt="+comp.nt+",bit_len="+comp.bit_len+",ctype="+comp.ctype+
695                                  ",fill_one="+comp.fill_one+",sign_ext="+comp.sign_ext+
696                                  ",start_bit="+comp.start_bit+")";
697                }
698            } 
699            catch (Exception ex) {
700                log.debug("get compression information:", ex);
701            }
702
703            // get chunk information
704            try {
705                HDFChunkInfo chunkInfo = new HDFChunkInfo();
706                int[] cflag = {HDFConstants.HDF_NONE};
707
708                try {
709                    boolean status = HDFLibrary.SDgetchunkinfo(id, chunkInfo, cflag);
710                } 
711                catch (Throwable ex) {
712                        ex.printStackTrace();
713                }
714
715                if (cflag[0] == HDFConstants.HDF_NONE) {
716                    chunkSize = null;
717                } 
718                else {
719                    chunkSize = new long[rank];
720                    for (int i=0; i<rank; i++) {
721                        chunkSize[i] = chunkInfo.chunk_lengths[i];
722                    }
723                }
724            } 
725            catch (Exception ex) {
726                log.debug("get chunk information:", ex);
727            }
728
729        } 
730        catch (HDFException ex) {
731                log.debug("init():", ex);
732        }
733        finally {
734            close(id);
735        }
736        isUnsigned = H4Datatype.isUnsigned(datatypeID);
737
738        if (idims == null) {
739            return;
740        }
741
742        dims = new long[rank];
743        maxDims = new long[rank];
744        startDims = new long[rank];
745        selectedDims = new long[rank];
746
747        for (int i=0; i<rank; i++) {
748            startDims[i] = 0;
749            selectedDims[i] = 1;
750            dims[i] = maxDims[i] = idims[i];
751        }
752        
753        if (isUnlimited)
754            maxDims[0] = -1;
755
756        selectedIndex[0] = 0;
757        selectedIndex[1] = 1;
758        selectedIndex[2] = 2;
759
760        // select only two dimension a time,
761        if (rank == 1) {
762            selectedDims[0] = dims[0];
763        }
764
765        if (rank > 1) {
766            selectedDims[0] = dims[0];
767            if (isText) {
768                selectedDims[1] = 1;
769            } 
770            else {
771                selectedDims[1] = dims[1];
772            }
773        }
774        log.trace("init(): finish");
775    }
776
777    // Implementing ScalarDS
778    @Override
779    public byte[][] getPalette()
780    {
781        return palette;
782    }
783
784    /**
785     * Creates a new dataset.
786     * @param name the name of the dataset to create.
787     * @param pgroup the parent group of the new dataset.
788     * @param type the datatype of the dataset.
789     * @param dims the dimension size of the dataset.
790     * @param maxdims the max dimension size of the dataset.
791     * @param chunks the chunk size of the dataset.
792     * @param gzip the level of the gzip compression.
793     * @param data the array of data values.
794     * @return the new dataset if successful. Otherwise returns null.
795     */
796    public static H4SDS create(
797        String name,
798        Group pgroup,
799        Datatype type,
800        long[] dims,
801        long[] maxdims,
802        long[] chunks,
803        int gzip,
804        Object fillValue,
805        Object data) throws Exception
806    {
807        H4SDS dataset = null;
808        if ((pgroup == null) ||
809            (name == null)||
810            (dims == null)) {
811            return null;
812        }
813        log.trace("create(): start");
814
815        H4File file = (H4File)pgroup.getFileFormat();
816
817        if (file == null) {
818            return null;
819        }
820        
821        String path = HObject.separator;
822        if (!pgroup.isRoot()) {
823            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
824        }
825        // prepare the dataspace
826        int tsize = 1;
827        int rank = dims.length;
828        int idims[] = new int[rank];
829        int start[] = new int [rank];
830        for (int i=0; i<rank; i++) {
831            idims[i] = (int)dims[i];
832            start[i] = 0;
833            tsize *= idims[i];
834        }
835
836        // only the first element of the SDcreate parameter dim_sizes (i.e.,
837        // the dimension of the lowest rank or the slowest-changing dimension)
838        // can be assigned the value SD_UNLIMITED (or 0) to make the first
839        // dimension unlimited.
840        if ((maxdims != null) && (maxdims[0]<=0)) {
841            idims[0] = 0; // set to unlimited dimension.
842        }
843
844        int ichunks[] = null;
845        if (chunks != null) {
846            ichunks = new int[rank];
847            for (int i=0; i<rank; i++) {
848                ichunks[i] = (int)chunks[i];
849            }
850        }
851        
852        // unlimted cannot be used with chunking or compression for HDF 4.2.6 or earlier.
853        if (idims[0] == 0 && (ichunks != null || gzip>0)) {
854            throw new HDFException("Unlimted cannot be used with chunking or compression");
855        }
856
857        int sdid = (file).getSDAccessID();
858        int sdsid = -1;
859        int vgid = -1;
860        // datatype
861        int tid = type.toNative();
862
863        if(tid >= 0) {
864                try {
865                    sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
866                    // set fill value to zero.
867                    int vsize = HDFLibrary.DFKNTsize(tid);
868                    byte[] fill = new byte[vsize];
869                    for (int i=0; i<vsize; i++) {
870                        fill[i] = 0;
871                    }
872                    HDFLibrary.SDsetfillvalue(sdsid, fill);
873        
874                    // when we create a new dataset with unlimited dimension,
875                    // we have to write some data into the dataset or otherwise
876                    // the current dataset has zero dimensin size.
877        
878                    // comment out the following lines because SDwritedata fails when
879                    // try to write data into a zero dimension array. 05/25/05
880                    // don't know why the code was first put here ????
881                    /**
882                    if (idims[0] == 0 && data == null)
883                    {
884                        idims[0] = (int)dims[0];
885                        data = new byte[tsize*vsize];
886                    }
887                    */
888        
889                } 
890                catch (Exception ex) { 
891                        throw (ex); 
892                }
893        }
894
895        if (sdsid < 0) {
896            throw (new HDFException("Unable to create the new dataset."));
897        }
898
899        HDFDeflateCompInfo compInfo = null;
900        if (gzip > 0) {
901            // set compression
902            compInfo = new HDFDeflateCompInfo();
903            compInfo.level = gzip;
904            if (chunks == null)
905                HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
906        }
907
908        if (chunks != null) {
909            // set chunk
910            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
911            int flag = HDFConstants.HDF_CHUNK;
912
913            if (gzip > 0) {
914                flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
915                chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
916            }
917            
918            try  {
919                HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
920            } 
921            catch (Throwable err) {
922                err.printStackTrace();
923                throw new HDFException("SDsetchunk failed.");
924            }
925        }
926
927        if ((sdsid > 0) && (data != null)) {
928            HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
929        }
930
931        int ref = HDFLibrary.SDidtoref(sdsid);
932
933        if (!pgroup.isRoot()) {
934            // add the dataset to the parent group
935            vgid = pgroup.open();
936            if (vgid < 0)
937            {
938                if (sdsid > 0) {
939                    HDFLibrary.SDendaccess(sdsid);
940                }
941                throw (new HDFException("Unable to open the parent group."));
942            }
943
944            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
945
946            pgroup.close(vgid);
947        }
948
949        try {
950            if (sdsid > 0) {
951                HDFLibrary.SDendaccess(sdsid);
952            }
953        } 
954        catch (Exception ex) {
955                log.debug("create.SDendaccess:", ex);
956        }
957
958        long[] oid = {HDFConstants.DFTAG_NDG, ref};
959        dataset = new H4SDS(file, name, path, oid);
960
961        if (dataset != null) {
962            pgroup.addToMemberList(dataset);
963        }
964
965        log.trace("create(): finish");
966        return dataset;
967    }
968    
969    public static H4SDS create(
970            String name,
971            Group pgroup,
972            Datatype type,
973            long[] dims,
974            long[] maxdims,
975            long[] chunks,
976            int gzip,
977            Object data) throws Exception
978   {
979        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
980   }
981
982    /**
983     * copy attributes from one SDS to another SDS
984     */
985    private void copyAttribute(int srcdid, int dstdid)
986    {
987        try {
988            String[] objName = {""};
989            int[] sdInfo = {0, 0, 0};
990            int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
991            HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
992            int numberOfAttributes = sdInfo[2];
993            log.trace("copyAttribute(): numberOfAttributes={}", numberOfAttributes);
994
995            boolean b = false;
996            String[] attrName = new String[1];
997            int[] attrInfo = {0, 0};
998            for (int i=0; i<numberOfAttributes; i++) {
999                attrName[0] = "";
1000                try {
1001                    b = HDFLibrary.SDattrinfo(srcdid, i, attrName, attrInfo);
1002                } 
1003                catch (HDFException ex) { 
1004                        b = false; 
1005                }
1006
1007                if (!b) {
1008                    continue;
1009                }
1010
1011                // read attribute data from source dataset
1012                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
1013                try { 
1014                        HDFLibrary.SDreadattr(srcdid, i, attrBuff);
1015                } 
1016                catch (HDFException ex) { 
1017                        attrBuff = null; 
1018                }
1019
1020                if (attrBuff == null) {
1021                    continue;
1022                }
1023
1024                // attach attribute to the destination dataset
1025                HDFLibrary.SDsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
1026            } // for (int i=0; i<numberOfAttributes; i++)
1027        } 
1028        catch (Exception ex) {
1029                log.debug("copyAttribute:", ex);
1030        }
1031        log.trace("copyAttribute(): finish");
1032    }
1033
1034    //Implementing DataFormat
1035    public List getMetadata(int... attrPropList) throws Exception {
1036        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
1037    }
1038
1039}