001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026import hdf.object.Attribute;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034/**
035 * H4GRImage describes HDF4 general raster(GR) image and operations performed on
036 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
037 * <p>
038 * Every GR data set must contain the following components: image array, name,
039 * pixel type, and dimensions. The name, dimensions, and pixel type must be
040 * supplied by the user at the time the GR data set is defined.
041 * <p>
042 * An image array is a two-dimensional array of pixels. Each element in an image
043 * array corresponds to one pixel and each pixel can consist of a number of
044 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
045 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
046 * by different methods (8-bit lookup table or 24-bit direct representation) and
047 * may have different data types. The data type of pixel components and the number
048 * of components in each pixel are collectively known as the pixel type.
049 * <p>
050 * <b>How to Select a Subset</b>
051 * <p>
052 * Dataset defines APIs for read, write and subet a dataset. No function is defined
053 * to select a subset of a data array. The selection is done in an implicit way.
054 * Function calls to dimension information such as getSelectedDims() return an array
055 * of dimension values, which is a reference to the array in the dataset object.
056 * Changes of the array outside the dataset object directly change the values of
057 * the array in the dataset object. It is like pointers in C.
058 * <p>
059 *
060 * The following is an example of how to make a subset. In the example, the dataset
061 * is a 4-dimension with size of [200][100][50][10], i.e.
062 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
063 * We want to select every other data points in dims[1] and dims[2]
064 * <pre>
065     int rank = dataset.getRank();   // number of dimension of the dataset
066     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
067     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
068     long[] start = dataset.getStartDims(); // the off set of the selection
069     long[] stride = dataset.getStride(); // the stride of the dataset
070     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
071
072     // select dim1 and dim2 as 2D data for display,and slice through dim0
073     selectedIndex[0] = 1;
074     selectedIndex[1] = 2;
075     selectedIndex[1] = 0;
076
077     // reset the selection arrays
078     for (int i=0; i<rank; i++) {
079         start[i] = 0;
080         selected[i] = 1;
081         stride[i] = 1;
082    }
083
084    // set stride to 2 on dim1 and dim2 so that every other data points are selected.
085    stride[1] = 2;
086    stride[2] = 2;
087
088    // set the selection size of dim1 and dim2
089    selected[1] = dims[1]/stride[1];
090    selected[2] = dims[1]/stride[2];
091
092    // when dataset.read() is called, the slection above will be used since
093    // the dimension arrays is passed by reference. Changes of these arrays
094    // outside the dataset object directly change the values of these array
095    // in the dataset object.
096
097 * </pre>
098 *
099 * <p>
100 * @version 1.1 9/4/2007
101 * @author Peter X. Cao
102 */
103public class H4GRImage extends ScalarDS
104{
105    /**
106     * 
107     */
108    private static final long serialVersionUID = 1029672744963360976L;
109
110    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
111
112    /**
113     * The list of attributes of this data object. Members of the list are
114     * instance of Attribute.
115     */
116    private List attributeList;
117
118    /**
119     * The GR interface identifier obtained from GRstart(fid)
120     */
121    private int grid;
122
123    /**
124     * The number of components in the raster image
125     */
126    private int ncomp;
127    
128    /** the datatype identifier */
129    private int datatypeID = -1;
130    
131    private int nAttributes = -1;
132    
133    
134    public H4GRImage(FileFormat theFile, String name, String path)
135    {
136        this(theFile, name, path, null);
137    }
138
139    /**
140     * Creates a H4GRImage object with specific name, path, and object ID.
141     * <p>
142     * @param theFile the HDF file.
143     * @param name the name of this H4GRImage.
144     * @param path the full path of this H4GRImage.
145     * @param oid the unique identifier of this data object.
146     */
147    public H4GRImage(
148        FileFormat theFile,
149        String name,
150        String path,
151        long[] oid)
152    {
153        super (theFile, name, path, oid);
154        palette = null;
155        isImage = isImageDisplay = true;
156        unsignedConverted = false;
157        grid = ((H4File)getFileFormat()).getGRAccessID();
158    }
159    
160    /*
161     * (non-Javadoc)
162     * @see hdf.object.DataFormat#hasAttribute()
163     */
164    public boolean hasAttribute () 
165    { 
166        if (nAttributes < 0) {
167            grid = ((H4File)getFileFormat()).getGRAccessID();
168
169            int id = open();
170            String[] objName = {""};
171            int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
172            int[] idims = new int[2];
173            try {
174                HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
175                nAttributes = grInfo[3];
176            } 
177            catch (Exception ex) { 
178                nAttributes = 0;
179            }
180            close(id);
181        }
182        
183        return (nAttributes>0);
184    }
185
186    // To do: Implementing Dataset
187    @Override
188    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
189    {
190        Dataset dataset = null;
191        int srcdid=-1, dstdid=-1;
192        String path=null;
193        int[] count=null;
194
195        if (pgroup == null) {
196            return null;
197        }
198
199        if (pgroup.isRoot()) {
200            path = HObject.separator;
201        } 
202        else {
203            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
204        }
205
206        srcdid = open();
207        if (srcdid < 0) {
208            return null;
209        }
210
211        if (dims != null) {
212            count = new int[2];
213            count[0] = (int)dims[0];
214            count[1] = (int)dims[1];
215        }
216
217        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
218        try {
219            String[] tmpName = {""};
220            int[] tmpDims = new int[2];
221            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
222            if (count == null) {
223                count = tmpDims;
224            }
225        } 
226        catch (HDFException ex) {
227                log.debug("copy.GRgetiminfo:", ex);
228        }
229
230        int ncomp = grInfo[0];
231        int tid = grInfo[1];
232        int interlace = grInfo[2];
233        int numberOfAttributes = grInfo[3];
234        dstdid = HDFLibrary.GRcreate(
235            ((H4File)pgroup.getFileFormat()).getGRAccessID(),
236            dname, ncomp, tid, interlace, count);
237        if (dstdid < 0) {
238            return null;
239        }
240
241        int ref = HDFLibrary.GRidtoref(dstdid);
242        if (!pgroup.isRoot()) {
243            int vgid = pgroup.open();
244            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
245            pgroup.close(vgid);
246        }
247
248        // read data from the source dataset
249        int[] start = {0, 0};
250        if (buff == null) {
251            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
252            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
253        }
254
255        // write the data into the destination dataset
256        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
257
258        // copy palette
259        int pid = HDFLibrary.GRgetlutid(srcdid, 0);
260        int[] palInfo = new int[4];
261
262        HDFLibrary.GRgetlutinfo(pid, palInfo);
263        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
264        int palSize = palInfo[0]*palInfo[3];
265        byte[] palBuff = new byte[palSize];
266        HDFLibrary.GRreadlut(pid, palBuff);
267        pid = HDFLibrary.GRgetlutid(dstdid, 0);
268        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
269
270        // copy attributes from one object to the new object
271        copyAttribute(srcdid, dstdid, numberOfAttributes);
272
273        long[] oid = {HDFConstants.DFTAG_RIG, ref};
274        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
275
276        pgroup.addToMemberList(dataset);
277
278        close(srcdid);
279        try { 
280                HDFLibrary.GRendaccess(dstdid); 
281        }
282        catch (HDFException ex) {
283                log.debug("copy.GRendaccess:", ex);
284        }
285
286        return dataset;
287    }
288
289    // ***** need to implement from ScalarDS *****
290    @Override
291    public byte[][] readPalette(int idx) { return null;}
292
293    // ***** need to implement from ScalarDS *****
294    @Override
295    public byte[] getPaletteRefs() { return null;}
296
297    // implementing ScalarDS
298    @Override
299    public Datatype getDatatype()
300    {
301        if (datatype == null) {
302            datatype = new H4Datatype(datatypeID);
303        }
304
305        return datatype;
306    }
307
308    // Implementing Dataset
309    @Override
310    public byte[] readBytes() throws HDFException
311    {
312        byte[] theData = null;
313
314        if (rank <=0 ) {
315            init();
316        }
317
318        int id = open();
319        if (id < 0) {
320            return null;
321        }
322
323        try {
324            // set the interlacing scheme for reading image data
325            HDFLibrary.GRreqimageil(id, interlace);
326            int datasize = getWidth()*getHeight()*ncomp;
327            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
328            theData = new byte[size];
329            int[] start = {(int)startDims[0], (int)startDims[1]};
330            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
331
332            int[] stride = null;
333            if (selectedStride != null) {
334                stride = new int[rank];
335                for (int i=0; i<rank; i++) {
336                    stride[i] = (int)selectedStride[i];
337                }
338            }
339
340            HDFLibrary.GRreadimage(id, start, stride, select, theData);
341        } 
342        finally {
343            close(id);
344        }
345
346        return theData;
347    }
348
349    // ***** need to implement from DataFormat *****
350    @Override
351    public Object read() throws HDFException
352    {
353        Object theData = null;
354
355        if (rank <=0 ) {
356            init();
357        }
358
359        int id = open();
360        if (id < 0) {
361            return null;
362        }
363
364        try {
365            // set the interlacing scheme for reading image data
366            HDFLibrary.GRreqimageil(id, interlace);
367            int datasize = getWidth()*getHeight()*ncomp;
368
369            theData = H4Datatype.allocateArray(datatypeID, datasize);
370
371            if (theData != null) {
372                // assume external data files are located in the same directory as the main file.
373                HDFLibrary.HXsetdir(getFileFormat().getParent());
374                
375                int[] start = {(int)startDims[0], (int)startDims[1]};
376                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
377
378                int[] stride = null;
379                if (selectedStride != null) {
380                    stride = new int[rank];
381                    for (int i=0; i<rank; i++) {
382                        stride[i] = (int)selectedStride[i];
383                    }
384                }
385
386                HDFLibrary.GRreadimage(id, start, stride, select, theData);
387            }
388        } 
389        finally {
390            close(id);
391        }
392        
393        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
394            isDefaultImageOrder = false;
395        else
396            isDefaultImageOrder = true;        
397
398        return theData;
399    }
400
401    // Implementing DataFormat
402    @Override
403    public void write(Object buf) throws HDFException
404    {
405        if (buf == null) {
406            return;
407        }
408
409        int id = open();
410        if (id < 0) {
411            return;
412        }
413
414        int[] select = new int[rank];
415        int[] start = new int[rank];
416        for (int i=0; i<rank; i++) {
417            select[i] = (int)selectedDims[i];
418            start[i] = (int)startDims[i];
419        }
420
421        int[] stride = null;
422        if (selectedStride != null) {
423            stride = new int[rank];
424            for (int i=0; i<rank; i++) {
425                stride[i] = (int)selectedStride[i];
426            }
427        }
428
429        Object tmpData = buf;
430        try {
431            if ( isUnsigned && unsignedConverted) {
432                tmpData = convertToUnsignedC(buf);
433            }
434            // assume external data files are located in the same directory as the main file.
435            HDFLibrary.HXsetdir(getFileFormat().getParent());
436            
437            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
438        } 
439        finally {
440            tmpData = null;
441            close(id);
442        }
443    }
444
445    // ***** need to implement from DataFormat *****
446    public List getMetadata() throws HDFException
447    {
448        if (attributeList != null) {
449            return attributeList;
450        }
451
452        int id = open();
453        String[] objName = {""};
454        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
455        int[] idims = new int[2];
456        try {
457            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
458            // mask off the litend bit
459            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
460            int n = grInfo[3];
461
462            if ((attributeList == null) && (n>0)) {
463                attributeList = new Vector(n, 5);
464            }
465
466            boolean b = false;
467            String[] attrName = new String[1];
468            int[] attrInfo = {0, 0}; // data_type, length
469            for (int i=0; i<n; i++) {
470                attrName[0] = "";
471                try {
472                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
473                    // mask off the litend bit
474                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
475                } 
476                catch (HDFException ex) {
477                    b = false;
478                }
479
480                if (!b) {
481                    continue;
482                }
483
484                long[] attrDims = {attrInfo[1]};
485                Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims);;
486                attributeList.add(attr);
487
488                Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
489                try {
490                    HDFLibrary.GRgetattr(id, i, buf);
491                } 
492                catch (HDFException ex) {
493                    buf = null;
494                }
495
496                if (buf != null) {
497                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
498                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
499                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
500                    }
501
502                    attr.setValue(buf);
503                }
504            } // for (int i=0; i<n; i++)
505        } 
506        finally {
507            close(id);
508        }
509
510        return attributeList;
511    }
512
513    // ***** need to implement from DataFormat *****
514    public void writeMetadata(Object info) throws Exception
515    {
516        // only attribute metadata is supported.
517        if (!(info instanceof Attribute)) {
518            return;
519        }
520
521        getFileFormat().writeAttribute(this, (Attribute)info, true);
522
523        if (attributeList == null) {
524            attributeList = new Vector();
525        }
526
527        attributeList.add(info);
528        nAttributes = attributeList.size();
529    }
530
531    // ***** need to implement from DataFormat *****
532    public void removeMetadata(Object info) throws HDFException {;}
533
534    // implementing DataFormat
535    public void updateMetadata(Object info) throws Exception {
536        log.trace("updateMetadata(): disabled");
537    }
538
539    // Implementing HObject.
540    @Override
541    public int open()
542    {
543
544        int id = -1;
545        try {
546            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
547            id = HDFLibrary.GRselect(grid, index);
548        } 
549        catch (HDFException ex) {
550            id = -1;
551        }
552
553        return id;
554    }
555
556    // Implementing HObject.
557    @Override
558    public void close(int grid)
559    {
560        try { HDFLibrary.GRendaccess(grid); }
561        catch (HDFException ex) {;}
562    }
563
564    // Implementing Dataset.
565    @Override
566    public void init()
567    {
568        if (rank>0) {
569            return; // already called. Initialize only once
570        }
571
572        int id = open();
573        String[] objName = {""};
574        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
575        int[] idims = new int[2];
576        try {
577            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
578            // mask off the litend bit
579            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
580            datatypeID = grInfo[1];
581
582            // get compression information
583            try {
584                HDFCompInfo compInfo = new HDFCompInfo();
585                boolean status = HDFLibrary.GRgetcompress(id, compInfo);
586                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
587                    compression = "GZIP";
588                } 
589                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
590                    compression = "SZIP";
591                } 
592                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
593                    compression = "JPEG";
594                } 
595                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
596                    compression = "SKPHUFF";
597                } 
598                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
599                    compression = "RLE";
600                } 
601                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
602                    compression = "NBIT";
603                }
604            } 
605            catch (Exception ex) {
606                log.debug("get compression information:", ex);
607            }
608
609            // get chunk information
610            try {
611                HDFChunkInfo chunkInfo = new HDFChunkInfo();
612                int[] cflag = {HDFConstants.HDF_NONE};
613                boolean status = HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
614                if (cflag[0] == HDFConstants.HDF_NONE) {
615                    chunkSize = null;
616                } 
617                else {
618                    chunkSize = new long[rank];
619                    for (int i=0; i<rank; i++) {
620                        chunkSize[i] = chunkInfo.chunk_lengths[i];
621                    }
622                }
623            } 
624            catch (Exception ex) {
625                log.debug("get chunk information:", ex);
626            }
627
628        } 
629        catch (HDFException ex) {
630                log.debug("H4GRImage.init():", ex);
631        }
632        finally {
633            close(id);
634        }
635
636        isUnsigned = H4Datatype.isUnsigned(datatypeID);
637
638        if (idims == null) {
639            return;
640        }
641
642        ncomp = grInfo[0];
643        isTrueColor = (ncomp >= 3);
644        interlace = grInfo[2];
645        rank = 2; // support only two dimensional raster image
646
647        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
648        // other image data is arranged as dim[0]=height, dim[1]=width.
649        selectedIndex[0] = 1;
650        selectedIndex[1] = 0;
651
652        dims = new long[rank];
653        startDims = new long[rank];
654        selectedDims = new long[rank];
655        for (int i=0; i<rank; i++) {
656            startDims[i] = 0;
657            selectedDims[i] = idims[i];
658            dims[i] = idims[i];
659        }
660
661    }
662
663    // ***** need to implement from ScalarDS *****
664    @Override
665    public byte[][] getPalette()
666    {
667        if (palette != null) {
668            return palette;
669        }
670
671        int id = open();
672        if (id < 0) {
673            return null;
674        }
675
676        // get palette info.
677        int lutid  = -1;
678        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
679        try {
680            // find the first palette.
681            // Todo: get all the palettes
682            lutid = HDFLibrary.GRgetlutid(id, 0);
683            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
684        } 
685        catch (HDFException ex) {
686            close(id);
687            return null;
688        }
689
690        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
691        // return true even if there is no palette data, and check if it is a
692        // RGB with 256 colors
693        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) | (lutInfo[3] != 256)) {
694            close(id);
695            return null;
696        }
697
698        // read palette data
699        boolean b = false;
700        byte[] pal = new byte[3*256];
701        try
702        {
703            HDFLibrary.GRreqlutil(id, lutInfo[2]);
704            b = HDFLibrary.GRreadlut(lutid, pal);
705        } 
706        catch (HDFException ex) {
707            b = false;
708        }
709
710        if (!b) {
711            close(id);
712            return null;
713        }
714
715        palette = new byte[3][256];
716        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
717            // color conponents are arranged in RGB, RGB, RGB, ...
718            for (int i=0; i<256; i++) {
719                palette[0][i] = pal[i*3];
720                palette[1][i] = pal[i*3+1];
721                palette[2][i] = pal[i*3+2];
722            }
723        }
724        else {
725            for (int i=0; i<256; i++) {
726                palette[0][i] = pal[i];
727                palette[1][i] = pal[256+i];
728                palette[2][i] = pal[512+i];
729            }
730        }
731
732        close(id);
733        return palette;
734    }
735
736    /**
737     * Returns the number of components of this image data.
738     */
739    public int getComponentCount()
740    {
741        return ncomp;
742    }
743
744    /**
745     * Creates a new image.
746     * @param name the name of the dataset to create.
747     * @param pgroup the parent group of the new dataset.
748     * @param type the datatype of the dataset.
749     * @param dims the dimension size of the dataset.
750     * @param maxdims the max dimension size of the dataset.
751     * @param chunks the chunk size of the dataset.
752     * @param gzip the level of the gzip compression.
753     * @param ncomp number of components of the image data.
754     * @param interlace the interlace mode.
755     * @param data the array of data values.
756     * @return the new image if successful. Otherwise returns null.
757     */
758    public static H4GRImage create(
759        String name,
760        Group pgroup,
761        Datatype type,
762        long[] dims,
763        long[] maxdims,
764        long[] chunks,
765        int gzip,
766        int ncomp,
767        int interlace,
768        Object data) throws Exception
769    {
770        H4GRImage dataset = null;
771        if ((name == null) ||
772            (pgroup == null) ||
773            (dims == null) ||
774            ((gzip>0) && (chunks==null))) {
775            return null;
776        }
777
778        H4File file = (H4File)pgroup.getFileFormat();
779        if (file == null) {
780            return null;
781        }
782
783        String path = HObject.separator;
784        if (!pgroup.isRoot()) {
785            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
786        }
787        if (interlace == ScalarDS.INTERLACE_PLANE) {
788            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
789        } 
790        else {
791            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
792        }
793
794        int rank = 2;
795        int idims[] = new int[rank];
796        int imaxdims[] = new int[rank];
797        int start[] = new int [rank];
798        for (int i=0; i<rank; i++) {
799            idims[i] = (int)dims[i];
800            if (maxdims != null) {
801                imaxdims[i] = (int)maxdims[i];
802            } 
803            else {
804                imaxdims[i] = idims[i];
805            }
806            start[i] = 0;
807        }
808
809        int ichunks[] = null;
810        if (chunks != null) {
811            ichunks = new int[rank];
812            for (int i=0; i<rank; i++) {
813                ichunks[i] = (int)chunks[i];
814            }
815        }
816
817        int grid = -1;
818        int vgid = -1;
819        int gid = (file).getGRAccessID();
820        int tid = type.toNative();
821
822        if(tid >= 0) {
823                try {
824                    grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
825                } 
826                catch (Exception ex) {  
827                        throw (ex); 
828                }
829        }
830
831        if (grid < 0) {
832            throw (new HDFException("Unable to create the new dataset."));
833        }
834
835        if ((grid > 0) && (data != null)) {
836            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
837        }
838
839        if (chunks != null) {
840            // set chunk
841            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
842            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
843        }
844
845        if (gzip > 0) {
846            // set compression
847            int compType = HDFConstants.COMP_CODE_DEFLATE;
848            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
849            compInfo.level = gzip;
850            HDFLibrary.GRsetcompress(grid, compType, compInfo);
851        }
852
853        int ref = HDFLibrary.GRidtoref(grid);
854
855        if (!pgroup.isRoot()) {
856            // add the dataset to the parent group
857            vgid = pgroup.open();
858            if (vgid < 0) {
859                if (grid > 0) {
860                    HDFLibrary.GRendaccess(grid);
861                }
862                throw (new HDFException("Unable to open the parent group."));
863            }
864
865            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
866
867            pgroup.close(vgid);
868        }
869
870        try {  
871                if (grid > 0) {
872                        HDFLibrary.GRendaccess(grid);
873                } 
874        } 
875        catch (Exception ex) {
876                log.debug("create.GRendaccess:", ex);
877        }
878
879        long[] oid = {HDFConstants.DFTAG_NDG, ref};
880        dataset = new H4GRImage(file, name, path, oid);
881
882        if (dataset != null) {
883            pgroup.addToMemberList(dataset);
884        }
885
886        return dataset;
887    }
888
889    /**
890     * copy attributes from one GR image to another GR image
891     */
892    private void copyAttribute(int srcdid, int dstdid, int numberOfAttributes)
893    {
894        if (numberOfAttributes <=0 ) {
895            return;
896        }
897
898        try {
899            boolean b = false;
900            String[] attrName = new String[1];
901            int[] attrInfo = {0, 0};
902            for (int i=0; i<numberOfAttributes; i++) {
903                attrName[0] = "";
904                try {
905                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
906                } 
907                catch (HDFException ex) { 
908                        b = false; 
909                }
910
911                if (!b) {
912                    continue;
913                }
914
915                // read attribute data from source dataset
916                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
917                try { 
918                        HDFLibrary.GRgetattr(srcdid, i, attrBuff);
919                } 
920                catch (Exception ex) { 
921                        attrBuff = null; 
922                }
923
924                if (attrBuff == null) {
925                    continue;
926                }
927
928                // attach attribute to the destination dataset
929                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
930            } // for (int i=0; i<numberOfAttributes; i++)
931        } 
932        catch (Exception ex) {
933                log.debug("copyAttribute:", ex);
934        }
935    }
936
937    //Implementing DataFormat
938    public List getMetadata(int... attrPropList) throws Exception {
939        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
940    }
941
942}