001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.structs.H5O_info_t;
028import hdf.object.Attribute;
029import hdf.object.CompoundDS;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035
036/**
037 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
038 * <p>
039 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that stores a
040 * description of the data elements, data layout, and all other information necessary to write, read, and interpret the
041 * stored data.
042 * <p>
043 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of one or more
044 * atomic types or small arrays of such types. Each member of a compound type has a name which is unique within that
045 * type, and a byte offset that determines the first byte (smallest byte address) of that member in a compound datum.
046 * <p>
047 * For more information on HDF5 datasets and datatypes, read the
048 * <a href="http://hdfgroup.org/HDF5/doc/UG/index.html">HDF5 User's Guide</a>.
049 * <p>
050 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a simple
051 * compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array of compound data.
052 * <p>
053 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the following C
054 * example.
055 *
056 * <pre>
057 * typedef struct s1_t {
058 *         int    a;
059 *         float  b;
060 *         double c;
061 *         } s1_t;
062 *     s1_t       s1[LENGTH];
063 *     ...
064 *     H5Dwrite(..., s1);
065 *     H5Dread(..., s1);
066 * </pre>
067 *
068 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by fields
069 * instead of compound structure. As for the example above, the java.util.Vector object has three elements: int[LENGTH],
070 * float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of int, float and double, we will be
071 * able to read/write the compound data by field.
072 *
073 * @version 1.1 9/4/2007
074 * @author Peter X. Cao
075 */
076public class H5CompoundDS extends CompoundDS {
077    /**
078     *
079     */
080    private static final long serialVersionUID = -5968625125574032736L;
081
082    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5CompoundDS.class);
083
084    /**
085     * The list of attributes of this data object. Members of the list are instance of Attribute.
086     */
087    private List<Attribute> attributeList;
088
089    private int nAttributes = -1;
090
091    private H5O_info_t obj_info;
092
093    /**
094     * A list of names of all fields including nested fields.
095     * <p>
096     * The nested names are separated by CompoundDs.separator. For example, if compound dataset "A" has the following
097     * nested structure,
098     *
099     * <pre>
100     * A --&gt; m01
101     * A --&gt; m02
102     * A --&gt; nest1 --&gt; m11
103     * A --&gt; nest1 --&gt; m12
104     * A --&gt; nest1 --&gt; nest2 --&gt; m21
105     * A --&gt; nest1 --&gt; nest2 --&gt; m22
106     * i.e.
107     * A = { m01, m02, nest1{m11, m12, nest2{ m21, m22}}}
108     * </pre>
109     *
110     * The flatNameList of compound dataset "A" will be {m01, m02, nest1[m11, nest1[m12, nest1[nest2[m21,
111     * nest1[nest2[m22}
112     *
113     */
114    private List<String> flatNameList;
115
116    /**
117     * A list of datatypes of all fields including nested fields.
118     */
119    private List<Integer> flatTypeList;
120
121    /** flag to indicate is the dataset is an external dataset */
122    private boolean isExternal = false;
123
124    /**
125     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
126     * <p>
127     * The dataset object represents an existing dataset in the file. For example, new H5CompoundDS(file, "dset1",
128     * "/g0/") constructs a dataset object that corresponds to the dataset,"dset1", at group "/g0/".
129     * <p>
130     * This object is usually constructed at FileFormat.open(), which loads the file structure and object information
131     * into tree structure (TreeNode). It is rarely used elsewhere.
132     *
133     * @param theFile
134     *            the file that contains the data object.
135     * @param theName
136     *            the name of the data object, e.g. "dset".
137     * @param thePath
138     *            the full path of the data object, e.g. "/arrays/".
139     */
140    public H5CompoundDS(FileFormat theFile, String theName, String thePath) {
141        this(theFile, theName, thePath, null);
142    }
143
144    /**
145     * @deprecated Not for public use in the future.<br>
146     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
147     *
148     * @param theFile
149     *            the file that contains the data object.
150     * @param theName
151     *            the name of the data object, e.g. "dset".
152     * @param thePath
153     *            the full path of the data object, e.g. "/arrays/".
154     * @param oid
155     *            the oid of the data object.
156     */
157    @Deprecated
158    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid) {
159        super(theFile, theName, thePath, oid);
160        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
161
162        if ((oid == null) && (theFile != null)) {
163            // retrieve the object ID
164            try {
165                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
166                this.oid = new long[1];
167                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
168            }
169            catch (Exception ex) {
170                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
171            }
172        }
173    }
174
175    /*
176     * (non-Javadoc)
177     *
178     * @see hdf.object.HObject#open()
179     */
180    @Override
181    public int open() {
182        int did = -1;
183
184        try {
185            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
186        }
187        catch (HDF5Exception ex) {
188            log.debug("Failed to open dataset {}", getPath() + getName());
189            did = -1;
190        }
191
192        return did;
193    }
194
195    /*
196     * (non-Javadoc)
197     *
198     * @see hdf.object.HObject#close(int)
199     */
200    @Override
201    public void close(int did) {
202        if (did >= 0) {
203            try {
204                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
205            }
206            catch (Exception ex) {
207                log.debug("close.H5Fflush:", ex);
208            }
209            try {
210                H5.H5Dclose(did);
211            }
212            catch (HDF5Exception ex) {
213                log.debug("close.H5Dclose:", ex);
214            }
215        }
216    }
217
218    /*
219     * (non-Javadoc)
220     *
221     * @see hdf.object.Dataset#init()
222     */
223    @Override
224    public void init() {
225        if (rank > 0) {
226            resetSelection();
227            return; // already called. Initialize only once
228        }
229        log.trace("init() start");
230
231        int did = -1, sid = -1, tid = -1, tclass = -1;
232        flatNameList = new Vector<String>();
233        flatTypeList = new Vector<Integer>();
234        int[] memberTIDs = null;
235
236        did = open();
237        if (did >= 0) {
238            // check if it is an external dataset
239            int pid = -1;
240            try {
241                pid = H5.H5Dget_create_plist(did);
242                int nfiles = H5.H5Pget_external_count(pid);
243                isExternal = (nfiles > 0);
244            }
245            catch (Exception ex) {
246                log.debug("check if it is an external dataset:", ex);
247            }
248            finally {
249                try {
250                    H5.H5Pclose(pid);
251                }
252                catch (Exception ex) {
253                    log.debug("finally close:", ex);
254                }
255            }
256
257            try {
258                sid = H5.H5Dget_space(did);
259                rank = H5.H5Sget_simple_extent_ndims(sid);
260                tid = H5.H5Dget_type(did);
261                tclass = H5.H5Tget_class(tid);
262
263                int tmptid = 0;
264                if (tclass == HDF5Constants.H5T_ARRAY) {
265                    // array of compound
266                    tmptid = tid;
267                    tid = H5.H5Tget_super(tmptid);
268                    try {H5.H5Tclose(tmptid);} catch (HDF5Exception ex) {}
269                }
270
271                if (rank == 0) {
272                    // a scalar data point
273                    rank = 1;
274                    dims = new long[1];
275                    dims[0] = 1;
276                }
277                else {
278                    dims = new long[rank];
279                    maxDims = new long[rank];
280                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
281                    log.trace("init() rank={}, dims={}, maxDims={}", rank, dims, maxDims);
282                }
283
284                startDims = new long[rank];
285                selectedDims = new long[rank];
286
287                // initialize member information
288                extractCompoundInfo(tid, "", flatNameList, flatTypeList);
289                numberOfMembers = flatNameList.size();
290                log.trace("init() numberOfMembers={}", numberOfMembers);
291
292                memberNames = new String[numberOfMembers];
293                memberTIDs = new int[numberOfMembers];
294                memberTypes = new Datatype[numberOfMembers];
295                memberOrders = new int[numberOfMembers];
296                isMemberSelected = new boolean[numberOfMembers];
297                memberDims = new Object[numberOfMembers];
298
299                for (int i = 0; i < numberOfMembers; i++) {
300                    isMemberSelected[i] = true;
301                    memberTIDs[i] = ((Integer) flatTypeList.get(i)).intValue();
302                    memberTypes[i] = new H5Datatype(memberTIDs[i]);
303                    memberNames[i] = (String) flatNameList.get(i);
304                    memberOrders[i] = 1;
305                    memberDims[i] = null;
306                    log.trace("init()[{}] memberNames[{}]={}, memberTIDs[{}]={}, memberTypes[{}]={}",
307                            i, i, memberNames[i], i, memberTIDs[i], i, memberTypes[i].getDatatypeDescription());
308
309                    try {
310                        tclass = H5.H5Tget_class(memberTIDs[i]);
311                    }
312                    catch (HDF5Exception ex) {
313                        log.debug("Exception memberTIDs[{}]:", i, ex);
314                    }
315
316                    if (tclass == HDF5Constants.H5T_ARRAY) {
317                        int n = H5.H5Tget_array_ndims(memberTIDs[i]);
318                        long mdim[] = new long[n];
319                        H5.H5Tget_array_dims(memberTIDs[i], mdim);
320                        int idim[] = new int[n];
321                        for (int j = 0; j < n; j++)
322                            idim[j] = (int) mdim[j];
323                        memberDims[i] = idim;
324                        tmptid = H5.H5Tget_super(memberTIDs[i]);
325                        memberOrders[i] = (H5.H5Tget_size(memberTIDs[i]) / H5.H5Tget_size(tmptid));
326                        try {
327                            H5.H5Tclose(tmptid);
328                        }
329                        catch (HDF5Exception ex) {
330                            log.debug("close temp of memberTIDs[{}]:", i, ex);
331                        }
332                    }
333                } // for (int i=0; i<numberOfMembers; i++)
334            }
335            catch (HDF5Exception ex) {
336                numberOfMembers = 0;
337                memberNames = null;
338                memberTypes = null;
339                memberOrders = null;
340                log.debug("init():", ex);
341            }
342            finally {
343                try {
344                    H5.H5Tclose(tid);
345                }
346                catch (HDF5Exception ex2) {
347                    log.debug("finally close:", ex2);
348                }
349                try {
350                    H5.H5Sclose(sid);
351                }
352                catch (HDF5Exception ex2) {
353                    log.debug("finally close:", ex2);
354                }
355
356                if (memberTIDs != null) {
357                    for (int i = 0; i < memberTIDs.length; i++) {
358                        try {
359                            H5.H5Tclose(memberTIDs[i]);
360                        }
361                        catch (Exception ex) {
362                            log.debug("finally close:", ex);
363                        }
364                    }
365                }
366            }
367
368            log.debug("init() close dataset");
369            close(did);
370        }
371        else {
372            log.debug("init() failed to open dataset");
373        }
374
375        resetSelection();
376        log.debug("init() end");
377    }
378
379    /*
380     * (non-Javadoc)
381     *
382     * @see hdf.object.DataFormat#hasAttribute()
383     */
384    public boolean hasAttribute() {
385        obj_info.num_attrs = nAttributes;
386
387        if (obj_info.num_attrs < 0) {
388            int did = open();
389            if (did >= 0) {
390                try {
391                    obj_info = H5.H5Oget_info(did);
392                    nAttributes = (int) obj_info.num_attrs;
393                }
394                catch (Exception ex) {
395                    obj_info.num_attrs = 0;
396                    log.debug("hasAttribute: get object info:", ex);
397                }
398                close(did);
399            }
400            else {
401                log.debug("could not open dataset");
402            }
403        }
404
405        return (obj_info.num_attrs > 0);
406    }
407
408    /*
409     * (non-Javadoc)
410     *
411     * @see hdf.object.Dataset#getDatatype()
412     */
413    @Override
414    public Datatype getDatatype() {
415        if (datatype == null) {
416            log.trace("H5CompoundDS getDatatype: datatype == null");
417            datatype = new H5Datatype(Datatype.CLASS_COMPOUND, -1, -1, -1);
418        }
419
420        return datatype;
421    }
422
423    /*
424     * (non-Javadoc)
425     *
426     * @see hdf.object.Dataset#clear()
427     */
428    @Override
429    public void clear() {
430        super.clear();
431
432        if (attributeList != null) {
433            ((Vector<Attribute>) attributeList).setSize(0);
434        }
435    }
436
437    /*
438     * (non-Javadoc)
439     *
440     * @see hdf.object.Dataset#readBytes()
441     */
442    @Override
443    public byte[] readBytes() throws HDF5Exception {
444        byte[] theData = null;
445
446        log.trace("H5CompoundDS readBytes: start");
447        if (rank <= 0) {
448            init();
449        }
450
451        int did = open();
452        if (did >= 0) {
453            int fspace = -1, mspace = -1, tid = -1;
454
455            try {
456                long[] lsize = { 1 };
457                for (int j = 0; j < selectedDims.length; j++) {
458                    lsize[0] *= selectedDims[j];
459                }
460
461                fspace = H5.H5Dget_space(did);
462                mspace = H5.H5Screate_simple(rank, selectedDims, null);
463
464                // set the rectangle selection
465                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
466                if (rank * dims[0] > 1) {
467                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
468                            selectedDims, null); // set block to 1
469                }
470
471                tid = H5.H5Dget_type(did);
472                int size = H5.H5Tget_size(tid) * (int) lsize[0];
473                log.trace("H5CompoundDS readBytes: size = {}", size);
474                theData = new byte[size];
475                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
476            }
477            finally {
478                try {
479                    H5.H5Sclose(fspace);
480                }
481                catch (Exception ex2) {
482                    log.debug("finally close:", ex2);
483                }
484                try {
485                    H5.H5Sclose(mspace);
486                }
487                catch (Exception ex2) {
488                    log.debug("finally close:", ex2);
489                }
490                try {
491                    H5.H5Tclose(tid);
492                }
493                catch (HDF5Exception ex2) {
494                    log.debug("finally close:", ex2);
495                }
496                close(did);
497            }
498        }
499        log.trace("H5CompoundDS readBytes: finish");
500
501        return theData;
502    }
503
504    /*
505     * (non-Javadoc)
506     *
507     * @see hdf.object.Dataset#read()
508     */
509    @Override
510    public Object read() throws Exception {
511        List<Object> list = null;
512        Object member_data = null;
513        String member_name = null;
514        int member_class = -1;
515        int member_base_class = -1;
516        int member_size = 0;
517        int atom_tid = -1;
518        int did = -1;
519        int tid = -1;
520        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
521
522        log.trace("H5CompoundDS read: start");
523        if (rank <= 0) {
524            init(); // read data information into memory
525        }
526
527        if (numberOfMembers <= 0) {
528            return null; // this compound dataset does not have any member
529        }
530
531        if (isExternal) {
532            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
533
534            if (pdir == null) {
535                pdir = ".";
536            }
537            System.setProperty("user.dir", pdir);//H5.H5Dchdir_ext(pdir);
538        }
539
540        long[] lsize = { 1 };
541        log.trace("H5CompoundDS read: open dataset");
542        did = open();
543        if (did >= 0) {
544            list = new Vector<Object>(flatNameList.size());
545            Vector<Integer> atomicList = new Vector<Integer>();
546            try {
547                lsize[0] = selectHyperslab(did, spaceIDs);
548                log.trace("H5CompoundDS read: opened dataset size {} for {}", lsize[0], nPoints);
549
550                if (lsize[0] == 0) {
551                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
552                }
553
554                if (log.isDebugEnabled()) {
555                    // check is storage space is allocated
556                    try {
557                        long ssize = H5.H5Dget_storage_size(did);
558                        log.trace("Storage space allocated = {}.", ssize);
559                    }
560                    catch (Exception ex) {
561                        log.debug("check if storage space is allocated:", ex);
562                    }
563                }
564
565                // read each of member data into a byte array, then extract
566                // it into its type such, int, long, float, etc.
567                int n = flatNameList.size();
568                tid = H5.H5Dget_type(did);
569                log.trace("H5CompoundDS read: H5Tget_super");
570                int tclass = H5.H5Tget_class(tid);
571                if (tclass == HDF5Constants.H5T_ARRAY) {
572                    // array of compound
573                    int tmptid = -1;
574                    try {
575                        tmptid = tid;
576                        tid = H5.H5Tget_super(tmptid);
577
578                        // ARRAY of COMPOUND currently unsupported
579                        if (H5.H5Tget_class(tid) == HDF5Constants.H5T_COMPOUND) {
580                            return null;
581                        }
582                    }
583                    finally {
584                        try {H5.H5Tclose(tmptid);}
585                        catch (Exception ex2) {log.debug("finally close:", ex2);}
586                    }
587                }
588
589                extractCompoundInfo(tid, null, null, atomicList);
590
591                log.trace("H5CompoundrDS read: foreach nMembers={}", n);
592                for (int i = 0; i < n; i++) {
593                    boolean isVL = false;
594
595                    if (!isMemberSelected[i]) {
596                        log.debug("H5CompoundDS read: Member[{}] is not selected", i);
597                        continue; // the field is not selected
598                    }
599
600                    member_name = new String(memberNames[i]);
601
602                    atom_tid = ((Integer) atomicList.get(i)).intValue();
603                    try {
604                        member_class = H5.H5Tget_class(atom_tid);
605                        member_size = H5.H5Tget_size(atom_tid);
606                        member_data = H5Datatype.allocateArray(atom_tid, (int) lsize[0]);
607                    }
608                    catch (OutOfMemoryError err) {
609                        member_data = null;
610                        throw new HDF5Exception("Out Of Memory.");
611                    }
612                    catch (Exception ex) {
613                        member_data = null;
614                    }
615                    log.trace("H5CompoundDS read: {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
616
617                    if (member_data == null || H5.H5Tequal(atom_tid, HDF5Constants.H5T_STD_REF_DSETREG)) {
618                        String[] nullValues = new String[(int) lsize[0]];
619                        String errorStr = "*unsupported*";
620                        for (int j = 0; j < lsize[0]; j++) {
621                            nullValues[j] = errorStr;
622                        }
623                        list.add(nullValues);
624
625                        log.trace("read(): {} Member[{}] of class {} is unsupported.", member_name, i, member_class);
626                        continue;
627                    }
628                    else if (member_class == HDF5Constants.H5T_ARRAY) {
629                        int tmptid = -1;
630                        try {
631                            tmptid = H5.H5Tget_super(atom_tid);
632                            member_base_class = H5.H5Tget_class(tmptid);
633                            
634                            isVL = isVL || H5.H5Tis_variable_str(tmptid);
635                            isVL = isVL || H5.H5Tdetect_class(tmptid, HDF5Constants.H5T_VLEN);
636
637                            if (member_base_class == HDF5Constants.H5T_COMPOUND) {
638                                try {
639                                    member_data = H5Datatype.allocateArray(tmptid, member_size * (int) lsize[0]);
640                                }
641                                catch (OutOfMemoryError err) {
642                                    member_data = null;
643                                    throw new HDF5Exception("Out Of Memory.");
644                                }
645                                catch (Exception ex) {
646                                    log.trace("read(): Error allocating array for Compound: ", ex);
647                                    member_data = null;
648                                }
649                                log.trace("H5CompoundDS read: {} Member[{}] is class {} of size={}", member_name, i, member_base_class, member_size);
650                            }
651                        }
652                        catch (Exception ex) {
653                            log.debug("Exception H5T_ARRAY id or class failure[{}]:", i, ex);
654                            continue;
655                        }
656                        finally {
657                            try {
658                                H5.H5Tclose(tmptid);
659                            }
660                            catch (Exception ex) {
661                                log.debug("finally close[{}]:", i, ex);
662                            }
663                        }
664
665                        // cannot deal with ARRAY of ARRAY, support only ARRAY of atomic types
666                        if (member_base_class == HDF5Constants.H5T_ARRAY) {
667                            String[] nullValues = new String[(int) lsize[0]];
668                            String errorStr = "*unsupported*";
669                            for (int j = 0; j < lsize[0]; j++) {
670                                nullValues[j] = errorStr;
671                            }
672                            list.add(nullValues);
673                            continue;
674                        }
675                    }
676
677                    if (member_data != null) {
678                        int comp_tid = -1;
679                        int compInfo[] = { member_class, member_size, 0 };
680                        try {
681                            comp_tid = createCompoundFieldType(atom_tid, member_name, compInfo);
682                        }
683                        catch (HDF5Exception ex) {
684                            String[] nullValues = new String[(int) lsize[0]];
685                            for (int j = 0; j < lsize[0]; j++) {
686                                nullValues[j] = "*unsupported*";
687                            }
688                            list.add(nullValues);
689                            log.debug("H5CompoundDS read: {} Member[{}] createCompoundFieldTypefailure:", member_name, i, ex);
690                            continue;
691                        }
692                        try {
693                            // See BUG#951 isVL = H5.H5Tdetect_class(atom_tid,
694                            // HDF5Constants.H5T_VLEN);
695                            isVL = isVL || H5.H5Tis_variable_str(atom_tid);
696                            isVL = isVL || H5.H5Tdetect_class(atom_tid, HDF5Constants.H5T_VLEN);
697                        }
698                        catch (Exception ex) {
699                            log.debug("H5CompoundDS read: detection of varstr:", ex);
700                            isVL = false;
701                        }
702                        try {
703                            log.trace("H5CompoundDS read: H5Dread({}) did={} spaceIDs[0]={} spaceIDs[1]={} isVL={}", comp_tid, did, spaceIDs[0], spaceIDs[1], isVL);
704                            if (isVL) {
705                                H5.H5DreadVL(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
706                                        (Object[]) member_data);
707                            }
708                            else if (member_base_class == HDF5Constants.H5T_COMPOUND) {
709                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
710                                        (byte[]) member_data, true);
711                            }
712                            else {
713                                H5.H5Dread(did, comp_tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, member_data);
714                            }
715                        }
716                        catch (HDF5DataFiltersException exfltr) {
717                            log.debug("H5CompoundDS read: {} Member[{}] read failure:", member_name, i, exfltr);
718                            throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
719                        }
720                        catch (HDF5Exception ex2) {
721                            String[] nullValues = new String[(int) lsize[0]];
722                            for (int j = 0; j < lsize[0]; j++) {
723                                nullValues[j] = "*unsupported*";
724                            }
725                            list.add(nullValues);
726                            log.debug("H5CompoundDS read: {} Member[{}] read failure:", member_name, i, ex2);
727                            continue;
728                        }
729                        finally {
730                            try {H5.H5Tclose(comp_tid);}
731                            catch (Exception ex3) {log.debug("H5CompoundDS read: finally close:", ex3);}
732                        }
733
734                        if (!isVL) {
735                            String cname = member_data.getClass().getName();
736                            char dname = cname.charAt(cname.lastIndexOf("[") + 1);
737                            log.trace("H5CompoundDS read(!isVL): {} Member[{}] is cname {} of dname={} convert={}", member_name, i, cname, dname, convertByteToString);
738
739                            if ((member_class == HDF5Constants.H5T_STRING) && convertByteToString) {
740                                if (dname == 'B') {
741                                    member_data = byteToString((byte[]) member_data, member_size / memberOrders[i]);
742                                    log.trace("H5CompoundDS read(!isVL)convertByteToString: {} Member[{}]", member_name, i);
743                                }
744                            }
745                            else if (member_class == HDF5Constants.H5T_REFERENCE) {
746                                if (dname == 'B') {
747                                    member_data = HDFNativeData.byteToLong((byte[]) member_data);
748                                    log.trace("H5CompoundDS read(!isVL)convertByteToLong: {} Member[{}]", member_name, i);
749                                }
750                            }
751                            else if (compInfo[2] != 0) {
752                                member_data = Dataset.convertFromUnsignedC(member_data, null);
753                                log.trace("H5CompoundDS read(!isVL)convertFromUnsignedC: {} Member[{}]", member_name, i);
754                            }
755                            else if ((member_class == HDF5Constants.H5T_ENUM || member_base_class == HDF5Constants.H5T_ENUM)
756                                    && enumConverted) {
757                                try {
758                                    String[] strs = null;
759                                    
760                                    if (member_class == HDF5Constants.H5T_ARRAY) {
761                                        int base_tid = -1;
762                                        
763                                        try {
764                                            base_tid = H5.H5Tget_super(atom_tid);
765                                            strs = H5Datatype.convertEnumValueToName(base_tid, member_data, null);
766                                        }
767                                        catch (Exception ex) {
768                                            log.debug("read(): convertEnumValueToName failure: ", ex);
769                                        }
770                                        finally {
771                                            try {
772                                                H5.H5Tclose(base_tid);
773                                            }
774                                            catch (Exception ex) {
775                                                log.debug("read(): H5Tclose(base_tid {}) failure: ", base_tid, ex);
776                                            }
777                                        }
778                                    }
779                                    else {
780                                        strs = H5Datatype.convertEnumValueToName(atom_tid, member_data, null);
781                                    }
782
783                                    if (strs != null) {
784                                        member_data = strs;
785                                        log.trace("H5CompoundDS read(!isVL)convertEnumValueToName: {} Member[{}]", member_name, i);
786                                    }
787                                    log.trace("H5CompoundDS read(!isVL)convertEnumValueToName:data={}", strs[0]);
788                                }
789                                catch (Exception ex) {
790                                    log.debug("read: H5Datatype.convertEnumValueToName:", ex);
791                                }
792                            }
793                            else if (member_class == HDF5Constants.H5T_ARRAY && member_base_class == HDF5Constants.H5T_COMPOUND) {
794                                // Since compounds are read into memory as a byte array, discover each member
795                                // type and size and convert the byte array to the correct type before adding
796                                // it to the list
797
798                                int numDims = H5.H5Tget_array_ndims(atom_tid);
799                                long[] dims = new long[numDims];
800                                H5.H5Tget_array_dims(atom_tid, dims);
801                                int numberOfCompounds = (int) dims[0] * (int) lsize[0];
802                                int compoundSize = (member_size * (int) lsize[0]) / numberOfCompounds;
803
804                                Object current_data = new Object[numberOfCompounds];
805
806                                int base_tid = -1;
807                                long memberOffsets[] = null;
808                                long memberLengths[] = null;
809                                int memberTypes[] = null;
810                                int numberOfMembers;
811
812                                try {
813                                    base_tid = H5.H5Tget_super(atom_tid);
814                                    numberOfMembers = H5.H5Tget_nmembers(base_tid);
815                                    memberOffsets = new long[numberOfMembers];
816                                    memberLengths = new long[numberOfMembers];
817                                    memberTypes = new int[numberOfMembers];
818
819                                    for (int j = 0; j < numberOfMembers; j++) {
820                                        memberOffsets[j] = H5.H5Tget_member_offset(base_tid, j);
821                                        memberTypes[j] = H5.H5Tget_member_type(base_tid, j);
822                                    }
823
824                                    for (int j = 0; j < numberOfMembers; j++) {
825                                        if (j < numberOfMembers - 1) {
826                                            memberLengths[j] = (memberOffsets[j + 1] - memberOffsets[j]);
827                                        }
828                                        else {
829                                            memberLengths[j] = (compoundSize - memberOffsets[j]);
830                                        }
831                                    }
832
833                                    for (int j = 0; j < numberOfCompounds; j++) {
834                                        Object field_data = new Object[numberOfMembers];
835
836                                        for (int k = 0; k < numberOfMembers; k++) {
837                                            Object converted = convertCompoundByteMember((byte[]) member_data, memberTypes[k], memberOffsets[k] + (compoundSize * j), memberLengths[k]);
838
839                                            ((Object[]) field_data)[k] = Array.get(converted, 0);
840                                        }
841
842                                        ((Object[]) current_data)[j] = field_data;
843                                    }
844                                }
845                                catch (Exception ex) {
846                                    log.debug("Convert Array of Compounds failure: ", ex);
847                                    continue;
848                                }
849                                finally {
850                                    for (int j = 0; j < memberTypes.length; j++) {
851                                        try {
852                                            H5.H5Tclose(memberTypes[j]);
853                                        }
854                                        catch (Exception ex) {
855                                            log.debug("finally close[{}]: ", i, ex);
856                                        }
857                                    }
858                                    try {
859                                        H5.H5Tclose(base_tid);
860                                    }
861                                    catch (Exception ex) {
862                                        log.debug("finally close[{}]:", i, ex);
863                                    }
864                                }
865
866                                list.add(current_data);
867                                continue;
868                            }
869                        }
870
871                        list.add(member_data);
872                    } // if (member_data != null)
873                } // end of for (int i=0; i<num_members; i++)
874            }
875            finally {
876                try {
877                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
878                        H5.H5Sclose(spaceIDs[0]);
879                }
880                catch (Exception ex2) {
881                    log.debug("read: finally close:", ex2);
882                }
883                try {
884                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
885                        H5.H5Sclose(spaceIDs[1]);
886                }
887                catch (Exception ex2) {
888                    log.debug("read: finally close:", ex2);
889                }
890
891                // close atomic types
892                int ntypes = atomicList.size();
893                for (int i = 0; i < ntypes; i++) {
894                    atom_tid = ((Integer) atomicList.get(i)).intValue();
895                    try {
896                        H5.H5Tclose(atom_tid);
897                    }
898                    catch (Exception ex2) {
899                        log.debug("finally close:", ex2);
900                    }
901                }
902                try {H5.H5Tclose(tid);}
903                catch (Exception ex2) {log.debug("finally close:", ex2);}
904
905                close(did);
906            }
907        }
908
909        log.trace("H5CompoundDS read: finish");
910        return list;
911    }
912
913    /**
914     * Writes the given data buffer into this dataset in a file.
915     * <p>
916     * The data buffer is a vector that contains the data values of compound fields. The data is written into file field
917     * by field.
918     *
919     * @param buf
920     *            The vector that contains the data values of compound fields.
921     *
922     * @throws HDF5Exception
923     *             If there is an error at the HDF5 library level.
924     */
925    @Override
926    public void write(Object buf) throws HDF5Exception {
927        log.trace("H5CompoundDS write: start");
928        int did = -1;
929        int tid = -1;
930        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
931        Object member_data = null;
932        String member_name = null;
933        int atom_tid = -1, member_class = -1, member_size = 0;
934
935        List<?> list = (List<?>) buf;
936        if ((buf == null) || (numberOfMembers <= 0) || !(buf instanceof List)) {
937            return;
938        }
939
940        long[] lsize = { 1 };
941        did = open();
942        log.trace("H5CompoundDS write: dataset opened");
943        if (did >= 0) {
944            Vector<Integer> atomicList = new Vector<Integer>();
945            try {
946                lsize[0] = selectHyperslab(did, spaceIDs);
947                int tmptid = H5.H5Dget_type(did);
948
949                // read each of member data into a byte array, then extract
950                // it into its type such, int, long, float, etc.
951                int idx = 0;
952                int n = flatNameList.size();
953                boolean isEnum = false;
954
955                try {
956                    extractCompoundInfo(tmptid, null, null, atomicList);
957                }
958                finally {
959                    try {H5.H5Tclose(tmptid);}
960                    catch (Exception ex2) {log.debug("finally close:", ex2);}
961                }
962                for (int i = 0; i < n; i++) {
963                    log.trace("H5CompoundDS write: Member[{}] of {}", i, n);
964                    if (!isMemberSelected[i]) {
965                        log.debug("H5CompoundDS write: Member[{}] is not selected", i);
966                        continue; // the field is not selected
967                    }
968
969                    member_name = new String(memberNames[i]);
970                    atom_tid = ((Integer) atomicList.get(i)).intValue();
971                    member_data = list.get(idx++);
972
973                    if (member_data == null) {
974                        log.debug("H5CompoundDS write: Member[{}] data is null", i);
975                        continue;
976                    }
977
978                    boolean isVL = false;
979                    try {
980                        isVL = (H5.H5Tget_class(atom_tid) == HDF5Constants.H5T_VLEN || H5.H5Tis_variable_str(atom_tid));
981                        log.debug("H5CompoundDS write: Member[{}] is VL", i);
982                    }
983                    catch (Exception ex) {
984                        log.debug("isVL:", ex);
985                    }
986
987                    try {
988                        member_class = H5.H5Tget_class(atom_tid);
989                        member_size = H5.H5Tget_size(atom_tid);
990                        isEnum = (member_class == HDF5Constants.H5T_ENUM);
991                    }
992                    catch (Exception ex) {
993                        log.debug("H5CompoundDS write: member class - size:", ex);
994                    }
995                    log.trace("H5CompoundDS write: {} Member[{}] is class {} of size={}", member_name, i, member_class, member_size);
996
997                    Object tmpData = member_data;
998
999                    int compInfo[] = { member_class, member_size, 0 };
1000                    try {
1001                        tid = createCompoundFieldType(atom_tid, member_name, compInfo);
1002                        log.debug("H5CompoundDS write: {} Member[{}] compInfo[class]={} compInfo[size]={} compInfo[unsigned]={}",
1003                                member_name, i, compInfo[0], compInfo[1], compInfo[2]);
1004                        if(isVL) {
1005                            H5.H5DwriteString(did, tid,
1006                                    spaceIDs[0], spaceIDs[1],
1007                                    HDF5Constants.H5P_DEFAULT, (String[])tmpData);
1008                        }
1009                        else {
1010                            if (compInfo[2] != 0) {
1011                                // check if need to convert integer data
1012                                int tsize = H5.H5Tget_size(tid);
1013                                String cname = member_data.getClass().getName();
1014                                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
1015                                boolean doConversion = (((tsize == 1) && (dname == 'S'))
1016                                        || ((tsize == 2) && (dname == 'I')) || ((tsize == 4) && (dname == 'J')));
1017
1018                                tmpData = member_data;
1019                                if (doConversion) {
1020                                    tmpData = convertToUnsignedC(member_data, null);
1021                                }
1022                                log.trace("H5CompoundDS write: {} Member[{}] convertToUnsignedC", member_name, i);
1023                            }
1024                            else if ((member_class == HDF5Constants.H5T_STRING) && (Array.get(member_data, 0) instanceof String)) {
1025                                tmpData = stringToByte((String[]) member_data, member_size);
1026                                log.trace("H5CompoundDS write: {} Member[{}] stringToByte", member_name, i);
1027                            }
1028                            else if (isEnum && (Array.get(member_data, 0) instanceof String)) {
1029                                tmpData = H5Datatype.convertEnumNameToValue(atom_tid, (String[]) member_data, null);
1030                                log.trace("H5CompoundDS write: {} Member[{}] convertEnumNameToValue", member_name, i);
1031                            }
1032
1033                            if (tmpData != null) {
1034                                // BUG!!! does not write nested compound data and no
1035                                // exception was caught
1036                                // need to check if it is a java error or C library
1037                                // error
1038                                log.debug("H5CompoundDS write: H5Dwrite warning - does not write nested compound data");
1039                                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1040                            }
1041                        }
1042                    }
1043                    catch (Exception ex1) {
1044                        log.debug("write: H5Dwrite process failure:", ex1);
1045                    }
1046                    finally {
1047                        try {
1048                            H5.H5Tclose(tid);
1049                        }
1050                        catch (Exception ex2) {
1051                            log.debug("write: finally close:", ex2);
1052                        }
1053                    }
1054                } // end of for (int i=0; i<num_members; i++)
1055            }
1056            finally {
1057                try {
1058                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
1059                        H5.H5Sclose(spaceIDs[0]);
1060                }
1061                catch (Exception ex2) {
1062                    log.debug("write: finally close:", ex2);
1063                }
1064                try {
1065                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
1066                        H5.H5Sclose(spaceIDs[1]);
1067                }
1068                catch (Exception ex2) {
1069                    log.debug("write: finally close:", ex2);
1070                }
1071
1072                // close atomic types
1073                int ntypes = atomicList.size();
1074                for (int i = 0; i < ntypes; i++) {
1075                    atom_tid = ((Integer) atomicList.get(i)).intValue();
1076                    try {
1077                        H5.H5Tclose(atom_tid);
1078                    }
1079                    catch (Exception ex2) {
1080                        log.debug("write: finally close:", ex2);
1081                    }
1082                }
1083            }
1084            close(did);
1085        }
1086        log.trace("H5CompoundDS write: finish");
1087    }
1088
1089    /**
1090     * Set up the selection of hyperslab
1091     *
1092     * @param did
1093     *            IN dataset ID
1094     * @param spaceIDs
1095     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
1096     *
1097     * @return total number of data point selected
1098     *
1099     * @throws HDF5Exception
1100     *             If there is an error at the HDF5 library level.
1101     */
1102    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
1103        long lsize = 1;
1104
1105        boolean isAllSelected = true;
1106        for (int i = 0; i < rank; i++) {
1107            lsize *= selectedDims[i];
1108            if (selectedDims[i] < dims[i]) {
1109                isAllSelected = false;
1110            }
1111        }
1112
1113        if (isAllSelected) {
1114            spaceIDs[0] = HDF5Constants.H5S_ALL;
1115            spaceIDs[1] = HDF5Constants.H5S_ALL;
1116        }
1117        else {
1118            spaceIDs[1] = H5.H5Dget_space(did);
1119
1120            // When 1D dataspace is used in chunked dataset, reading is very
1121            // slow.
1122            // It is a known problem on HDF5 library for chunked dataset.
1123            // mspace = H5.H5Screate_simple(1, lsize, null);
1124            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
1125            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
1126                    null);
1127        }
1128
1129        return lsize;
1130    }
1131
1132    /*
1133     * (non-Javadoc)
1134     *
1135     * @see hdf.object.DataFormat#getMetadata()
1136     */
1137    public List<Attribute> getMetadata() throws HDF5Exception {
1138        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
1139    }
1140
1141    /*
1142     * (non-Javadoc)
1143     *
1144     * @see hdf.object.DataFormat#getMetadata(int...)
1145     */
1146    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
1147        if (rank <= 0) {
1148            init();
1149        }
1150        log.trace("getMetadata: inited");
1151
1152        try {
1153            this.linkTargetObjName = H5File.getLinkTargetName(this);
1154        }
1155        catch (Exception ex) {
1156            log.debug("getLinkTargetName failed: ", ex);
1157        }
1158
1159        if (attributeList != null) {
1160            return attributeList;
1161        }
1162
1163        // load attributes first
1164        int did = -1, pid = -1;
1165        int indxType = fileFormat.getIndexType(null);
1166        int order = fileFormat.getIndexOrder(null);
1167
1168        if (attrPropList.length > 0) {
1169            indxType = attrPropList[0];
1170            if (attrPropList.length > 1) {
1171                order = attrPropList[1];
1172            }
1173        }
1174        log.trace("getMetadata: open dataset");
1175        did = open();
1176        if (did >= 0) {
1177            log.trace("getMetadata: dataset opened");
1178            try {
1179                compression = "";
1180                attributeList = H5File.getAttribute(did, indxType, order);
1181                log.trace("getMetadata: attributeList loaded");
1182
1183                // get the compression and chunk information
1184                pid = H5.H5Dget_create_plist(did);
1185                long storage_size = H5.H5Dget_storage_size(did);
1186                int nfilt = H5.H5Pget_nfilters(pid);
1187                if (H5.H5Pget_layout(pid) == HDF5Constants.H5D_CHUNKED) {
1188                    chunkSize = new long[rank];
1189                    H5.H5Pget_chunk(pid, rank, chunkSize);
1190                    if(nfilt > 0) {
1191                        long    nelmts = 1;
1192                        long    uncomp_size;
1193                        long    datum_size = getDatatype().getDatatypeSize();
1194                        if (datum_size < 0) {
1195                            int tmptid = -1;
1196                            try {
1197                                tmptid = H5.H5Dget_type(did);
1198                                datum_size = H5.H5Tget_size(tmptid);
1199                            }
1200                            finally {
1201                                try {H5.H5Tclose(tmptid);}
1202                                catch (Exception ex2) {log.debug("finally close:", ex2);}
1203                            }
1204                        }
1205
1206
1207                        for(int i = 0; i < rank; i++) {
1208                            nelmts *= dims[i];
1209                        }
1210                        uncomp_size = nelmts * datum_size;
1211
1212                        /* compression ratio = uncompressed size /  compressed size */
1213
1214                        if(storage_size != 0) {
1215                            double ratio = (double) uncomp_size / (double) storage_size;
1216                            DecimalFormat df = new DecimalFormat();
1217                            df.setMinimumFractionDigits(3);
1218                            df.setMaximumFractionDigits(3);
1219                            compression +=  df.format(ratio) + ":1";
1220                        }
1221                    }
1222                }
1223                else {
1224                    chunkSize = null;
1225                }
1226
1227                int[] flags = { 0, 0 };
1228                long[] cd_nelmts = { 20 };
1229                int[] cd_values = new int[(int) cd_nelmts[0]];;
1230                String[] cd_name = { "", "" };
1231                log.trace("getMetadata: {} filters in pipeline", nfilt);
1232                int filter = -1;
1233                int[] filter_config = { 1 };
1234                filters = "";
1235
1236                for (int i = 0, k = 0; i < nfilt; i++) {
1237                    log.trace("getMetadata: filter[{}]", i);
1238                    if (i > 0) {
1239                        filters += ", ";
1240                    }
1241                    if (k > 0) {
1242                        compression += ", ";
1243                    }
1244
1245                    try {
1246                        cd_nelmts[0] = 20;
1247                        cd_values = new int[(int) cd_nelmts[0]];
1248                        cd_values = new int[(int) cd_nelmts[0]];
1249                        filter = H5.H5Pget_filter(pid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1250                        log.trace("getMetadata: filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1251                        for (int j = 0; j < cd_nelmts[0]; j++) {
1252                            log.trace("getMetadata: filter[{}] element {} = {}", i, j, cd_values[j]);
1253                        }
1254                    }
1255                    catch (Throwable err) {
1256                        filters += "ERROR";
1257                        continue;
1258                    }
1259
1260                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1261                        filters += "NONE";
1262                    }
1263                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1264                        filters += "GZIP";
1265                        compression += compression_gzip_txt + cd_values[0];
1266                        k++;
1267                    }
1268                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1269                        filters += "Error detection filter";
1270                    }
1271                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1272                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1273                    }
1274                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1275                        filters += "NBIT";
1276                    }
1277                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1278                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1279                    }
1280                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1281                        filters += "SZIP";
1282                        compression += "SZIP: Pixels per block = " + cd_values[1];
1283                        k++;
1284                        int flag = -1;
1285                        try {
1286                            flag = H5.H5Zget_filter_info(filter);
1287                        }
1288                        catch (Exception ex) {
1289                            flag = -1;
1290                        }
1291                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1292                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1293                        }
1294                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1295                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1296                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1297                        }
1298                    }
1299                    else {
1300                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1301                        for (int j = 0; j < cd_nelmts[0]; j++) {
1302                            if (j > 0)
1303                                filters += ", ";
1304                            filters += cd_values[j];
1305                        }
1306                        log.debug("getMetadata: filter[{}] is user defined compression", i);
1307                    }
1308                } // for (int i=0; i<nfilt; i++)
1309
1310                if (compression.length() == 0) {
1311                    compression = "NONE";
1312                }
1313                log.trace("getMetadata: filter compression={}", compression);
1314
1315                if (filters.length() == 0) {
1316                    filters = "NONE";
1317                }
1318                log.trace("getMetadata: filter information={}", filters);
1319
1320                storage = "" + storage_size;
1321                try {
1322                    int[] at = { 0 };
1323                    H5.H5Pget_alloc_time(pid, at);
1324                    storage += ", allocation time: ";
1325                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1326                        storage += "Early";
1327                    }
1328                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1329                        storage += "Incremental";
1330                    }
1331                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1332                        storage += "Late";
1333                    }
1334                }
1335                catch (Exception ex) {
1336                    log.debug("Storage allocation time:", ex);
1337                }
1338                if (storage.length() == 0) {
1339                    storage = "NONE";
1340                }
1341                log.trace("getMetadata: storage={}", storage);
1342            }
1343            finally {
1344                try {
1345                    H5.H5Pclose(pid);
1346                }
1347                catch (Exception ex) {
1348                    log.debug("finally close:", ex);
1349                }
1350                close(did);
1351            }
1352        }
1353
1354        log.trace("getMetadata: finish");
1355        return attributeList;
1356    }
1357
1358    /*
1359     * (non-Javadoc)
1360     *
1361     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1362     */
1363    public void writeMetadata(Object info) throws Exception {
1364        // only attribute metadata is supported.
1365        if (!(info instanceof Attribute)) {
1366            return;
1367        }
1368
1369        boolean attrExisted = false;
1370        Attribute attr = (Attribute) info;
1371        log.trace("writeMetadata: {}", attr.getName());
1372
1373        if (attributeList == null) {
1374            this.getMetadata();
1375        }
1376
1377        if (attributeList != null)
1378            attrExisted = attributeList.contains(attr);
1379
1380        getFileFormat().writeAttribute(this, attr, attrExisted);
1381        // add the new attribute into attribute list
1382        if (!attrExisted) {
1383            attributeList.add(attr);
1384            nAttributes = attributeList.size();
1385        }
1386    }
1387
1388    /*
1389     * (non-Javadoc)
1390     *
1391     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1392     */
1393    public void removeMetadata(Object info) throws HDF5Exception {
1394        // only attribute metadata is supported.
1395        if (!(info instanceof Attribute)) {
1396            return;
1397        }
1398
1399        Attribute attr = (Attribute) info;
1400        log.trace("removeMetadata: {}", attr.getName());
1401        int did = open();
1402        if (did >= 0) {
1403            try {
1404                H5.H5Adelete(did, attr.getName());
1405                List<Attribute> attrList = getMetadata();
1406                attrList.remove(attr);
1407                nAttributes = attrList.size();
1408            }
1409            finally {
1410                close(did);
1411            }
1412        }
1413    }
1414
1415    /*
1416     * (non-Javadoc)
1417     *
1418     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1419     */
1420    public void updateMetadata(Object info) throws HDF5Exception {
1421        // only attribute metadata is supported.
1422        if (!(info instanceof Attribute)) {
1423            return;
1424        }
1425        log.trace("updateMetadata");
1426
1427        nAttributes = -1;
1428    }
1429
1430    /*
1431     * (non-Javadoc)
1432     *
1433     * @see hdf.object.HObject#setName(java.lang.String)
1434     */
1435    @Override
1436    public void setName(String newName) throws Exception {
1437        H5File.renameObject(this, newName);
1438        super.setName(newName);
1439    }
1440
1441    /**
1442     * Resets selection of dataspace
1443     */
1444    private void resetSelection() {
1445        log.trace("resetSelection: start");
1446
1447        for (int i = 0; i < rank; i++) {
1448            startDims[i] = 0;
1449            selectedDims[i] = 1;
1450            if (selectedStride != null) {
1451                selectedStride[i] = 1;
1452            }
1453        }
1454
1455        if (rank == 1) {
1456            selectedIndex[0] = 0;
1457            selectedDims[0] = dims[0];
1458        }
1459        else if (rank == 2) {
1460            selectedIndex[0] = 0;
1461            selectedIndex[1] = 1;
1462            selectedDims[0] = dims[0];
1463            selectedDims[1] = dims[1];
1464        }
1465        else if (rank > 2) {
1466            // selectedIndex[0] = rank - 2; // columns
1467            // selectedIndex[1] = rank - 1; // rows
1468            // selectedIndex[2] = rank - 3;
1469            selectedIndex[0] = 0; // width, the fastest dimension
1470            selectedIndex[1] = 1; // height
1471            selectedIndex[2] = 2; // frames
1472            // selectedDims[rank - 1] = dims[rank - 1];
1473            // selectedDims[rank - 2] = dims[rank - 2];
1474            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1475            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1476        }
1477
1478        isDataLoaded = false;
1479        setMemberSelection(true);
1480        log.trace("resetSelection: finish");
1481    }
1482
1483    /**
1484     * @deprecated Not for public use in the future. <br>
1485     *             Using
1486     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1487     *
1488     * @param name
1489     *            the name of the dataset to create.
1490     * @param pgroup
1491     *            parent group where the new dataset is created.
1492     * @param dims
1493     *            the dimension size of the dataset.
1494     * @param memberNames
1495     *            the names of compound datatype
1496     * @param memberDatatypes
1497     *            the datatypes of the compound datatype
1498     * @param memberSizes
1499     *            the dim sizes of the members
1500     * @param data
1501     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1502     *
1503     * @return the new compound dataset if successful; otherwise returns null.
1504     *
1505     * @throws Exception if there is a failure.
1506     */
1507    @Deprecated
1508    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1509            Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception {
1510        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) || (memberDatatypes == null)
1511                || (memberSizes == null)) {
1512            return null;
1513        }
1514
1515        int nMembers = memberNames.length;
1516        int memberRanks[] = new int[nMembers];
1517        long memberDims[][] = new long[nMembers][1];
1518        for (int i = 0; i < nMembers; i++) {
1519            memberRanks[i] = 1;
1520            memberDims[i][0] = memberSizes[i];
1521        }
1522
1523        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims, data);
1524    }
1525
1526    /**
1527     * @deprecated Not for public use in the future. <br>
1528     *             Using
1529     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[], long[][], Object)}
1530     *
1531     * @param name
1532     *            the name of the dataset to create.
1533     * @param pgroup
1534     *            parent group where the new dataset is created.
1535     * @param dims
1536     *            the dimension size of the dataset.
1537     * @param memberNames
1538     *            the names of compound datatype
1539     * @param memberDatatypes
1540     *            the datatypes of the compound datatype
1541     * @param memberRanks
1542     *            the ranks of the members
1543     * @param memberDims
1544     *            the dim sizes of the members
1545     * @param data
1546     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1547     *
1548     * @return the new compound dataset if successful; otherwise returns null.
1549     *
1550     * @throws Exception if the dataset can not be created.
1551     */
1552    @Deprecated
1553    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1554            Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data) throws Exception {
1555        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes, memberRanks,
1556                memberDims, data);
1557    }
1558
1559    /**
1560     * Creates a simple compound dataset in a file with/without chunking and compression.
1561     * <p>
1562     * This function provides an easy way to create a simple compound dataset in file by hiding tedious details of
1563     * creating a compound dataset from users.
1564     * <p>
1565     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound dataset is not
1566     * supported. The required information to create a compound dataset includes the name, the parent group and data
1567     * space of the dataset, the names, datatypes and data spaces of the compound fields. Other information such as
1568     * chunks, compression and the data buffer is optional.
1569     * <p>
1570     * The following example shows how to use this function to create a compound dataset in file.
1571     *
1572     * <pre>
1573     * H5File file = null;
1574     * String message = &quot;&quot;;
1575     * Group pgroup = null;
1576     * int[] DATA_INT = new int[DIM_SIZE];
1577     * float[] DATA_FLOAT = new float[DIM_SIZE];
1578     * String[] DATA_STR = new String[DIM_SIZE];
1579     * long[] DIMs = { 50, 10 };
1580     * long[] CHUNKs = { 25, 5 };
1581     *
1582     * try {
1583     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1584     *     file.open();
1585     *     pgroup = (Group) file.get(&quot;/&quot;);
1586     * }
1587     * catch (Exception ex) {
1588     * }
1589     *
1590     * Vector data = new Vector();
1591     * data.add(0, DATA_INT);
1592     * data.add(1, DATA_FLOAT);
1593     * data.add(2, DATA_STR);
1594     *
1595     * // create groups
1596     * Datatype[] mdtypes = new H5Datatype[3];
1597     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1598     * Dataset dset = null;
1599     * try {
1600     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, -1, -1);
1601     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, -1, -1);
1602     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, -1, -1);
1603     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames, mdtypes, null, data);
1604     * }
1605     * catch (Exception ex) {
1606     *     failed(message, ex, file);
1607     *     return 1;
1608     * }
1609     * </pre>
1610     *
1611     * @param name
1612     *            the name of the dataset to create.
1613     * @param pgroup
1614     *            parent group where the new dataset is created.
1615     * @param dims
1616     *            the dimension size of the dataset.
1617     * @param maxdims
1618     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1619     * @param chunks
1620     *            the chunk size of the dataset. No chunking if chunk = null.
1621     * @param gzip
1622     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1623     * @param memberNames
1624     *            the names of compound datatype
1625     * @param memberDatatypes
1626     *            the datatypes of the compound datatype
1627     * @param memberRanks
1628     *            the ranks of the members
1629     * @param memberDims
1630     *            the dim sizes of the members
1631     * @param data
1632     *            list of data arrays written to the new dataset, null if no data is written to the new dataset.
1633     *
1634     * @return the new compound dataset if successful; otherwise returns null.
1635     *
1636     * @throws Exception if there is a failure.
1637     */
1638    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks, int gzip,
1639            String[] memberNames, Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims, Object data)
1640            throws Exception {
1641        H5CompoundDS dataset = null;
1642        String fullPath = null;
1643        int did = -1, sid = -1, tid = -1, plist = -1;
1644
1645        log.trace("H5CompoundDS create start");
1646        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))
1647                || (memberNames == null) || (memberDatatypes == null)
1648                || (memberRanks == null) || (memberDims == null)) {
1649            return null;
1650        }
1651
1652        H5File file = (H5File) pgroup.getFileFormat();
1653        if (file == null) {
1654            return null;
1655        }
1656
1657        String path = HObject.separator;
1658        if (!pgroup.isRoot()) {
1659            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1660            if (name.endsWith("/")) {
1661                name = name.substring(0, name.length() - 1);
1662            }
1663            int idx = name.lastIndexOf("/");
1664            if (idx >= 0) {
1665                name = name.substring(idx + 1);
1666            }
1667        }
1668
1669        fullPath = path + name;
1670
1671        int typeSize = 0;
1672        int nMembers = memberNames.length;
1673        int[] mTypes = new int[nMembers];
1674        int memberSize = 1;
1675        for (int i = 0; i < nMembers; i++) {
1676            memberSize = 1;
1677            for (int j = 0; j < memberRanks[i]; j++) {
1678                memberSize *= memberDims[i][j];
1679            }
1680
1681            mTypes[i] = -1;
1682            // the member is an array
1683            if ((memberSize > 1) && (memberDatatypes[i].getDatatypeClass() != Datatype.CLASS_STRING)) {
1684                int tmptid = -1;
1685                if ((tmptid = memberDatatypes[i].toNative()) >= 0) {
1686                    try {
1687                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
1688                    }
1689                    finally {
1690                        try {H5.H5Tclose(tmptid);}
1691                        catch (Exception ex) {log.debug("compound array create finally close:", ex);}
1692                    }
1693                }
1694            }
1695            else {
1696                mTypes[i] = memberDatatypes[i].toNative();
1697            }
1698            try {
1699                typeSize += H5.H5Tget_size(mTypes[i]);
1700            }
1701            catch (Exception ex) {
1702                log.debug("array create H5Tget_size:", ex);
1703
1704                while (i > 0) {
1705                    try {H5.H5Tclose(mTypes[i]);}
1706                    catch (HDF5Exception ex2) {log.debug("compound create finally close:", ex2);}
1707                    i--;
1708                }
1709                throw ex;
1710            }
1711        } // for (int i = 0; i < nMembers; i++) {
1712
1713        // setup chunking and compression
1714        boolean isExtentable = false;
1715        if (maxdims != null) {
1716            for (int i = 0; i < maxdims.length; i++) {
1717                if (maxdims[i] == 0) {
1718                    maxdims[i] = dims[i];
1719                }
1720                else if (maxdims[i] < 0) {
1721                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1722                }
1723
1724                if (maxdims[i] != dims[i]) {
1725                    isExtentable = true;
1726                }
1727            }
1728        }
1729
1730        // HDF5 requires you to use chunking in order to define extendible
1731        // datasets. Chunking makes it possible to extend datasets efficiently,
1732        // without having to reorganize storage excessively. Using default size
1733        // of 64x...which has good performance
1734        if ((chunks == null) && isExtentable) {
1735            chunks = new long[dims.length];
1736            for (int i = 0; i < dims.length; i++)
1737                chunks[i] = Math.min(dims[i], 64);
1738        }
1739
1740        // prepare the dataspace and datatype
1741        int rank = dims.length;
1742
1743        try {
1744            sid = H5.H5Screate_simple(rank, dims, maxdims);
1745
1746            // figure out creation properties
1747            plist = HDF5Constants.H5P_DEFAULT;
1748
1749            tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
1750            int offset = 0;
1751            for (int i = 0; i < nMembers; i++) {
1752                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
1753                offset += H5.H5Tget_size(mTypes[i]);
1754            }
1755
1756            if (chunks != null) {
1757                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1758
1759                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1760                H5.H5Pset_chunk(plist, rank, chunks);
1761
1762                // compression requires chunking
1763                if (gzip > 0) {
1764                    H5.H5Pset_deflate(plist, gzip);
1765                }
1766            }
1767
1768            int fid = file.getFID();
1769
1770            log.trace("H5CompoundDS create dataset");
1771            did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1772            log.trace("H5CompoundDS create H5CompoundDS");
1773            dataset = new H5CompoundDS(file, name, path);
1774        }
1775        finally {
1776            try {
1777                H5.H5Pclose(plist);
1778            }
1779            catch (HDF5Exception ex) {
1780                log.debug("create finally close:", ex);
1781            }
1782            try {
1783                H5.H5Sclose(sid);
1784            }
1785            catch (HDF5Exception ex) {
1786                log.debug("create finally close:", ex);
1787            }
1788            try {
1789                H5.H5Tclose(tid);
1790            }
1791            catch (HDF5Exception ex) {
1792                log.debug("create finally close:", ex);
1793            }
1794            try {
1795                H5.H5Dclose(did);
1796            }
1797            catch (HDF5Exception ex) {
1798                log.debug("create finally close:", ex);
1799            }
1800
1801            for (int i = 0; i < nMembers; i++) {
1802                try {
1803                    H5.H5Tclose(mTypes[i]);
1804                }
1805                catch (HDF5Exception ex) {
1806                    log.debug("compound create finally close:", ex);
1807                }
1808            }
1809        }
1810
1811        if (dataset != null) {
1812            pgroup.addToMemberList(dataset);
1813            if (data != null) {
1814                dataset.init();
1815                long selected[] = dataset.getSelectedDims();
1816                for (int i = 0; i < rank; i++) {
1817                    selected[i] = dims[i];
1818                }
1819                dataset.write(data);
1820            }
1821        }
1822        log.trace("H5CompoundDS create finish");
1823
1824        return dataset;
1825    }
1826
1827    /**
1828     * Extracts compound information into flat structure.
1829     * <p>
1830     * For example, compound datatype "nest" has {nest1{a, b, c}, d, e} then extractCompoundInfo() will put the names of
1831     * nested compound fields into a flat list as
1832     *
1833     * <pre>
1834     * nest.nest1.a
1835     * nest.nest1.b
1836     * nest.nest1.c
1837     * nest.d
1838     * nest.e
1839     * </pre>
1840     *
1841     * @param tid   the identifier of the compound datatype
1842     * @param name  the name of the compound datatype
1843     * @param names  the list to store the member names of the compound datatype
1844     * @param flatTypeList2  the list to store the nested member names of the compound datatype
1845     */
1846    private void extractCompoundInfo(int tid, String name, List<String> names, List<Integer> flatTypeList2) {
1847        int nMembers = 0, mclass = -1, mtype = -1;
1848        String mname = null;
1849
1850        log.trace("extractCompoundInfo: tid={}, name={}", tid, name);
1851        try {
1852            nMembers = H5.H5Tget_nmembers(tid);
1853        }
1854        catch (Exception ex) {
1855            nMembers = 0;
1856        }
1857        log.trace("extractCompoundInfo: nMembers={}", nMembers);
1858
1859        if (nMembers <= 0) {
1860            log.trace("extractCompoundInfo: leave because nMembers={}", nMembers);
1861            return;
1862        }
1863
1864        int tmptid = -1;
1865        for (int i = 0; i < nMembers; i++) {
1866            log.trace("extractCompoundInfo: nMembers[{}]", i);
1867            try {
1868                mtype = H5.H5Tget_member_type(tid, i);
1869            }
1870            catch (Exception ex) {
1871                log.debug("Exception continue H5Tget_member_type[{}]:", i, ex);
1872                continue;
1873            }
1874
1875            try {
1876                tmptid = mtype;
1877                mtype = H5.H5Tget_native_type(tmptid);
1878            }
1879            catch (HDF5Exception ex) {
1880                log.debug("Exception continue H5Tget_native_type[{}]:", i, ex);
1881                continue;
1882            }
1883            finally {
1884                try {
1885                    H5.H5Tclose(tmptid);
1886                }
1887                catch (HDF5Exception ex) {
1888                    log.debug("finally close:", ex);
1889                }
1890            }
1891
1892            try {
1893                mclass = H5.H5Tget_class(mtype);
1894            }
1895            catch (HDF5Exception ex) {
1896                log.debug("Exception continue H5Tget_class[{}]:", i, ex);
1897                continue;
1898            }
1899
1900            if (names != null) {
1901                mname = name + H5.H5Tget_member_name(tid, i);
1902                log.trace("extractCompoundInfo:[{}] mname={}, name={}", i, mname, name);
1903            }
1904
1905            if (mclass == HDF5Constants.H5T_COMPOUND) {
1906                extractCompoundInfo(mtype, mname + CompoundDS.separator, names, flatTypeList2);
1907                log.debug("continue after recursive H5T_COMPOUND[{}]:", i);
1908                continue;
1909            }
1910            else if (mclass == HDF5Constants.H5T_ARRAY) {
1911                try {
1912                    tmptid = H5.H5Tget_super(mtype);
1913                    int tmpclass = H5.H5Tget_class(tmptid);
1914
1915                    // cannot deal with ARRAY of ARRAY, support only ARRAY of atomic types
1916                    if ((tmpclass == HDF5Constants.H5T_ARRAY)) {
1917                        log.debug("unsupported ARRAY of COMPOUND or ARRAY of ARRAY[{}]:", i);
1918                        continue;
1919                    }
1920                }
1921                catch (Exception ex) {
1922                    log.debug("Exception continue H5T_ARRAY id or class failure[{}]:", i, ex);
1923                    continue;
1924                }
1925                finally {
1926                    try {
1927                        H5.H5Tclose(tmptid);
1928                    }
1929                    catch (Exception ex) {
1930                        log.debug("finally close[{}]:", i, ex);
1931                    }
1932                }
1933            }
1934
1935            if (names != null) {
1936                names.add(mname);
1937            }
1938            flatTypeList2.add(new Integer(mtype));
1939
1940        } // for (int i=0; i<nMembers; i++)
1941        log.trace("extractCompoundInfo: end");
1942    } // extractNestedCompoundInfo
1943
1944    /*
1945     * (non-Javadoc)
1946     *
1947     * @see hdf.object.Dataset#isString(int)
1948     */
1949    @Override
1950    public boolean isString(int tid) {
1951        boolean b = false;
1952        try {
1953            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
1954        }
1955        catch (Exception ex) {
1956            b = false;
1957        }
1958
1959        return b;
1960    }
1961
1962    /*
1963     * (non-Javadoc)
1964     *
1965     * @see hdf.object.Dataset#getSize(int)
1966     */
1967    @Override
1968    public int getSize(int tid) {
1969        int tsize = -1;
1970
1971        try {
1972            tsize = H5.H5Tget_size(tid);
1973        }
1974        catch (Exception ex) {
1975            tsize = -1;
1976        }
1977
1978        return tsize;
1979    }
1980
1981    /**
1982     * Creates a datatype of a compound with one field.
1983     * <p>
1984     * This function is needed to read/write data field by field.
1985     *
1986     * @param atom_tid
1987     *            The datatype identifier of the compound to create
1988     * @param member_name
1989     *            The name of the datatype
1990     * @param compInfo
1991     *            compInfo[0]--IN: class of member datatype; compInfo[1]--IN: size of member datatype; compInfo[2]--OUT:
1992     *            non-zero if the base type of the compound field is unsigned; zero, otherwise.
1993     *
1994     * @return the identifier of the compound datatype.
1995     *
1996     * @throws HDF5Exception
1997     *             If there is an error at the HDF5 library level.
1998     */
1999    private final int createCompoundFieldType(int atom_tid, String member_name, int[] compInfo) throws HDF5Exception {
2000        int nested_tid = -1;
2001
2002        int arrayType = -1;
2003        int baseType = -1;
2004        int tmp_tid1 = -1, tmp_tid4 = -1;
2005        log.trace("createCompoundFieldType start");
2006
2007        try {
2008            int member_class = compInfo[0];
2009            int member_size = compInfo[1];
2010
2011            log.trace("{} Member is class {} of size={} with baseType={}", member_name, member_class, member_size,
2012                    baseType);
2013            if (member_class == HDF5Constants.H5T_ARRAY) {
2014                int mn = H5.H5Tget_array_ndims(atom_tid);
2015                long[] marray = new long[mn];
2016                H5.H5Tget_array_dims(atom_tid, marray);
2017                baseType = H5.H5Tget_super(atom_tid);
2018                tmp_tid4 = H5.H5Tget_native_type(baseType);
2019                arrayType = H5.H5Tarray_create(tmp_tid4, mn, marray);
2020                log.trace("H5T_ARRAY {} Member is class {} of size={} with baseType={}", member_name, member_class,
2021                        member_size, baseType);
2022            }
2023
2024            try {
2025                if (baseType < 0) {
2026                    if (H5Datatype.isUnsigned(atom_tid)) {
2027                        compInfo[2] = 1;
2028                    }
2029                }
2030                else {
2031                    if (H5Datatype.isUnsigned(baseType)) {
2032                        compInfo[2] = 1;
2033                    }
2034                }
2035            }
2036            catch (Exception ex2) {
2037                log.debug("baseType isUnsigned:", ex2);
2038            }
2039            try {
2040                H5.H5Tclose(baseType);
2041                baseType = -1;
2042            }
2043            catch (HDF5Exception ex4) {
2044                log.debug("finally close:", ex4);
2045            }
2046
2047            member_size = H5.H5Tget_size(atom_tid);
2048            log.trace("createCompoundFieldType member_size={}",member_size);
2049
2050            // construct nested compound structure with a single field
2051            String theName = member_name;
2052            if (arrayType < 0) {
2053                tmp_tid1 = H5.H5Tcopy(atom_tid);
2054            }
2055            else {
2056                tmp_tid1 = H5.H5Tcopy(arrayType);
2057            }
2058            try {
2059                H5.H5Tclose(arrayType);
2060                arrayType = -1;
2061            }
2062            catch (HDF5Exception ex4) {
2063                log.debug("finally close:", ex4);
2064            }
2065            int sep = member_name.lastIndexOf(CompoundDS.separator);
2066            log.trace("createCompoundFieldType sep={}",sep);
2067
2068            while (sep > 0) {
2069                theName = member_name.substring(sep + 1);
2070                log.trace("createCompoundFieldType sep={} with name={}",sep,theName);
2071                nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
2072                H5.H5Tinsert(nested_tid, theName, 0, tmp_tid1);
2073                try {
2074                    log.trace("createCompoundFieldType(sep) H5.H5Tclose:tmp_tid1={}",tmp_tid1);
2075                    H5.H5Tclose(tmp_tid1);
2076                }
2077                catch (Exception ex) {
2078                    log.debug("close nested temp {}:", sep, ex);
2079                }
2080                tmp_tid1 = nested_tid;
2081                member_name = member_name.substring(0, sep);
2082                sep = member_name.lastIndexOf(CompoundDS.separator);
2083            }
2084
2085            nested_tid = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, member_size);
2086
2087            H5.H5Tinsert(nested_tid, member_name, 0, tmp_tid1);
2088        }
2089        finally {
2090            try {
2091                log.trace("createCompoundFieldType finally H5.H5Tclose:tmp_tid1={}",tmp_tid1);
2092                H5.H5Tclose(tmp_tid1);
2093            }
2094            catch (HDF5Exception ex3) {
2095                log.debug("finally close:", ex3);
2096            }
2097            try {
2098                log.trace("createCompoundFieldType finally H5.H5Tclose:tmp_tid4={}",tmp_tid4);
2099                H5.H5Tclose(tmp_tid4);
2100            }
2101            catch (HDF5Exception ex3) {
2102                log.debug("finally close:", ex3);
2103            }
2104            try {
2105                log.trace("createCompoundFieldType finally H5.H5Tclose:baseType={}",baseType);
2106                H5.H5Tclose(baseType);
2107            }
2108            catch (HDF5Exception ex4) {
2109                log.debug("finally close:", ex4);
2110            }
2111            try {
2112                log.trace("createCompoundFieldType finally H5.H5Tclose:arrayType={}",arrayType);
2113                H5.H5Tclose(arrayType);
2114            }
2115            catch (HDF5Exception ex4) {
2116                log.debug("finally close:", ex4);
2117            }
2118        }
2119
2120        log.trace("createCompoundFieldType finish");
2121        return nested_tid;
2122    }
2123
2124    /**
2125     * Given an array of bytes representing a compound Datatype and a start
2126     * index and length, converts len number of bytes into the correct
2127     * Object type and returns it.
2128     *
2129     * @param data The byte array representing the data of the compound Datatype
2130     * @param data_type The type of data to convert the bytes to
2131     * @param start The start index of the bytes to get
2132     * @param len The number of bytes to convert
2133     * @return The converted type of the bytes
2134     */
2135    private Object convertCompoundByteMember(byte[] data, int data_type, long start, long len) {
2136        Object currentData = null;
2137
2138        try {
2139            int typeClass = H5.H5Tget_class(data_type);
2140
2141            if (typeClass == HDF5Constants.H5T_INTEGER) {
2142                int size = H5.H5Tget_size(data_type);
2143
2144                currentData = HDFNativeData.byteToInt((int) start, (int) (len / size), data);
2145            }
2146            else if (typeClass == HDF5Constants.H5T_FLOAT) {
2147                currentData = HDFNativeData.byteToDouble((int) start, 1, data);
2148            }
2149        }
2150        catch (Exception ex) {
2151            log.debug("convertCompoundByteMember() conversion failure: ", ex);
2152        }
2153
2154        return currentData;
2155    }
2156}