[h5py] 35/455: h5s additions, added h5r, misc
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:14 UTC 2015
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.
commit 2db2026144167d6dc228ca7b5d8cf4e314f9ab9c
Author: andrewcollette <andrew.collette at gmail.com>
Date: Thu May 29 22:51:56 2008 +0000
h5s additions, added h5r, misc
---
h5py/errors.py | 3 +-
h5py/h5d.pyx | 27 ++--
h5py/h5r.pxd | 38 +++++
h5py/h5r.pyx | 129 ++++++++++++++++
h5py/h5s.pxd | 17 ++-
h5py/h5s.pyx | 341 +++++++++++++++++++++++++++++++++++++++++-
h5py/highlevel.py | 121 +++++++--------
h5py/utils.pxd | 7 +-
h5py/{utils.c => utils_low.c} | 19 ++-
h5py/{utils.h => utils_low.h} | 0
setup.py | 4 +-
11 files changed, 611 insertions(+), 95 deletions(-)
diff --git a/h5py/errors.py b/h5py/errors.py
index e2547c6..dca35ea 100644
--- a/h5py/errors.py
+++ b/h5py/errors.py
@@ -58,7 +58,8 @@ class H5TypeError(H5LibraryError):
class IdentifierError(H5LibraryError):
pass
-
+class H5ReferenceError(H5LibraryError):
+ pass
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
index dbdeb0b..697fb13 100644
--- a/h5py/h5d.pyx
+++ b/h5py/h5d.pyx
@@ -374,13 +374,10 @@ def py_create(hid_t parent_id, char* name, object data=None, object dtype=None,
return dset_id
def py_read_slab(hid_t ds_id, object start, object count,
- object stride=None, **kwds):
- """ (INT ds_id, TUPLE start, TUPLE count, TUPLE stride=None, **kwds)
+ object stride=None, dtype=None):
+ """ (INT ds_id, TUPLE start, TUPLE count, TUPLE stride=None,
+ DTYPE dtype=None)
=> NDARRAY numpy_array_out
-
- Keywords allowed:
- STRING byteorder=None, TUPLE compound_names=None,
- TUPLE complex_names=None
Read a hyperslab from an existing HDF5 dataset, and return it as a
Numpy array. Dimensions are specified by:
@@ -393,8 +390,10 @@ def py_read_slab(hid_t ds_id, object start, object count,
selected. If None (default), the HDF5 library default of "1"
will be used for all axes.
- Keywords byteorder, compound_names, and complex_names are passed to
- the datatype conversion function, py_h5t_to_dtype.
+ If a Numpy dtype object is passed in through "dtype", it will be used
+ as the type object for the returned array, and the library will attempt
+ to convert between datatypes during the read operation. If no
+ automatic conversion path exists, an exception will be raised.
As is customary when slicing into Numpy array objects, no dimensions
with length 1 are present in the returned array. Additionally, if the
@@ -414,8 +413,9 @@ def py_read_slab(hid_t ds_id, object start, object count,
try:
# Obtain the Numpy dtype of the array
- type_id = get_type(ds_id)
- dtype = h5t.py_h5t_to_dtype(type_id, **kwds)
+ if dtype is None:
+ type_id = get_type(ds_id)
+ dtype = h5t.py_h5t_to_dtype(type_id)
file_space = get_space(ds_id)
space_type = h5s.get_simple_extent_type(file_space)
@@ -553,18 +553,17 @@ def py_rank(hid_t dset_id):
H5Sclose(space_id)
return rank
-def py_dtype(hid_t dset_id, **kwds):
- """ (INT dset_id, **kwds) => DTYPE numpy_dtype
+def py_dtype(hid_t dset_id):
+ """ (INT dset_id) => DTYPE numpy_dtype
Get the datatype of an HDF5 dataset, converted to a Numpy dtype.
- Keywords are passed to py_h5t_to_dtype.
"""
cdef hid_t type_id
type_id = 0
dtype_out = None
try:
type_id = get_type(dset_id)
- dtype_out = h5t.py_h5t_to_dtype(type_id, **kwds)
+ dtype_out = h5t.py_h5t_to_dtype(type_id)
finally:
if type_id:
H5Tclose(type_id)
diff --git a/h5py/h5r.pxd b/h5py/h5r.pxd
new file mode 100644
index 0000000..48b29d5
--- /dev/null
+++ b/h5py/h5r.pxd
@@ -0,0 +1,38 @@
+#+
+#
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+#
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD (See LICENSE.txt for full license)
+#
+# $Date$
+#
+#-
+
+from defs_c cimport size_t
+from h5 cimport hid_t, herr_t, haddr_t
+from h5g cimport H5G_obj_t
+
+cdef extern from "hdf5.h":
+
+ size_t H5R_DSET_REG_REF_BUF_SIZE
+ size_t H5R_OBJ_REF_BUF_SIZE
+
+ ctypedef enum H5R_type_t:
+ H5R_BADTYPE = (-1),
+ H5R_OBJECT,
+ H5R_DATASET_REGION,
+ H5R_INTERNAL,
+ H5R_MAXTYPE
+
+ ctypedef haddr_t hobj_ref_t
+ ctypedef unsigned char hdset_reg_ref_t[12]
+
+ herr_t H5Rcreate(void *ref, hid_t loc_id, char *name, H5R_type_t ref_type,
+ hid_t space_id)
+ hid_t H5Rdereference(hid_t obj_id, H5R_type_t ref_type, void *ref)
+ hid_t H5Rget_region(hid_t dataset, H5R_type_t ref_type, void *ref)
+ H5G_obj_t H5Rget_obj_type(hid_t id, H5R_type_t ref_type, void *ref)
+
+
diff --git a/h5py/h5r.pyx b/h5py/h5r.pyx
new file mode 100644
index 0000000..9559c64
--- /dev/null
+++ b/h5py/h5r.pyx
@@ -0,0 +1,129 @@
+#+
+#
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+#
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD (See LICENSE.txt for full license)
+#
+# $Date$
+#
+#-
+
+# Pyrex compile-time imports
+from h5g cimport H5G_obj_t
+
+# Runtime imports
+import h5
+from errors import H5ReferenceError
+
+# === Public constants and data structures ====================================
+
+TYPE_OBJECT = H5R_OBJECT
+TYPE_REGION = H5R_DATASET_REGION
+TYPE_MAPPER = {H5R_OBJECT: 'OBJECT', H5R_DATASET_REGION: 'DATASET REGION' }
+TYPE_MAPPER = DDict(TYPE_MAPPER)
+
+cdef union ref_u:
+ hobj_ref_t obj_ref
+ hdset_reg_ref_t reg_ref
+
+cdef class Reference:
+
+ """
+ Represents an HDF5 reference.
+
+ Objects of this class are created exclusively by the library and
+ cannot be modified. The read-only attribute "typecode" determines
+ whether the reference is to an object in an HDF5 file (TYPE_OBJECT)
+ or a dataspace region (TYPE_REGION).
+ """
+
+ cdef ref_u ref
+ cdef readonly int typecode
+
+ def __str__(self):
+ return "HDF5 reference (type %s)" % TYPE_MAPPER[self.typecode]
+
+ def __repr__(self):
+ return self.__str__()
+
+# === Reference API ===========================================================
+
+def create(hid_t loc_id, char* name, int ref_type, hid_t space_id=-1):
+ """ (INT loc_id, STRING name, INT ref_type, INT space_id=0)
+ => ReferenceObject ref
+
+ Create a new reference, either to an object or a dataset region.
+ """
+ cdef herr_t retval
+ cdef Reference ref
+
+ ref = Reference()
+
+ retval = H5Rcreate(&ref.ref, loc_id, name, <H5R_type_t>ref_type, space_id)
+
+ if retval < 0:
+ raise H5ReferenceError("Failed to create reference on %d (\"%s\") type %d." % (loc_id, name, ref_type))
+
+ ref.typecode = ref_type
+ return ref
+
+def dereference(hid_t obj_id, Reference ref):
+ """ (INT obj_id, ReferenceObject ref) => INT obj_id
+
+ Open the object pointed to by "ref" and return its identifier. The
+ parameter "obj_id" may be the file ID or the ID of any object which
+ lives in the file.
+ """
+ cdef hid_t retval
+ retval = H5Rdereference(obj_id, <H5R_type_t>ref.typecode, &ref.ref)
+ if retval < 0:
+ raise H5ReferenceError("Failed to dereference.")
+
+ return retval
+
+def get_region(hid_t container_id, Reference ref):
+ """ (INT container_id, Reference ref) => INT dataspace_id
+
+ Retrieve the dataspace selection pointed to by a reference. The given
+ reference must be in the dataset indicated by container_id. Returns
+ an identifier for a copy of the dataspace for the dataset pointed to
+ by "ref", with the appropriate elements selected.
+
+ The given reference object must be of type TYPE_REGION.
+ """
+ cdef hid_t space_id
+ space_id = H5Rget_region(container_id, <H5R_type_t>ref.typecode, &ref.ref)
+ if space_id < 0:
+ raise H5ReferenceError("Failed to retrieve region.")
+ return space_id
+
+def get_obj_type(hid_t container_id, Reference ref):
+ """ (INT container_id, Reference ref) => INT obj_code
+
+ Determine what type of object an object reference points to. The
+ reference may be either TYPE_OBJECT or TYPE_REGION.
+
+ The return value is one of:
+ h5g.OBJ_LINK Symbolic link
+ h5g.OBJ_GROUP Group
+ h5g.OBJ_DATASET Dataset
+ h5g.OBJ_TYPE Named datatype
+ """
+ cdef int retval
+ retval = <int>H5Rget_obj_type(container_id, <H5R_type_t>ref.typecode, &ref.ref)
+ if retval < 0:
+ raise H5ReferenceError("Failed to determine object type.")
+ return retval
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5s.pxd b/h5py/h5s.pxd
index 919d2fa..ab7f4bc 100644
--- a/h5py/h5s.pxd
+++ b/h5py/h5s.pxd
@@ -78,20 +78,21 @@ cdef extern from "hdf5.h":
# --- Dataspace selection ---------------------------------------------------
H5S_sel_type H5Sget_select_type(hid_t space_id)
hssize_t H5Sget_select_npoints(hid_t space_id)
- hssize_t H5Sget_select_hyper_nblocks(hid_t space_id )
- herr_t H5Sget_select_hyper_blocklist(hid_t space_id,
- hsize_t startblock, hsize_t numblocks, hsize_t *buf )
- hssize_t H5Sget_select_elem_npoints(hid_t space_id )
- herr_t H5Sget_select_elem_pointlist(hid_t space_id, hsize_t startpoint,
- hsize_t numpoints, hsize_t *buf)
herr_t H5Sget_select_bounds(hid_t space_id, hsize_t *start, hsize_t *end)
- herr_t H5Sselect_elements(hid_t space_id, H5S_seloper_t op,
- size_t num_elements, hsize_t *coord[] )
herr_t H5Sselect_all(hid_t space_id)
herr_t H5Sselect_none(hid_t space_id)
htri_t H5Sselect_valid(hid_t space_id)
+ hssize_t H5Sget_select_elem_npoints(hid_t space_id )
+ herr_t H5Sget_select_elem_pointlist(hid_t space_id, hsize_t startpoint,
+ hsize_t numpoints, hsize_t *buf)
+ herr_t H5Sselect_elements(hid_t space_id, H5S_seloper_t op,
+ size_t num_elements, hsize_t *coord[] )
+
+ hssize_t H5Sget_select_hyper_nblocks(hid_t space_id )
+ herr_t H5Sget_select_hyper_blocklist(hid_t space_id,
+ hsize_t startblock, hsize_t numblocks, hsize_t *buf )
herr_t H5Sselect_hyperslab(hid_t space_id, H5S_seloper_t op,
hsize_t start[], hsize_t _stride[],
hsize_t count[], hsize_t _block[])
diff --git a/h5py/h5s.pyx b/h5py/h5s.pyx
index 063908c..349148f 100644
--- a/h5py/h5s.pyx
+++ b/h5py/h5s.pyx
@@ -10,7 +10,6 @@
#
#-
-
"""
Low-level interface to the "H5S" family of data-space functions.
@@ -19,9 +18,9 @@
"""
# Pyrex compile-time imports
-from defs_c cimport malloc, free
+from defs_c cimport malloc, free, memcpy
from h5 cimport herr_t, htri_t, hid_t, size_t, hsize_t, hssize_t
-from utils cimport tuple_to_dims, dims_to_tuple
+from utils cimport tuple_to_dims, dims_to_tuple, emalloc
# Runtime imports
import h5
@@ -60,6 +59,17 @@ CLASS_MAPPER = {H5S_NO_CLASS: 'NO CLASS', H5S_SCALAR: 'SCALAR',
H5S_SIMPLE: 'SIMPLE'}
CLASS_MAPPER = DDict(CLASS_MAPPER)
+#enum H5S_sel_type
+SEL_ERROR = H5S_SEL_ERROR
+SEL_NON = H5S_SEL_NONE
+SEL_POINTS = H5S_SEL_POINTS
+SEL_HYPERSLABS = H5S_SEL_HYPERSLABS
+SEL_ALL = H5S_SEL_ALL
+SEL_MAPPER = {H5S_SEL_ERROR: 'ERROR', H5S_SEL_NONE: 'NONE',
+ H5S_SEL_POINTS: 'POINTS', H5S_SEL_HYPERSLABS: 'HYPERSLABS',
+ H5S_SEL_ALL: 'ALL'}
+SEL_MAPPER = DDict(SEL_MAPPER)
+
# === Basic dataspace operations ==============================================
def close(hid_t space_id):
@@ -150,13 +160,12 @@ def is_simple(hid_t space_id):
raise DataspaceError("Failed to determine simplicity of dataspace %d" % space_id)
return bool(retval)
-def offset_simple(hid_t space_id, object offset):
- """ (INT space_id, TUPLE offset or None)
+def offset_simple(hid_t space_id, object offset=None):
+ """ (INT space_id, TUPLE offset=None)
Set the offset of a dataspace. The length of the given tuple must
match the rank of the dataspace; ValueError will be raised otherwise.
- If None is provided instead of a tuple, the offsets on all axes
- will be set to 0.
+ If None is provided (default), the offsets on all axes will be set to 0.
"""
cdef htri_t simple
cdef int rank
@@ -259,7 +268,323 @@ def get_simple_extent_type(hid_t space_id):
raise DataspaceError("Can't determine type of dataspace %d" % space_id)
return retval
-# === Dataspace manipulation ==================================================
+# === Extents =================================================================
+
+def extent_copy(hid_t dest_id, hid_t source_id):
+ """ (INT dest_id, INT source_id)
+
+ Copy one dataspace's extent to another, changing its type if necessary.
+ """
+ cdef herr_t retval
+ retval = H5Sextent_copy(dest_id, source_id)
+ if retval < 0:
+ raise DataspaceError("Can't copy extent (%d to %d)" % (source_id, dest_id))
+
+def set_extent_simple(hid_t space_id, object dims_tpl, object max_dims_tpl=None):
+ """ (INT space_id, TUPLE dims_tpl, TUPLE max_dims_tpl=None)
+
+ Reset the dataspace extent, via a tuple of new dimensions. Every
+ element of dims_tpl must be a positive integer. You can also specify
+ the maximum dataspace size, via the tuple max_dims. The special
+ integer h5s.SPACE_UNLIMITED, as an element of max_dims, indicates an
+ unlimited dimension.
+ """
+ cdef int rank
+ cdef hsize_t* dims
+ cdef hsize_t* max_dims
+ cdef herr_t retval
+ dims = NULL
+ max_dims = NULL
+
+ rank = len(dims_tpl)
+ if max_dims_tpl is not None and len(max_dims_tpl) != rank:
+ raise ValueError("Dims/max dims tuples must be the same rank: %s vs %s" % (repr(dims_tpl),repr(max_dims_tpl)))
+
+ try:
+ dims = tuple_to_dims(dims_tpl)
+ if dims == NULL:
+ raise ValueError("Bad dimensions tuple: %s" % repr(dims_tpl))
+
+ if max_dims_tpl is not None:
+ max_dims = tuple_to_dims(max_dims_tpl)
+ if max_dims == NULL:
+ raise ValueError("Bad max dimensions tuple: %s" % repr(max_dims_tpl))
+
+ retval = H5Sset_extent_simple(space_id, rank, dims, max_dims)
+
+ if retval < 0:
+ raise DataspaceError("Failed to reset extent to %s on space %d" % (str(dims_tpl), space_id))
+ finally:
+ if dims != NULL:
+ free(dims)
+ if max_dims != NULL:
+ free(max_dims)
+
+def set_extent_none(hid_t space_id):
+ """ (INT space_id)
+
+ Remove the dataspace extent; class changes to h5s.CLASS_NO_CLASS.
+ """
+ cdef herr_t retval
+ retval = H5Sset_extent_non(space_id)
+ if retval < 0:
+ raise DataspaceError("Failed to remove extent from dataspace %d" % space_id)
+
+# === General selection operations ============================================
+
+def get_select_type(hid_t space_id):
+ """ (INT space_id) => INT select_code
+
+ Determine selection type. Return values are:
+ SEL_NONE: No selection.
+ SEL_ALL: All points selected
+ SEL_POINTS: Point-by-point element selection in use
+ SEL_HYPERSLABS: Hyperslab selection in use
+ """
+ cdef int sel_code
+ sel_code = <int>H5Sget_select_type(space_id)
+ if sel_code < 0:
+ raise DataspaceError("Failed to determine selection type of dataspace %d" % space_id)
+ return sel_code
+
+def get_select_npoints(hid_t space_id):
+ """ (INT space_id) => LONG npoints
+
+ Determine the total number of points currently selected. Works for
+ all selection techniques.
+ """
+ cdef hssize_t retval
+ retval = H5Sget_select_npoints(space_id)
+ if retval < 0:
+ raise DataspaceError("Failed to determine number of selected points in dataspace %d" % space_id)
+ return retval
+
+def get_select_bounds(hid_t space_id):
+ """ (INT space_id) => (TUPLE start, TUPLE end)
+
+ Determine the bounding box which exactly contains the current
+ selection.
+ """
+ cdef int rank
+ cdef herr_t retval
+ cdef hsize_t *start
+ cdef hsize_t *end
+ start = NULL
+ end = NULL
+
+ rank = H5Sget_simple_extent_ndims(space_id)
+ if rank < 0:
+ raise DataspaceError("Failed to enumerate dimensions of %d for bounding box." % space_id)
+
+ start = <hsize_t*>malloc(sizeof(hsize_t)*rank)
+ end = <hsize_t*>malloc(sizeof(hsize_t)*rank)
+
+ try:
+ retval = H5Sget_select_bounds(space_id, start, end)
+ if retval < 0:
+ raise DataspaceError("Failed to determine bounding box for space %d" % space_id)
+
+ start_tpl = dims_to_tuple(start, rank)
+ end_tpl = dims_to_tuple(end, rank)
+ if start_tpl == None or end_tpl == None:
+ raise RuntimeError("Failed to construct return tuples.")
+
+ finally:
+ free(start)
+ free(end)
+
+ return (start_tpl, end_tpl)
+
+def select_all(hid_t space_id):
+ """ (INT space_id)
+
+ Select all points in the dataspace.
+ """
+ cdef herr_t retval
+ retval = H5Sselect_all(space_id)
+ if retval < 0:
+ raise DataspaceError("select_all failed on dataspace %d" % space_id)
+
+def select_none(hid_t space_id):
+ """ (INT space_id)
+
+ Deselect entire dataspace.
+ """
+ cdef herr_t retval
+ retval = H5Sselect_none(space_id)
+ if retval < 0:
+ raise DataspaceError("select_none failed on dataspace %d" % space_id)
+
+def select_valid(hid_t space_id):
+ """ (INT space_id) => BOOL select_valid
+
+ Determine if the current selection falls within the dataspace extent.
+ """
+ cdef htri_t retval
+ retval = H5Sselect_valid(space_id)
+ if retval < 0:
+ raise DataspaceError("Failed to determine selection status on dataspace %d" % space_id)
+ return bool(retval)
+
+# === Point selection functions ===============================================
+
+def get_select_elem_npoints(hid_t space_id):
+ """ (INT space_id) => LONG npoints
+
+ Determine the number of elements selected in point-selection mode.
+ """
+ cdef hssize_t retval
+ retval = H5Sget_select_elem_npoints(space_id)
+ if retval < 0:
+ raise DataspaceError("Failed to count element-selection npoints in space %d" % space_id)
+ return retval
+
+def get_select_elem_pointlist(hid_t space_id):
+ """ (INT space_id) => LIST elements_list
+
+ Get a list of all selected elements, in point-selection mode.
+ List entries <rank>-length tuples containing point coordinates.
+ """
+ cdef herr_t retval
+ cdef int rank
+ cdef hssize_t npoints
+ cdef hsize_t *buf
+ cdef int i_point
+ cdef int i_entry
+
+ npoints = H5Sget_select_elem_npoints(space_id)
+ if npoints < 0:
+ raise DataspaceError("Failed to enumerate points for pointlist, space %d" % space_id)
+ elif npoints == 0:
+ return []
+
+ rank = H5Sget_simple_extent_ndims(space_id)
+ if rank < 0:
+ raise DataspaceError("Failed to determine rank of space %d" % space_id)
+
+ buf = <hsize_t*>malloc(sizeof(hsize_t)*rank*npoints)
+
+ try:
+ retval = H5Sget_select_elem_pointlist(space_id, 0, <hsize_t>npoints, buf)
+ if retval < 0:
+ raise DataspaceError("Failed to retrieve pointlist for dataspace %d" % space_id)
+
+ retlist = []
+ for i_point from 0<=i_point<npoints:
+ tmp_tpl = []
+ for i_entry from 0<=i_entry<rank:
+ tmp_tpl.append( long( buf[i_point*rank + i_entry] ) )
+ retlist.append(tuple(tmp_tpl))
+
+ finally:
+ free(buf)
+
+ return retlist
+
+def select_elements(hid_t space_id, object coord_list, int op=H5S_SELECT_SET):
+ """ (INT space_id, LIST coord_list, INT op=SELECT_SET)
+
+ Select elements using a list of points. List entries should be
+ <rank>-length tuples containing point coordinates.
+ """
+ cdef herr_t retval # Result of API call
+ cdef size_t nelements # Number of point coordinates
+ cdef hsize_t *coords # Contiguous 2D array nelements x rank x sizeof(hsize_t)
+ cdef size_t element_size # Size of a point record: sizeof(hsize_t)*rank
+
+ cdef int rank
+ cdef int i_point
+ cdef int i_entry
+ coords = NULL
+
+ rank = H5Sget_simple_extent_ndims(space_id)
+ if rank < 0:
+ raise DataspaceError("Failed to determine rank of space %d" % space_id)
+
+ nelements = len(coord_list)
+ element_size = sizeof(hsize_t)*rank
+
+ # HDF5 docs say this has to be a contiguous 2D array
+ coords = <hsize_t*>malloc(element_size*nelements)
+
+ try:
+ for i_point from 0<=i_point<nelements:
+
+ tpl = coord_list[i_point]
+ if len(tpl) != rank:
+ raise ValueError("All coordinate entries must be length-%d" % rank)
+
+ for i_entry from 0<=i_entry<rank:
+ coords[(i_point*rank) + i_entry] = tpl[i_entry]
+
+ retval = H5Sselect_elements(space_id, <H5S_seloper_t>op, nelements, <hsize_t**>coords)
+ if retval < 0:
+ raise DataspaceError("Failed to select point list on dataspace %d" % space_id)
+ finally:
+ if coords != NULL:
+ free(coords)
+
+# === Hyperslab selection functions ===========================================
+
+def get_select_hyper_nblocks(hid_t space_id):
+ """ (INT space_id) => LONG nblocks
+
+ Get the number of hyperslab blocks currently selected.
+ """
+ cdef hssize_t nblocks
+ nblocks = H5Sget_select_hyper_nblocks(space_id)
+ if nblocks < 0:
+ raise DataspaceError("Failed to enumerate selected hyperslab blocks in space %d" % space_id)
+ return nblocks
+
+def get_select_hyper_blocklist(hid_t space_id):
+ """ (INT space_id) => LIST hyperslab_blocks
+
+ Get a Python list containing selected hyperslab blocks.
+ List entries are 2-tuples in the form:
+ ( corner_coordinate, opposite_coordinate )
+ where corner_coordinate and opposite_coordinate are <rank>-length
+ tuples.
+ """
+ cdef hssize_t nblocks
+ cdef herr_t retval
+ cdef hsize_t *buf
+
+ cdef int rank
+ cdef int i_block
+ cdef int i_entry
+
+ rank = H5Sget_simple_extent_ndims(space_id)
+ if rank < 0:
+ raise DataspaceError("Failed to determine rank of space %d" % space_id)
+
+ nblocks = H5Sget_select_hyper_nblocks(space_id)
+ if nblocks < 0:
+ raise DataspaceError("Failed to enumerate block selection on space %d" % space_id)
+
+ buf = <hsize_t*>malloc(sizeof(hsize_t)*2*rank*nblocks)
+
+ try:
+ retval = H5Sget_select_hyper_blocklist(space_id, 0, nblocks, buf)
+ if retval < 0:
+ raise DataspaceError("Failed to retrieve list of hyperslab blocks from space %d" % space_id)
+
+ outlist = []
+ for i_block from 0<=i_block<nblocks:
+ corner_list = []
+ opposite_list = []
+ for i_entry from 0<=i_entry<(2*rank):
+ entry = long(buf[ i_block*(2*rank) + i_entry])
+ if i_entry < rank:
+ corner_list.append(entry)
+ else:
+ opposite_list.append(entry)
+ outlist.append( (tuple(corner_list), tuple(opposite_list)) )
+ finally:
+ free(buf)
+
+ return outlist
+
def select_hyperslab(hid_t space_id, object start, object count,
object stride=None, object block=None, int op=H5S_SELECT_SET):
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
index 54ae1f2..09e8c2a 100644
--- a/h5py/highlevel.py
+++ b/h5py/highlevel.py
@@ -110,7 +110,7 @@ class Dataset(object):
# --- Public interface (Dataset) ------------------------------------------
- def __init__(self, group, name, create=False, force=False,
+ def __init__(self, group, name, create=False,
data=None, dtype=None, shape=None,
chunks=None, compression=None, shuffle=False, fletcher32=False):
""" Create a new Dataset object. There are two modes of operation:
@@ -126,11 +126,8 @@ class Dataset(object):
"dtype" (Numpy dtype object) and "shape" (tuple of dimensions).
Chunks/compression/shuffle/fletcher32 can also be specified.
- By default, creating a dataset will fail if another of the
- same name already exists. If you specify force=True, any
- existing dataset will be unlinked, and the new one created.
- This is as close as possible to an atomic operation; if the
- dataset creation fails, the old dataset isn't destroyed.
+ Creating a dataset will fail if another of the same name
+ already exists.
Creation keywords (* is default):
@@ -140,17 +137,8 @@ class Dataset(object):
fletcher32: Enable Fletcher32 error detection? T/F*
"""
if create:
- if force and h5g.py_exists(group.id,name):
- tmpname = 'h5py_temp_' + ''.join(random.sample(string.ascii_letters, 30))
- tmpid = h5d.py_create(group.id, tmpname, data, shape,
+ self.id = h5d.py_create(group.id, name, data, shape,
chunks, compression, shuffle, fletcher32)
- h5g.unlink(group.id, name)
- h5g.link(group.id, tmpname, name)
- h5g.unlink(group.id, tmpname)
-
- else:
- self.id = h5d.py_create(group.id, name, data, shape,
- chunks, compression, shuffle, fletcher32)
else:
if any((data,dtype,shape,chunks,compression,shuffle,fletcher32)):
raise ValueError('You cannot specify keywords when opening a dataset.')
@@ -174,10 +162,20 @@ class Dataset(object):
"""
start, count, stride, names = slicer(self.shape, args)
- return h5d.py_read_slab(self.id, start, count, stride,
- byteorder = self._byteorder,
- compound_names = names,
- complex_names = self._cnames)
+ if names is not None and self.dtype.names is None:
+ raise ValueError('This dataset has no named fields (requested "%s")' % ", ".join(names))
+
+ tid = 0
+ try:
+ tid = h5d.get_type(self.id)
+ dt = h5t.py_h5t_to_dtype(tid, byteorder=self._byteorder,
+ compound_names=names,
+ complex_names=self._cnames)
+ finally:
+ if tid != 0:
+ h5t.close(tid, force=True)
+
+ return h5d.py_read_slab(self.id, start, count, stride, dtype=dt)
def __setitem__(self, args):
""" Write to the underlying array from an existing Numpy array. The
@@ -280,7 +278,7 @@ class Group(object):
elif isinstance(obj, numpy.ndarray):
if h5t.py_can_convert_dtype(obj.dtype):
- dset = Dataset(self, name, data=obj, create=True, force=True)
+ dset = Dataset(self, name, data=obj, create=True)
dset.close()
else:
raise ValueError("Don't know how to store data of this type in a dataset: " + repr(obj.dtype))
@@ -288,7 +286,7 @@ class Group(object):
else:
arr = numpy.array(obj)
if h5t.py_can_convert_dtype(arr.dtype):
- dset = Dataset(self, name, data=arr, create=True, force=True)
+ dset = Dataset(self, name, data=arr, create=True)
dset.close()
else:
raise ValueError("Don't know how to store data of this type in a dataset: " + repr(arr.dtype))
@@ -299,7 +297,7 @@ class Group(object):
"""
retval = _open_arbitrary(self, name)
if isinstance(retval, Dataset) and retval.shape == ():
- value = h5d.py_read_slab(retval.id, ())
+ value = h5d.py_read_slab(retval.id, (), ())
value = value.astype(value.dtype.type)
retval.close()
return value
@@ -736,27 +734,55 @@ def slicer(shape, args):
rank = len(shape)
- def checkdim(dim):
- if not dim < rank:
- raise ValueError("Too many slices (dataset is rank-%d)" % rank)
+ slices = [] # Holds both slice objects and integer indices.
+ names = [] # Field names (strings)
+
+ # Sort slice-like arguments from strings
+ for arg in args:
+ if isinstance(arg, int) or isinstance(arg, long) or isinstance(arg, slice):
+ slices.append(arg)
+ elif isinstance(arg, str):
+ names.append(arg)
+ else:
+ raise TypeError("Unsupported slice type (must be int/long/slice/str): %s" % repr(arg))
+
+ # If there are no names, this is interpreted to mean "all names." So
+ # return None instead of an empty sequence.
+ if len(names) == 0:
+ names = None
+ else:
+ names = tuple(names)
+
+ # Check for special cases
+
+ # 1. No numeric slices == full dataspace
+ if len(slices) == 0:
+ return ((0,)*rank, shape, (1,)*rank, names)
+
+ # 2. Single numeric slice ":" == full dataspace
+ if len(slices) == 1 and isinstance(slices[0], slice):
+ slice_ = slices[0]
+ if slice_.stop == None and slice_.step == None and slice_.stop == None:
+ return ((0,)*rank, shape, (1,)*rank, names)
+
+ # Validate slices
+ if len(slices) != rank:
+ raise ValueError("Number of numeric slices must match dataset rank (%d)" % rank)
start = []
count = []
stride = []
- rawslices = []
- names = []
- dim = 0
- for arg in args:
+ # Parse slices to assemble hyperslab start/count/stride tuples
+ for dim, arg in enumerate(slices):
if isinstance(arg, int) or isinstance(arg, long):
- checkdim(dim)
+ if arg < 0:
+ raise ValueError("Negative indices are not allowed.")
start.append(arg)
count.append(1)
stride.append(1)
- dim += 1
elif isinstance(arg, slice):
- checkdim(dim)
# slice.indices() method clips, so do it the hard way...
@@ -768,7 +794,7 @@ def slicer(shape, args):
raise ValueError("Negative dimensions are not allowed")
ss=arg.start
- # Step
+ # Stride
if arg.step is None:
st = 1
else:
@@ -789,32 +815,7 @@ def slicer(shape, args):
start.append(ss)
stride.append(st)
count.append(cc)
- rawslices.append(arg)
- dim += 1
-
- elif isinstance(arg, str):
- names.append(arg)
-
- else:
- raise TypeError("Unsupported slice type (must be int/long/slice/str): %s" % repr(arg))
-
- if len(names) == 0:
- names = None
- else:
- names = tuple(names)
-
- nslices = len(count)
-
- # Check for lone ":" or no numeric slices, which in Numpy means the whole thing.
- if nslices == len(rawslices) == 1:
- slice_ = rawslices[0]
- if slice_.stop == None and slice_.step == None and slice_.stop == None:
- return ((0,)*rank, shape, (1,)*rank, names)
- if nslices == 0:
- return ((0,)*rank, shape, (1,)*rank, names)
-
- if nslices != rank:
- raise ValueError("Not enough slices (%d); dataset is rank-%d" % (nslices, rank))
+ slices.append(arg)
return (tuple(start), tuple(count), tuple(stride), names)
diff --git a/h5py/utils.pxd b/h5py/utils.pxd
index f775f7f..78e5d04 100644
--- a/h5py/utils.pxd
+++ b/h5py/utils.pxd
@@ -10,10 +10,11 @@
#
#-
+from defs_c cimport size_t
from h5 cimport hid_t, hsize_t
from numpy cimport ndarray
-cdef extern from "utils.h":
+cdef extern from "utils_low.h":
hid_t create_ieee_complex64(char byteorder, char* real_name, char* img_name)
hid_t create_ieee_complex128(char byteorder, char* real_name, char* img_name)
@@ -22,3 +23,7 @@ cdef extern from "utils.h":
int check_numpy_read(ndarray arr, hid_t space_id)
int check_numpy_write(ndarray arr, hid_t space_id)
+
+ void* emalloc(size_t size) except? NULL
+
+
diff --git a/h5py/utils.c b/h5py/utils_low.c
similarity index 94%
rename from h5py/utils.c
rename to h5py/utils_low.c
index 340ddad..3d132b4 100644
--- a/h5py/utils.c
+++ b/h5py/utils_low.c
@@ -21,9 +21,26 @@
#include "Python.h"
#include "numpy/arrayobject.h"
-#include "utils.h"
+#include "utils_low.h"
#include "hdf5.h"
+/* Wrapper for malloc(size) */
+void* emalloc(size_t size){
+
+ void *retval = NULL;
+
+ if(size==0) return NULL;
+ if(size<0){
+ PyErr_SetString(PyExc_RuntimeError, ".");
+ }
+
+ retval = malloc(size);
+ if(retval == NULL){
+ PyErr_SetString(PyExc_RuntimeError, ".");
+ }
+
+ return retval;
+}
/* Convert an hsize_t array to a Python tuple of long ints.
Returns None on failure
diff --git a/h5py/utils.h b/h5py/utils_low.h
similarity index 100%
rename from h5py/utils.h
rename to h5py/utils_low.h
diff --git a/setup.py b/setup.py
index 25de45c..a5d9152 100644
--- a/setup.py
+++ b/setup.py
@@ -178,10 +178,10 @@ ext_exten = '.pyx'
# Pyrex extension modules
pyx_modules = ['h5' , 'h5f', 'h5g', 'h5s', 'h5t',
- 'h5d', 'h5a', 'h5p', 'h5z', 'h5i']
+ 'h5d', 'h5a', 'h5p', 'h5z', 'h5i', 'h5r']
pyx_src_path = 'h5py'
-pyx_extra_src = ['utils.c'] # C source files required for Pyrex code
+pyx_extra_src = ['utils_low.c'] # C source files required for Pyrex code
pyx_libraries = ['hdf5'] # Libraries to link into Pyrex code
# Compile-time include and library dirs for Pyrex code
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git
More information about the debian-science-commits
mailing list