[Git][debian-gis-team/python-hdf4][master] 6 commits: New upstream version 0.11.4

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Thu Mar 28 10:05:54 GMT 2024



Antonio Valentino pushed to branch master at Debian GIS Project / python-hdf4


Commits:
2faa70f8 by Antonio Valentino at 2024-03-28T09:49:38+00:00
New upstream version 0.11.4
- - - - -
8b997ccd by Antonio Valentino at 2024-03-28T09:49:38+00:00
Update upstream source from tag 'upstream/0.11.4'

Update to upstream version '0.11.4'
with Debian dir 4a2b123d0156f56f80dba3a7390b749c6b7d8a3e
- - - - -
102f0300 by Antonio Valentino at 2024-03-28T10:02:59+00:00
New upstream release

- - - - -
63ec4d6f by Antonio Valentino at 2024-03-28T10:03:31+00:00
Drop hdf4alt.patch

- - - - -
2db2bc8f by Antonio Valentino at 2024-03-28T10:03:34+00:00
Update dates in d/copyright

- - - - -
8c6b6708 by Antonio Valentino at 2024-03-28T10:03:34+00:00
Set distribution to unstable

- - - - -


10 changed files:

- debian/changelog
- debian/copyright
- − debian/patches/hdf4alt.patch
- − debian/patches/series
- doc/conf.py
- pyhdf/hdfext.i
- pyhdf/hdfext_wrap.c
- pyhdf/test_SD.py
- pyproject.toml
- setup.py


Changes:

=====================================
debian/changelog
=====================================
@@ -1,3 +1,12 @@
+python-hdf4 (0.11.4-1) unstable; urgency=medium
+
+  * New upstream release.
+  * debian/patches:
+    - Drop hdf4alt.patch, applied upstream.
+  * Update dates in d/copyright.
+
+ -- Antonio Valentino <antonio.valentino at tiscali.it>  Thu, 28 Mar 2024 09:56:41 +0000
+
 python-hdf4 (0.11.3-3) unstable; urgency=medium
 
   * Team upload.


=====================================
debian/copyright
=====================================
@@ -12,7 +12,7 @@ Copyright: 2010-2013, Benjamin Peterson
 License: Expat
 
 Files: debian/*
-Copyright: 2018-2023, Antonio Valentino <antonio.valentino at tiscali.it>
+Copyright: 2018-2024, Antonio Valentino <antonio.valentino at tiscali.it>
 License: Expat
 
 License: Expat


=====================================
debian/patches/hdf4alt.patch deleted
=====================================
@@ -1,24 +0,0 @@
-Description: Also check Multi-Arch path to fix FTBFS.
-Author: Bas Couwenberg <sebastic at debian.org>
-Bug-Debian: https://bugs.debian.org/1066395
-Forwarded: https://github.com/fhs/pyhdf/pull/71
-Applied-Upstream: https://github.com/fhs/pyhdf/commit/f508ec8d5caf6dbba9d60ce2e5f4d335d9f88851
-
---- a/setup.py
-+++ b/setup.py
-@@ -4,6 +4,7 @@ import sys
- import os
- import os.path as path
- import shlex
-+import sysconfig
- 
- from setuptools import Extension, setup
- import numpy as np
-@@ -31,6 +32,7 @@ def _use_hdf4alt(libdirs):
-     if not sys.platform.startswith("linux"):
-         return False
-     libdirs.extend(os.environ.get("LD_LIBRARY_PATH", "").split(os.pathsep))
-+    libdirs.append("/usr/lib/%s" % sysconfig.get_config_var('MULTIARCH'))
-     libdirs.append("/usr/lib")
-     libdirs.append("/usr/local/lib")
-     libdirs.append("/lib")


=====================================
debian/patches/series deleted
=====================================
@@ -1 +0,0 @@
-hdf4alt.patch


=====================================
doc/conf.py
=====================================
@@ -56,7 +56,7 @@ copyright = u'2019, pyhdf authors'
 # The short X.Y version.
 version = '0.11'
 # The full version, including alpha/beta/rc tags.
-release = '0.11.3'
+release = '0.11.4'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.


=====================================
pyhdf/hdfext.i
=====================================
@@ -325,14 +325,17 @@ static PyObject * _SDreaddata_0(int32 sds_id, int32 data_type,
         }
 
         /*
-         * Create output numpy array.
+         * Create output numpy array. We provide 1 for the itemsize argument to
+         * PyArray_New to handle to case when num_type is NPY_STRING. All other
+         * num_type possibilities are fixed-size types, so itemsize is ignored.
          */
     if ((num_type = HDFtoNumericType(data_type)) < 0)    {
         PyErr_SetString(PyExc_ValueError, "data_type not compatible with numpy");
         return NULL;
         }
-    if ((array = (PyArrayObject *)
-                 PyArray_SimpleNew(outRank, dims, num_type)) == NULL)
+    array = (PyArrayObject *)PyArray_New(&PyArray_Type, outRank, dims, num_type,
+                                         NULL, NULL, 1, 0, NULL);
+    if (array == NULL)
         return NULL;
         /*
          * Load it from the SDS.


=====================================
pyhdf/hdfext_wrap.c
=====================================
@@ -3964,14 +3964,17 @@ static PyObject * _SDreaddata_0(int32 sds_id, int32 data_type,
         }
 
         /*
-         * Create output numpy array.
+         * Create output numpy array. We provide 1 for the itemsize argument to
+         * PyArray_New to handle to case when num_type is NPY_STRING. All other
+         * num_type possibilities are fixed-size types, so itemsize is ignored.
          */
     if ((num_type = HDFtoNumericType(data_type)) < 0)    {
         PyErr_SetString(PyExc_ValueError, "data_type not compatible with numpy");
         return NULL;
         }
-    if ((array = (PyArrayObject *)
-                 PyArray_SimpleNew(outRank, dims, num_type)) == NULL)
+    array = (PyArrayObject *)PyArray_New(&PyArray_Type, outRank, dims, num_type,
+                                         NULL, NULL, 1, 0, NULL);
+    if (array == NULL)
         return NULL;
         /*
          * Load it from the SDS.


=====================================
pyhdf/test_SD.py
=====================================
@@ -6,6 +6,7 @@ import pyhdf.SD
 import shutil
 import tempfile
 from numpy.testing import assert_array_equal
+from pathlib import Path
 from pyhdf.SD import SDC
 
 def test_long_varname():
@@ -61,3 +62,11 @@ def test_negative_int8():
         sd.end()
     finally:
         shutil.rmtree(temp)
+
+def test_char():
+    with tempfile.TemporaryDirectory() as temp_dir:
+        hdf_file = str(Path(temp_dir) / "test.hdf")
+        sd = pyhdf.SD.SD(hdf_file, SDC.WRITE | SDC.CREATE)
+        sds = sd.create("test_sds", SDC.CHAR, [5])
+        sds[:] = "ABCDE"
+        assert_array_equal(sds[:], np.array(list("ABCDE"), "S2"))


=====================================
pyproject.toml
=====================================
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
 
 [project]
 name = "pyhdf"
-version = "0.11.3"
+version = "0.11.4"
 description = "Python interface to the NCSA HDF4 library"
 readme = "README.md"
 keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf']
@@ -34,7 +34,9 @@ authors = [
 maintainers = [
 	{name = "Fazlul Shahriar", email = "fshahriar at gmail.com"},
 ]
-dependencies = []
+dependencies = [
+	"numpy"
+]
 
 [project.urls]
 Homepage = 'https://github.com/fhs/pyhdf'


=====================================
setup.py
=====================================
@@ -4,6 +4,7 @@ import sys
 import os
 import os.path as path
 import shlex
+import sysconfig
 
 from setuptools import Extension, setup
 import numpy as np
@@ -31,6 +32,7 @@ def _use_hdf4alt(libdirs):
     if not sys.platform.startswith("linux"):
         return False
     libdirs.extend(os.environ.get("LD_LIBRARY_PATH", "").split(os.pathsep))
+    libdirs.append("/usr/lib/%s" % sysconfig.get_config_var('MULTIARCH'))
     libdirs.append("/usr/lib")
     libdirs.append("/usr/local/lib")
     libdirs.append("/lib")



View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/compare/29fceb2e07252a38ba75015cfca6387d8d75a503...8c6b67081dd77cfbd6ccb6c93feaface7d432dfc

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/compare/29fceb2e07252a38ba75015cfca6387d8d75a503...8c6b67081dd77cfbd6ccb6c93feaface7d432dfc
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240328/f0658d24/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list