Merge remote-tracking branch 'up/main' into main-riscv64

Signed-off-by: David Abdurachmanov <davidlt@rivosinc.com>
This commit is contained in:
David Abdurachmanov 2024-07-13 18:05:20 +03:00
commit aa75049d28
Signed by: davidlt
GPG Key ID: 7A5F42FAF91FACC3
11 changed files with 149 additions and 312 deletions

5
.gitignore vendored
View File

@ -75,3 +75,8 @@
/dask-2023.8.1.tar.gz
/dask-2023.11.0.tar.gz
/dask-2023.12.0.tar.gz
/dask-2024.1.0.tar.gz
/dask-2024.1.1.tar.gz
/dask-2024.2.1.tar.gz
/dask-2024.6.0.tar.gz
/dask-2024.6.2.tar.gz

View File

@ -0,0 +1,38 @@
From f72b9ced312c92900ed7903ec07aefde35e22489 Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Thu, 20 Jun 2024 21:08:07 -0400
Subject: [PATCH] Remove extra test dependencies
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
pyproject.toml | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 1d036cbb..e9600551 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -75,11 +75,9 @@ complete = [
test = [
"pandas[test]",
"pytest",
- "pytest-cov",
"pytest-rerunfailures",
"pytest-timeout",
"pytest-xdist",
- "pre-commit",
]
[project.entry-points."dask.array.backends"]
@@ -128,7 +126,7 @@ markers = [
"skip_with_pyarrow_strings: Tests that should be skipped when pyarrow string conversion is turned on",
"xfail_with_pyarrow_strings: Tests that should be xfailed when pyarrow string conversion is turned on",
]
-addopts = "-v -rsfE --durations=10 --color=yes --cov-config=pyproject.toml"
+addopts = "-v -rsfE --durations=10 --color=yes"
filterwarnings = [
"error:::dask[.*]",
"error:::pandas[.*]",
--
2.45.2

View File

@ -1,33 +0,0 @@
From 1fcc2a30f6e5fea9aff448a52a0b4c4bbed908ea Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Sun, 7 Mar 2021 04:07:32 -0500
Subject: [PATCH 1/6] Skip test_encoding_gh601 on big-endian machines.
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
dask/dataframe/io/tests/test_csv.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dask/dataframe/io/tests/test_csv.py b/dask/dataframe/io/tests/test_csv.py
index 1df7202f..54ff1dff 100644
--- a/dask/dataframe/io/tests/test_csv.py
+++ b/dask/dataframe/io/tests/test_csv.py
@@ -4,6 +4,7 @@ import gzip
import os
import warnings
from io import BytesIO, StringIO
+import sys
from unittest import mock
import pytest
@@ -1155,6 +1156,7 @@ def test_read_csv_with_datetime_index_partitions_n():
xfail_pandas_100 = pytest.mark.xfail(reason="https://github.com/dask/dask/issues/5787")
+@pytest.mark.skipif(sys.byteorder == 'big', reason='Broken on big-endian machines')
@pytest.mark.parametrize(
"encoding",
[
--
2.42.0

View File

@ -1,26 +0,0 @@
From f99ee06a1c64ff3096e90cbc3a11ed4f172de691 Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Sun, 7 May 2023 23:13:59 -0400
Subject: [PATCH 2/6] Skip coverage testing
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 0ef22fb6..a11ac921 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -127,7 +127,7 @@ markers = [
"skip_with_pyarrow_strings: Tests that should be skipped when pyarrow string conversion is turned on",
"xfail_with_pyarrow_strings: Tests that should be xfailed when pyarrow string conversion is turned on",
]
-addopts = "-v -rsxfE --durations=10 --color=yes --cov-config=pyproject.toml"
+addopts = "-v -rsxfE --durations=10 --color=yes"
filterwarnings = [
# From Cython-1753
"ignore:can't resolve:ImportWarning",
--
2.42.0

View File

@ -1,29 +0,0 @@
From 691eb92e4c6099309919e3aaa05a3dd6a2f4ddb1 Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Thu, 20 Jul 2023 00:05:48 -0400
Subject: [PATCH 3/6] TST: Increase maximum for sizeof test to pass 32-bit
In that case, the result is 1244, which is ~2.6 * `sys.sizeof` (476),
slightly over the 2 used in the existing test.
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
dask/tests/test_sizeof.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dask/tests/test_sizeof.py b/dask/tests/test_sizeof.py
index 9b43a7ae..866d119b 100644
--- a/dask/tests/test_sizeof.py
+++ b/dask/tests/test_sizeof.py
@@ -81,7 +81,7 @@ def test_pandas_multiindex():
index = pd.MultiIndex.from_product([range(5), ["a", "b", "c", "d", "e"]])
actual_size = sys.getsizeof(index)
- assert 0.5 * actual_size < sizeof(index) < 2 * actual_size
+ assert 0.5 * actual_size < sizeof(index) < 3 * actual_size
assert isinstance(sizeof(index), int)
--
2.42.0

View File

@ -1,36 +0,0 @@
From e5c291fbcd69a80aef7d8b01f7621d736ce497ed Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Sat, 19 Aug 2023 16:49:33 -0400
Subject: [PATCH 4/6] Fix test_pandas_timestamp_overflow_pyarrow condition
The new behavior in pyarrow only occurs with Pandas 2.0 as well.
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
dask/dataframe/io/tests/test_parquet.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/dask/dataframe/io/tests/test_parquet.py b/dask/dataframe/io/tests/test_parquet.py
index 0d4dd39d..d5ad8b5f 100644
--- a/dask/dataframe/io/tests/test_parquet.py
+++ b/dask/dataframe/io/tests/test_parquet.py
@@ -3381,13 +3381,13 @@ def test_pandas_timestamp_overflow_pyarrow(tmpdir):
table, f"{tmpdir}/file.parquet", use_deprecated_int96_timestamps=False
)
- if pyarrow_version < parse_version("13.0.0.dev"):
+ if pyarrow_version.major >= 13 and PANDAS_GE_200:
+ dd.read_parquet(str(tmpdir), engine="pyarrow").compute()
+ else:
# This will raise by default due to overflow
with pytest.raises(pa.lib.ArrowInvalid) as e:
dd.read_parquet(str(tmpdir), engine="pyarrow").compute()
assert "out of bounds" in str(e.value)
- else:
- dd.read_parquet(str(tmpdir), engine="pyarrow").compute()
from dask.dataframe.io.parquet.arrow import ArrowDatasetEngine as ArrowEngine
--
2.42.0

View File

@ -1,24 +0,0 @@
From 7803706e5acf1e1edce34de0b0d5321872bde299 Mon Sep 17 00:00:00 2001
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
Date: Sat, 25 Nov 2023 20:22:50 -0500
Subject: [PATCH 5/6] Allow older versioneer
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index a11ac921..d74f0fa5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[build-system]
-requires = ["setuptools>=62.6", "versioneer[toml]==0.29"]
+requires = ["setuptools>=62.6", "versioneer[toml]>=0.28"]
build-backend = "setuptools.build_meta"
[project]
--
2.42.0

View File

@ -1,29 +0,0 @@
From 7db95f23f20cfd48eb6352c9ac0cba7780e925f6 Mon Sep 17 00:00:00 2001
From: James Bourbeau <jrbourbeau@gmail.com>
Date: Mon, 22 May 2023 12:53:20 -0500
Subject: [PATCH 6/6] Ignore NumPy warnings from Pandas
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
---
pyproject.toml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/pyproject.toml b/pyproject.toml
index d74f0fa5..b93c2187 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -143,6 +143,11 @@ filterwarnings = [
# https://pandas.pydata.org/docs/dev/whatsnew/v1.5.0.html#using-group-keys-with-transformers-in-groupby-apply
"ignore:Not prepending group keys:FutureWarning",
"ignore:.*:dask.tests.warning_aliases.RemovedIn20Warning",
+ # This is coming from pandas use of np.find_common_type
+ # See https://github.com/pandas-dev/pandas/issues/53236
+ "ignore:np.find_common_type is deprecated:DeprecationWarning",
+ "ignore:`cumproduct` is deprecated:DeprecationWarning",
+ "ignore:`product` is deprecated:DeprecationWarning",
"ignore:When grouping with a length-1 list-like, you will need to pass a length-1 tuple to get_group in a future version of pandas:FutureWarning",
'ignore:DataFrameGroupBy\.apply operated on the grouping columns\. This behavior is deprecated, and in a future version of pandas the grouping columns will be excluded from the operation\. Either pass `include_groups=False` to exclude the groupings or explicitly select the grouping columns after groupby to silence this warning\.:FutureWarning',
'ignore:Passing a BlockManager to DataFrame is deprecated and will raise in a future version. Use public APIs instead:DeprecationWarning', # https://github.com/apache/arrow/issues/35081
--
2.42.0

View File

@ -1,19 +0,0 @@
Author: Diane Trout <diane@ghic.org>
Description: Force initializing the random seed with the same
byte order interpretation as on x86.
Index: dask-2023.8.0+dfsg/dask/utils.py
===================================================================
--- dask-2023.8.0+dfsg.orig/dask/utils.py
+++ dask-2023.8.0+dfsg/dask/utils.py
@@ -426,7 +426,9 @@ def random_state_data(n: int, random_sta
random_state = np.random.RandomState(random_state)
random_data = random_state.bytes(624 * n * 4) # `n * 624` 32-bit integers
- l = list(np.frombuffer(random_data, dtype=np.uint32).reshape((n, -1)))
+ dt = np.dtype(np.uint32)
+ dt = dt.newbyteorder("<")
+ l = list(np.frombuffer(random_data, dtype=dt).reshape((n, -1)))
assert len(l) == n
return l

View File

@ -3,36 +3,29 @@
# Requires distributed, which is a loop.
# Also, some tests require packages that require dask itself.
# Force bootstrap for package review.
%bcond_without bootstrap
%bcond bootstrap 0
# We don't have all dependencies available yet.
%bcond docs 0
# We have an arched package to detect arch-dependent issues in dependencies,
# but all of the installable RPMs are noarch and there is no compiled code.
%global debug_package %{nil}
Name: python-%{srcname}
Version: 2023.12.0
%global tag 2023.12.0
Release: %autorelease -e 0.riscv64
Version: 2024.6.2
%global tag 2024.6.2
Release: %autorelease
Summary: Parallel PyData with Task Scheduling
License: BSD-3-Clause
URL: https://github.com/dask/dask
Source0: %{pypi_source %{srcname}}
# https://github.com/dask/dask/issues/6725
Patch: 0001-Skip-test_encoding_gh601-on-big-endian-machines.patch
# Fedora-specific patch.
Patch: 0002-Skip-coverage-testing.patch
# Drop after dropping 32-bit support.
Patch: 0003-TST-Increase-maximum-for-sizeof-test-to-pass-32-bit.patch
# https://github.com/dask/dask/pull/10451
Patch: 0004-Fix-test_pandas_timestamp_overflow_pyarrow-condition.patch
# https://github.com/dask/dask/issues/10423
Patch: https://salsa.debian.org/python-team/packages/dask/-/raw/08ffea1b7b53e9c71c9a926d5786288c2e6c1b5b/debian/patches/force-little-endian-random.patch
Patch: 0005-Allow-older-versioneer.patch
# Ignore warnings from Pandas.
# Upstream had https://github.com/dask/dask/pull/10307 but reverted it because
# a new Pandas was released that fixed the warning, but we don't have it yet.
Patch: 0006-Ignore-NumPy-warnings-from-Pandas.patch
Patch: 0001-Remove-extra-test-dependencies.patch
# Stop building on i686
# https://fedoraproject.org/wiki/Changes/EncourageI686LeafRemoval
ExcludeArch: %{ix86}
%description
Dask is a flexible parallel computing library for analytics.
@ -51,24 +44,16 @@ BuildRequires: python3dist(scikit-image)
BuildRequires: python3dist(xarray)
%endif
# Optional test requirements.
# Fastavro does not support 32 bit architectures and is ExcludeArch:
# https://bugzilla.redhat.com/show_bug.cgi?id=1943932
%ifnarch %{arm32} %{ix86}
BuildRequires: python3dist(fastavro)
%endif
BuildRequires: python3dist(h5py)
BuildRequires: python3dist(psutil)
# libarrow does not support 32 bit architectures and is ExcludeArch.
# Tests don't pass on s390x either.
%ifnarch %{arm} %{ix86} s390x
# libarrow tests don't pass on s390x either.
%ifnarch s390x
BuildRequires: python3dist(pyarrow)
%endif
BuildRequires: python3dist(requests)
BuildRequires: python3dist(sqlalchemy)
# tables does not support 32 bit architectures and is ExcludeArch.
%ifnarch %{ix86}
BuildRequires: python3dist(tables)
%endif
BuildRequires: python3dist(zarr)
Recommends: python3-%{srcname}+array = %{version}-%{release}
@ -101,13 +86,82 @@ Provides: bundled(numpy)
Dask is a flexible parallel computing library for analytics.
%pyproject_extras_subpkg -n python3-%{srcname} array bag dataframe delayed
# Based on (but with BuildArch: noarch):
# %%pyproject_extras_subpkg -n python3-%%{srcname} array bag dataframe delayed
#
# Extras subpackages are arched, they should not be
# https://bugzilla.redhat.com/show_bug.cgi?id=2293727
#
# Further discussion is in
# https://src.fedoraproject.org/rpms/python-rpm-macros/pull-request/174.
%package -n python3-%{srcname}+array
Summary: Metapackage for python3-%{srcname}: array extras
Requires: python3-%{srcname} = %{version}-%{release}
BuildArch: noarch
%description -n python3-%{srcname}+array
This is a metapackage bringing in array extras requires for python3-%{srcname}.
It makes sure the dependencies are installed.
%files -n python3-%{srcname}+array -f %{_pyproject_ghost_distinfo}
%package -n python3-%{srcname}+bag
Summary: Metapackage for python3-%{srcname}: bag extras
Requires: python3-%{srcname} = %{version}-%{release}
BuildArch: noarch
%description -n python3-%{srcname}+bag
This is a metapackage bringing in bag extras requires for python3-%{srcname}.
It makes sure the dependencies are installed.
%files -n python3-%{srcname}+bag -f %{_pyproject_ghost_distinfo}
%package -n python3-%{srcname}+dataframe
Summary: Metapackage for python3-%{srcname}: dataframe extras
Requires: python3-%{srcname} = %{version}-%{release}
BuildArch: noarch
%description -n python3-%{srcname}+dataframe
This is a metapackage bringing in dataframe extras requires for python3-%{srcname}.
It makes sure the dependencies are installed.
%files -n python3-%{srcname}+dataframe -f %{_pyproject_ghost_distinfo}
%package -n python3-%{srcname}+delayed
Summary: Metapackage for python3-%{srcname}: delayed extras
Requires: python3-%{srcname} = %{version}-%{release}
BuildArch: noarch
%description -n python3-%{srcname}+delayed
This is a metapackage bringing in delayed extras requires for python3-%{srcname}.
It makes sure the dependencies are installed.
%files -n python3-%{srcname}+delayed -f %{_pyproject_ghost_distinfo}
%if %{without bootstrap}
%pyproject_extras_subpkg distributed
# Based on (but with BuildArch: noarch):
# %%pyproject_extras_subpkg -n python3-%%{srcname} distributed
# (see comments for the other extras metapackages, above)
%package -n python3-%{srcname}+distributed
Summary: Metapackage for python3-%{srcname}: distributed extras
Requires: python3-%{srcname} = %{version}-%{release}
BuildArch: noarch
%description -n python3-%{srcname}+distributed
This is a metapackage bringing in distributed extras requires for python3-%{srcname}.
It makes sure the dependencies are installed.
%files -n python3-%{srcname}+distributed -f %{_pyproject_ghost_distinfo}
%endif
%if %{without bootstrap}
%if %{with docs}
%package -n python-%{srcname}-doc
Summary: dask documentation
@ -124,12 +178,10 @@ Documentation for dask.
%prep
%autosetup -n %{srcname}-%{version} -p1
# we don't use pre-commit when running tests
sed -i '/"pre-commit"/d' setup.py
%generate_buildrequires
%pyproject_buildrequires -r -x test,array,bag,dataframe,delayed
%pyproject_buildrequires -x test,array,bag,dataframe,delayed
%if %{without bootstrap}
%pyproject_buildrequires -x distributed
%endif
@ -138,7 +190,7 @@ sed -i '/"pre-commit"/d' setup.py
%build
%pyproject_wheel
%if %{without bootstrap}
%if %{with docs}
# generate html docs
PYTHONPATH=${PWD} sphinx-build-3 docs/source html
# remove the sphinx-build leftovers
@ -149,68 +201,10 @@ rm -rf html/.{doctrees,buildinfo}
%install
%pyproject_install
%pyproject_save_files %{srcname}
%pyproject_save_files -l %{srcname}
%check
%ifarch arm
# Is there a way to do this in one line?
%global have_arm 1
%endif
%if 0%{?__isa_bits} == 32
# read_sql_query with meta converts dtypes from 32 to 64.
# https://github.com/dask/dask/issues/8620
# > tm.assert_frame_equal(
# a, b, check_names=check_names, check_dtype=check_dtype, **kwargs
# E AssertionError: Attributes of DataFrame.iloc[:, 1] (column name="age") are different
# E
# E Attribute "dtype" are different
# E [left]: int32
# E [right]: int64
# dask/dataframe/utils.py:555: AssertionError
k="${k-}${k+ and }not test_query_with_meta"
%endif
%ifarch ppc64le
# TODO: Should this be reported upstream? Is it a dask issue, or a numpy one?
# Possibly related to
# https://fedoraproject.org/wiki/Changes/PPC64LE_Float128_Transition?
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
# E assert False
# E + where False = allclose(array([0.12586355-0.09957204j, 0.20256483+0.04098342j,\n 0.05781123-0.03588671j, 0.01135963-0.03334219j,\n 0.03747771+0.07495994j, 0.2106574 -0.0363521j ,\n 0.16352091+0.03782915j, 0.1381678 -0.06815128j,\n 0.03781295-0.04011523j, 0.01493269+0.07780643j]), array([0.12559072-0.07164038j, 0.20256483+0.05438578j,\n 0.05781123-0.03588671j, 0.01135963-0.03334219j,\n 0.03747771+0.07495994j, 0.2106574 -0.0363521j ,\n 0.16352091+0.03782915j, 0.1381678 -0.06815128j,\n 0.03781295-0.04011523j, 0.01493269+0.07780643j]), equal_nan=True, **{})
# dask/array/utils.py:361: AssertionError
k="${k-}${k+ and }not test_lstsq[100-10-10-True]"
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
# E assert False
# E + where False = allclose(array([ 0.20168675+0.08857556j, 0.144233 -0.19173091j,\n -0.03367557-0.08053959j, 0.04108325-0.24648308j,\n -0.01844576+0.00841932j, 0.29652375+0.05682199j,\n 0.05551828+0.20156798j, -0.08409592+0.02354949j,\n 0.09848743-0.00748637j, 0.22889193-0.07372773j]), array([ 0.20067551+0.2642591j , 0.144233 -0.18573336j,\n -0.03367557-0.08053959j, 0.04108325-0.24648308j,\n -0.01844576+0.00841932j, 0.29652375+0.05682199j,\n 0.05551828+0.20156798j, -0.08409592+0.02354949j,\n 0.09848743-0.00748637j, 0.22889193-0.07372773j]), equal_nan=True, **{})
# dask/array/utils.py:361: AssertionError
k="${k-}${k+ and }not test_lstsq[20-10-5-True]"
# test_vdot fails with NumPy 1.19.0
# https://github.com/dask/dask/issues/6406
#
# vdot returns incorrect results on ppc64le
# https://github.com/numpy/numpy/issues/17087
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
# E assert False
# E + where False = allclose((0.38772781971416226-0.6851997484294434j), (0.38772781971416226-0.306563166009585j), equal_nan=True, **{})
# dask/array/utils.py:361: AssertionError
k="${k-}${k+ and }not test_vdot[shape0-chunks0]"
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
# E assert False
# E + where False = allclose((0.38772781971416226-0.6851997484294434j), (0.38772781971416226-0.306563166009585j), equal_nan=True, **{})
# dask/array/utils.py:361: AssertionError
k="${k-}${k+ and }not test_vdot[shape1-chunks1]"
%endif
# This test compares against files in .github/. It does not work on the PyPI
# sdist, and is only relevant to upstream CI anyway.
#
@ -218,30 +212,26 @@ k="${k-}${k+ and }not test_vdot[shape1-chunks1]"
# https://github.com/dask/dask/issues/8499
k="${k-}${k+ and }not test_development_guidelines_matches_ci"
# This test shows excess memory usage on Python 3.12
# https://github.com/dask/dask/issues/10418
k="${k-}${k+ and }not test_division_or_partition"
%ifarch riscv64
k="${k-}${k+ and }not test_datetime_std_with_larger_dataset[False-False-1]"
k="${k-}${k+ and }not test_datetime_std_with_larger_dataset[None-True-1]"
k="${k-}${k+ and }not test_datetime_std_with_larger_dataset[False-True-1]"
k="${k-}${k+ and }not test_datetime_std_with_larger_dataset[None-False-1]"
k="${k-}${k+ and }not test_cov_corr_stable"
%endif
pytest_args=(
-m 'not network'
-n %[0%{?have_arm}?"2":"auto"]
%ifarch %{ix86}
# Ignore 32-bit warning
-W 'ignore:invalid value encountered in cast:RuntimeWarning'
%endif
-n "auto"
-k "${k-}"
# arrow tests all fail on s390x, it's not at all BE-safe
# the exclusion of arrow as a build dep on s390x above is meant to
# ensure these tests get skipped, but dask-expr requires arrow, so it
# it gets pulled into the build env anyway
# https://github.com/dask/dask/issues/11186
%ifarch s390x
--ignore %{buildroot}%{python3_sitelib}/%{srcname}/dataframe/io/tests/test_parquet.py
%endif
# Upstream uses 'thread' for Windows, but that kills the whole session, and
# we'd like to see exactly which tests fail.
--timeout_method=signal
--pyargs dask
)
@ -250,13 +240,13 @@ cd docs
%files -n python3-%{srcname} -f %{pyproject_files}
%doc README.rst
%license LICENSE.txt dask/array/NUMPY_LICENSE.txt
%license dask/array/NUMPY_LICENSE.txt
%{_bindir}/dask
%if %{without bootstrap}
%if %{with docs}
%files -n python-%{srcname}-doc
%doc html
%license LICENSE.txt dask/array/NUMPY_LICENSE.txt
%license dask/array/NUMPY_LICENSE.txt
%endif

View File

@ -1 +1 @@
SHA512 (dask-2023.12.0.tar.gz) = b4e81f7e0ff23862824b71c98f36293da2d11bc49b5f82df556be0023d001aa85181db336e8333bb4c589259d18435ef04289219706479e3982b2c2bb398e7c6
SHA512 (dask-2024.6.2.tar.gz) = d6004429672c08971b1b3ef6fe0a413d7ce8ad5af9f65c7d1fceb612be0acb801739b990a7ee7949bb801d654f64f0bb11641d289378f9e5307896fe238db261