Compare commits
No commits in common. "rawhide" and "f34" have entirely different histories.
16
.gitignore
vendored
16
.gitignore
vendored
@ -54,19 +54,3 @@
|
||||
/dask-2022.01.0.tar.gz
|
||||
/dask-2022.01.1.tar.gz
|
||||
/dask-2022.02.0.tar.gz
|
||||
/dask-2022.02.1.tar.gz
|
||||
/dask-2022.05.0.tar.gz
|
||||
/dask-2022.5.0.tar.gz
|
||||
/dask-2022.7.1.tar.gz
|
||||
/dask-2022.8.0.tar.gz
|
||||
/dask-2022.8.1.tar.gz
|
||||
/dask-2022.9.0.tar.gz
|
||||
/dask-2022.10.0.tar.gz
|
||||
/dask-2022.11.1.tar.gz
|
||||
/dask-2022.12.0.tar.gz
|
||||
/dask-2022.12.1.tar.gz
|
||||
/dask-2023.1.0.tar.gz
|
||||
/dask-2023.2.0.tar.gz
|
||||
/dask-2023.3.2.tar.gz
|
||||
/dask-2023.4.0.tar.gz
|
||||
/dask-2023.4.1.tar.gz
|
||||
|
@ -1,4 +1,4 @@
|
||||
From 5ee683719a19c8865a7b80725a0adf55baa3e9c9 Mon Sep 17 00:00:00 2001
|
||||
From 5d9cdaa75e0547d8d5edf8c995f29279688f7e11 Mon Sep 17 00:00:00 2001
|
||||
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
|
||||
Date: Sun, 7 Mar 2021 04:07:32 -0500
|
||||
Subject: [PATCH 1/2] Skip test_encoding_gh601 on big-endian machines.
|
||||
@ -9,18 +9,18 @@ Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/dask/dataframe/io/tests/test_csv.py b/dask/dataframe/io/tests/test_csv.py
|
||||
index b50a727b..0ac1df4c 100644
|
||||
index 48780ef8..f63d0e80 100644
|
||||
--- a/dask/dataframe/io/tests/test_csv.py
|
||||
+++ b/dask/dataframe/io/tests/test_csv.py
|
||||
@@ -2,6 +2,7 @@ import gzip
|
||||
@@ -1,6 +1,7 @@
|
||||
import gzip
|
||||
import os
|
||||
import warnings
|
||||
from io import BytesIO, StringIO
|
||||
from io import BytesIO
|
||||
+import sys
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@@ -1146,6 +1147,7 @@ def test_read_csv_with_datetime_index_partitions_n():
|
||||
@@ -1099,6 +1100,7 @@ def test_read_csv_with_datetime_index_partitions_n():
|
||||
xfail_pandas_100 = pytest.mark.xfail(reason="https://github.com/dask/dask/issues/5787")
|
||||
|
||||
|
||||
@ -29,5 +29,5 @@ index b50a727b..0ac1df4c 100644
|
||||
"encoding",
|
||||
[
|
||||
--
|
||||
2.40.0
|
||||
2.31.1
|
||||
|
||||
|
@ -1,26 +0,0 @@
|
||||
From 54487feeef6d7d41e3b6d4e4ece5b6342071068b Mon Sep 17 00:00:00 2001
|
||||
From: Elliott Sales de Andrade <quantum.analyst@gmail.com>
|
||||
Date: Sun, 7 May 2023 23:13:59 -0400
|
||||
Subject: [PATCH 2/2] Skip coverage testing
|
||||
|
||||
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
|
||||
---
|
||||
pyproject.toml | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/pyproject.toml b/pyproject.toml
|
||||
index a7d504ff..36dfca8e 100644
|
||||
--- a/pyproject.toml
|
||||
+++ b/pyproject.toml
|
||||
@@ -121,7 +121,7 @@ markers = [
|
||||
"skip_with_pyarrow_strings: Tests that should be skipped when pyarrow string conversion is turned on",
|
||||
"xfail_with_pyarrow_strings: Tests that should be xfailed when pyarrow string conversion is turned on",
|
||||
]
|
||||
-addopts = "-v -rsxfE --durations=10 --color=yes --cov-config=pyproject.toml"
|
||||
+addopts = "-v -rsxfE --durations=10 --color=yes"
|
||||
filterwarnings = [
|
||||
# From Cython-1753
|
||||
"ignore:can't resolve:ImportWarning",
|
||||
--
|
||||
2.40.0
|
||||
|
@ -0,0 +1,30 @@
|
||||
From 41306c1829a22f0e8654f8d75dc23ce766c26ea8 Mon Sep 17 00:00:00 2001
|
||||
From: McToel <theo.doellmann@gmx.de>
|
||||
Date: Sun, 16 May 2021 11:11:06 +0200
|
||||
Subject: [PATCH 2/2] fix index_col duplication if index_col is type str
|
||||
|
||||
Signed-off-by: Elliott Sales de Andrade <quantum.analyst@gmail.com>
|
||||
---
|
||||
dask/dataframe/io/sql.py | 6 ++----
|
||||
1 file changed, 2 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/dask/dataframe/io/sql.py b/dask/dataframe/io/sql.py
|
||||
index 6573a325..9c15360c 100644
|
||||
--- a/dask/dataframe/io/sql.py
|
||||
+++ b/dask/dataframe/io/sql.py
|
||||
@@ -125,10 +125,8 @@ def read_sql_table(
|
||||
if columns
|
||||
else list(table.columns)
|
||||
)
|
||||
- if index_col not in columns:
|
||||
- columns.append(
|
||||
- table.columns[index_col] if isinstance(index_col, str) else index_col
|
||||
- )
|
||||
+ if index not in columns:
|
||||
+ columns.append(index)
|
||||
|
||||
if isinstance(index_col, str):
|
||||
kwargs["index_col"] = index_col
|
||||
--
|
||||
2.31.1
|
||||
|
21
_version.py
Normal file
21
_version.py
Normal file
@ -0,0 +1,21 @@
|
||||
|
||||
# This file was generated by 'versioneer.py' (0.16) from
|
||||
# revision-control system data, or from the parent directory name of an
|
||||
# unpacked source archive. Distribution tarballs contain a pre-generated copy
|
||||
# of this file.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
version_json = '''
|
||||
{
|
||||
"dirty": false,
|
||||
"error": null,
|
||||
"full-revisionid": "a3299586b05d422800ff8a618a2ba35e00e62665",
|
||||
"version": "2022.01.0"
|
||||
}
|
||||
''' # END VERSION_JSON
|
||||
|
||||
|
||||
def get_versions():
|
||||
return json.loads(version_json)
|
143
python-dask.spec
143
python-dask.spec
@ -5,23 +5,21 @@
|
||||
# Force bootstrap for package review.
|
||||
%bcond_without bootstrap
|
||||
|
||||
# We have an arched package to detect arch-dependent issues in dependencies,
|
||||
# but all of the installable RPMs are noarch and there is no compiled code.
|
||||
%global debug_package %{nil}
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 2023.4.1
|
||||
%global tag 2023.4.1
|
||||
Version: 2022.2.0
|
||||
%global tag 2022.02.0
|
||||
Release: %autorelease
|
||||
Summary: Parallel PyData with Task Scheduling
|
||||
|
||||
License: BSD-3-Clause
|
||||
URL: https://github.com/dask/dask
|
||||
Source0: %{pypi_source %{srcname}}
|
||||
License: BSD
|
||||
URL: https://github.com/dask/dask/
|
||||
Source0: https://github.com/dask/dask/archive/%{tag}/%{srcname}-%{tag}.tar.gz
|
||||
# Grab this from the sdist until we switch back to it.
|
||||
Source1: _version.py
|
||||
# https://github.com/dask/dask/issues/6725
|
||||
Patch: 0001-Skip-test_encoding_gh601-on-big-endian-machines.patch
|
||||
# Fedora-specific patch.
|
||||
Patch: 0002-Skip-coverage-testing.patch
|
||||
Patch0001: 0001-Skip-test_encoding_gh601-on-big-endian-machines.patch
|
||||
|
||||
BuildArch: noarch
|
||||
|
||||
%description
|
||||
Dask is a flexible parallel computing library for analytics.
|
||||
@ -30,8 +28,6 @@ Dask is a flexible parallel computing library for analytics.
|
||||
%package -n python3-%{srcname}
|
||||
Summary: %{summary}
|
||||
|
||||
BuildArch: noarch
|
||||
|
||||
BuildRequires: python3-devel
|
||||
BuildRequires: python3dist(graphviz)
|
||||
BuildRequires: python3dist(ipython)
|
||||
@ -40,54 +36,27 @@ BuildRequires: python3dist(scikit-image)
|
||||
BuildRequires: python3dist(xarray)
|
||||
%endif
|
||||
# Optional test requirements.
|
||||
# Fastavro does not support 32 bit architectures and is ExcludeArch:
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=1943932
|
||||
%ifnarch %{arm32} %{ix86}
|
||||
BuildRequires: python3dist(fastavro)
|
||||
%endif
|
||||
BuildRequires: python3dist(h5py)
|
||||
BuildRequires: python3dist(psutil)
|
||||
# libarrow does not support 32 bit architectures and is ExcludeArch.
|
||||
# Tests don't pass on s390x either.
|
||||
%ifnarch %{arm} %{ix86} s390x
|
||||
BuildRequires: python3dist(pyarrow)
|
||||
%endif
|
||||
BuildRequires: python3dist(requests)
|
||||
BuildRequires: python3dist(sqlalchemy)
|
||||
BuildRequires: python3dist(tables)
|
||||
BuildRequires: python3dist(zarr)
|
||||
|
||||
Recommends: python3-%{srcname}+array = %{version}-%{release}
|
||||
Recommends: python3-%{srcname}+bag = %{version}-%{release}
|
||||
Recommends: python3-%{srcname}+dataframe = %{version}-%{release}
|
||||
Recommends: python3-%{srcname}+delayed = %{version}-%{release}
|
||||
Recommends: python3-%{srcname}+diagnostics = %{version}-%{release}
|
||||
%if %{without bootstrap}
|
||||
Recommends: python3-%{srcname}+distributed = %{version}-%{release}
|
||||
%endif
|
||||
# No recent enough Bokeh is packaged
|
||||
Obsoletes: python3-%{srcname}+diagnostics < 2022.5.0-1
|
||||
|
||||
# There is nothing that can be unbundled; there are some some snippets forked
|
||||
# or copied from unspecified versions of numpy, under a BSD-3-Clause license
|
||||
# similar to that of dask itself.
|
||||
#
|
||||
# - dask/array/numpy_compat.py:
|
||||
# _Recurser, moveaxis, rollaxis, sliding_window_view
|
||||
# - dask/array/backends.py:
|
||||
# _tensordot
|
||||
# - dask/array/core.py:
|
||||
# block
|
||||
# - dask/array/einsumfuncs.py:
|
||||
# parse_einsum_input
|
||||
# - dask/array/routines.py:
|
||||
# cov, _average
|
||||
Provides: bundled(numpy)
|
||||
|
||||
%description -n python3-%{srcname}
|
||||
Dask is a flexible parallel computing library for analytics.
|
||||
|
||||
|
||||
%pyproject_extras_subpkg -n python3-%{srcname} array bag dataframe delayed
|
||||
%pyproject_extras_subpkg -n python3-%{srcname} array bag dataframe delayed diagnostics
|
||||
%if %{without bootstrap}
|
||||
%pyproject_extras_subpkg distributed
|
||||
%endif
|
||||
@ -97,8 +66,6 @@ Dask is a flexible parallel computing library for analytics.
|
||||
%package -n python-%{srcname}-doc
|
||||
Summary: dask documentation
|
||||
|
||||
BuildArch: noarch
|
||||
|
||||
BuildRequires: python3dist(dask_sphinx_theme) >= 1.3.5
|
||||
BuildRequires: python3dist(numpydoc)
|
||||
BuildRequires: python3dist(sphinx) >= 4
|
||||
@ -109,13 +76,12 @@ Documentation for dask.
|
||||
|
||||
|
||||
%prep
|
||||
%autosetup -n %{srcname}-%{version} -p1
|
||||
# we don't use pre-commit when running tests
|
||||
sed -i '/"pre-commit"/d' setup.py
|
||||
%autosetup -n %{srcname}-%{tag} -p1
|
||||
cp %SOURCE1 %{srcname}/
|
||||
|
||||
|
||||
%generate_buildrequires
|
||||
%pyproject_buildrequires -r -x test,array,bag,dataframe,delayed
|
||||
%pyproject_buildrequires -r -x test,array,bag,dataframe,delayed,diagnostics
|
||||
%if %{without bootstrap}
|
||||
%pyproject_buildrequires -x distributed
|
||||
%endif
|
||||
@ -144,93 +110,28 @@ rm -rf html/.{doctrees,buildinfo}
|
||||
%global have_arm 1
|
||||
%endif
|
||||
|
||||
%if 0%{?__isa_bits} == 32
|
||||
# read_sql_query with meta converts dtypes from 32 to 64.
|
||||
# https://github.com/dask/dask/issues/8620
|
||||
|
||||
# > tm.assert_frame_equal(
|
||||
# a, b, check_names=check_names, check_dtype=check_dtype, **kwargs
|
||||
# E AssertionError: Attributes of DataFrame.iloc[:, 1] (column name="age") are different
|
||||
# E
|
||||
# E Attribute "dtype" are different
|
||||
# E [left]: int32
|
||||
# E [right]: int64
|
||||
# dask/dataframe/utils.py:555: AssertionError
|
||||
k="${k-}${k+ and }not test_query_with_meta"
|
||||
%endif
|
||||
|
||||
%ifarch ppc64le
|
||||
# TODO: Should this be reported upstream? Is it a dask issue, or a numpy one?
|
||||
# Possibly related to
|
||||
# https://fedoraproject.org/wiki/Changes/PPC64LE_Float128_Transition?
|
||||
|
||||
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
|
||||
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
|
||||
# E assert False
|
||||
# E + where False = allclose(array([0.12586355-0.09957204j, 0.20256483+0.04098342j,\n 0.05781123-0.03588671j, 0.01135963-0.03334219j,\n 0.03747771+0.07495994j, 0.2106574 -0.0363521j ,\n 0.16352091+0.03782915j, 0.1381678 -0.06815128j,\n 0.03781295-0.04011523j, 0.01493269+0.07780643j]), array([0.12559072-0.07164038j, 0.20256483+0.05438578j,\n 0.05781123-0.03588671j, 0.01135963-0.03334219j,\n 0.03747771+0.07495994j, 0.2106574 -0.0363521j ,\n 0.16352091+0.03782915j, 0.1381678 -0.06815128j,\n 0.03781295-0.04011523j, 0.01493269+0.07780643j]), equal_nan=True, **{})
|
||||
# dask/array/utils.py:361: AssertionError
|
||||
k="${k-}${k+ and }not test_lstsq[100-10-10-True]"
|
||||
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
|
||||
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
|
||||
# E assert False
|
||||
# E + where False = allclose(array([ 0.20168675+0.08857556j, 0.144233 -0.19173091j,\n -0.03367557-0.08053959j, 0.04108325-0.24648308j,\n -0.01844576+0.00841932j, 0.29652375+0.05682199j,\n 0.05551828+0.20156798j, -0.08409592+0.02354949j,\n 0.09848743-0.00748637j, 0.22889193-0.07372773j]), array([ 0.20067551+0.2642591j , 0.144233 -0.18573336j,\n -0.03367557-0.08053959j, 0.04108325-0.24648308j,\n -0.01844576+0.00841932j, 0.29652375+0.05682199j,\n 0.05551828+0.20156798j, -0.08409592+0.02354949j,\n 0.09848743-0.00748637j, 0.22889193-0.07372773j]), equal_nan=True, **{})
|
||||
# dask/array/utils.py:361: AssertionError
|
||||
k="${k-}${k+ and }not test_lstsq[20-10-5-True]"
|
||||
|
||||
# test_vdot fails with NumPy 1.19.0
|
||||
# https://github.com/dask/dask/issues/6406
|
||||
#
|
||||
# vdot returns incorrect results on ppc64le
|
||||
# https://github.com/numpy/numpy/issues/17087
|
||||
|
||||
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
|
||||
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
|
||||
# E assert False
|
||||
# E + where False = allclose((0.38772781971416226-0.6851997484294434j), (0.38772781971416226-0.306563166009585j), equal_nan=True, **{})
|
||||
# dask/array/utils.py:361: AssertionError
|
||||
k="${k-}${k+ and }not test_vdot[shape0-chunks0]"
|
||||
# > assert allclose(a, b, equal_nan=equal_nan, **kwargs), msg
|
||||
# E AssertionError: found values in 'a' and 'b' which differ by more than the allowed amount
|
||||
# E assert False
|
||||
# E + where False = allclose((0.38772781971416226-0.6851997484294434j), (0.38772781971416226-0.306563166009585j), equal_nan=True, **{})
|
||||
# dask/array/utils.py:361: AssertionError
|
||||
k="${k-}${k+ and }not test_vdot[shape1-chunks1]"
|
||||
%endif
|
||||
|
||||
# This test compares against files in .github/. It does not work on the PyPI
|
||||
# sdist, and is only relevant to upstream CI anyway.
|
||||
#
|
||||
# test_development_guidelines_matches_ci fails from sdist
|
||||
# https://github.com/dask/dask/issues/8499
|
||||
k="${k-}${k+ and }not test_development_guidelines_matches_ci"
|
||||
|
||||
pytest_args=(
|
||||
-m 'not network'
|
||||
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=1968947#c4
|
||||
--deselect=dask/dataframe/io/tests/test_sql.py::test_select_from_select
|
||||
|
||||
-n %[0%{?have_arm}?"2":"auto"]
|
||||
|
||||
%ifarch %{ix86}
|
||||
# Ignore 32-bit warning
|
||||
-W 'ignore:invalid value encountered in cast:RuntimeWarning'
|
||||
%endif
|
||||
|
||||
-k "${k-}"
|
||||
|
||||
--pyargs dask
|
||||
# Ignore https://github.com/numpy/numpy/issues/20225 and Pandas issues
|
||||
-W ignore::DeprecationWarning
|
||||
)
|
||||
|
||||
cd docs
|
||||
%{pytest} "${pytest_args[@]}"
|
||||
|
||||
%files -n python3-%{srcname} -f %{pyproject_files}
|
||||
%doc README.rst
|
||||
%license LICENSE.txt dask/array/NUMPY_LICENSE.txt
|
||||
%{_bindir}/dask
|
||||
%license LICENSE.txt
|
||||
|
||||
%if %{without bootstrap}
|
||||
%files -n python-%{srcname}-doc
|
||||
%doc html
|
||||
%license LICENSE.txt dask/array/NUMPY_LICENSE.txt
|
||||
%license LICENSE.txt
|
||||
%endif
|
||||
|
||||
|
||||
|
2
sources
2
sources
@ -1 +1 @@
|
||||
SHA512 (dask-2023.4.1.tar.gz) = 2fad1317aad845f7b11d8efceafb11eb9e945aca649bcf7ccd71e8692c3ca18c6f595c82b4e2930cba4e9622dd384965b3cf0ea3f68bdbc2f6f18c7a2d806b26
|
||||
SHA512 (dask-2022.02.0.tar.gz) = 397dabd3caabac0657808ff031299a1461c7acf4a123db4d1876d8e72a0ae971266dcb256e29498b1988b1b4bea506db3aec962b876f280ec532b96da35d356c
|
||||
|
Loading…
Reference in New Issue
Block a user