Run the tests (requires switching to GitHub source)

The tests are now packaged separately in a python3-pandas+tests
subpackage, which also provides the [test] extra, since the tests—and
especially some of the associated data files that are now present to
make them actually usable—are quite large.
This commit is contained in:
Benjamin A. Beasley 2022-04-04 13:56:31 -04:00
parent 599262e6a5
commit b5351d82fb
6 changed files with 3787 additions and 6 deletions

3035
pandas-1.3.5-2dd75ca.patch Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,33 @@
diff -Naur pandas-1.3.5-original/pandas/tests/tools/test_to_datetime.py pandas-1.3.5/pandas/tests/tools/test_to_datetime.py
--- pandas-1.3.5-original/pandas/tests/tools/test_to_datetime.py 2021-12-12 05:20:48.000000000 -0500
+++ pandas-1.3.5/pandas/tests/tools/test_to_datetime.py 2022-04-04 18:13:26.346032539 -0400
@@ -44,6 +44,7 @@
from pandas.core.arrays import DatetimeArray
from pandas.core.tools import datetimes as tools
from pandas.core.tools.datetimes import start_caching_at
+from pandas.util.version import Version
class TestTimeConversionFormats:
@@ -761,11 +762,20 @@
@pytest.mark.parametrize("cache", [True, False])
@td.skip_if_no("psycopg2")
- def test_to_datetime_tz_psycopg2(self, cache):
+ def test_to_datetime_tz_psycopg2(self, request, cache):
# xref 8260
import psycopg2
+ # https://www.psycopg.org/docs/news.html#what-s-new-in-psycopg-2-9
+ request.node.add_marker(
+ pytest.mark.xfail(
+ Version(psycopg2.__version__.split()[0]) > Version("2.8.7"),
+ raises=AttributeError,
+ reason="psycopg2.tz is deprecated (and appears dropped) in 2.9",
+ )
+ )
+
# misc cases
tz1 = psycopg2.tz.FixedOffsetTimezone(offset=-300, name=None)
tz2 = psycopg2.tz.FixedOffsetTimezone(offset=-240, name=None)

View File

@ -0,0 +1,12 @@
diff -Naur pandas-1.3.5-original/pandas/tests/plotting/frame/test_frame.py pandas-1.3.5/pandas/tests/plotting/frame/test_frame.py
--- pandas-1.3.5-original/pandas/tests/plotting/frame/test_frame.py 2021-12-12 05:20:48.000000000 -0500
+++ pandas-1.3.5/pandas/tests/plotting/frame/test_frame.py 2022-04-04 18:04:05.301456533 -0400
@@ -682,7 +682,7 @@
# GH 8113, datetime.time type is not supported by matplotlib in scatter
df = DataFrame(np.random.randn(10), columns=["a"])
df["dtime"] = date_range(start="2014-01-01", freq="h", periods=10).time
- msg = "must be a string or a number, not 'datetime.time'"
+ msg = "must be a string or a (real )?number, not 'datetime.time'"
with pytest.raises(TypeError, match=msg):
df.plot(kind="scatter", x="dtime", y="a")

468
pandas-1.3.5-pr-46681.patch Normal file
View File

@ -0,0 +1,468 @@
From 5c886169cd2674d7077271602a1a36ae0526d3ff Mon Sep 17 00:00:00 2001
From: "Benjamin A. Beasley" <code@musicinmybrain.net>
Date: Wed, 6 Apr 2022 07:13:22 -0400
Subject: [PATCH] Fix a few test failures on big-endian systems
These are all due to tests expecting little-endian dtypes, where in fact
the endianness of the dtype is that of the host.
---
pandas/tests/arrays/boolean/test_astype.py | 5 +-
.../tests/arrays/boolean/test_construction.py | 5 +-
pandas/tests/arrays/floating/test_to_numpy.py | 5 +-
pandas/tests/arrays/integer/test_dtypes.py | 5 +-
pandas/tests/frame/methods/test_to_records.py | 137 ++++++++++++++----
pandas/tests/io/parser/test_c_parser_only.py | 8 +-
.../tests/scalar/timedelta/test_arithmetic.py | 4 +-
pandas/tests/tools/test_to_timedelta.py | 4 +-
8 files changed, 134 insertions(+), 39 deletions(-)
diff --git a/pandas/tests/arrays/boolean/test_astype.py b/pandas/tests/arrays/boolean/test_astype.py
index 57cec70262..258d2a99ef 100644
--- a/pandas/tests/arrays/boolean/test_astype.py
+++ b/pandas/tests/arrays/boolean/test_astype.py
@@ -1,3 +1,5 @@
+from sys import byteorder
+
import numpy as np
import pytest
@@ -20,7 +22,8 @@ def test_astype():
tm.assert_numpy_array_equal(result, expected)
result = arr.astype("str")
- expected = np.array(["True", "False", "<NA>"], dtype="<U5")
+ endian = {"little": "<", "big": ">"}[byteorder]
+ expected = np.array(["True", "False", "<NA>"], dtype=f"{endian}U5")
tm.assert_numpy_array_equal(result, expected)
# no missing values
diff --git a/pandas/tests/arrays/boolean/test_construction.py b/pandas/tests/arrays/boolean/test_construction.py
index c9e96c4379..8204da66b0 100644
--- a/pandas/tests/arrays/boolean/test_construction.py
+++ b/pandas/tests/arrays/boolean/test_construction.py
@@ -1,3 +1,5 @@
+from sys import byteorder
+
import numpy as np
import pytest
@@ -270,7 +272,8 @@ def test_to_numpy(box):
arr = con([True, False, None], dtype="boolean")
result = arr.to_numpy(dtype="str")
- expected = np.array([True, False, pd.NA], dtype="<U5")
+ endian = {"little": "<", "big": ">"}[byteorder]
+ expected = np.array([True, False, pd.NA], dtype=f"{endian}U5")
tm.assert_numpy_array_equal(result, expected)
# no missing values -> can convert to bool, otherwise raises
diff --git a/pandas/tests/arrays/floating/test_to_numpy.py b/pandas/tests/arrays/floating/test_to_numpy.py
index 26e5687b1b..e96e27d84c 100644
--- a/pandas/tests/arrays/floating/test_to_numpy.py
+++ b/pandas/tests/arrays/floating/test_to_numpy.py
@@ -1,3 +1,5 @@
+from sys import byteorder
+
import numpy as np
import pytest
@@ -115,7 +117,8 @@ def test_to_numpy_string(box, dtype):
arr = con([0.0, 1.0, None], dtype="Float64")
result = arr.to_numpy(dtype="str")
- expected = np.array([0.0, 1.0, pd.NA], dtype="<U32")
+ endian = {"little": "<", "big": ">"}[byteorder]
+ expected = np.array([0.0, 1.0, pd.NA], dtype=f"{endian}U32")
tm.assert_numpy_array_equal(result, expected)
diff --git a/pandas/tests/arrays/integer/test_dtypes.py b/pandas/tests/arrays/integer/test_dtypes.py
index e3f59205aa..88b4a1e935 100644
--- a/pandas/tests/arrays/integer/test_dtypes.py
+++ b/pandas/tests/arrays/integer/test_dtypes.py
@@ -1,3 +1,5 @@
+from sys import byteorder
+
import numpy as np
import pytest
@@ -284,7 +286,8 @@ def test_to_numpy_na_raises(dtype):
def test_astype_str():
a = pd.array([1, 2, None], dtype="Int64")
- expected = np.array(["1", "2", "<NA>"], dtype="<U21")
+ endian = {"little": "<", "big": ">"}[byteorder]
+ expected = np.array(["1", "2", "<NA>"], dtype=f"{endian}U21")
tm.assert_numpy_array_equal(a.astype(str), expected)
tm.assert_numpy_array_equal(a.astype("str"), expected)
diff --git a/pandas/tests/frame/methods/test_to_records.py b/pandas/tests/frame/methods/test_to_records.py
index 2c96cf291c..2c503571f6 100644
--- a/pandas/tests/frame/methods/test_to_records.py
+++ b/pandas/tests/frame/methods/test_to_records.py
@@ -1,4 +1,5 @@
from collections import abc
+from sys import byteorder
import numpy as np
import pytest
@@ -14,6 +15,9 @@ from pandas import (
import pandas._testing as tm
+endian = {"little": "<", "big": ">"}[byteorder]
+
+
class TestDataFrameToRecords:
def test_to_records_timeseries(self):
index = date_range("1/1/2000", periods=10)
@@ -143,7 +147,12 @@ class TestDataFrameToRecords:
{},
np.rec.array(
[(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
- dtype=[("index", "<i8"), ("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", f"{endian}i8"),
+ ("B", f"{endian}f8"),
+ ("C", "O"),
+ ],
),
),
# Should have no effect in this case.
@@ -151,23 +160,38 @@ class TestDataFrameToRecords:
{"index": True},
np.rec.array(
[(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
- dtype=[("index", "<i8"), ("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", f"{endian}i8"),
+ ("B", f"{endian}f8"),
+ ("C", "O"),
+ ],
),
),
# Column dtype applied across the board. Index unaffected.
(
- {"column_dtypes": "<U4"},
+ {"column_dtypes": f"{endian}U4"},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "<U4"), ("B", "<U4"), ("C", "<U4")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", f"{endian}U4"),
+ ("B", f"{endian}U4"),
+ ("C", f"{endian}U4"),
+ ],
),
),
# Index dtype applied across the board. Columns unaffected.
(
- {"index_dtypes": "<U1"},
+ {"index_dtypes": f"{endian}U1"},
np.rec.array(
[("0", 1, 0.2, "a"), ("1", 2, 1.5, "bc")],
- dtype=[("index", "<U1"), ("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}U1"),
+ ("A", f"{endian}i8"),
+ ("B", f"{endian}f8"),
+ ("C", "O"),
+ ],
),
),
# Pass in a type instance.
@@ -175,7 +199,12 @@ class TestDataFrameToRecords:
{"column_dtypes": str},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "<U"), ("B", "<U"), ("C", "<U")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", f"{endian}U"),
+ ("B", f"{endian}U"),
+ ("C", f"{endian}U"),
+ ],
),
),
# Pass in a dtype instance.
@@ -183,15 +212,25 @@ class TestDataFrameToRecords:
{"column_dtypes": np.dtype("unicode")},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "<U"), ("B", "<U"), ("C", "<U")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", f"{endian}U"),
+ ("B", f"{endian}U"),
+ ("C", f"{endian}U"),
+ ],
),
),
# Pass in a dictionary (name-only).
(
- {"column_dtypes": {"A": np.int8, "B": np.float32, "C": "<U2"}},
+ {"column_dtypes": {"A": np.int8, "B": np.float32, "C": f"{endian}U2"}},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "i1"), ("B", "<f4"), ("C", "<U2")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", "i1"),
+ ("B", f"{endian}f4"),
+ ("C", f"{endian}U2"),
+ ],
),
),
# Pass in a dictionary (indices-only).
@@ -199,15 +238,20 @@ class TestDataFrameToRecords:
{"index_dtypes": {0: "int16"}},
np.rec.array(
[(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
- dtype=[("index", "i2"), ("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[
+ ("index", "i2"),
+ ("A", f"{endian}i8"),
+ ("B", f"{endian}f8"),
+ ("C", "O"),
+ ],
),
),
# Ignore index mappings if index is not True.
(
- {"index": False, "index_dtypes": "<U2"},
+ {"index": False, "index_dtypes": f"{endian}U2"},
np.rec.array(
[(1, 0.2, "a"), (2, 1.5, "bc")],
- dtype=[("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[("A", f"{endian}i8"), ("B", f"{endian}f8"), ("C", "O")],
),
),
# Non-existent names / indices in mapping should not error.
@@ -215,7 +259,12 @@ class TestDataFrameToRecords:
{"index_dtypes": {0: "int16", "not-there": "float32"}},
np.rec.array(
[(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
- dtype=[("index", "i2"), ("A", "<i8"), ("B", "<f8"), ("C", "O")],
+ dtype=[
+ ("index", "i2"),
+ ("A", f"{endian}i8"),
+ ("B", f"{endian}f8"),
+ ("C", "O"),
+ ],
),
),
# Names / indices not in mapping default to array dtype.
@@ -223,7 +272,12 @@ class TestDataFrameToRecords:
{"column_dtypes": {"A": np.int8, "B": np.float32}},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "i1"), ("B", "<f4"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", "i1"),
+ ("B", f"{endian}f4"),
+ ("C", "O"),
+ ],
),
),
# Names / indices not in dtype mapping default to array dtype.
@@ -231,18 +285,28 @@ class TestDataFrameToRecords:
{"column_dtypes": {"A": np.dtype("int8"), "B": np.dtype("float32")}},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<i8"), ("A", "i1"), ("B", "<f4"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}i8"),
+ ("A", "i1"),
+ ("B", f"{endian}f4"),
+ ("C", "O"),
+ ],
),
),
# Mixture of everything.
(
{
"column_dtypes": {"A": np.int8, "B": np.float32},
- "index_dtypes": "<U2",
+ "index_dtypes": f"{endian}U2",
},
np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<U2"), ("A", "i1"), ("B", "<f4"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}U2"),
+ ("A", "i1"),
+ ("B", f"{endian}f4"),
+ ("C", "O"),
+ ],
),
),
# Invalid dype values.
@@ -291,7 +355,7 @@ class TestDataFrameToRecords:
{"column_dtypes": "float64", "index_dtypes": {0: "int32", 1: "int8"}},
np.rec.array(
[(1, 2, 3.0), (4, 5, 6.0), (7, 8, 9.0)],
- dtype=[("a", "<i4"), ("b", "i1"), ("c", "<f8")],
+ dtype=[("a", f"{endian}i4"), ("b", "i1"), ("c", f"{endian}f8")],
),
),
# MultiIndex in the columns.
@@ -302,14 +366,17 @@ class TestDataFrameToRecords:
[("a", "d"), ("b", "e"), ("c", "f")]
),
),
- {"column_dtypes": {0: "<U1", 2: "float32"}, "index_dtypes": "float32"},
+ {
+ "column_dtypes": {0: f"{endian}U1", 2: "float32"},
+ "index_dtypes": "float32",
+ },
np.rec.array(
[(0.0, "1", 2, 3.0), (1.0, "4", 5, 6.0), (2.0, "7", 8, 9.0)],
dtype=[
- ("index", "<f4"),
- ("('a', 'd')", "<U1"),
- ("('b', 'e')", "<i8"),
- ("('c', 'f')", "<f4"),
+ ("index", f"{endian}f4"),
+ ("('a', 'd')", f"{endian}U1"),
+ ("('b', 'e')", f"{endian}i8"),
+ ("('c', 'f')", f"{endian}f4"),
],
),
),
@@ -324,7 +391,10 @@ class TestDataFrameToRecords:
[("d", -4), ("d", -5), ("f", -6)], names=list("cd")
),
),
- {"column_dtypes": "float64", "index_dtypes": {0: "<U2", 1: "int8"}},
+ {
+ "column_dtypes": "float64",
+ "index_dtypes": {0: f"{endian}U2", 1: "int8"},
+ },
np.rec.array(
[
("d", -4, 1.0, 2.0, 3.0),
@@ -332,11 +402,11 @@ class TestDataFrameToRecords:
("f", -6, 7, 8, 9.0),
],
dtype=[
- ("c", "<U2"),
+ ("c", f"{endian}U2"),
("d", "i1"),
- ("('a', 'd')", "<f8"),
- ("('b', 'e')", "<f8"),
- ("('c', 'f')", "<f8"),
+ ("('a', 'd')", f"{endian}f8"),
+ ("('b', 'e')", f"{endian}f8"),
+ ("('c', 'f')", f"{endian}f8"),
],
),
),
@@ -366,13 +436,18 @@ class TestDataFrameToRecords:
dtype_mappings = {
"column_dtypes": DictLike(**{"A": np.int8, "B": np.float32}),
- "index_dtypes": "<U2",
+ "index_dtypes": f"{endian}U2",
}
result = df.to_records(**dtype_mappings)
expected = np.rec.array(
[("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
- dtype=[("index", "<U2"), ("A", "i1"), ("B", "<f4"), ("C", "O")],
+ dtype=[
+ ("index", f"{endian}U2"),
+ ("A", "i1"),
+ ("B", f"{endian}f4"),
+ ("C", "O"),
+ ],
)
tm.assert_almost_equal(result, expected)
diff --git a/pandas/tests/io/parser/test_c_parser_only.py b/pandas/tests/io/parser/test_c_parser_only.py
index 160e00f5fb..049fa412c2 100644
--- a/pandas/tests/io/parser/test_c_parser_only.py
+++ b/pandas/tests/io/parser/test_c_parser_only.py
@@ -12,6 +12,7 @@ from io import (
)
import mmap
import os
+from sys import byteorder
import tarfile
import numpy as np
@@ -28,6 +29,9 @@ from pandas import (
import pandas._testing as tm
+endian = {"little": "<", "big": ">"}[byteorder]
+
+
@pytest.mark.parametrize(
"malformed",
["1\r1\r1\r 1\r 1\r", "1\r1\r1\r 1\r 1\r11\r", "1\r1\r1\r 1\r 1\r11\r1\r"],
@@ -141,9 +145,9 @@ nan 2
"the dtype timedelta64 is not supported for parsing",
{"dtype": {"A": "timedelta64", "B": "float64"}},
),
- ("the dtype <U8 is not supported for parsing", {"dtype": {"A": "U8"}}),
+ (f"the dtype {endian}U8 is not supported for parsing", {"dtype": {"A": "U8"}}),
],
- ids=["dt64-0", "dt64-1", "td64", "<U8"],
+ ids=["dt64-0", "dt64-1", "td64", f"{endian}U8"],
)
def test_unsupported_dtype(c_parser_only, match, kwargs):
parser = c_parser_only
diff --git a/pandas/tests/scalar/timedelta/test_arithmetic.py b/pandas/tests/scalar/timedelta/test_arithmetic.py
index 7dfda0463e..3e4334261a 100644
--- a/pandas/tests/scalar/timedelta/test_arithmetic.py
+++ b/pandas/tests/scalar/timedelta/test_arithmetic.py
@@ -6,6 +6,7 @@ from datetime import (
timedelta,
)
import operator
+from sys import byteorder
import numpy as np
import pytest
@@ -293,9 +294,10 @@ class TestTimedeltaAdditionSubtraction:
expected = pd.to_timedelta(["2 days"]).values
tm.assert_numpy_array_equal(td * np.array([2]), expected)
tm.assert_numpy_array_equal(np.array([2]) * td, expected)
+ endian = {"little": "<", "big": ">"}[byteorder]
msg = (
"ufunc '?multiply'? cannot use operands with types "
- r"dtype\('<m8\[ns\]'\) and dtype\('<m8\[ns\]'\)"
+ fr"dtype\('{endian}m8\[ns\]'\) and dtype\('{endian}m8\[ns\]'\)"
)
with pytest.raises(TypeError, match=msg):
td * other
diff --git a/pandas/tests/tools/test_to_timedelta.py b/pandas/tests/tools/test_to_timedelta.py
index 395fdea67f..1141a136d6 100644
--- a/pandas/tests/tools/test_to_timedelta.py
+++ b/pandas/tests/tools/test_to_timedelta.py
@@ -2,6 +2,7 @@ from datetime import (
time,
timedelta,
)
+from sys import byteorder
import numpy as np
import pytest
@@ -202,8 +203,9 @@ class TestTimedeltas:
timedelta_NaT = np.timedelta64("NaT")
actual = to_timedelta(Series(["00:00:01", np.nan]))
+ endian = {"little": "<", "big": ">"}[byteorder]
expected = Series(
- [np.timedelta64(1000000000, "ns"), timedelta_NaT], dtype="<m8[ns]"
+ [np.timedelta64(1000000000, "ns"), timedelta_NaT], dtype=f"{endian}m8[ns]"
)
tm.assert_series_equal(actual, expected)
--
2.35.1

View File

@ -1,6 +1,15 @@
# Break cycles with optional dependencies
%bcond_with bootstrap
# Run tests?
%bcond_without tests
# When running tests, run ones that are marked as slow?
%bcond_without slow_tests
# When running tests, run ones that cannot be run in parallel?
%bcond_without single_tests
# When running tests, run ones that require a lot of memory?
%bcond_without high_memory_tests
Name: python-pandas
Version: 1.3.5
Release: 1%{?dist}
@ -26,6 +35,11 @@ Summary: Python library providing high-performance data analysis tools
# https://github.com/pandas-dev/pandas/pull/46741 “Add a license file for
# klib (khash)”
#
# In the python3-pandas-tests subpackage:
#
# - pandas/tests/io/data/spss/*.sav are MIT: see LICENSES/HAVEN_LICENSE and
# LICENSES/HAVEN_MIT
#
# Additionally:
#
# - pandas/_libs/tslibs/parsing.pyx is either BSD or
@ -44,13 +58,11 @@ Summary: Python library providing high-performance data analysis tools
# Additionally, the following are not packaged and so do not affect the overall
# License field:
#
# - pandas/tests/io/data/spss/*.sav are MIT: see LICENSES/HAVEN_LICENSE and
# LICENSES/HAVEN_MIT (This would be packaged if it were present—tests *are*
# packaged—but it is not in the PyPI sdist.)
# - scripts/no_bool_in_generic.py is MIT: see LICENSES/PYUPGRADE_LICENSE
License: BSD and (BSD or ASL 2.0) and (BSD and ASL 2.0) and (BSD and MIT) and (BSD and Python)
URL: https://pandas.pydata.org/
Source0: %{pypi_source pandas}
# The GitHub archive contains tests; the PyPI sdist does not.
Source0: https://github.com/pandas-dev/pandas/archive/v%{version}/pandas-%{version}.tar.gz
# Partial backport of upstream commit d437902f46acbff4a03d748b30620bc75fa5ea1f:
# “CI: Migrate Python 3.10 testing to Posix GHA/Azure Pipelines (#45120)”
@ -272,6 +284,10 @@ Recommends: python3dist(openpyxl) >= 3
# python-pyxlsb is not currently packaged:
# BuildRequires: python3dist(pyxlsb) >= 1.0.6
# Recommends: python3dist(pyxlsb) >= 1.0.6
# Not in doc/source/getting_started/install.rst, but in environment.yml and in
# some doc-strings:
BuildRequires: python3dist(odfpy)
Recommends: python3dist(odfpy)
# HTML
BuildRequires: python3dist(beautifulsoup4) >= 4.6
@ -341,6 +357,83 @@ Recommends: python3dist(pandas-datareader)
%description -n python3-pandas %_description
%package -n python3-pandas+test
Summary: Tests and test extras for Pandas
# See comment above base package License tag for licensing breakdown.
License: BSD and MIT
Requires: python3-pandas%{?_isa} = %{version}-%{release}
# Additional BRs and weak dependencies below are generally those that dont
# provide enough added functionality to be weak dependencies of the library
# package, but for which there is some integration support and additional tests
# that can be enabled.
# Additional dependencies from environment.yml: “testing”
# Those not in the “test” extra are treated as weak dependencies for the tests.
BuildRequires: python3dist(boto3)
Recommends: python3dist(boto3)
BuildRequires: python3dist(botocore) >= 1.11
Recommends: python3dist(botocore) >= 1.11
# Already covered by “test” extra
# BuildRequires: python3dist(hypothesis) >= 3.82
# Recommends: python3dist(hypothesis) >= 3.82
# python-moto is not yet packaged
# BuildRequires: python3dist(moto)
# Recommends: python3dist(moto)
BuildRequires: python3dist(flask)
Recommends: python3dist(flask)
# Already covered by “test” extra
# BuildRequires: python3dist(pytest) >= 5.0.1
# Requires: python3dist(pytest) >= 5.0.1
# https://docs.fedoraproject.org/en-US/packaging-guidelines/Python/#_linters
# BuildRequires: python3dist(pytest-cov)
# Recommends: python3dist(pytest-cov)
# Already covered by “test” extra
# BuildRequires: python3dist(pytest-xdist) >= 1.21
# Requires: python3dist(pytest-xdist) >= 1.21
BuildRequires: python3dist(pytest-asyncio)
Recommends: python3dist(pytest-asyncio)
# python-pytest-instafail is not yet packaged
# BuildRequires: python3dist(pytest-instafail)
# Recommends: python3dist(pytest-instafail)
# Additional dependencies from environment.yml:
# “Dask and its dependencies (that dont install with dask)”
# Asks for dask-core, but we just have dask
BuildRequires: python3dist(dask)
Recommends: python3dist(dask)
BuildRequires: python3dist(toolz) >= 0.7.3
Recommends: python3dist(toolz) >= 0.7.3
BuildRequires: python3dist(partd) >= 0.3.10
Recommends: python3dist(partd) >= 0.3.10
BuildRequires: python3dist(cloudpickle) >= 0.2.1
Recommends: python3dist(cloudpickle) >= 0.2.1
# Additional dependencies from environment.yml: “downstream tests”
BuildRequires: python3dist(seaborn)
Recommends: python3dist(seaborn)
BuildRequires: python3dist(statsmodels)
Recommends: python3dist(statsmodels)
# environment.yml: Needed for downstream xarray.CFTimeIndex test
BuildRequires: python3dist(cftime)
Recommends: python3dist(cftime)
# environment.yml: optional
BuildRequires: python3dist(ipython) >= 7.11.1
Recommends: python3dist(ipython) >= 7.11.1
%description -n python3-pandas+test
These are the tests for python3-pandas. This package:
Provides the “pandas.tests” package
Makes sure the “test” extra dependencies are installed
Carries additonal weak dependencies for running the tests
%prep
%autosetup -n pandas-%{version} -p1
@ -352,7 +445,7 @@ sed -r -i '/\boldest-supported-numpy\b/d' pyproject.toml
%generate_buildrequires
%pyproject_buildrequires -r
%pyproject_buildrequires -r %{?with_tests:-x test}
%build
@ -365,7 +458,140 @@ sed -r -i '/\boldest-supported-numpy\b/d' pyproject.toml
%check
# Clipboard tests dont run without a graphical session, and its not worth
# using xvfb-run just for them.
m="${m-}${m+ and }not clipboard"
%if %{without single_tests}
m="${m-}${m+ and }not single"
%endif
%if %{with tests}
%ifarch %{arm32}
# worker 'gw2' crashed while running '…'
k="${k-}${k+ and }not test_append_frame_column_oriented"
%endif
%ifarch %{ix86} %{arm32}
# This “high-memory” test is just not appropriate for 32-bit platforms:
# E OverflowError: join() result is too long for a Python string
k="${k-}${k+ and }not test_bytes_exceed_2gb[c_high]"
%endif
%ifarch ppc64le s390x %{arm32}
# TODO: Why does this fail?
# > with pytest.raises(TypeError, match=msg):
# E Failed: DID NOT RAISE <class 'TypeError'>
k="${k-}${k+ and }not (TestFloatSubtype and test_subtype_integer_errors)"
%endif
%ifarch %{ix86} %{arm32}
# TODO: Why does this fail?
# E assert 243.164 == 243.16400000000002
# Fails for both [c_high] and [c_low].
k="${k-}${k+ and }not test_float_precision_options"
%endif
%ifarch s390x
# TODO: Why does this fail?
#
# > os.fsync(self._handle.fileno())
# E OverflowError: Python int too large to convert to C int
k="${k-}${k+ and }not test_flush"
%endif
%ifarch %{arm64} %{arm32}
# TODO: Why does this fail?
# > with pytest.raises(ValueError, match="external reference.*"):
# E Failed: DID NOT RAISE <class 'ValueError'>
k="${k-}${k+ and }not (TestHashTable and test_vector_resize[True-UInt64HashTable-UInt64Vector-uint64-False-10])"
%endif
%ifarch ppc64le
# TODO: Why does this fail?
# > with pytest.raises(ValueError, match="external reference.*"):
# E Failed: DID NOT RAISE <class 'ValueError'>
k="${k-}${k+ and }not (TestHashTable and test_vector_resize[False-UInt64HashTable-UInt64Vector-uint64-False-10])"
%endif
# This test (only) expects the current working directory to be the
# site-packages directory containing the built pandas. This is not how we run
# the tests, because we dont want to clutter the buildroot with
# testing-related hidden files and directories. We could run tests from
# %%pyproject_build_lib if this were a problem for a lot of tests, but its
# easier just to skip it.
k="${k-}${k+ and }not test_html_template_extends_options"
# TODO: Why does this fail? This also seems to have to do with fsspec.
k="${k-}${k+ and }not test_markdown_options"
# TODO: Why does this fail?
# > assert res_deep == res == expected
# E assert 0 == 108
k="${k-}${k+ and }not test_memory_usage[series-with-empty-index]"
%ifarch %{ix86} %{arm32}
# TODO: Why does this fail?
# E AssertionError: DataFrame.iloc[:, 2] (column name="C") are different
# E
# E DataFrame.iloc[:, 2] (column name="C") values are different (11.57513 %)
k="${k-}${k+ and }not (TestMerge and test_int64_overflow_issues)"
%endif
# TODO: Why does this fail? An fsspec.implementations.memory.MemoryFile does
# not seem to work as expected.
k="${k-}${k+ and }not test_read_csv"
%ifarch ppc64le s390x
# TODO: Why does this fail? The differences are large!
k="${k-}${k+ and }not test_rolling_var_numerical_issues"
%endif
%ifarch %{arm32}
# worker 'gw4' crashed while running '…'
k="${k-}${k+ and }not test_select_filter_corner"
%endif
# Ensure pytest doesnt find the “un-built” library. We can get away with this
# approach because the tests are also in the installed library. We cant simply
# “cd” to the buildroots python3_sitearch because testing leaves files in the
# current working directory.
mkdir -p _empty
cd _empty
# See: test_fast.sh
# Workaround for pytest-xdist flaky collection order
# https://github.com/pytest-dev/pytest/issues/920
# https://github.com/pytest-dev/pytest/issues/1075
export PYTHONHASHSEED="$(
%{python3} -c 'import random; print(random.randint(1, 4294967295))'
)"
%ifarch %{ix86} %{arm32}
# Limit parallelism in tests to prevent memory exhaustion
%global testn_max 4
%if 0%{?fedora} > 35
%constrain_build -c %{testn_max}
%else
%if %{?_smp_build_ncpus}%{?!_smp_build_ncpus:4} > %{testn_max}
%global _smp_build_ncpus %{testn_max}
%endif
%endif
%endif
# Fallback parallelism of 4 is from upstream CI
%pytest '%{buildroot}%{python3_sitearch}/pandas' \
%{?!with_slow_tests:--skip-slow} \
--skip-network \
--skip-db \
%{?with_high_memory_tests:--run-high-memory} \
-m "${m-}" \
-k "${k-}" \
-n %{?_smp_build_ncpus}%{?!_smp_build_ncpus:4} \
-r sxX
%else
%pyproject_check_import -e 'pandas.conftest' -e 'pandas.tests.*'
%endif
%files -n python3-pandas -f %{pyproject_files}
@ -377,6 +603,12 @@ sed -r -i '/\boldest-supported-numpy\b/d' pyproject.toml
%license LICENSE LICENSES/
%doc README.md
%doc RELEASE.md
%exclude %{python3_sitearch}/pandas/tests
%files -n python3-pandas+test
%{python3_sitearch}/pandas/tests
%ghost %{python3_sitearch}/*.dist-info
%changelog
@ -388,6 +620,7 @@ sed -r -i '/\boldest-supported-numpy\b/d' pyproject.toml
- Do not install C sources
- Carefully handle virtual Provides and licenses for bundled/copied code
- Use pyproject-rpm-macros
- Run the tests (requires switching to GitHub source)
* Fri Jan 21 2022 Fedora Release Engineering <releng@fedoraproject.org> - 1.3.3-3
- Rebuilt for https://fedoraproject.org/wiki/Fedora_36_Mass_Rebuild

View File

@ -1 +1 @@
SHA512 (pandas-1.3.5.tar.gz) = cf3f76894cae19784af6393d4a575d114b55621a3d594edd7e95ac6d5c8588a580135789c859c618acfeceeb710d2a1fdd94188a839a90be6b58af929e00a726
SHA512 (pandas-1.3.5.tar.gz) = 3c6a3da2f46f71de61789f69e9c8ca8c175a0cfe8a6ca12d3f4e6b6c9f617e110f59eb68db73b7f576ebfab9e8e6be469b52c5e711f595cfd73c86eb9c6d3491