From d26d3d23af984bf1154c152e66dd294a55302a59 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 4 Oct 2018 11:29:22 -0500 Subject: [PATCH] Squashed commit of the following: commit c9d6e89a1f401e4f47b384b72030873cc4cc2f2b Author: Tom Augspurger Date: Thu Oct 4 08:34:22 2018 -0500 xpass -> skip commit 95d5cbfe4eaf53ed60e84a938723062a14d2d625 Author: Tom Augspurger Date: Thu Oct 4 08:22:17 2018 -0500 typo, import commit 4e9b7f0a6ceec0275e22f5f1edac1daeb41f5033 Author: Tom Augspurger Date: Thu Oct 4 08:18:40 2018 -0500 doc update commit cc2bfc8b991f4d8cf46a993bf4205cc80656384e Merge: 11a0d938c fe67b94e7 Author: Tom Augspurger Date: Thu Oct 4 08:15:46 2018 -0500 Merge remote-tracking branch 'upstream/master' into ea-divmod commit fe67b94e7681c1f21fc2be212514ca0d67a6603c Author: Tom Augspurger Date: Thu Oct 4 06:55:09 2018 -0500 Update type for PeriodDtype / DatetimeTZDtype / IntervalDtype (#22938) commit b12e5ba55c3691733dab36373e80d1b16134c8c2 Author: Tom Augspurger Date: Thu Oct 4 06:30:29 2018 -0500 Safer is dtype (#22975) commit c19c8052f384206c3b2cd87f277344d21d0ae2c7 Author: Tom Augspurger Date: Thu Oct 4 06:27:54 2018 -0500 Catch Exception in combine (#22936) commit d553ab3e5650d105de8e02ae6fd57d03af57b214 Author: Anjali2019 Date: Thu Oct 4 13:24:06 2018 +0200 TST: Fixturize series/test_combine_concat.py (#22964) commit 4c78b9738e01ae147106301cca76c6b36ee68d06 Author: Anjali2019 Date: Thu Oct 4 13:23:39 2018 +0200 TST: Fixturize series/test_constructors.py (#22965) commit 45d3bb761dd44edd0853b06fd81f05af915fd695 Author: Anjali2019 Date: Thu Oct 4 13:23:20 2018 +0200 TST: Fixturize series/test_datetime_values.py (#22966) commit f1a22ff56f895ed340ed7db6dc46841b81d331a1 Author: Anjali2019 Date: Thu Oct 4 13:22:21 2018 +0200 TST: Fixturize series/test_dtypes.py (#22967) commit abf68fd1d5694403e506416c68f6abec6d780c39 Author: Anjali2019 Date: Thu Oct 4 13:21:45 2018 +0200 TST: Fixturize series/test_io.py (#22972) commit e6b0c2915f6433d7c29af908f91a6d511177eec1 Author: Anjali2019 Date: Thu Oct 4 13:20:46 2018 +0200 TST: Fixturize series/test_missing.py (#22973) commit 9b405b829bf5e3fd142cccbcca46df4cc3df4ccb Author: Joris Van den Bossche Date: Thu Oct 4 13:16:28 2018 +0200 CLN: values is required argument in _shallow_copy_with_infer (#22983) commit c282e310809921a0dadd4446f23c9273c15da443 Author: h-vetinari <33685575+h-vetinari@users.noreply.github.com> Date: Thu Oct 4 03:34:35 2018 +0200 Fix ASV import error (#22978) commit 11a0d938cdaf7482546691519577b5dd28f69aac Author: Tom Augspurger Date: Wed Oct 3 14:26:34 2018 -0500 typerror commit a0cd5e79eb06ac71cf2f510b1a2122bc2b21fcf0 Author: Tom Augspurger Date: Wed Oct 3 14:25:38 2018 -0500 TypeError for Series commit 2247461ec0b1017db320cb8581337cba0b5c6679 Author: Tom Augspurger Date: Wed Oct 3 13:29:29 2018 -0500 Test op(Series[EA], EA]) commit c9fe5d318d7077f99413532cdaf392ae3ea9cd2c Author: Tom Augspurger Date: Wed Oct 3 13:21:33 2018 -0500 make strict commit 7ef697cffdcd2f8d701de3cdfd2e6897358effbf Author: Tom Augspurger Date: Wed Oct 3 13:14:52 2018 -0500 Use super commit 35d42133acbcb3c25308b1c10e0e2dc3fa1052b6 Merge: 0671e7d67 ee808033b Author: Tom Augspurger Date: Wed Oct 3 13:11:05 2018 -0500 Merge remote-tracking branch 'upstream/master' into ea-divmod commit 0671e7d67df8b0aa258fd864ef5f3169fe0ffc55 Author: Tom Augspurger Date: Tue Oct 2 11:10:42 2018 -0500 Fixup commit 1b4261f41c70379fa868866bc77e7a31c43baa5d Merge: c92a4a899 1d9f76c50 Author: Tom Augspurger Date: Tue Oct 2 10:58:43 2018 -0500 Merge remote-tracking branch 'upstream/master' into ea-divmod commit c92a4a899b8d5e5e6a0479f390a604dc9f624f89 Author: Tom Augspurger Date: Mon Oct 1 16:56:15 2018 -0500 Update old test commit 52538fa03a8c9722ab5c86c88419105b6ebfe5a1 Author: Tom Augspurger Date: Mon Oct 1 16:51:48 2018 -0500 BUG: divmod return type --- .travis.yml | 19 ++--- asv_bench/benchmarks/indexing.py | 8 +-- asv_bench/benchmarks/join_merge.py | 7 +- asv_bench/benchmarks/panel_ctor.py | 4 +- asv_bench/benchmarks/panel_methods.py | 3 +- doc/source/extending.rst | 10 +-- pandas/core/arrays/base.py | 24 ++++--- pandas/core/dtypes/base.py | 8 ++- pandas/core/frame.py | 3 +- pandas/core/indexes/base.py | 4 +- pandas/core/indexes/multi.py | 2 +- pandas/core/indexes/period.py | 13 +--- pandas/core/series.py | 2 +- pandas/tests/dtypes/test_dtypes.py | 20 ++++++ .../tests/extension/decimal/test_decimal.py | 2 +- pandas/tests/extension/json/test_json.py | 12 ++-- pandas/tests/extension/test_categorical.py | 4 +- pandas/tests/frame/test_operators.py | 6 ++ pandas/tests/series/test_combine_concat.py | 25 ++++--- pandas/tests/series/test_constructors.py | 36 +++++----- pandas/tests/series/test_datetime_values.py | 4 +- pandas/tests/series/test_dtypes.py | 18 +++-- pandas/tests/series/test_io.py | 70 +++++++++---------- pandas/tests/series/test_missing.py | 46 ++++++------ 24 files changed, 182 insertions(+), 168 deletions(-) diff --git a/.travis.yml b/.travis.yml index 40baee2c03ea0..c9bdb91283d42 100644 --- a/.travis.yml +++ b/.travis.yml @@ -53,11 +53,7 @@ matrix: - dist: trusty env: - JOB="3.6, coverage" ENV_FILE="ci/travis-36.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" COVERAGE=true DOCTEST=true - # In allow_failures - - dist: trusty - env: - - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true - # In allow_failures + - dist: trusty env: - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" @@ -65,6 +61,12 @@ matrix: apt: packages: - xsel + + # In allow_failures + - dist: trusty + env: + - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true + # In allow_failures - dist: trusty env: @@ -73,13 +75,6 @@ matrix: - dist: trusty env: - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true - - dist: trusty - env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" - addons: - apt: - packages: - - xsel - dist: trusty env: - JOB="3.6, doc" ENV_FILE="ci/travis-36-doc.yaml" DOC=true diff --git a/asv_bench/benchmarks/indexing.py b/asv_bench/benchmarks/indexing.py index c5b147b152aa6..2850fa249725c 100644 --- a/asv_bench/benchmarks/indexing.py +++ b/asv_bench/benchmarks/indexing.py @@ -2,10 +2,10 @@ import numpy as np import pandas.util.testing as tm -from pandas import (Series, DataFrame, MultiIndex, Int64Index, Float64Index, - IntervalIndex, CategoricalIndex, - IndexSlice, concat, date_range) -from .pandas_vb_common import setup, Panel # noqa +from pandas import (Series, DataFrame, MultiIndex, Panel, + Int64Index, Float64Index, IntervalIndex, + CategoricalIndex, IndexSlice, concat, date_range) +from .pandas_vb_common import setup # noqa class NumericSeriesIndexing(object): diff --git a/asv_bench/benchmarks/join_merge.py b/asv_bench/benchmarks/join_merge.py index 7487a0d8489b7..6624c3d0aaf49 100644 --- a/asv_bench/benchmarks/join_merge.py +++ b/asv_bench/benchmarks/join_merge.py @@ -3,14 +3,15 @@ import numpy as np import pandas.util.testing as tm -from pandas import (DataFrame, Series, MultiIndex, date_range, concat, merge, - merge_asof) +from pandas import (DataFrame, Series, Panel, MultiIndex, + date_range, concat, merge, merge_asof) + try: from pandas import merge_ordered except ImportError: from pandas import ordered_merge as merge_ordered -from .pandas_vb_common import Panel, setup # noqa +from .pandas_vb_common import setup # noqa class Append(object): diff --git a/asv_bench/benchmarks/panel_ctor.py b/asv_bench/benchmarks/panel_ctor.py index ce946c76ed199..4614bbd198afa 100644 --- a/asv_bench/benchmarks/panel_ctor.py +++ b/asv_bench/benchmarks/panel_ctor.py @@ -1,9 +1,9 @@ import warnings from datetime import datetime, timedelta -from pandas import DataFrame, DatetimeIndex, date_range +from pandas import DataFrame, Panel, DatetimeIndex, date_range -from .pandas_vb_common import Panel, setup # noqa +from .pandas_vb_common import setup # noqa class DifferentIndexes(object): diff --git a/asv_bench/benchmarks/panel_methods.py b/asv_bench/benchmarks/panel_methods.py index a5b1a92e9cf67..4d19e9a87c507 100644 --- a/asv_bench/benchmarks/panel_methods.py +++ b/asv_bench/benchmarks/panel_methods.py @@ -1,8 +1,9 @@ import warnings import numpy as np +from pandas import Panel -from .pandas_vb_common import Panel, setup # noqa +from .pandas_vb_common import setup # noqa class PanelMethods(object): diff --git a/doc/source/extending.rst b/doc/source/extending.rst index da249cb3592f4..ab940384594bc 100644 --- a/doc/source/extending.rst +++ b/doc/source/extending.rst @@ -167,11 +167,11 @@ your ``MyExtensionArray`` class, as follows: element one-by-one, this might not be as performant as implementing your own version of the associated operators directly on the ``ExtensionArray``. -This implementation will try to reconstruct a new ``ExtensionArray`` with the -result of the element-wise operation. Whether or not that succeeds depends on -whether the operation returns a result that's valid for the ``ExtensionArray``. -If an ``ExtensionArray`` cannot be reconstructed, a list containing the scalars -returned instead. +For arithmetic operations, this implementation will try to reconstruct a new +``ExtensionArray`` with the result of the element-wise operation. Whether +or not that succeeds depends on whether the operation returns a result +that's valid for the ``ExtensionArray``. If an ``ExtensionArray`` cannot +be reconstructed, an ndarray containing the scalars returned instead. .. _extending.extension.testing: diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index b16f550b3641d..23eea5f4c04f2 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -781,20 +781,24 @@ def convert_values(param): # a TypeError should be raised res = [op(a, b) for (a, b) in zip(lvalues, rvalues)] - if coerce_to_dtype: - if op.__name__ in {'divmod', 'rdivmod'}: + def _maybe_convert(arr): + if coerce_to_dtype: + # https://github.com/pandas-dev/pandas/issues/22850 + # We catch all regular exceptions here, and fall back + # to an ndarray. try: - a, b = zip(*res) - res = (self._from_sequence(a), - self._from_sequence(b)) + res = self._from_sequence(arr) except Exception: - pass + res = np.asarray(arr) else: - try: - res = self._from_sequence(res) - except Exception: - pass + res = np.asarray(arr) + return res + if op.__name__ in {'divmod', 'rdivmod'}: + a, b = zip(*res) + res = _maybe_convert(a), _maybe_convert(b) + else: + res = _maybe_convert(res) return res op_name = ops._get_op_name(op, True) diff --git a/pandas/core/dtypes/base.py b/pandas/core/dtypes/base.py index 5c9ba921226c0..b0fa55e346613 100644 --- a/pandas/core/dtypes/base.py +++ b/pandas/core/dtypes/base.py @@ -2,6 +2,7 @@ import numpy as np from pandas import compat +from pandas.core.dtypes.generic import ABCSeries, ABCIndexClass, ABCDataFrame from pandas.errors import AbstractMethodError @@ -83,7 +84,12 @@ def is_dtype(cls, dtype): """ dtype = getattr(dtype, 'dtype', dtype) - if isinstance(dtype, np.dtype): + if isinstance(dtype, (ABCSeries, ABCIndexClass, + ABCDataFrame, np.dtype)): + # https://github.com/pandas-dev/pandas/issues/22960 + # avoid passing data to `construct_from_string`. This could + # cause a FutureWarning from numpy about failing elementwise + # comparison from, e.g., comparing DataFrame == 'category'. return False elif dtype is None: return False diff --git a/pandas/core/frame.py b/pandas/core/frame.py index ff7590f6d5358..f4b7ccb0fdf5b 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -4908,7 +4908,8 @@ def _combine_match_index(self, other, func, level=None): return ops.dispatch_to_series(left, right, func) else: # fastpath --> operate directly on values - new_data = func(left.values.T, right.values).T + with np.errstate(all="ignore"): + new_data = func(left.values.T, right.values).T return self._constructor(new_data, index=left.index, columns=self.columns, copy=False) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 19f9209ef39ca..d0dbe76547e75 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -540,7 +540,7 @@ def _shallow_copy(self, values=None, **kwargs): return self._simple_new(values, **attributes) - def _shallow_copy_with_infer(self, values=None, **kwargs): + def _shallow_copy_with_infer(self, values, **kwargs): """ create a new Index inferring the class with passed value, don't copy the data, use the same object attributes with passed in attributes @@ -553,8 +553,6 @@ def _shallow_copy_with_infer(self, values=None, **kwargs): values : the values to create the new Index, optional kwargs : updates the default attributes for this Index """ - if values is None: - values = self.values attributes = self._get_attributes_dict() attributes.update(kwargs) attributes['copy'] = False diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 6091df776a01b..3cccb65503378 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -556,7 +556,7 @@ def view(self, cls=None): result._id = self._id return result - def _shallow_copy_with_infer(self, values=None, **kwargs): + def _shallow_copy_with_infer(self, values, **kwargs): # On equal MultiIndexes the difference is empty. # Therefore, an empty MultiIndex is returned GH13490 if len(values) == 0: diff --git a/pandas/core/indexes/period.py b/pandas/core/indexes/period.py index cc61c1baa7bf6..f3f680f085118 100644 --- a/pandas/core/indexes/period.py +++ b/pandas/core/indexes/period.py @@ -252,17 +252,10 @@ def _from_ordinals(cls, values, name=None, freq=None, **kwargs): result = cls._simple_new(data, name=name) return result - def _shallow_copy(self, values=None, **kwargs): + def _shallow_copy(self, values, **kwargs): # TODO: simplify, figure out type of values - if values is None: - # Note: this is the Index implementation. - # slightly different from AttributesMixin implementation which - # defaults to self._ndarray_values - values = self.values - else: - # this differs too - if not isinstance(values, PeriodArray): - values = PeriodArray._from_ordinals(values, freq=self.freq) + if not isinstance(values, PeriodArray): + values = PeriodArray._from_ordinals(values, freq=self.freq) # I don't like overloading shallow_copy with freq changes. # See if it's used anywhere outside of test_resample_empty_dataframe diff --git a/pandas/core/series.py b/pandas/core/series.py index 2e22e4e6e1bfc..a613b22ea9046 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -4228,7 +4228,7 @@ def _try_cast(arr, take_fast_path): try: # gh-15832: Check if we are requesting a numeric dype and # that we can convert the data to the requested dtype. - if is_float_dtype(dtype) or is_integer_dtype(dtype): + if is_integer_dtype(dtype): subarr = maybe_cast_to_integer_array(arr, dtype) subarr = maybe_cast_to_datetime(arr, dtype) diff --git a/pandas/tests/dtypes/test_dtypes.py b/pandas/tests/dtypes/test_dtypes.py index e3d14497a38f9..7e95b076a8a66 100644 --- a/pandas/tests/dtypes/test_dtypes.py +++ b/pandas/tests/dtypes/test_dtypes.py @@ -815,3 +815,23 @@ def test_registry_find(dtype, expected): ('datetime64[ns, US/Eastern]', DatetimeTZDtype('ns', 'US/Eastern'))]) def test_pandas_registry_find(dtype, expected): assert _pandas_registry.find(dtype) == expected + + +@pytest.mark.parametrize("check", [ + is_categorical_dtype, + is_datetime64tz_dtype, + is_period_dtype, + is_datetime64_ns_dtype, + is_datetime64_dtype, + is_interval_dtype, + is_datetime64_any_dtype, + is_string_dtype, + is_bool_dtype, +]) +def test_is_dtype_no_warning(check): + data = pd.DataFrame({"A": [1, 2]}) + with tm.assert_produces_warning(None): + check(data) + + with tm.assert_produces_warning(None): + check(data["A"]) diff --git a/pandas/tests/extension/decimal/test_decimal.py b/pandas/tests/extension/decimal/test_decimal.py index a33cc6c4ab6cb..317170e8db1e1 100644 --- a/pandas/tests/extension/decimal/test_decimal.py +++ b/pandas/tests/extension/decimal/test_decimal.py @@ -102,7 +102,7 @@ class TestInterface(BaseDecimal, base.BaseInterfaceTests): class TestConstructors(BaseDecimal, base.BaseConstructorsTests): - @pytest.mark.xfail(reason="not implemented constructor from dtype") + @pytest.mark.skip(reason="not implemented constructor from dtype") def test_from_dtype(self, data): # construct from our dtype & string dtype pass diff --git a/pandas/tests/extension/json/test_json.py b/pandas/tests/extension/json/test_json.py index e503e54db64c5..115afdcc99f2b 100644 --- a/pandas/tests/extension/json/test_json.py +++ b/pandas/tests/extension/json/test_json.py @@ -131,8 +131,7 @@ def test_custom_asserts(self): class TestConstructors(BaseJSON, base.BaseConstructorsTests): - # TODO: Should this be pytest.mark.skip? - @pytest.mark.xfail(reason="not implemented constructor from dtype") + @pytest.mark.skip(reason="not implemented constructor from dtype") def test_from_dtype(self, data): # construct from our dtype & string dtype pass @@ -147,13 +146,11 @@ class TestGetitem(BaseJSON, base.BaseGetitemTests): class TestMissing(BaseJSON, base.BaseMissingTests): - # TODO: Should this be pytest.mark.skip? - @pytest.mark.xfail(reason="Setting a dict as a scalar") + @pytest.mark.skip(reason="Setting a dict as a scalar") def test_fillna_series(self): """We treat dictionaries as a mapping in fillna, not a scalar.""" - # TODO: Should this be pytest.mark.skip? - @pytest.mark.xfail(reason="Setting a dict as a scalar") + @pytest.mark.skip(reason="Setting a dict as a scalar") def test_fillna_frame(self): """We treat dictionaries as a mapping in fillna, not a scalar.""" @@ -204,8 +201,7 @@ def test_combine_add(self, data_repeated): class TestCasting(BaseJSON, base.BaseCastingTests): - # TODO: Should this be pytest.mark.skip? - @pytest.mark.xfail(reason="failing on np.array(self, dtype=str)") + @pytest.mark.skip(reason="failing on np.array(self, dtype=str)") def test_astype_str(self): """This currently fails in NumPy on np.array(self, dtype=str) with diff --git a/pandas/tests/extension/test_categorical.py b/pandas/tests/extension/test_categorical.py index 924f01077abd1..f118279c4b915 100644 --- a/pandas/tests/extension/test_categorical.py +++ b/pandas/tests/extension/test_categorical.py @@ -140,11 +140,11 @@ def test_take_series(self): def test_reindex_non_na_fill_value(self): pass - @pytest.mark.xfail(reason="Categorical.take buggy") + @pytest.mark.skip(reason="Categorical.take buggy") def test_take_empty(self): pass - @pytest.mark.xfail(reason="test not written correctly for categorical") + @pytest.mark.skip(reason="test not written correctly for categorical") def test_reindex(self): pass diff --git a/pandas/tests/frame/test_operators.py b/pandas/tests/frame/test_operators.py index 97c94e1134cc8..6ed289614b96a 100644 --- a/pandas/tests/frame/test_operators.py +++ b/pandas/tests/frame/test_operators.py @@ -1030,3 +1030,9 @@ def test_alignment_non_pandas(self): align(df, val, 'index') with pytest.raises(ValueError): align(df, val, 'columns') + + def test_no_warning(self, all_arithmetic_operators): + df = pd.DataFrame({"A": [0., 0.], "B": [0., None]}) + b = df['B'] + with tm.assert_produces_warning(None): + getattr(df, all_arithmetic_operators)(b, 0) diff --git a/pandas/tests/series/test_combine_concat.py b/pandas/tests/series/test_combine_concat.py index 35ba4fbf0ce25..8b021ab81ff81 100644 --- a/pandas/tests/series/test_combine_concat.py +++ b/pandas/tests/series/test_combine_concat.py @@ -15,29 +15,28 @@ from pandas.util.testing import assert_series_equal import pandas.util.testing as tm -from .common import TestData +class TestSeriesCombine(): -class TestSeriesCombine(TestData): - - def test_append(self): - appendedSeries = self.series.append(self.objSeries) + def test_append(self, datetime_series, string_series, object_series): + appendedSeries = string_series.append(object_series) for idx, value in compat.iteritems(appendedSeries): - if idx in self.series.index: - assert value == self.series[idx] - elif idx in self.objSeries.index: - assert value == self.objSeries[idx] + if idx in string_series.index: + assert value == string_series[idx] + elif idx in object_series.index: + assert value == object_series[idx] else: raise AssertionError("orphaned index!") - pytest.raises(ValueError, self.ts.append, self.ts, + pytest.raises(ValueError, datetime_series.append, datetime_series, verify_integrity=True) - def test_append_many(self): - pieces = [self.ts[:5], self.ts[5:10], self.ts[10:]] + def test_append_many(self, datetime_series): + pieces = [datetime_series[:5], datetime_series[5:10], + datetime_series[10:]] result = pieces[0].append(pieces[1:]) - assert_series_equal(result, self.ts) + assert_series_equal(result, datetime_series) def test_append_duplicates(self): # GH 13677 diff --git a/pandas/tests/series/test_constructors.py b/pandas/tests/series/test_constructors.py index 4817f5bdccc29..57a3f54fadbcc 100644 --- a/pandas/tests/series/test_constructors.py +++ b/pandas/tests/series/test_constructors.py @@ -26,10 +26,8 @@ from pandas.util.testing import assert_series_equal import pandas.util.testing as tm -from .common import TestData - -class TestSeriesConstructors(TestData): +class TestSeriesConstructors(): def test_invalid_dtype(self): # GH15520 @@ -50,23 +48,23 @@ def test_scalar_conversion(self): assert int(Series([1.])) == 1 assert long(Series([1.])) == 1 - def test_constructor(self): - assert self.ts.index.is_all_dates + def test_constructor(self, datetime_series, empty_series): + assert datetime_series.index.is_all_dates # Pass in Series - derived = Series(self.ts) + derived = Series(datetime_series) assert derived.index.is_all_dates - assert tm.equalContents(derived.index, self.ts.index) + assert tm.equalContents(derived.index, datetime_series.index) # Ensure new index is not created - assert id(self.ts.index) == id(derived.index) + assert id(datetime_series.index) == id(derived.index) # Mixed type Series mixed = Series(['hello', np.NaN], index=[0, 1]) assert mixed.dtype == np.object_ assert mixed[1] is np.NaN - assert not self.empty.index.is_all_dates + assert not empty_series.index.is_all_dates assert not Series({}).index.is_all_dates pytest.raises(Exception, Series, np.random.randn(3, 3), index=np.arange(3)) @@ -977,27 +975,27 @@ def test_fromDict(self): series = Series(data, dtype=float) assert series.dtype == np.float64 - def test_fromValue(self): + def test_fromValue(self, datetime_series): - nans = Series(np.NaN, index=self.ts.index) + nans = Series(np.NaN, index=datetime_series.index) assert nans.dtype == np.float_ - assert len(nans) == len(self.ts) + assert len(nans) == len(datetime_series) - strings = Series('foo', index=self.ts.index) + strings = Series('foo', index=datetime_series.index) assert strings.dtype == np.object_ - assert len(strings) == len(self.ts) + assert len(strings) == len(datetime_series) d = datetime.now() - dates = Series(d, index=self.ts.index) + dates = Series(d, index=datetime_series.index) assert dates.dtype == 'M8[ns]' - assert len(dates) == len(self.ts) + assert len(dates) == len(datetime_series) # GH12336 # Test construction of categorical series from value - categorical = Series(0, index=self.ts.index, dtype="category") - expected = Series(0, index=self.ts.index).astype("category") + categorical = Series(0, index=datetime_series.index, dtype="category") + expected = Series(0, index=datetime_series.index).astype("category") assert categorical.dtype == 'category' - assert len(categorical) == len(self.ts) + assert len(categorical) == len(datetime_series) tm.assert_series_equal(categorical, expected) def test_constructor_dtype_timedelta64(self): diff --git a/pandas/tests/series/test_datetime_values.py b/pandas/tests/series/test_datetime_values.py index fee2323310b9c..e06d3a67db662 100644 --- a/pandas/tests/series/test_datetime_values.py +++ b/pandas/tests/series/test_datetime_values.py @@ -23,10 +23,8 @@ from pandas.util.testing import assert_series_equal import pandas.util.testing as tm -from .common import TestData - -class TestSeriesDatetimeValues(TestData): +class TestSeriesDatetimeValues(): def test_dt_namespace_accessor(self): diff --git a/pandas/tests/series/test_dtypes.py b/pandas/tests/series/test_dtypes.py index 125dff9ecfa7c..63ead2dc7d245 100644 --- a/pandas/tests/series/test_dtypes.py +++ b/pandas/tests/series/test_dtypes.py @@ -24,10 +24,8 @@ from pandas import compat import pandas.util.testing as tm -from .common import TestData - -class TestSeriesDtypes(TestData): +class TestSeriesDtypes(): def test_dt64_series_astype_object(self): dt64ser = Series(date_range('20130101', periods=3)) @@ -56,17 +54,17 @@ def test_asobject_deprecated(self): o = s.asobject assert isinstance(o, np.ndarray) - def test_dtype(self): + def test_dtype(self, datetime_series): - assert self.ts.dtype == np.dtype('float64') - assert self.ts.dtypes == np.dtype('float64') - assert self.ts.ftype == 'float64:dense' - assert self.ts.ftypes == 'float64:dense' - tm.assert_series_equal(self.ts.get_dtype_counts(), + assert datetime_series.dtype == np.dtype('float64') + assert datetime_series.dtypes == np.dtype('float64') + assert datetime_series.ftype == 'float64:dense' + assert datetime_series.ftypes == 'float64:dense' + tm.assert_series_equal(datetime_series.get_dtype_counts(), Series(1, ['float64'])) # GH18243 - Assert .get_ftype_counts is deprecated with tm.assert_produces_warning(FutureWarning): - tm.assert_series_equal(self.ts.get_ftype_counts(), + tm.assert_series_equal(datetime_series.get_ftype_counts(), Series(1, ['float64:dense'])) @pytest.mark.parametrize("value", [np.nan, np.inf]) diff --git a/pandas/tests/series/test_io.py b/pandas/tests/series/test_io.py index cbf9bff06ad34..50f548b855247 100644 --- a/pandas/tests/series/test_io.py +++ b/pandas/tests/series/test_io.py @@ -16,10 +16,8 @@ assert_frame_equal, ensure_clean) import pandas.util.testing as tm -from .common import TestData - -class TestSeriesToCSV(TestData): +class TestSeriesToCSV(): def read_csv(self, path, **kwargs): params = dict(squeeze=True, index_col=0, @@ -34,10 +32,10 @@ def read_csv(self, path, **kwargs): return out - def test_from_csv_deprecation(self): + def test_from_csv_deprecation(self, datetime_series): # see gh-17812 with ensure_clean() as path: - self.ts.to_csv(path, header=False) + datetime_series.to_csv(path, header=False) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): @@ -46,7 +44,7 @@ def test_from_csv_deprecation(self): assert_series_equal(depr_ts, ts) @pytest.mark.parametrize("arg", ["path", "header", "both"]) - def test_to_csv_deprecation(self, arg): + def test_to_csv_deprecation(self, arg, datetime_series): # see gh-19715 with ensure_clean() as path: if arg == "path": @@ -57,18 +55,18 @@ def test_to_csv_deprecation(self, arg): kwargs = dict(path=path) with tm.assert_produces_warning(FutureWarning): - self.ts.to_csv(**kwargs) + datetime_series.to_csv(**kwargs) # Make sure roundtrip still works. ts = self.read_csv(path) - assert_series_equal(self.ts, ts, check_names=False) + assert_series_equal(datetime_series, ts, check_names=False) - def test_from_csv(self): + def test_from_csv(self, datetime_series, string_series): with ensure_clean() as path: - self.ts.to_csv(path, header=False) + datetime_series.to_csv(path, header=False) ts = self.read_csv(path) - assert_series_equal(self.ts, ts, check_names=False) + assert_series_equal(datetime_series, ts, check_names=False) assert ts.name is None assert ts.index.name is None @@ -79,18 +77,18 @@ def test_from_csv(self): assert_series_equal(depr_ts, ts) # see gh-10483 - self.ts.to_csv(path, header=True) + datetime_series.to_csv(path, header=True) ts_h = self.read_csv(path, header=0) assert ts_h.name == "ts" - self.series.to_csv(path, header=False) + string_series.to_csv(path, header=False) series = self.read_csv(path) - assert_series_equal(self.series, series, check_names=False) + assert_series_equal(string_series, series, check_names=False) assert series.name is None assert series.index.name is None - self.series.to_csv(path, header=True) + string_series.to_csv(path, header=True) series_h = self.read_csv(path, header=0) assert series_h.name == "series" @@ -106,19 +104,19 @@ def test_from_csv(self): check_series = Series({"1998-01-01": 1.0, "1999-01-01": 2.0}) assert_series_equal(check_series, series) - def test_to_csv(self): + def test_to_csv(self, datetime_series): import io with ensure_clean() as path: - self.ts.to_csv(path, header=False) + datetime_series.to_csv(path, header=False) with io.open(path, newline=None) as f: lines = f.readlines() assert (lines[1] != '\n') - self.ts.to_csv(path, index=False, header=False) + datetime_series.to_csv(path, index=False, header=False) arr = np.loadtxt(path) - assert_almost_equal(arr, self.ts.values) + assert_almost_equal(arr, datetime_series.values) def test_to_csv_unicode_index(self): buf = StringIO() @@ -196,22 +194,23 @@ def test_to_csv_compression(self, s, encoding, compression): encoding=encoding)) -class TestSeriesIO(TestData): +class TestSeriesIO(): - def test_to_frame(self): - self.ts.name = None - rs = self.ts.to_frame() - xp = pd.DataFrame(self.ts.values, index=self.ts.index) + def test_to_frame(self, datetime_series): + datetime_series.name = None + rs = datetime_series.to_frame() + xp = pd.DataFrame(datetime_series.values, index=datetime_series.index) assert_frame_equal(rs, xp) - self.ts.name = 'testname' - rs = self.ts.to_frame() - xp = pd.DataFrame(dict(testname=self.ts.values), index=self.ts.index) + datetime_series.name = 'testname' + rs = datetime_series.to_frame() + xp = pd.DataFrame(dict(testname=datetime_series.values), + index=datetime_series.index) assert_frame_equal(rs, xp) - rs = self.ts.to_frame(name='testdifferent') - xp = pd.DataFrame( - dict(testdifferent=self.ts.values), index=self.ts.index) + rs = datetime_series.to_frame(name='testdifferent') + xp = pd.DataFrame(dict(testdifferent=datetime_series.values), + index=datetime_series.index) assert_frame_equal(rs, xp) def test_timeseries_periodindex(self): @@ -256,11 +255,12 @@ class SubclassedFrame(DataFrame): dict, collections.defaultdict(list), collections.OrderedDict)) - def test_to_dict(self, mapping): + def test_to_dict(self, mapping, datetime_series): # GH16122 - ts = TestData().ts tm.assert_series_equal( - Series(ts.to_dict(mapping), name='ts'), ts) - from_method = Series(ts.to_dict(collections.Counter)) - from_constructor = Series(collections.Counter(ts.iteritems())) + Series(datetime_series.to_dict(mapping), name='ts'), + datetime_series) + from_method = Series(datetime_series.to_dict(collections.Counter)) + from_constructor = Series(collections + .Counter(datetime_series.iteritems())) tm.assert_series_equal(from_method, from_constructor) diff --git a/pandas/tests/series/test_missing.py b/pandas/tests/series/test_missing.py index ab3fdd8cbf84f..b3f105ee5cb67 100644 --- a/pandas/tests/series/test_missing.py +++ b/pandas/tests/series/test_missing.py @@ -21,8 +21,6 @@ import pandas.util.testing as tm import pandas.util._test_decorators as td -from .common import TestData - try: import scipy _is_scipy_ge_0190 = (LooseVersion(scipy.__version__) >= @@ -52,7 +50,7 @@ def _simple_ts(start, end, freq='D'): return Series(np.random.randn(len(rng)), index=rng) -class TestSeriesMissingData(TestData): +class TestSeriesMissingData(): def test_remove_na_deprecation(self): # see gh-16971 @@ -489,7 +487,7 @@ def test_isnull_for_inf_deprecated(self): tm.assert_series_equal(r, e) tm.assert_series_equal(dr, de) - def test_fillna(self): + def test_fillna(self, datetime_series): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) tm.assert_series_equal(ts, ts.fillna(method='ffill')) @@ -506,7 +504,8 @@ def test_fillna(self): tm.assert_series_equal(ts.fillna(value=5), exp) pytest.raises(ValueError, ts.fillna) - pytest.raises(ValueError, self.ts.fillna, value=0, method='ffill') + pytest.raises(ValueError, datetime_series.fillna, value=0, + method='ffill') # GH 5703 s1 = Series([np.nan]) @@ -576,9 +575,9 @@ def test_fillna_inplace(self): expected = x.fillna(value=0) assert_series_equal(y, expected) - def test_fillna_invalid_method(self): + def test_fillna_invalid_method(self, datetime_series): try: - self.ts.fillna(method='ffil') + datetime_series.fillna(method='ffil') except ValueError as inst: assert 'ffil' in str(inst) @@ -632,8 +631,8 @@ def test_timedelta64_nan(self): # def test_logical_range_select(self): # np.random.seed(12345) - # selector = -0.5 <= self.ts <= 0.5 - # expected = (self.ts >= -0.5) & (self.ts <= 0.5) + # selector = -0.5 <= datetime_series <= 0.5 + # expected = (datetime_series >= -0.5) & (datetime_series <= 0.5) # assert_series_equal(selector, expected) def test_dropna_empty(self): @@ -688,8 +687,8 @@ def test_dropna_intervals(self): expected = s.iloc[1:] assert_series_equal(result, expected) - def test_valid(self): - ts = self.ts.copy() + def test_valid(self, datetime_series): + ts = datetime_series.copy() ts[::2] = np.NaN result = ts.dropna() @@ -734,12 +733,12 @@ def test_pad_require_monotonicity(self): pytest.raises(ValueError, rng2.get_indexer, rng, method='pad') - def test_dropna_preserve_name(self): - self.ts[:5] = np.nan - result = self.ts.dropna() - assert result.name == self.ts.name - name = self.ts.name - ts = self.ts.copy() + def test_dropna_preserve_name(self, datetime_series): + datetime_series[:5] = np.nan + result = datetime_series.dropna() + assert result.name == datetime_series.name + name = datetime_series.name + ts = datetime_series.copy() ts.dropna(inplace=True) assert ts.name == name @@ -825,10 +824,11 @@ def test_series_pad_backfill_limit(self): assert_series_equal(result, expected) -class TestSeriesInterpolateData(TestData): +class TestSeriesInterpolateData(): - def test_interpolate(self): - ts = Series(np.arange(len(self.ts), dtype=float), self.ts.index) + def test_interpolate(self, datetime_series, string_series): + ts = Series(np.arange(len(datetime_series), dtype=float), + datetime_series.index) ts_copy = ts.copy() ts_copy[5:10] = np.NaN @@ -836,8 +836,8 @@ def test_interpolate(self): linear_interp = ts_copy.interpolate(method='linear') tm.assert_series_equal(linear_interp, ts) - ord_ts = Series([d.toordinal() for d in self.ts.index], - index=self.ts.index).astype(float) + ord_ts = Series([d.toordinal() for d in datetime_series.index], + index=datetime_series.index).astype(float) ord_ts_copy = ord_ts.copy() ord_ts_copy[5:10] = np.NaN @@ -847,7 +847,7 @@ def test_interpolate(self): # try time interpolation on a non-TimeSeries # Only raises ValueError if there are NaNs. - non_ts = self.series.copy() + non_ts = string_series.copy() non_ts[0] = np.NaN pytest.raises(ValueError, non_ts.interpolate, method='time')