Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEPS: set min versions #17002

Merged
merged 1 commit into from
Aug 22, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ matrix:
- JOB="3.5_OSX" TEST_ARGS="--skip-slow --skip-network"
- dist: trusty
env:
- JOB="2.7_LOCALE" TEST_ARGS="--only-slow --skip-network" LOCALE_OVERRIDE="zh_CN.UTF-8"
- JOB="2.7_LOCALE" LOCALE_OVERRIDE="zh_CN.UTF-8" SLOW=true
addons:
apt:
packages:
Expand All @@ -62,7 +62,7 @@ matrix:
# In allow_failures
- dist: trusty
env:
- JOB="2.7_SLOW" TEST_ARGS="--only-slow --skip-network"
- JOB="2.7_SLOW" SLOW=true
# In allow_failures
- dist: trusty
env:
Expand All @@ -82,7 +82,7 @@ matrix:
allow_failures:
- dist: trusty
env:
- JOB="2.7_SLOW" TEST_ARGS="--only-slow --skip-network"
- JOB="2.7_SLOW" SLOW=true
- dist: trusty
env:
- JOB="2.7_BUILD_TEST" TEST_ARGS="--skip-slow" BUILD_TEST=true
Expand Down
2 changes: 1 addition & 1 deletion ci/install_travis.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ which conda
echo
echo "[update conda]"
conda config --set ssl_verify false || exit 1
conda config --set always_yes true --set changeps1 false || exit 1
conda config --set quiet true --set always_yes true --set changeps1 false || exit 1
conda update -q conda

echo
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements-2.7_COMPAT.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
numpy=1.7.1
numpy=1.9.2
cython=0.23
dateutil=1.5
pytz=2013b
9 changes: 5 additions & 4 deletions ci/requirements-2.7_COMPAT.run
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
numpy=1.7.1
numpy=1.9.2
dateutil=1.5
pytz=2013b
scipy=0.11.0
scipy=0.14.0
xlwt=0.7.5
xlrd=0.9.2
numexpr=2.2.2
pytables=3.0.0
bottleneck=1.0.0
numexpr=2.4.4 # we test that we correctly don't use an unsupported numexpr
pytables=3.2.2
psycopg2
pymysql=0.6.0
sqlalchemy=0.7.8
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements-2.7_LOCALE.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
python-dateutil
pytz=2013b
numpy=1.8.2
numpy=1.9.2
cython=0.23
5 changes: 3 additions & 2 deletions ci/requirements-2.7_LOCALE.run
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
python-dateutil
pytz=2013b
numpy=1.8.2
numpy=1.9.2
xlwt=0.7.5
openpyxl=1.6.2
xlsxwriter=0.5.2
xlrd=0.9.2
matplotlib=1.3.1
bottleneck=1.0.0
matplotlib=1.4.3
sqlalchemy=0.8.1
lxml=3.2.1
scipy
2 changes: 1 addition & 1 deletion ci/requirements-2.7_SLOW.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
python-dateutil
pytz
numpy=1.8.2
numpy=1.10*
cython
4 changes: 2 additions & 2 deletions ci/requirements-2.7_SLOW.run
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
python-dateutil
pytz
numpy=1.8.2
matplotlib=1.3.1
numpy=1.10*
matplotlib=1.4.3
scipy
patsy
xlwt
Expand Down
6 changes: 6 additions & 0 deletions ci/script_multi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,15 @@ elif [ "$COVERAGE" ]; then
echo pytest -s -n 2 -m "not single" --cov=pandas --cov-report xml:/tmp/cov-multiple.xml --junitxml=/tmp/multiple.xml $TEST_ARGS pandas
pytest -s -n 2 -m "not single" --cov=pandas --cov-report xml:/tmp/cov-multiple.xml --junitxml=/tmp/multiple.xml $TEST_ARGS pandas

elif [ "$SLOW" ]; then
TEST_ARGS="--only-slow --skip-network"
echo pytest -r xX -m "not single and slow" -v --junitxml=/tmp/multiple.xml $TEST_ARGS pandas
pytest -r xX -m "not single and slow" -v --junitxml=/tmp/multiple.xml $TEST_ARGS pandas

else
echo pytest -n 2 -r xX -m "not single" --junitxml=/tmp/multiple.xml $TEST_ARGS pandas
pytest -n 2 -r xX -m "not single" --junitxml=/tmp/multiple.xml $TEST_ARGS pandas # TODO: doctest

fi

RET="$?"
Expand Down
8 changes: 8 additions & 0 deletions ci/script_single.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,24 @@ if [ -n "$LOCALE_OVERRIDE" ]; then
python -c "$pycmd"
fi

if [ "$SLOW" ]; then
TEST_ARGS="--only-slow --skip-network"
fi

if [ "$BUILD_TEST" ]; then
echo "We are not running pytest as this is a build test."

elif [ "$DOC" ]; then
echo "We are not running pytest as this is a doc-build"

elif [ "$COVERAGE" ]; then
echo pytest -s -m "single" --cov=pandas --cov-report xml:/tmp/cov-single.xml --junitxml=/tmp/single.xml $TEST_ARGS pandas
pytest -s -m "single" --cov=pandas --cov-report xml:/tmp/cov-single.xml --junitxml=/tmp/single.xml $TEST_ARGS pandas

else
echo pytest -m "single" -r xX --junitxml=/tmp/single.xml $TEST_ARGS pandas
pytest -m "single" -r xX --junitxml=/tmp/single.xml $TEST_ARGS pandas # TODO: doctest

fi

RET="$?"
Expand Down
6 changes: 3 additions & 3 deletions doc/source/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ Dependencies
------------

* `setuptools <https://setuptools.readthedocs.io/en/latest/>`__
* `NumPy <http://www.numpy.org>`__: 1.7.1 or higher
* `NumPy <http://www.numpy.org>`__: 1.9.0 or higher
* `python-dateutil <http://labix.org/python-dateutil>`__: 1.5 or higher
* `pytz <http://pytz.sourceforge.net/>`__: Needed for time zone support

Expand Down Expand Up @@ -233,7 +233,7 @@ Optional Dependencies

* `Cython <http://www.cython.org>`__: Only necessary to build development
version. Version 0.23 or higher.
* `SciPy <http://www.scipy.org>`__: miscellaneous statistical functions
* `SciPy <http://www.scipy.org>`__: miscellaneous statistical functions, Version 0.14.0 or higher
* `xarray <http://xarray.pydata.org>`__: pandas like handling for > 2 dims, needed for converting Panels to xarray objects. Version 0.7.0 or higher is recommended.
* `PyTables <http://www.pytables.org>`__: necessary for HDF5-based storage. Version 3.0.0 or higher required, Version 3.2.1 or higher highly recommended.
* `Feather Format <https://github.com/wesm/feather>`__: necessary for feather-based storage, version 0.3.1 or higher.
Expand All @@ -244,7 +244,7 @@ Optional Dependencies
* `pymysql <https://github.com/PyMySQL/PyMySQL>`__: for MySQL.
* `SQLite <https://docs.python.org/3.5/library/sqlite3.html>`__: for SQLite, this is included in Python's standard library by default.

* `matplotlib <http://matplotlib.org/>`__: for plotting
* `matplotlib <http://matplotlib.org/>`__: for plotting, Version 1.4.3 or higher.
* For Excel I/O:

* `xlrd/xlwt <http://www.python-excel.org/>`__: Excel reading (xlrd) and writing (xlwt)
Expand Down
22 changes: 21 additions & 1 deletion doc/source/whatsnew/v0.21.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,27 @@ Other Enhancements
Backwards incompatible API changes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


.. _whatsnew_0210.api_breaking.deps:

Dependencies have increased minimum versions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

We have updated our minimum supported versions of dependencies (:issue:`15206`, :issue:`15543`, :issue:`15214`)
). If installed, we now require:

+--------------+-----------------+----------+
| Package | Minimum Version | Required |
+======================+=========+==========+
| Numpy | 1.9.0 | X |
+--------------+-----------------+----------+
| Matplotlib | 1.4.3 | |
+--------------+-----------------+----------+
| Scipy | 0.14.0 | |
+--------------+-----------------+----------+
| Bottleneck | 1.0.0 | |
+--------------+-----------------+----------+

.. _whatsnew_0210.api_breaking.pandas_eval:

Improved error handling during item assignment in pd.eval
Expand Down Expand Up @@ -259,7 +280,6 @@ Other API Changes
^^^^^^^^^^^^^^^^^

- Support has been dropped for Python 3.4 (:issue:`15251`)
- Support has been dropped for bottleneck < 1.0.0 (:issue:`15214`)
- The Categorical constructor no longer accepts a scalar for the ``categories`` keyword. (:issue:`16022`)
- Accessing a non-existent attribute on a closed :class:`~pandas.HDFStore` will now
raise an ``AttributeError`` rather than a ``ClosedFileError`` (:issue:`16301`)
Expand Down
2 changes: 0 additions & 2 deletions pandas/_libs/sparse.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ from distutils.version import LooseVersion

# numpy versioning
_np_version = np.version.short_version
_np_version_under1p8 = LooseVersion(_np_version) < '1.8'
_np_version_under1p9 = LooseVersion(_np_version) < '1.9'
_np_version_under1p10 = LooseVersion(_np_version) < '1.10'
_np_version_under1p11 = LooseVersion(_np_version) < '1.11'

Expand Down
14 changes: 6 additions & 8 deletions pandas/compat/numpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,18 @@
# numpy versioning
_np_version = np.__version__
_nlv = LooseVersion(_np_version)
_np_version_under1p8 = _nlv < '1.8'
_np_version_under1p9 = _nlv < '1.9'
_np_version_under1p10 = _nlv < '1.10'
_np_version_under1p11 = _nlv < '1.11'
_np_version_under1p12 = _nlv < '1.12'
_np_version_under1p13 = _nlv < '1.13'
_np_version_under1p14 = _nlv < '1.14'
_np_version_under1p15 = _nlv < '1.15'

if _nlv < '1.7.0':
if _nlv < '1.9':
raise ImportError('this version of pandas is incompatible with '
'numpy < 1.7.0\n'
'numpy < 1.9.0\n'
'your numpy version is {0}.\n'
'Please upgrade numpy to >= 1.7.0 to use '
'Please upgrade numpy to >= 1.9.0 to use '
'this pandas version'.format(_np_version))


Expand Down Expand Up @@ -70,11 +69,10 @@ def np_array_datetime64_compat(arr, *args, **kwargs):


__all__ = ['np',
'_np_version_under1p8',
'_np_version_under1p9',
'_np_version_under1p10',
'_np_version_under1p11',
'_np_version_under1p12',
'_np_version_under1p13',
'_np_version_under1p14'
'_np_version_under1p14',
'_np_version_under1p15'
]
7 changes: 2 additions & 5 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from warnings import warn, catch_warnings
import numpy as np

from pandas import compat, _np_version_under1p8
from pandas.core.dtypes.cast import maybe_promote
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndex,
Expand Down Expand Up @@ -407,14 +406,12 @@ def isin(comps, values):
comps, dtype, _ = _ensure_data(comps)
values, _, _ = _ensure_data(values, dtype=dtype)

# GH11232
# work-around for numpy < 1.8 and comparisions on py3
# faster for larger cases to use np.in1d
f = lambda x, y: htable.ismember_object(x, values)

# GH16012
# Ensure np.in1d doesn't get object types or it *may* throw an exception
if ((_np_version_under1p8 and compat.PY3) or len(comps) > 1000000 and
not is_object_dtype(comps)):
if len(comps) > 1000000 and not is_object_dtype(comps):
f = lambda x, y: np.in1d(x, y)
elif is_integer_dtype(comps):
try:
Expand Down
5 changes: 1 addition & 4 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1827,11 +1827,8 @@ def _box_item_values(self, key, values):

def _maybe_cache_changed(self, item, value):
"""The object has called back to us saying maybe it has changed.

numpy < 1.8 has an issue with object arrays and aliasing
GH6026
"""
self._data.set(item, value, check=pd._np_version_under1p8)
self._data.set(item, value, check=False)

@property
def _is_cached(self):
Expand Down
8 changes: 2 additions & 6 deletions pandas/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
)

from pandas import compat
from pandas.compat.numpy import function as nv, _np_version_under1p8
from pandas.compat.numpy import function as nv
from pandas.compat import set_function_name

from pandas.core.dtypes.common import (
Expand Down Expand Up @@ -3257,11 +3257,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
d = np.diff(np.r_[idx, len(ids)])
if dropna:
m = ids[lab == -1]
if _np_version_under1p8:
mi, ml = algorithms.factorize(m)
d[ml] = d[ml] - np.bincount(mi)
else:
np.add.at(d, m, -1)
np.add.at(d, m, -1)
acc = rep(d)[mask]
else:
acc = rep(d)
Expand Down
16 changes: 2 additions & 14 deletions pandas/core/internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@
import pandas.core.computation.expressions as expressions
from pandas.util._decorators import cache_readonly
from pandas.util._validators import validate_bool_kwarg

from pandas import compat, _np_version_under1p9
from pandas import compat
from pandas.compat import range, map, zip, u


Expand Down Expand Up @@ -857,9 +856,6 @@ def _is_empty_indexer(indexer):

# set
else:
if _np_version_under1p9:
# Work around GH 6168 to support old numpy
indexer = getattr(indexer, 'values', indexer)
values[indexer] = value

# coerce and try to infer the dtypes of the result
Expand Down Expand Up @@ -1482,15 +1478,7 @@ def quantile(self, qs, interpolation='linear', axis=0, mgr=None):
tuple of (axis, block)

"""
if _np_version_under1p9:
if interpolation != 'linear':
raise ValueError("Interpolation methods other than linear "
"are not supported in numpy < 1.9.")

kw = {}
if not _np_version_under1p9:
kw.update({'interpolation': interpolation})

kw = {'interpolation': interpolation}
values = self.get_values()
values, _, _, _ = self._try_coerce_args(values, values)

Expand Down
Loading