Skip to content

Deprecate old pandas support #1530

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Aug 31, 2017
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,6 @@ before_install:
install:
- conda env create --file ci/requirements-$CONDA_ENV.yml
- source activate test_env
# scipy should not have been installed, but it's included in older versions of
# the conda pandas package
- if [[ "$CONDA_ENV" == "py27-min" ]]; then
conda remove scipy;
fi
- python setup.py install

script:
Expand Down
4 changes: 2 additions & 2 deletions ci/requirements-py27-min.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ name: test_env
dependencies:
- python=2.7
- pytest
- numpy==1.9.3
- pandas==0.15.0
- numpy==1.11
- pandas==0.18.0
- pip:
- coveralls
- pytest-cov
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,10 @@ Enhancements
(:issue:`576`).
By `Stephan Hoyer <https://github.com/shoyer>`_.

- Deprecate supports of old numpy < 1.11 and pandas < 0.18 (:issue:`1512`).
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should go to the top of the release notes, under a heading "Backwards incompatible changes"

Also: these versions aren't just deprecated -- they are no longer supported at all.

By `Keisuke Fujii <https://github.com/fujiisoup>`_.


Bug fixes
~~~~~~~~~

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
'Topic :: Scientific/Engineering',
]

INSTALL_REQUIRES = ['numpy >= 1.7', 'pandas >= 0.15.0']
INSTALL_REQUIRES = ['numpy >= 1.11', 'pandas >= 0.18.0']
TESTS_REQUIRE = ['pytest >= 2.7.1']

DESCRIPTION = "N-D labeled arrays and datasets in Python"
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,10 @@ def isnull(data):
where = _dask_or_eager_func('where', n_array_args=3)
insert = _dask_or_eager_func('insert')
take = _dask_or_eager_func('take')
broadcast_to = _dask_or_eager_func('broadcast_to', npcompat)
broadcast_to = _dask_or_eager_func('broadcast_to')

concatenate = _dask_or_eager_func('concatenate', list_of_args=True)
stack = _dask_or_eager_func('stack', npcompat, list_of_args=True)
stack = _dask_or_eager_func('stack', list_of_args=True)

array_all = _dask_or_eager_func('all')
array_any = _dask_or_eager_func('any')
Expand Down
130 changes: 1 addition & 129 deletions xarray/core/npcompat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,139 +4,11 @@
import numpy as np

try:
from numpy import broadcast_to, stack, nanprod, nancumsum, nancumprod
from numpy import nanprod, nancumsum, nancumprod
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can remove nanprod, too -- it arrived in NumPy 1.10

except ImportError: # pragma: no cover
# Code copied from newer versions of NumPy (v1.10 to v1.12).
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

v1.10 to v1.12 -> v1.12

# Used under the terms of NumPy's license, see licenses/NUMPY_LICENSE.

def _maybe_view_as_subclass(original_array, new_array):
if type(original_array) is not type(new_array):
# if input was an ndarray subclass and subclasses were OK,
# then view the result as that subclass.
new_array = new_array.view(type=type(original_array))
# Since we have done something akin to a view from original_array, we
# should let the subclass finalize (if it has it implemented, i.e., is
# not None).
if new_array.__array_finalize__:
new_array.__array_finalize__(original_array)
return new_array

def _broadcast_to(array, shape, subok, readonly):
shape = tuple(shape) if np.iterable(shape) else (shape,)
array = np.array(array, copy=False, subok=subok)
if not shape and array.shape:
raise ValueError('cannot broadcast a non-scalar to a scalar array')
if any(size < 0 for size in shape):
raise ValueError('all elements of broadcast shape must be non-'
'negative')
broadcast = np.nditer(
(array,), flags=['multi_index', 'zerosize_ok', 'refs_ok'],
op_flags=['readonly'], itershape=shape, order='C').itviews[0]
result = _maybe_view_as_subclass(array, broadcast)
if not readonly and array.flags.writeable:
result.flags.writeable = True
return result

def broadcast_to(array, shape, subok=False):
"""Broadcast an array to a new shape.

Parameters
----------
array : array_like
The array to broadcast.
shape : tuple
The shape of the desired array.
subok : bool, optional
If True, then sub-classes will be passed-through, otherwise
the returned array will be forced to be a base-class array (default).

Returns
-------
broadcast : array
A readonly view on the original array with the given shape. It is
typically not contiguous. Furthermore, more than one element of a
broadcasted array may refer to a single memory location.

Raises
------
ValueError
If the array is not compatible with the new shape according to NumPy's
broadcasting rules.

Examples
--------
>>> x = np.array([1, 2, 3])
>>> np.broadcast_to(x, (3, 3))
array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3]])
"""
return _broadcast_to(array, shape, subok=subok, readonly=True)

def stack(arrays, axis=0):
"""
Join a sequence of arrays along a new axis.

.. versionadded:: 1.10.0

Parameters
----------
arrays : sequence of ndarrays
Each array must have the same shape.
axis : int, optional
The axis along which the arrays will be stacked.

Returns
-------
stacked : ndarray
The stacked array has one more dimension than the input arrays.
See Also
--------
concatenate : Join a sequence of arrays along an existing axis.
split : Split array into a list of multiple sub-arrays of equal size.

Examples
--------
>>> arrays = [np.random.randn(3, 4) for _ in range(10)]
>>> np.stack(arrays, axis=0).shape
(10, 3, 4)

>>> np.stack(arrays, axis=1).shape
(3, 10, 4)

>>> np.stack(arrays, axis=2).shape
(3, 4, 10)

>>> a = np.array([1, 2, 3])
>>> b = np.array([2, 3, 4])
>>> np.stack((a, b))
array([[1, 2, 3],
[2, 3, 4]])

>>> np.stack((a, b), axis=-1)
array([[1, 2],
[2, 3],
[3, 4]])
"""
arrays = [np.asanyarray(arr) for arr in arrays]
if not arrays:
raise ValueError('need at least one array to stack')

shapes = set(arr.shape for arr in arrays)
if len(shapes) != 1:
raise ValueError('all input arrays must have the same shape')

result_ndim = arrays[0].ndim + 1
if not -result_ndim <= axis < result_ndim:
msg = 'axis {0} out of bounds [-{1}, {1})'.format(axis, result_ndim)
raise IndexError(msg)
if axis < 0:
axis += result_ndim

sl = (slice(None),) * axis + (np.newaxis,)
expanded_arrays = [arr[sl] for arr in arrays]
return np.concatenate(expanded_arrays, axis=axis)

def _replace_nan(a, val):
"""
If `a` is of inexact type, make a copy of `a`, replace NaNs with
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def test_apply_output_core_dimension():

def stack_negative(obj):
def func(x):
return xr.core.npcompat.stack([x, -x], axis=-1)
return np.stack([x, -x], axis=-1)
result = apply_ufunc(func, obj, output_core_dims=[['sign']])
if isinstance(result, (xr.Dataset, xr.DataArray)):
result.coords['sign'] = [1, -1]
Expand Down Expand Up @@ -303,7 +303,7 @@ def func(x):

def original_and_stack_negative(obj):
def func(x):
return (x, xr.core.npcompat.stack([x, -x], axis=-1))
return (x, np.stack([x, -x], axis=-1))
result = apply_ufunc(func, obj, output_core_dims=[[], ['sign']])
if isinstance(result[1], (xr.Dataset, xr.DataArray)):
result[1].coords['sign'] = [1, -1]
Expand Down
2 changes: 0 additions & 2 deletions xarray/tests/test_dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -1442,8 +1442,6 @@ def test_reduce(self):
expected = DataArray(5, {'c': -999})
self.assertDataArrayIdentical(expected, actual)

@pytest.mark.skipif(LooseVersion(np.__version__) < LooseVersion('1.10.0'),
reason='requires numpy version 1.10.0 or later')
# skip due to bug in older versions of numpy.nanpercentile
def test_quantile(self):
for q in [0.25, [0.50], [0.25, 0.75]]:
Expand Down
2 changes: 0 additions & 2 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2923,8 +2923,6 @@ def mean_only_one_axis(x, axis):
with self.assertRaisesRegexp(TypeError, 'non-integer axis'):
ds.reduce(mean_only_one_axis, ['x', 'y'])

@pytest.mark.skipif(LooseVersion(np.__version__) < LooseVersion('1.10.0'),
reason='requires numpy version 1.10.0 or later')
def test_quantile(self):

ds = create_test_data(seed=123)
Expand Down