Skip to content

Commit

Permalink
Add whatsnew, small CR refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
alonme committed May 29, 2020
1 parent 7ee1faa commit fa6299d
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 11 deletions.
59 changes: 59 additions & 0 deletions doc/source/whatsnew/v1.1.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -630,6 +630,65 @@ Using :meth:`DataFrame.groupby` with ``as_index=False`` and the function ``idxma
df.groupby("a", as_index=False).nunique()
.. _whatsnew_110.api_breaking.apply_applymap_first_once:

apply and applymap on ``DataFrame`` evaluates first row/column only once
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

.. ipython:: python
In [1]: import pandas as pd
...: df = pd.DataFrame({'a': [1,2], 'b': [3,6]})
...:
In [2]: df
Out[2]:
a b
0 1 3
1 2 6
In [3]: def func(row):
...: print(row)
...: return row
...:
*Previous behavior*:

.. ipython:: python
In [4]: df.apply(func, axis=1)
a 1
b 3
Name: 0, dtype: int64
a 1
b 3
Name: 0, dtype: int64
a 2
b 6
Name: 1, dtype: int64
Out[4]:
a b
0 1 3
1 2 6
*New behavior*:

.. ipython:: python
In [4]: df.apply(func, axis=1)
a 1
b 3
Name: 0, dtype: int64
a 2
b 6
Name: 1, dtype: int64
Out[4]:
a b
0 1 3
1 2 6
.. _whatsnew_110.deprecations:

Deprecations
Expand Down
19 changes: 8 additions & 11 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,8 @@ def apply_broadcast(self, target: "DataFrame") -> "DataFrame":

def apply_standard(self):

partial_result = None # partial result that may be returned from reduction.
# partial result that may be returned from reduction
partial_result = None

# try to reduce first (by default)
# this only matters if the reduction in values is of different dtype
Expand Down Expand Up @@ -305,12 +306,12 @@ def apply_standard(self):
else:
if reduction_success:
return self.obj._constructor_sliced(result, index=labels)
else:
# no exceptions - however reduction was unsuccessful,
# use the computed function result for first element
partial_result = result[0]
if isinstance(partial_result, ABCSeries):
partial_result = partial_result.infer_objects()

# no exceptions - however reduction was unsuccessful,
# use the computed function result for first element
partial_result = result[0]
if isinstance(partial_result, ABCSeries):
partial_result = partial_result.infer_objects()

# compute the result using the series generator,
# use the result computed while trying to reduce if available.
Expand All @@ -323,7 +324,6 @@ def apply_series_generator(self, partial_result=None) -> Tuple[ResType, "Index"]
series_gen = self.series_generator
res_index = self.result_index

keys = []
results = {}

# If a partial result was already computed,
Expand All @@ -332,7 +332,6 @@ def apply_series_generator(self, partial_result=None) -> Tuple[ResType, "Index"]
if partial_result is not None:
i, v = next(series_gen_enumeration)
results[i] = partial_result
keys.append(v.name)

if self.ignore_failures:
successes = []
Expand All @@ -342,7 +341,6 @@ def apply_series_generator(self, partial_result=None) -> Tuple[ResType, "Index"]
except Exception:
pass
else:
keys.append(v.name)
successes.append(i)

# so will work with MultiIndex
Expand All @@ -353,7 +351,6 @@ def apply_series_generator(self, partial_result=None) -> Tuple[ResType, "Index"]
for i, v in series_gen_enumeration:

results[i] = self.f(v)
keys.append(v.name)

return results, res_index

Expand Down

0 comments on commit fa6299d

Please sign in to comment.