Skip to content

Commit 9f379ac

Browse files
committed
BUG: quantile should drop non-numeric columns instead of raising
1 parent 6a6faf5 commit 9f379ac

File tree

3 files changed

+23
-3
lines changed

3 files changed

+23
-3
lines changed

doc/source/whatsnew/v1.1.0.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1052,6 +1052,7 @@ Groupby/resample/rolling
10521052
- Bug in :meth:`SeriesGroupBy.agg` where any column name was accepted in the named aggregation of ``SeriesGroupBy`` previously. The behaviour now allows only ``str`` and callables else would raise ``TypeError``. (:issue:`34422`)
10531053
- Bug in :meth:`DataFrame.groupby` lost index, when one of the ``agg`` keys referenced an empty list (:issue:`32580`)
10541054
- Bug in :meth:`Rolling.apply` where ``center=True`` was ignored when ``engine='numba'`` was specified (:issue:`34784`)
1055+
- Bug in :meth:`core.groupby.DataFrameGroupBy.quantile` raises ``TypeError`` for non-numeric types rather than dropping columns (:issue:`27892`)
10551056

10561057
Reshaping
10571058
^^^^^^^^^

pandas/core/groupby/groupby.py

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2400,7 +2400,7 @@ def _get_cythonized_result(
24002400
signature
24012401
needs_2d : bool, default False
24022402
Whether the values and result of the Cython call signature
2403-
are at least 2-dimensional.
2403+
are 2-dimensional.
24042404
min_count : int, default None
24052405
When not None, min_count for the Cython call
24062406
needs_mask : bool, default False
@@ -2416,7 +2416,9 @@ def _get_cythonized_result(
24162416
Function should return a tuple where the first element is the
24172417
values to be passed to Cython and the second element is an optional
24182418
type which the values should be converted to after being returned
2419-
by the Cython operation. Raises if `needs_values` is False.
2419+
by the Cython operation. This function is also responsible for
2420+
raising a TypeError if the values have an invalid type. Raises
2421+
if `needs_values` is False.
24202422
post_processing : function, default None
24212423
Function to be applied to result of Cython function. Should accept
24222424
an array of values as the first argument and type inferences as its
@@ -2448,6 +2450,7 @@ def _get_cythonized_result(
24482450
output: Dict[base.OutputKey, np.ndarray] = {}
24492451
base_func = getattr(libgroupby, how)
24502452

2453+
error_msg = ""
24512454
for idx, obj in enumerate(self._iterate_slices()):
24522455
name = obj.name
24532456
values = obj._values
@@ -2474,7 +2477,11 @@ def _get_cythonized_result(
24742477
if needs_values:
24752478
vals = values
24762479
if pre_processing:
2477-
vals, inferences = pre_processing(vals)
2480+
try:
2481+
vals, inferences = pre_processing(vals)
2482+
except TypeError as e:
2483+
error_msg = str(e)
2484+
continue
24782485
if needs_2d:
24792486
vals = vals.reshape((-1, 1))
24802487
vals = vals.astype(cython_dtype, copy=False)
@@ -2506,6 +2513,10 @@ def _get_cythonized_result(
25062513
key = base.OutputKey(label=name, position=idx)
25072514
output[key] = result
25082515

2516+
# error_msg is "" on an frame/series with no rows or columns
2517+
if len(output) == 0 and error_msg != "":
2518+
raise TypeError(error_msg)
2519+
25092520
if aggregate:
25102521
return self._wrap_aggregated_output(output)
25112522
else:

pandas/tests/groupby/test_quantile.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,3 +232,11 @@ def test_groupby_quantile_nullable_array(values, q):
232232

233233
expected = pd.Series(true_quantiles * 2, index=idx, name="b")
234234
tm.assert_series_equal(result, expected)
235+
236+
237+
@pytest.mark.parametrize("q", [0.5, [0.0, 0.5, 1.0]])
238+
def test_groupby_quantile_skips_invalid_dtype(q):
239+
df = pd.DataFrame({"a": [1], "b": [2.0], "c": ["x"]})
240+
result = df.groupby("a").quantile(0.5)
241+
expected = df.set_index("a")[["b"]]
242+
tm.assert_frame_equal(result, expected)

0 commit comments

Comments
 (0)