Skip to content

Commit

Permalink
User memory views in .pyx files
Browse files Browse the repository at this point in the history
  • Loading branch information
noamher committed Jan 25, 2019
1 parent 1a64333 commit 55e5dbc
Show file tree
Hide file tree
Showing 9 changed files with 55 additions and 55 deletions.
26 changes: 13 additions & 13 deletions pandas/_libs/algos.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class NegInfinity(object):

@cython.wraparound(False)
@cython.boundscheck(False)
cpdef ndarray[int64_t, ndim=1] unique_deltas(ndarray[int64_t] arr):
cpdef ndarray[int64_t, ndim=1] unique_deltas(const int64_t[:] arr):
"""
Efficiently find the unique first-differences of the given array.
Expand Down Expand Up @@ -150,7 +150,7 @@ def is_lexsorted(list_of_arrays: list) -> bint:

@cython.boundscheck(False)
@cython.wraparound(False)
def groupsort_indexer(ndarray[int64_t] index, Py_ssize_t ngroups):
def groupsort_indexer(const int64_t[:] index, Py_ssize_t ngroups):
"""
compute a 1-d indexer that is an ordering of the passed index,
ordered by the groups. This is a reverse of the label
Expand Down Expand Up @@ -230,7 +230,7 @@ def kth_smallest(numeric[:] a, Py_ssize_t k) -> numeric:

@cython.boundscheck(False)
@cython.wraparound(False)
def nancorr(ndarray[float64_t, ndim=2] mat, bint cov=0, minp=None):
def nancorr(const float64_t[:, :] mat, bint cov=0, minp=None):
cdef:
Py_ssize_t i, j, xi, yi, N, K
bint minpv
Expand Down Expand Up @@ -294,7 +294,7 @@ def nancorr(ndarray[float64_t, ndim=2] mat, bint cov=0, minp=None):

@cython.boundscheck(False)
@cython.wraparound(False)
def nancorr_spearman(ndarray[float64_t, ndim=2] mat, Py_ssize_t minp=1):
def nancorr_spearman(const float64_t[:, :] mat, Py_ssize_t minp=1):
cdef:
Py_ssize_t i, j, xi, yi, N, K
ndarray[float64_t, ndim=2] result
Expand Down Expand Up @@ -435,8 +435,8 @@ def pad(ndarray[algos_t] old, ndarray[algos_t] new, limit=None):

@cython.boundscheck(False)
@cython.wraparound(False)
def pad_inplace(ndarray[algos_t] values,
ndarray[uint8_t, cast=True] mask,
def pad_inplace(algos_t[:] values,
const uint8_t[:] mask,
limit=None):
cdef:
Py_ssize_t i, N
Expand Down Expand Up @@ -472,8 +472,8 @@ def pad_inplace(ndarray[algos_t] values,

@cython.boundscheck(False)
@cython.wraparound(False)
def pad_2d_inplace(ndarray[algos_t, ndim=2] values,
ndarray[uint8_t, ndim=2] mask,
def pad_2d_inplace(algos_t[:, :] values,
const uint8_t[:, :] mask,
limit=None):
cdef:
Py_ssize_t i, j, N, K
Expand Down Expand Up @@ -602,8 +602,8 @@ def backfill(ndarray[algos_t] old, ndarray[algos_t] new, limit=None):

@cython.boundscheck(False)
@cython.wraparound(False)
def backfill_inplace(ndarray[algos_t] values,
ndarray[uint8_t, cast=True] mask,
def backfill_inplace(algos_t[:] values,
const uint8_t[:] mask,
limit=None):
cdef:
Py_ssize_t i, N
Expand Down Expand Up @@ -639,8 +639,8 @@ def backfill_inplace(ndarray[algos_t] values,

@cython.boundscheck(False)
@cython.wraparound(False)
def backfill_2d_inplace(ndarray[algos_t, ndim=2] values,
ndarray[uint8_t, ndim=2] mask,
def backfill_2d_inplace(algos_t[:, :] values,
const uint8_t[:, :] mask,
limit=None):
cdef:
Py_ssize_t i, j, N, K
Expand Down Expand Up @@ -678,7 +678,7 @@ def backfill_2d_inplace(ndarray[algos_t, ndim=2] values,

@cython.wraparound(False)
@cython.boundscheck(False)
def arrmap(ndarray[algos_t] index, object func):
def arrmap(algos_t[:] index, object func):
cdef:
Py_ssize_t length = index.shape[0]
Py_ssize_t i = 0
Expand Down
4 changes: 2 additions & 2 deletions pandas/_libs/hashtable.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ cdef class Int64Factorizer:

@cython.wraparound(False)
@cython.boundscheck(False)
def unique_label_indices(ndarray[int64_t, ndim=1] labels):
def unique_label_indices(const int64_t[:] labels):
"""
indices of the first occurrences of the unique labels
*excluding* -1. equivalent to:
Expand Down Expand Up @@ -170,6 +170,6 @@ def unique_label_indices(ndarray[int64_t, ndim=1] labels):
kh_destroy_int64(table)

arr = idx.to_array()
arr = arr[labels[arr].argsort()]
arr = arr[np.asarray(labels)[arr].argsort()]

return arr[1:] if arr.size != 0 and labels[arr[0]] == -1 else arr
2 changes: 1 addition & 1 deletion pandas/_libs/index.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ cpdef get_value_at(ndarray arr, object loc, object tz=None):
return util.get_value_at(arr, loc)


def get_value_box(arr: ndarray, loc: object) -> object:
def get_value_box(ndarray arr, object loc) -> object:
return get_value_at(arr, loc, tz=None)


Expand Down
46 changes: 23 additions & 23 deletions pandas/_libs/join.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ from pandas._libs.algos import groupsort_indexer, ensure_platform_int
from pandas.core.algorithms import take_nd


def inner_join(ndarray[int64_t] left, ndarray[int64_t] right,
def inner_join(const int64_t[:] left, const int64_t[:] right,
Py_ssize_t max_groups):
cdef:
Py_ssize_t i, j, k, count = 0
Expand Down Expand Up @@ -65,7 +65,7 @@ def inner_join(ndarray[int64_t] left, ndarray[int64_t] right,
_get_result_indexer(right_sorter, right_indexer))


def left_outer_join(ndarray[int64_t] left, ndarray[int64_t] right,
def left_outer_join(const int64_t[:] left, const int64_t[:] right,
Py_ssize_t max_groups, sort=True):
cdef:
Py_ssize_t i, j, k, count = 0
Expand Down Expand Up @@ -139,7 +139,7 @@ def left_outer_join(ndarray[int64_t] left, ndarray[int64_t] right,
return left_indexer, right_indexer


def full_outer_join(ndarray[int64_t] left, ndarray[int64_t] right,
def full_outer_join(const int64_t[:] left, const int64_t[:] right,
Py_ssize_t max_groups):
cdef:
Py_ssize_t i, j, k, count = 0
Expand Down Expand Up @@ -213,7 +213,7 @@ def _get_result_indexer(sorter, indexer):
return res


def ffill_indexer(ndarray[int64_t] indexer):
def ffill_indexer(const int64_t[:] indexer):
cdef:
Py_ssize_t i, n = len(indexer)
ndarray[int64_t] result
Expand Down Expand Up @@ -252,7 +252,7 @@ ctypedef fused join_t:

@cython.wraparound(False)
@cython.boundscheck(False)
def left_join_indexer_unique(ndarray[join_t] left, ndarray[join_t] right):
def left_join_indexer_unique(join_t[:] left, join_t[:] right):
cdef:
Py_ssize_t i, j, nleft, nright
ndarray[int64_t] indexer
Expand Down Expand Up @@ -677,10 +677,10 @@ ctypedef fused by_t:
uint64_t


def asof_join_backward_on_X_by_Y(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
ndarray[by_t] left_by_values,
ndarray[by_t] right_by_values,
def asof_join_backward_on_X_by_Y(asof_t[:] left_values,
asof_t[:] right_values,
by_t[:] left_by_values,
by_t[:] right_by_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down Expand Up @@ -746,10 +746,10 @@ def asof_join_backward_on_X_by_Y(ndarray[asof_t] left_values,
return left_indexer, right_indexer


def asof_join_forward_on_X_by_Y(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
ndarray[by_t] left_by_values,
ndarray[by_t] right_by_values,
def asof_join_forward_on_X_by_Y(asof_t[:] left_values,
asof_t[:] right_values,
by_t[:] left_by_values,
by_t[:] right_by_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down Expand Up @@ -815,10 +815,10 @@ def asof_join_forward_on_X_by_Y(ndarray[asof_t] left_values,
return left_indexer, right_indexer


def asof_join_nearest_on_X_by_Y(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
ndarray[by_t] left_by_values,
ndarray[by_t] right_by_values,
def asof_join_nearest_on_X_by_Y(asof_t[:] left_values,
asof_t[:] right_values,
by_t[:] left_by_values,
by_t[:] right_by_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down Expand Up @@ -864,8 +864,8 @@ def asof_join_nearest_on_X_by_Y(ndarray[asof_t] left_values,
# asof_join
# ----------------------------------------------------------------------

def asof_join_backward(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
def asof_join_backward(asof_t[:] left_values,
asof_t[:] right_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down Expand Up @@ -917,8 +917,8 @@ def asof_join_backward(ndarray[asof_t] left_values,
return left_indexer, right_indexer


def asof_join_forward(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
def asof_join_forward(asof_t[:] left_values,
asof_t[:] right_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down Expand Up @@ -971,8 +971,8 @@ def asof_join_forward(ndarray[asof_t] left_values,
return left_indexer, right_indexer


def asof_join_nearest(ndarray[asof_t] left_values,
ndarray[asof_t] right_values,
def asof_join_nearest(asof_t[:] left_values,
asof_t[:] right_values,
bint allow_exact_matches=1,
tolerance=None):

Expand Down
24 changes: 12 additions & 12 deletions pandas/_libs/lib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def fast_zip(list ndarrays):
return result


def get_reverse_indexer(ndarray[int64_t] indexer, Py_ssize_t length):
def get_reverse_indexer(const int64_t[:] indexer, Py_ssize_t length):
"""
Reverse indexing operation.
Expand Down Expand Up @@ -407,7 +407,7 @@ def get_reverse_indexer(ndarray[int64_t] indexer, Py_ssize_t length):

@cython.wraparound(False)
@cython.boundscheck(False)
def has_infs_f4(ndarray[float32_t] arr) -> bool:
def has_infs_f4(const float32_t[:] arr) -> bool:
cdef:
Py_ssize_t i, n = len(arr)
float32_t inf, neginf, val
Expand All @@ -424,7 +424,7 @@ def has_infs_f4(ndarray[float32_t] arr) -> bool:

@cython.wraparound(False)
@cython.boundscheck(False)
def has_infs_f8(ndarray[float64_t] arr) -> bool:
def has_infs_f8(const float64_t[:] arr) -> bool:
cdef:
Py_ssize_t i, n = len(arr)
float64_t inf, neginf, val
Expand Down Expand Up @@ -662,7 +662,7 @@ def clean_index_list(obj: list):
# is a general, O(max(len(values), len(binner))) method.
@cython.boundscheck(False)
@cython.wraparound(False)
def generate_bins_dt64(ndarray[int64_t] values, ndarray[int64_t] binner,
def generate_bins_dt64(ndarray[int64_t] values, const int64_t[:] binner,
object closed='left', bint hasnans=0):
"""
Int64 (datetime64) version of generic python version in groupby.py
Expand Down Expand Up @@ -725,7 +725,7 @@ def generate_bins_dt64(ndarray[int64_t] values, ndarray[int64_t] binner,

@cython.boundscheck(False)
@cython.wraparound(False)
def row_bool_subset(ndarray[float64_t, ndim=2] values,
def row_bool_subset(const float64_t[:, :] values,
ndarray[uint8_t, cast=True] mask):
cdef:
Py_ssize_t i, j, n, k, pos = 0
Expand Down Expand Up @@ -769,8 +769,8 @@ def row_bool_subset_object(ndarray[object, ndim=2] values,

@cython.boundscheck(False)
@cython.wraparound(False)
def get_level_sorter(ndarray[int64_t, ndim=1] label,
ndarray[int64_t, ndim=1] starts):
def get_level_sorter(const int64_t[:] label,
const int64_t[:] starts):
"""
argsort for a single level of a multi-index, keeping the order of higher
levels unchanged. `starts` points to starts of same-key indices w.r.t
Expand All @@ -785,15 +785,15 @@ def get_level_sorter(ndarray[int64_t, ndim=1] label,

for i in range(len(starts) - 1):
l, r = starts[i], starts[i + 1]
out[l:r] = l + label[l:r].argsort(kind='mergesort')
out[l:r] = l + np.asarray(label)[l:r].argsort(kind='mergesort')

return out


@cython.boundscheck(False)
@cython.wraparound(False)
def count_level_2d(ndarray[uint8_t, ndim=2, cast=True] mask,
ndarray[int64_t, ndim=1] labels,
const int64_t[:] labels,
Py_ssize_t max_bin,
int axis):
cdef:
Expand All @@ -820,7 +820,7 @@ def count_level_2d(ndarray[uint8_t, ndim=2, cast=True] mask,
return counts


def generate_slices(ndarray[int64_t] labels, Py_ssize_t ngroups):
def generate_slices(const int64_t[:] labels, Py_ssize_t ngroups):
cdef:
Py_ssize_t i, group_size, n, start
int64_t lab
Expand Down Expand Up @@ -849,7 +849,7 @@ def generate_slices(ndarray[int64_t] labels, Py_ssize_t ngroups):
return starts, ends


def indices_fast(object index, ndarray[int64_t] labels, list keys,
def indices_fast(object index, const int64_t[:] labels, list keys,
list sorted_labels):
cdef:
Py_ssize_t i, j, k, lab, cur, start, n = len(labels)
Expand Down Expand Up @@ -2148,7 +2148,7 @@ def maybe_convert_objects(ndarray[object] objects, bint try_float=0,

@cython.boundscheck(False)
@cython.wraparound(False)
def map_infer_mask(ndarray arr, object f, ndarray[uint8_t] mask,
def map_infer_mask(ndarray arr, object f, const uint8_t[:] mask,
bint convert=1):
"""
Substitute for np.vectorize with pandas-friendly dtype inference
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/reduction.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ class InvalidApply(Exception):


def apply_frame_axis0(object frame, object f, object names,
ndarray[int64_t] starts, ndarray[int64_t] ends):
const int64_t[:] starts, const int64_t[:] ends):
cdef:
BlockSlider slider
Py_ssize_t i, n = len(starts)
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslibs/conversion.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def ensure_timedelta64ns(arr: ndarray, copy: bool=True):

@cython.boundscheck(False)
@cython.wraparound(False)
def datetime_to_datetime64(values: object[:]):
def datetime_to_datetime64(object[:] values):
"""
Convert ndarray of datetime-like objects to int64 array representing
nanosecond timestamps.
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslibs/fields.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ def get_start_end_field(int64_t[:] dtindex, object field,

@cython.wraparound(False)
@cython.boundscheck(False)
def get_date_field(ndarray[int64_t] dtindex, object field):
def get_date_field(int64_t[:] dtindex, object field):
"""
Given a int64-based datetime index, extract the year, month, etc.,
field and return an array of these values.
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/window.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ cdef class VariableWindowIndexer(WindowIndexer):
# max window size
self.win = (self.end - self.start).max()

def build(self, ndarray[int64_t] index, int64_t win, bint left_closed,
def build(self, const int64_t[:] index, int64_t win, bint left_closed,
bint right_closed):

cdef:
Expand Down

0 comments on commit 55e5dbc

Please sign in to comment.