Skip to content

Commit f299392

Browse files
authored
Merge pull request #184 from jorenham/refactor/onp
use the conventional `optype` import aliases
2 parents e8d7eec + 23be62d commit f299392

File tree

111 files changed

+863
-865
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

111 files changed

+863
-865
lines changed

pyproject.toml

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ force-exclude = true
187187
target-version = "py310"
188188
# https://typing.readthedocs.io/en/latest/guides/writing_stubs.html#maximum-line-length
189189
line-length = 130
190-
src = ["scipy-stubs", "codemods.py"]
190+
src = ["scipy-stubs", "codegen"]
191191

192192
[tool.ruff.format]
193193
line-ending = "lf"
@@ -236,15 +236,9 @@ select = [
236236
[tool.ruff.lint.flake8-import-conventions]
237237
banned-from = [
238238
"numpy",
239-
"numpy.emath",
240-
"numpy.fft",
241-
"numpy.linalg",
242239
"numpy.ma",
243-
"numpy.polynomial",
244-
"numpy.random",
245-
"numpy.rec",
246240
"numpy.typing",
247-
"numpy.strings",
241+
"optype",
248242
"optype.numpy",
249243
"optype.typing",
250244
]
@@ -255,7 +249,7 @@ banned-from = [
255249
"numpy.typing" = "npt"
256250
"optype" = "op"
257251
"optype.typing" = "opt"
258-
"optype.numpy" = "onpt"
252+
"optype.numpy" = "onp"
259253

260254
[tool.ruff.lint.isort]
261255
combine-as-imports = true

scipy-stubs/_lib/_array_api.pyi

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ from typing_extensions import TypeVar
44

55
import numpy as np
66
import numpy.typing as npt
7-
import optype.numpy as onpt
7+
import optype.numpy as onp
88
from scipy._typing import AnyBool, OrderKACF
99

1010
__all__ = ["_asarray", "array_namespace", "device", "size"]
@@ -72,6 +72,6 @@ def device(x: _HasDevice[_DeviceT], /) -> _DeviceT: ...
7272
@overload
7373
def size(x: _HasShape[tuple[()] | tuple[_0, ...]]) -> _0: ...
7474
@overload
75-
def size(x: _HasShape[tuple[_SizeT] | onpt.AtLeast1D[_SizeT, _1] | tuple[_1, _SizeT]]) -> _SizeT: ...
75+
def size(x: _HasShape[tuple[_SizeT] | onp.AtLeast1D[_SizeT, _1] | tuple[_1, _SizeT]]) -> _SizeT: ...
7676
@overload
7777
def size(x: _HasShape) -> int: ...

scipy-stubs/_lib/_util.pyi

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@ from typing_extensions import TypeVar, override
77
import numpy as np
88
import numpy.typing as npt
99
import optype as op
10-
import optype.numpy as onpt
10+
import optype.numpy as onp
1111
from numpy._typing import _ArrayLikeInt
12-
from numpy.random import Generator as Generator # noqa: ICN003
12+
from numpy.random import Generator as Generator
1313
from scipy._typing import RNG, EnterSelfMixin
1414

1515
_AnyRNG = TypeVar("_AnyRNG", np.random.RandomState, np.random.Generator)
@@ -96,7 +96,7 @@ def rng_integers(
9696
low: _ArrayLikeInt,
9797
high: _ArrayLikeInt | None = None,
9898
size: tuple[()] | None = None,
99-
dtype: onpt.AnyIntegerDType = "int64",
99+
dtype: onp.AnyIntegerDType = "int64",
100100
endpoint: op.CanBool = False,
101101
) -> np.integer[Any]: ...
102102
@overload
@@ -105,7 +105,7 @@ def rng_integers(
105105
low: _ArrayLikeInt,
106106
high: _ArrayLikeInt | None = None,
107107
size: op.CanIndex | Sequence[op.CanIndex] | None = None,
108-
dtype: onpt.AnyIntegerDType = "int64",
108+
dtype: onp.AnyIntegerDType = "int64",
109109
endpoint: op.CanBool = False,
110110
) -> np.integer[Any] | npt.NDArray[np.integer[Any]]: ...
111111

scipy-stubs/_typing.pyi

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ from typing_extensions import LiteralString, Self, TypeVar
88

99
import numpy as np
1010
import optype as op
11-
import optype.numpy as onpt
11+
import optype.numpy as onp
1212

1313
__all__ = [
1414
"RNG",
@@ -59,7 +59,7 @@ UntypedTuple: TypeAlias = tuple[Untyped, ...]
5959
UntypedList: TypeAlias = list[Untyped]
6060
UntypedDict: TypeAlias = dict[Untyped, Untyped]
6161
UntypedCallable: TypeAlias = Callable[..., Untyped]
62-
UntypedArray: TypeAlias = onpt.Array[Any, np.generic]
62+
UntypedArray: TypeAlias = onp.Array[Any, np.generic]
6363

6464
# I/O
6565
_ByteSOrStr = TypeVar("_ByteSOrStr", bytes, str)

scipy-stubs/cluster/hierarchy.pyi

Lines changed: 44 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ from typing_extensions import TypeVar, override
55

66
import numpy as np
77
import numpy.typing as npt
8-
import optype.numpy as onpt
8+
import optype.numpy as onp
99
from scipy._lib._disjoint_set import DisjointSet
1010
from scipy.spatial.distance import _MetricCallback, _MetricKind
1111

@@ -47,7 +47,7 @@ __all__ = [
4747
_T = TypeVar("_T")
4848
_SCT = TypeVar("_SCT", bound=np.number[Any], default=np.float64)
4949
_LinkageMethod: TypeAlias = Literal["single", "complete", "average", "weighted", "centroid", "median", "ward"]
50-
_LinkageArray: TypeAlias = onpt.Array[tuple[int, int], _SCT]
50+
_LinkageArray: TypeAlias = onp.Array[tuple[int, int], _SCT]
5151
_ClusterCriterion: TypeAlias = Literal["inconsistent", "distance", "maxclust", "monocrit", "maxclust_monocrit"]
5252
_SortOrder: TypeAlias = Literal["ascending", "descending"]
5353

@@ -64,16 +64,16 @@ class _DendrogramResult(TypedDict):
6464

6565
class ClusterWarning(UserWarning): ...
6666

67-
def int_floor(arr: onpt.AnyArray, xp: ModuleType) -> int: ...
68-
def single(y: onpt.AnyArray) -> _LinkageArray: ...
69-
def complete(y: onpt.AnyArray) -> _LinkageArray: ...
70-
def average(y: onpt.AnyArray) -> _LinkageArray: ...
71-
def weighted(y: onpt.AnyArray) -> _LinkageArray: ...
72-
def centroid(y: onpt.AnyArray) -> _LinkageArray: ...
73-
def median(y: onpt.AnyArray) -> _LinkageArray: ...
74-
def ward(y: onpt.AnyArray) -> _LinkageArray: ...
67+
def int_floor(arr: onp.AnyArray, xp: ModuleType) -> int: ...
68+
def single(y: onp.AnyArray) -> _LinkageArray: ...
69+
def complete(y: onp.AnyArray) -> _LinkageArray: ...
70+
def average(y: onp.AnyArray) -> _LinkageArray: ...
71+
def weighted(y: onp.AnyArray) -> _LinkageArray: ...
72+
def centroid(y: onp.AnyArray) -> _LinkageArray: ...
73+
def median(y: onp.AnyArray) -> _LinkageArray: ...
74+
def ward(y: onp.AnyArray) -> _LinkageArray: ...
7575
def linkage(
76-
y: onpt.AnyArray,
76+
y: onp.AnyArray,
7777
method: _LinkageMethod = "single",
7878
metric: _MetricKind | _MetricCallback = "euclidean",
7979
optimal_ordering: bool = False,
@@ -109,61 +109,61 @@ class ClusterNode:
109109
def pre_order(self, /, func: Callable[[ClusterNode], _T]) -> list[_T]: ...
110110

111111
def cut_tree(
112-
Z: onpt.AnyArray,
112+
Z: onp.AnyArray,
113113
n_clusters: Sequence[int] | npt.NDArray[np.integer[Any]] | None = None,
114114
height: Sequence[float] | npt.NDArray[np.integer[Any] | np.floating[Any]] | None = None,
115-
) -> onpt.Array[tuple[int, int], np.int64]: ...
115+
) -> onp.Array[tuple[int, int], np.int64]: ...
116116
@overload
117-
def to_tree(Z: onpt.AnyArray, rd: Literal[False] = False) -> ClusterNode: ...
117+
def to_tree(Z: onp.AnyArray, rd: Literal[False] = False) -> ClusterNode: ...
118118
@overload
119-
def to_tree(Z: onpt.AnyArray, rd: Literal[True]) -> tuple[ClusterNode, list[ClusterNode]]: ...
119+
def to_tree(Z: onp.AnyArray, rd: Literal[True]) -> tuple[ClusterNode, list[ClusterNode]]: ...
120120
def optimal_leaf_ordering(
121-
Z: onpt.AnyArray,
122-
y: onpt.AnyArray,
121+
Z: onp.AnyArray,
122+
y: onp.AnyArray,
123123
metric: _MetricKind | _MetricCallback = "euclidean",
124124
) -> _LinkageArray: ...
125125
@overload
126-
def cophenet(Z: onpt.AnyArray, Y: None = None) -> onpt.Array[tuple[int], np.float64]: ...
126+
def cophenet(Z: onp.AnyArray, Y: None = None) -> onp.Array[tuple[int], np.float64]: ...
127127
@overload
128128
def cophenet(
129-
Z: onpt.AnyArray,
130-
Y: onpt.AnyArray,
131-
) -> tuple[onpt.Array[tuple[int], np.float64], onpt.Array[tuple[int], np.float64]]: ...
132-
def inconsistent(Z: onpt.AnyArray, d: int = 2) -> _LinkageArray: ...
133-
def from_mlab_linkage(Z: onpt.AnyArray) -> _LinkageArray: ...
134-
def to_mlab_linkage(Z: onpt.AnyArray) -> _LinkageArray: ...
135-
def is_monotonic(Z: onpt.AnyArray) -> bool: ...
136-
def is_valid_im(R: onpt.AnyArray, warning: bool = False, throw: bool = False, name: str | None = None) -> bool: ...
137-
def is_valid_linkage(Z: onpt.AnyArray, warning: bool = False, throw: bool = False, name: str | None = None) -> bool: ...
138-
def num_obs_linkage(Z: onpt.AnyArray) -> int: ...
139-
def correspond(Z: onpt.AnyArray, Y: onpt.AnyArray) -> bool: ...
129+
Z: onp.AnyArray,
130+
Y: onp.AnyArray,
131+
) -> tuple[onp.Array[tuple[int], np.float64], onp.Array[tuple[int], np.float64]]: ...
132+
def inconsistent(Z: onp.AnyArray, d: int = 2) -> _LinkageArray: ...
133+
def from_mlab_linkage(Z: onp.AnyArray) -> _LinkageArray: ...
134+
def to_mlab_linkage(Z: onp.AnyArray) -> _LinkageArray: ...
135+
def is_monotonic(Z: onp.AnyArray) -> bool: ...
136+
def is_valid_im(R: onp.AnyArray, warning: bool = False, throw: bool = False, name: str | None = None) -> bool: ...
137+
def is_valid_linkage(Z: onp.AnyArray, warning: bool = False, throw: bool = False, name: str | None = None) -> bool: ...
138+
def num_obs_linkage(Z: onp.AnyArray) -> int: ...
139+
def correspond(Z: onp.AnyArray, Y: onp.AnyArray) -> bool: ...
140140
def fcluster(
141-
Z: onpt.AnyArray,
141+
Z: onp.AnyArray,
142142
t: float | np.floating[Any] | np.integer[Any],
143143
criterion: _ClusterCriterion = "inconsistent",
144144
depth: int = 2,
145-
R: onpt.AnyArray | None = None,
146-
monocrit: onpt.AnyArray | None = None,
147-
) -> onpt.Array[tuple[int], np.int32]: ...
145+
R: onp.AnyArray | None = None,
146+
monocrit: onp.AnyArray | None = None,
147+
) -> onp.Array[tuple[int], np.int32]: ...
148148
def fclusterdata(
149-
X: onpt.AnyArray,
149+
X: onp.AnyArray,
150150
t: float | np.floating[Any] | np.integer[Any],
151151
criterion: _ClusterCriterion = "inconsistent",
152152
metric: _MetricKind | _MetricCallback = "euclidean",
153153
depth: int = 2,
154154
method: _LinkageMethod = "single",
155-
R: onpt.AnyArray | None = None,
156-
) -> onpt.Array[tuple[int], np.int32]: ...
157-
def leaves_list(Z: onpt.AnyArray) -> onpt.Array[tuple[int], np.int32]: ...
155+
R: onp.AnyArray | None = None,
156+
) -> onp.Array[tuple[int], np.int32]: ...
157+
def leaves_list(Z: onp.AnyArray) -> onp.Array[tuple[int], np.int32]: ...
158158
def set_link_color_palette(palette: list[str] | tuple[str, ...] | None) -> None: ...
159159
def dendrogram(
160-
Z: onpt.AnyArray,
160+
Z: onp.AnyArray,
161161
p: int = 30,
162162
truncate_mode: Literal["lastp", "level"] | None = None,
163163
color_threshold: float | np.floating[Any] | None = None,
164164
get_leaves: bool = True,
165165
orientation: Literal["top", "bottom", "left", "right"] = "top",
166-
labels: onpt.AnyArray | None = None,
166+
labels: onp.AnyArray | None = None,
167167
count_sort: _SortOrder | bool = False,
168168
distance_sort: _SortOrder | bool = False,
169169
show_leaf_counts: bool = True,
@@ -177,8 +177,8 @@ def dendrogram(
177177
ax: _MatplotlibAxes | None = None,
178178
above_threshold_color: str = "C0",
179179
) -> _DendrogramResult: ...
180-
def is_isomorphic(T1: onpt.AnyArray, T2: onpt.AnyArray) -> bool: ...
181-
def maxdists(Z: onpt.AnyArray) -> onpt.Array[tuple[int], np.float64]: ...
182-
def maxinconsts(Z: onpt.AnyArray, R: onpt.AnyArray) -> onpt.Array[tuple[int], np.float64]: ...
183-
def maxRstat(Z: onpt.AnyArray, R: onpt.AnyArray, i: int) -> onpt.Array[tuple[int], np.float64]: ...
184-
def leaders(Z: onpt.AnyArray, T: onpt.AnyArray) -> tuple[onpt.Array[tuple[int], np.int32], onpt.Array[tuple[int], np.int32]]: ...
180+
def is_isomorphic(T1: onp.AnyArray, T2: onp.AnyArray) -> bool: ...
181+
def maxdists(Z: onp.AnyArray) -> onp.Array[tuple[int], np.float64]: ...
182+
def maxinconsts(Z: onp.AnyArray, R: onp.AnyArray) -> onp.Array[tuple[int], np.float64]: ...
183+
def maxRstat(Z: onp.AnyArray, R: onp.AnyArray, i: int) -> onp.Array[tuple[int], np.float64]: ...
184+
def leaders(Z: onp.AnyArray, T: onp.AnyArray) -> tuple[onp.Array[tuple[int], np.int32], onp.Array[tuple[int], np.int32]]: ...

scipy-stubs/cluster/vq.pyi

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,30 +4,30 @@ from typing_extensions import TypeVar
44

55
import numpy as np
66
import numpy.typing as npt
7-
import optype.numpy as onpt
7+
import optype.numpy as onp
88

99
__all__ = ["kmeans", "kmeans2", "vq", "whiten"]
1010

1111
_SCT_fc = TypeVar("_SCT_fc", bound=np.inexact[Any])
12-
_ArrayLike_1d_fc: TypeAlias = onpt.AnyNumberArray | Sequence[complex | np.number[Any]]
13-
_ArrayLike_2d_fc: TypeAlias = onpt.AnyNumberArray | Sequence[Sequence[complex | np.number[Any]]]
12+
_ArrayLike_1d_fc: TypeAlias = onp.AnyNumberArray | Sequence[complex | np.number[Any]]
13+
_ArrayLike_2d_fc: TypeAlias = onp.AnyNumberArray | Sequence[Sequence[complex | np.number[Any]]]
1414

1515
class ClusterError(Exception): ...
1616

1717
@overload
18-
def whiten(obs: npt.NDArray[_SCT_fc], check_finite: bool = True) -> onpt.Array[tuple[int, int], _SCT_fc]: ...
18+
def whiten(obs: npt.NDArray[_SCT_fc], check_finite: bool = True) -> onp.Array[tuple[int, int], _SCT_fc]: ...
1919
@overload
20-
def whiten(obs: _ArrayLike_2d_fc, check_finite: bool = True) -> onpt.Array[tuple[int, int], np.inexact[Any]]: ...
20+
def whiten(obs: _ArrayLike_2d_fc, check_finite: bool = True) -> onp.Array[tuple[int, int], np.inexact[Any]]: ...
2121
def vq(
2222
obs: _ArrayLike_2d_fc,
2323
code_book: _ArrayLike_2d_fc,
2424
check_finite: bool = True,
25-
) -> tuple[onpt.Array[tuple[int], np.int32 | np.intp], onpt.Array[tuple[int], _SCT_fc]]: ...
25+
) -> tuple[onp.Array[tuple[int], np.int32 | np.intp], onp.Array[tuple[int], _SCT_fc]]: ...
2626
def py_vq(
2727
obs: _ArrayLike_2d_fc,
2828
code_book: _ArrayLike_2d_fc,
2929
check_finite: bool = True,
30-
) -> tuple[onpt.Array[tuple[int], np.intp], onpt.Array[tuple[int], _SCT_fc]]: ...
30+
) -> tuple[onp.Array[tuple[int], np.intp], onp.Array[tuple[int], _SCT_fc]]: ...
3131
def kmeans(
3232
obs: _ArrayLike_2d_fc,
3333
k_or_guess: npt.ArrayLike,
@@ -36,7 +36,7 @@ def kmeans(
3636
check_finite: bool = True,
3737
*,
3838
seed: int | np.random.Generator | np.random.RandomState | None = None,
39-
) -> tuple[onpt.Array[tuple[int, int], np.inexact[Any]], float]: ...
39+
) -> tuple[onp.Array[tuple[int, int], np.inexact[Any]], float]: ...
4040
def kmeans2(
4141
data: _ArrayLike_1d_fc | _ArrayLike_2d_fc,
4242
k: npt.ArrayLike,
@@ -47,4 +47,4 @@ def kmeans2(
4747
check_finite: bool = True,
4848
*,
4949
seed: int | np.random.Generator | np.random.RandomState | None = None,
50-
) -> tuple[onpt.Array[tuple[int, int], np.inexact[Any]], onpt.Array[tuple[int], np.int32]]: ...
50+
) -> tuple[onp.Array[tuple[int, int], np.inexact[Any]], onp.Array[tuple[int], np.int32]]: ...

scipy-stubs/fft/_backend.pyi

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ from collections.abc import Mapping, Sequence
22
from typing import Any, ClassVar, Protocol, final, type_check_only
33
from typing_extensions import TypeVar
44

5-
from optype import CanWith
5+
import optype as op
66

77
_RT_co = TypeVar("_RT_co", covariant=True, default=Any)
88

@@ -19,5 +19,5 @@ class _ScipyBackend(_BaseBackend): ...
1919

2020
def set_global_backend(backend: _BaseBackend, coerce: bool = False, only: bool = False, try_last: bool = False) -> None: ...
2121
def register_backend(backend: _BaseBackend) -> None: ...
22-
def set_backend(backend: _BaseBackend, coerce: bool = False, only: bool = False) -> CanWith[None]: ...
23-
def skip_backend(backend: _BaseBackend) -> CanWith[None]: ...
22+
def set_backend(backend: _BaseBackend, coerce: bool = False, only: bool = False) -> op.CanWith[None]: ...
23+
def skip_backend(backend: _BaseBackend) -> op.CanWith[None]: ...

0 commit comments

Comments
 (0)