Skip to content

Iter

Bases: BaseAgg[T], BaseBool[T], BaseFilter[T], BaseProcess[T], BaseMap[T], BaseRolling[T], BaseList[T], BaseTuples[T], BasePartitions[T], BaseJoins[T], BaseGroups[T], BaseEager[T], IterConstructors

A wrapper around Python's built-in iterable types, providing a rich set of functional programming tools.

It supports lazy evaluation, allowing for efficient processing of large datasets.

It is not a collection itself, but a wrapper that provides additional methods for working with iterables.

It can be constructed from any iterable, including lists, tuples, sets, and generators.

Source code in src/pyochain/_iter/_main.py
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
class Iter[T](
    BaseAgg[T],
    BaseBool[T],
    BaseFilter[T],
    BaseProcess[T],
    BaseMap[T],
    BaseRolling[T],
    BaseList[T],
    BaseTuples[T],
    BasePartitions[T],
    BaseJoins[T],
    BaseGroups[T],
    BaseEager[T],
    IterConstructors,
):
    """
    A wrapper around Python's built-in iterable types, providing a rich set of functional programming tools.

    It supports lazy evaluation, allowing for efficient processing of large datasets.

    It is not a collection itself, but a wrapper that provides additional methods for working with iterables.

    It can be constructed from any iterable, including `lists`, `tuples`, `sets`, and `generators`.
    """

    __slots__ = ("_data",)

    def __init__(self, data: Iterator[T] | Generator[T, Any, Any]) -> None:
        self._data = data

    def __repr__(self) -> str:
        return f"{self.__class__.__name__}({self.unwrap().__repr__()})"

    def itr[**P, R, U: Iterable[Any]](
        self: Iter[U],
        func: Callable[Concatenate[Iter[U], P], R],
        *args: P.args,
        **kwargs: P.kwargs,
    ) -> Iter[R]:
        """
        Apply a function to each element after wrapping it in an Iter.

        This is a convenience method for the common pattern of mapping a function over an iterable of iterables.

        Args:
            func: Function to apply to each wrapped element.
            *args: Positional arguments to pass to the function.
            **kwargs: Keyword arguments to pass to the function.
        Example:
        ```python
        >>> import pyochain as pc
        >>> data = [
        ...     [1, 2, 3],
        ...     [4, 5],
        ...     [6, 7, 8, 9],
        ... ]
        >>> pc.Iter.from_(data).itr(
        ...     lambda x: x.repeat(2).flatten().reduce(lambda a, b: a + b)
        ... ).into(list)
        [12, 18, 60]

        ```
        """

        def _itr(data: Iterable[U]) -> Generator[R, None, None]:
            return (func(Iter.from_(x), *args, **kwargs) for x in data)

        return self.apply(_itr)

    def struct[**P, R, K, V](
        self: Iter[dict[K, V]],
        func: Callable[Concatenate[Dict[K, V], P], R],
        *args: P.args,
        **kwargs: P.kwargs,
    ) -> Iter[R]:
        """
        Apply a function to each element after wrapping it in a Dict.

        This is a convenience method for the common pattern of mapping a function over an iterable of dictionaries.

        Args:
            func: Function to apply to each wrapped dictionary.
            *args: Positional arguments to pass to the function.
            **kwargs: Keyword arguments to pass to the function.
        Example:
        ```python
        >>> from typing import Any
        >>> import pyochain as pc

        >>> data: list[dict[str, Any]] = [
        ...     {"name": "Alice", "age": 30, "city": "New York"},
        ...     {"name": "Bob", "age": 25, "city": "Los Angeles"},
        ...     {"name": "Charlie", "age": 35, "city": "New York"},
        ...     {"name": "David", "age": 40, "city": "Paris"},
        ... ]
        >>>
        >>> def to_title(d: pc.Dict[str, Any]) -> pc.Dict[str, Any]:
        ...     return d.map_keys(lambda k: k.title())
        >>> def is_young(d: pc.Dict[str, Any]) -> bool:
        ...     return d.unwrap().get("Age", 0) < 30
        >>> def set_continent(d: pc.Dict[str, Any], value: str) -> dict[str, Any]:
        ...     return d.with_key("Continent", value).unwrap()
        >>>
        >>> pc.Iter.from_(data).struct(to_title).filter_false(is_young).map(
        ...     lambda d: d.drop("Age").with_key("Continent", "NA")
        ... ).map_if(
        ...     lambda d: d.unwrap().get("City") == "Paris",
        ...     lambda d: set_continent(d, "Europe"),
        ...     lambda d: set_continent(d, "America"),
        ... ).group_by(lambda d: d.get("Continent")).map_values(
        ...     lambda d: pc.Iter.from_(d)
        ...     .struct(lambda d: d.drop("Continent").unwrap())
        ...     .into(list)
        ... )  # doctest: +NORMALIZE_WHITESPACE
        Dict({
        'America': [
            {'Name': 'Alice', 'City': 'New York'},
            {'Name': 'Charlie', 'City': 'New York'}
        ],
        'Europe': [
            {'Name': 'David', 'City': 'Paris'}
        ]
        })

        ```
        """
        from .._dict import Dict

        def _struct(data: Iterable[dict[K, V]]) -> Generator[R, None, None]:
            return (func(Dict(x), *args, **kwargs) for x in data)

        return self.apply(_struct)

    def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
        """
        Create a Dict by zipping the iterable with keys.

        Args:
            keys: Iterable of keys to pair with the values.
        Example:
        ```python
        >>> import pyochain as pc
        >>> keys = ["a", "b", "c"]
        >>> values = [1, 2, 3]
        >>> pc.Iter.from_(values).with_keys(keys).unwrap()
        {'a': 1, 'b': 2, 'c': 3}
        >>> # This is equivalent to:
        >>> pc.Iter.from_(keys).zip(values).pipe(
        ...     lambda x: pc.Dict(x.into(dict)).unwrap()
        ... )
        {'a': 1, 'b': 2, 'c': 3}

        ```
        """
        from .._dict import Dict

        return Dict(dict(zip(keys, self.unwrap())))

    def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
        """
        Create a Dict by zipping the iterable with values.

        Args:
            values: Iterable of values to pair with the keys.
        Example:
        ```python
        >>> import pyochain as pc
        >>> keys = [1, 2, 3]
        >>> values = ["a", "b", "c"]
        >>> pc.Iter.from_(keys).with_values(values).unwrap()
        {1: 'a', 2: 'b', 3: 'c'}
        >>> # This is equivalent to:
        >>> pc.Iter.from_(keys).zip(values).pipe(
        ...     lambda x: pc.Dict(x.into(dict)).unwrap()
        ... )
        {1: 'a', 2: 'b', 3: 'c'}

        ```
        """
        from .._dict import Dict

        return Dict(dict(zip(self.unwrap(), values)))

__slots__ class-attribute instance-attribute

__slots__ = ('_data',)

_data instance-attribute

_data = data

__init__

__init__(data: Iterator[T] | Generator[T, Any, Any]) -> None
Source code in src/pyochain/_iter/_main.py
51
52
def __init__(self, data: Iterator[T] | Generator[T, Any, Any]) -> None:
    self._data = data

__repr__

__repr__() -> str
Source code in src/pyochain/_iter/_main.py
54
55
def __repr__(self) -> str:
    return f"{self.__class__.__name__}({self.unwrap().__repr__()})"

accumulate

accumulate(func: Callable[[T, T], T]) -> Iter[T]

Return cumulative application of binary op provided by the function.

Parameters:

Name Type Description Default
func Callable[[T, T], T]

A binary function to apply cumulatively.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).accumulate(lambda a, b: a + b).into(list)
[1, 3, 6]
Source code in src/pyochain/_iter/_process.py
114
115
116
117
118
119
120
121
122
123
124
125
126
127
def accumulate(self, func: Callable[[T, T], T]) -> Iter[T]:
    """
    Return cumulative application of binary op provided by the function.

    Args:
        func: A binary function to apply cumulatively.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).accumulate(lambda a, b: a + b).into(list)
    [1, 3, 6]

    ```
    """
    return self.apply(partial(cz.itertoolz.accumulate, func))

adjacent

adjacent(predicate: Callable[[T], bool], distance: int = 1) -> Iter[tuple[bool, T]]

Return an iterable over (bool, item) tuples.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to determine if an item satisfies the condition.

required
distance int

Number of places to consider as adjacent. Defaults to 1.

1

The output is a sequence of tuples where the item is drawn from iterable.

The bool indicates whether that item satisfies the predicate or is adjacent to an item that does.

For example, to find whether items are adjacent to a 3:

>>> import pyochain as pc
>>> pc.Iter.from_(range(6)).adjacent(lambda x: x == 3).into(list)
[(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
Set distance to change what counts as adjacent. For example, to find whether items are two places away from a 3:
>>> pc.Iter.from_(range(6)).adjacent(lambda x: x == 3, distance=2).into(list)
[(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]

This is useful for contextualizing the results of a search function.

For example, a code comparison tool might want to identify lines that have changed, but also surrounding lines to give the viewer of the diff context.

The predicate function will only be called once for each item in the iterable.

See also groupby_transform, which can be used with this function to group ranges of items with the same bool value.

Source code in src/pyochain/_iter/_tuples.py
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
def adjacent(
    self, predicate: Callable[[T], bool], distance: int = 1
) -> Iter[tuple[bool, T]]:
    """
    Return an iterable over (bool, item) tuples.

    Args:
        predicate: Function to determine if an item satisfies the condition.
        distance: Number of places to consider as adjacent. Defaults to 1.

    The output is a sequence of tuples where the item is drawn from iterable.

    The bool indicates whether that item satisfies the predicate or is adjacent to an item that does.

    For example, to find whether items are adjacent to a 3:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(6)).adjacent(lambda x: x == 3).into(list)
    [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]

    ```
    Set distance to change what counts as adjacent.
    For example, to find whether items are two places away from a 3:
    ```python
    >>> pc.Iter.from_(range(6)).adjacent(lambda x: x == 3, distance=2).into(list)
    [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]

    ```

    This is useful for contextualizing the results of a search function.

    For example, a code comparison tool might want to identify lines that have changed, but also surrounding lines to give the viewer of the diff context.

    The predicate function will only be called once for each item in the iterable.

    See also groupby_transform, which can be used with this function to group ranges of items with the same bool value.
    """
    return self.apply(partial(mit.adjacent, predicate, distance=distance))

all

all(predicate: Callable[[T], bool] = lambda x: bool(x)) -> bool

Tests if every element of the iterator matches a predicate.

Iter.all() takes a closure that returns true or false.

It applies this closure to each element of the iterator, and if they all return true, then so does Iter.all().

If any of them return false, it returns false.

An empty iterator returns true.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to evaluate each item. Defaults to checking truthiness.

lambda x: bool(x)

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, True]).all()
True
>>> pc.Iter.from_([]).all()
True
>>> pc.Iter.from_([1, 0]).all()
False
>>> def is_even(x: int) -> bool:
...     return x % 2 == 0
>>> pc.Iter.from_([2, 4, 6]).all(is_even)
True

Source code in src/pyochain/_iter/_booleans.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
def all(self, predicate: Callable[[T], bool] = lambda x: bool(x)) -> bool:
    """
    Tests if every element of the iterator matches a predicate.

    `Iter.all()` takes a closure that returns true or false.

    It applies this closure to each element of the iterator, and if they all return true, then so does `Iter.all()`.

    If any of them return false, it returns false.

    An empty iterator returns true.

    Args:
        predicate: Function to evaluate each item. Defaults to checking truthiness.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, True]).all()
    True
    >>> pc.Iter.from_([]).all()
    True
    >>> pc.Iter.from_([1, 0]).all()
    False
    >>> def is_even(x: int) -> bool:
    ...     return x % 2 == 0
    >>> pc.Iter.from_([2, 4, 6]).all(is_even)
    True

    ```
    """

    def _all(data: Iterable[T]) -> bool:
        return all(predicate(x) for x in data)

    return self.into(_all)

all_equal

all_equal(key: Callable[[T], U] | None = None) -> bool

Return True if all items are equal.

Parameters:

Name Type Description Default
key Callable[[T], U] | None

Function to transform items before comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 1, 1]).all_equal()
True
A function that accepts a single argument and returns a transformed version of each input item can be specified with key:
>>> pc.Iter.from_("AaaA").all_equal(key=str.casefold)
True
>>> pc.Iter.from_([1, 2, 3]).all_equal(key=lambda x: x < 10)
True

Source code in src/pyochain/_iter/_booleans.py
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
def all_equal[U](self, key: Callable[[T], U] | None = None) -> bool:
    """
    Return True if all items are equal.

    Args:
        key: Function to transform items before comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 1, 1]).all_equal()
    True

    ```
    A function that accepts a single argument and returns a transformed version of each input item can be specified with key:
    ```python
    >>> pc.Iter.from_("AaaA").all_equal(key=str.casefold)
    True
    >>> pc.Iter.from_([1, 2, 3]).all_equal(key=lambda x: x < 10)
    True

    ```
    """
    return self.into(mit.all_equal, key=key)

all_unique

all_unique(key: Callable[[T], U] | None = None) -> bool

Returns True if all the elements of iterable are unique.

Parameters:

Name Type Description Default
key Callable[[T], U] | None

Function to transform items before comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("ABCB").all_unique()
False
If a key function is specified, it will be used to make comparisons.
>>> pc.Iter.from_("ABCb").all_unique()
True
>>> pc.Iter.from_("ABCb").all_unique(str.lower)
False
The function returns as soon as the first non-unique element is encountered.

Iterables with a mix of hashable and unhashable items can be used, but the function will be slower for unhashable items

Source code in src/pyochain/_iter/_booleans.py
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
def all_unique[U](self, key: Callable[[T], U] | None = None) -> bool:
    """
    Returns True if all the elements of iterable are unique.

    Args:
        key: Function to transform items before comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("ABCB").all_unique()
    False

    ```
    If a key function is specified, it will be used to make comparisons.
    ```python
    >>> pc.Iter.from_("ABCb").all_unique()
    True
    >>> pc.Iter.from_("ABCb").all_unique(str.lower)
    False

    ```
    The function returns as soon as the first non-unique element is encountered.

    Iterables with a mix of hashable and unhashable items can be used, but the function will be slower for unhashable items

    """
    return self.into(mit.all_unique, key=key)

any

any(predicate: Callable[[T], bool] = lambda x: bool(x)) -> bool

Tests if any element of the iterator matches a predicate.

Iter.any() takes a closure that returns true or false.

It applies this closure to each element of the iterator, and if any of them return true, then so does Iter.any().

If they all return false, it returns false.

An empty iterator returns false.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to evaluate each item. Defaults to checking truthiness.

lambda x: bool(x)

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([0, 1]).any()
True
>>> pc.Iter.from_(range(0)).any()
False
>>> def is_even(x: int) -> bool:
...     return x % 2 == 0
>>> pc.Iter.from_([1, 3, 4]).any(is_even)
True

Source code in src/pyochain/_iter/_booleans.py
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
def any(self, predicate: Callable[[T], bool] = lambda x: bool(x)) -> bool:
    """
    Tests if any element of the iterator matches a predicate.


    `Iter.any()` takes a closure that returns true or false.

    It applies this closure to each element of the iterator, and if any of them return true, then so does `Iter.any()`.

    If they all return false, it returns false.

    An empty iterator returns false.

    Args:
        predicate: Function to evaluate each item. Defaults to checking truthiness.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([0, 1]).any()
    True
    >>> pc.Iter.from_(range(0)).any()
    False
    >>> def is_even(x: int) -> bool:
    ...     return x % 2 == 0
    >>> pc.Iter.from_([1, 3, 4]).any(is_even)
    True

    ```
    """

    def _any(data: Iterable[T]) -> bool:
        return any(predicate(x) for x in data)

    return self.into(_any)

apply

apply(
    func: Callable[Concatenate[Iterable[T], P], Iterator[R]],
    *args: P.args,
    **kwargs: P.kwargs,
) -> Iter[R]

Apply a function to the underlying iterable and return an Iter of the result. Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.

Parameters:

Name Type Description Default
func Callable[Concatenate[Iterable[T], P], Iterator[R]]

Function to apply to the underlying iterable.

required
*args P.args

Positional arguments to pass to the function.

()
**kwargs P.kwargs

Keyword arguments to pass to the function.

{}

Example:

>>> import pyochain as pc
>>> def double(data: Iterable[int]) -> Iterator[int]:
...     return (x * 2 for x in data)
>>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
[2, 4, 6]

Source code in src/pyochain/_core/_main.py
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def apply[**P, R](
    self,
    func: Callable[Concatenate[Iterable[T], P], Iterator[R]],
    *args: P.args,
    **kwargs: P.kwargs,
) -> Iter[R]:
    """
    Apply a function to the underlying iterable and return an Iter of the result.
    Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.

    Args:
        func: Function to apply to the underlying iterable.
        *args: Positional arguments to pass to the function.
        **kwargs: Keyword arguments to pass to the function.

    Example:
    ```python
    >>> import pyochain as pc
    >>> def double(data: Iterable[int]) -> Iterator[int]:
    ...     return (x * 2 for x in data)
    >>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
    [2, 4, 6]

    ```
    """
    from .._iter import Iter

    return Iter(self.into(func, *args, **kwargs))

argmax

argmax(key: Callable[[T], U] | None = None) -> int

Index of the first occurrence of a maximum value in an iterable.

Parameters:

Name Type Description Default
key Callable[[T], U] | None

Optional function to determine the value for comparison.

None

>>> import pyochain as pc
>>> pc.Iter.from_("abcdefghabcd").argmax()
7
>>> pc.Iter.from_([0, 1, 2, 3, 3, 2, 1, 0]).argmax()
3
For example, identify the best machine learning model:
>>> models = pc.Iter.from_(["svm", "random forest", "knn", "naïve bayes"])
>>> accuracy = pc.Seq([68, 61, 84, 72])
>>> # Most accurate model
>>> models.item(accuracy.argmax())
'knn'
>>>
>>> # Best accuracy
>>> accuracy.into(max)
84

Source code in src/pyochain/_iter/_aggregations.py
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
def argmax[U](self, key: Callable[[T], U] | None = None) -> int:
    """
    Index of the first occurrence of a maximum value in an iterable.

    Args:
        key: Optional function to determine the value for comparison.

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("abcdefghabcd").argmax()
    7
    >>> pc.Iter.from_([0, 1, 2, 3, 3, 2, 1, 0]).argmax()
    3

    ```
    For example, identify the best machine learning model:
    ```python
    >>> models = pc.Iter.from_(["svm", "random forest", "knn", "naïve bayes"])
    >>> accuracy = pc.Seq([68, 61, 84, 72])
    >>> # Most accurate model
    >>> models.item(accuracy.argmax())
    'knn'
    >>>
    >>> # Best accuracy
    >>> accuracy.into(max)
    84

    ```
    """
    return self.into(mit.argmax, key=key)

argmin

argmin(key: Callable[[T], U] | None = None) -> int

Index of the first occurrence of a minimum value in an iterable.

Parameters:

Name Type Description Default
key Callable[[T], U] | None

Optional function to determine the value for comparison.

None
>>> import pyochain as pc
>>> pc.Iter.from_("efghabcdijkl").argmin()
4
>>> pc.Iter.from_([3, 2, 1, 0, 4, 2, 1, 0]).argmin()
3

For example, look up a label corresponding to the position of a value that minimizes a cost function:

>>> def cost(x):
...     "Days for a wound to heal given a subject's age."
...     return x**2 - 20 * x + 150
>>> labels = pc.Iter.from_(["homer", "marge", "bart", "lisa", "maggie"])
>>> ages = pc.Seq([35, 30, 10, 9, 1])
>>> # Fastest healing family member
>>> labels.item(ages.argmin(key=cost))
'bart'
>>> # Age with fastest healing
>>> ages.into(min, key=cost)
10

Source code in src/pyochain/_iter/_aggregations.py
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
def argmin[U](self, key: Callable[[T], U] | None = None) -> int:
    """
    Index of the first occurrence of a minimum value in an iterable.

    Args:
        key: Optional function to determine the value for comparison.

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("efghabcdijkl").argmin()
    4
    >>> pc.Iter.from_([3, 2, 1, 0, 4, 2, 1, 0]).argmin()
    3

    ```

    For example, look up a label corresponding to the position of a value that minimizes a cost function:
    ```python
    >>> def cost(x):
    ...     "Days for a wound to heal given a subject's age."
    ...     return x**2 - 20 * x + 150
    >>> labels = pc.Iter.from_(["homer", "marge", "bart", "lisa", "maggie"])
    >>> ages = pc.Seq([35, 30, 10, 9, 1])
    >>> # Fastest healing family member
    >>> labels.item(ages.argmin(key=cost))
    'bart'
    >>> # Age with fastest healing
    >>> ages.into(min, key=cost)
    10

    ```
    """
    return self.into(mit.argmin, key=key)

batch

batch(n: int) -> Iter[tuple[T, ...]]

Batch elements into tuples of length n and return a new Iter.

  • The last batch may be shorter than n.
  • The data is consumed lazily, just enough to fill a batch.
  • The result is yielded as soon as a batch is full or when the input iterable is exhausted.

Parameters:

Name Type Description Default
n int

Number of elements in each batch.

required
>>> import pyochain as pc
>>> pc.Iter.from_("ABCDEFG").batch(3).into(list)
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
Source code in src/pyochain/_iter/_partitions.py
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
def batch(self, n: int) -> Iter[tuple[T, ...]]:
    """
    Batch elements into tuples of length n and return a new Iter.

    - The last batch may be shorter than n.
    - The data is consumed lazily, just enough to fill a batch.
    - The result is yielded as soon as a batch is full or when the input iterable is exhausted.

    Args:
        n: Number of elements in each batch.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("ABCDEFG").batch(3).into(list)
    [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]

    ```
    """
    return self.apply(itertools.batched, n)

chain

chain(*others: Iterable[T]) -> Iter[T]

Concatenate zero or more iterables, any of which may be infinite.

An infinite sequence will prevent the rest of the arguments from being included.

We use chain.from_iterable rather than chain(*seqs) so that seqs can be a generator.

Parameters:

Name Type Description Default
others Iterable[T]

Other iterables to concatenate.

()
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).chain([3, 4], [5]).into(list)
[1, 2, 3, 4, 5]
Source code in src/pyochain/_iter/_process.py
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
def chain(self, *others: Iterable[T]) -> Iter[T]:
    """
    Concatenate zero or more iterables, any of which may be infinite.

    An infinite sequence will prevent the rest of the arguments from being included.

    We use chain.from_iterable rather than chain(*seqs) so that seqs can be a generator.

    Args:
        others: Other iterables to concatenate.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).chain([3, 4], [5]).into(list)
    [1, 2, 3, 4, 5]

    ```
    """

    def _chain(data: Iterable[T]) -> Iterator[T]:
        return cz.itertoolz.concat((data, *others))

    return self.apply(_chain)

chunks

chunks(n: int, strict: bool = False) -> Iter[list[T]]

Break iterable into lists of length n.

By default, the last yielded list will have fewer than n elements if the length of iterable is not divisible by n.

To use a fill-in value instead, see the :func:grouper recipe.

If:

  • the length of iterable is not divisible by n
  • strict is True

then ValueError will be raised before the last list is yielded.

Parameters:

Name Type Description Default
n int

Number of elements in each chunk.

required
strict bool

Whether to raise an error if the last chunk is smaller than n. Defaults to False.

False

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).chunks(3).into(list)
[[1, 2, 3], [4, 5, 6]]
>>> pc.Iter.from_([1, 2, 3, 4, 5, 6, 7, 8]).chunks(3).into(list)
[[1, 2, 3], [4, 5, 6], [7, 8]]

Source code in src/pyochain/_iter/_lists.py
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
def chunks(self, n: int, strict: bool = False) -> Iter[list[T]]:
    """
    Break iterable into lists of length n.

    By default, the last yielded list will have fewer than *n* elements if the length of *iterable* is not divisible by *n*.

    To use a fill-in value instead, see the :func:`grouper` recipe.

    If:

    - the length of *iterable* is not divisible by *n*
    - *strict* is `True`

    then `ValueError` will be raised before the last list is yielded.

    Args:
        n: Number of elements in each chunk.
        strict: Whether to raise an error if the last chunk is smaller than n. Defaults to False.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).chunks(3).into(list)
    [[1, 2, 3], [4, 5, 6]]
    >>> pc.Iter.from_([1, 2, 3, 4, 5, 6, 7, 8]).chunks(3).into(list)
    [[1, 2, 3], [4, 5, 6], [7, 8]]

    ```
    """
    return self.apply(mit.chunked, n, strict)

chunks_even

chunks_even(n: int) -> Iter[list[T]]

Break iterable into lists of approximately length n.

Items are distributed such the lengths of the lists differ by at most 1 item.

Parameters:

Name Type Description Default
n int

Approximate number of elements in each chunk.

required

Example:

>>> import pyochain as pc
>>> iterable = pc.Seq([1, 2, 3, 4, 5, 6, 7])
>>> iterable.iter().chunks_even(3).into(list)  # List lengths: 3, 2, 2
[[1, 2, 3], [4, 5], [6, 7]]
>>> iterable.iter().chunks(3).into(list)  # List lengths: 3, 3, 1
[[1, 2, 3], [4, 5, 6], [7]]

Source code in src/pyochain/_iter/_lists.py
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
def chunks_even(self, n: int) -> Iter[list[T]]:
    """
    Break iterable into lists of approximately length n.

    Items are distributed such the lengths of the lists differ by at most 1 item.

    Args:
        n: Approximate number of elements in each chunk.
    Example:
    ```python
    >>> import pyochain as pc
    >>> iterable = pc.Seq([1, 2, 3, 4, 5, 6, 7])
    >>> iterable.iter().chunks_even(3).into(list)  # List lengths: 3, 2, 2
    [[1, 2, 3], [4, 5], [6, 7]]
    >>> iterable.iter().chunks(3).into(list)  # List lengths: 3, 3, 1
    [[1, 2, 3], [4, 5, 6], [7]]

    ```
    """
    return self.apply(mit.chunked_even, n)

classify_unique

classify_unique() -> Iter[tuple[T, bool, bool]]

Classify each element in terms of its uniqueness.

For each element in the input iterable, return a 3-tuple consisting of:

  • The element itself
  • False if the element is equal to the one preceding it in the input, True otherwise (i.e. the equivalent of unique_justseen)
  • False if this element has been seen anywhere in the input before, True otherwise (i.e. the equivalent of unique_everseen)

This function is analogous to unique_everseen and is subject to the same performance considerations.

>>> import pyochain as pc
>>> pc.Iter.from_("otto").classify_unique().into(list)
... # doctest: +NORMALIZE_WHITESPACE
[('o', True,  True),
('t', True,  True),
('t', False, False),
('o', True,  False)]
Source code in src/pyochain/_iter/_tuples.py
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
def classify_unique(self) -> Iter[tuple[T, bool, bool]]:
    """
    Classify each element in terms of its uniqueness.\n
    For each element in the input iterable, return a 3-tuple consisting of:

    - The element itself
    - False if the element is equal to the one preceding it in the input, True otherwise (i.e. the equivalent of unique_justseen)
    - False if this element has been seen anywhere in the input before, True otherwise (i.e. the equivalent of unique_everseen)

    This function is analogous to unique_everseen and is subject to the same performance considerations.

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("otto").classify_unique().into(list)
    ... # doctest: +NORMALIZE_WHITESPACE
    [('o', True,  True),
    ('t', True,  True),
    ('t', False, False),
    ('o', True,  False)]

    ```
    """
    return self.apply(mit.classify_unique)

collect

collect(factory: Callable[[Iterable[T]], Collection[T]] = list) -> Seq[T]

Collect the elements into a sequence.

Parameters:

Name Type Description Default
factory Callable[[Iterable[T]], Collection[T]]

A callable that takes an iterable and returns a collection. Defaults to list.

list

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(range(5)).collect().unwrap()
[0, 1, 2, 3, 4]

Source code in src/pyochain/_core/_main.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
def collect(self, factory: Callable[[Iterable[T]], Collection[T]] = list) -> Seq[T]:
    """
    Collect the elements into a sequence.

    Args:
        factory: A callable that takes an iterable and returns a collection. Defaults to list.

    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(5)).collect().unwrap()
    [0, 1, 2, 3, 4]

    ```
    """
    from .._iter import Seq

    return Seq(self.into(factory))

combination_index

combination_index(r: Iterable[T]) -> int

Equivalent to list(combinations(iterable, r)).index(element).

Parameters:

Name Type Description Default
r Iterable[T]

The combination to find the index of.

required

The subsequences of iterable that are of length r can be ordered lexicographically.

combination_index computes the index of the first element, without computing the previous combinations.

ValueError will be raised if the given element isn't one of the combinations of iterable.

>>> import pyochain as pc
>>> pc.Iter.from_("abcdefg").combination_index("adf")
10

Source code in src/pyochain/_iter/_aggregations.py
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
def combination_index(self, r: Iterable[T]) -> int:
    """
    Equivalent to list(combinations(iterable, r)).index(element).

    Args:
        r: The combination to find the index of.

    The subsequences of iterable that are of length r can be ordered lexicographically.

    combination_index computes the index of the first element, without computing the previous combinations.

    ValueError will be raised if the given element isn't one of the combinations of iterable.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("abcdefg").combination_index("adf")
    10

    ```
    """
    return self.into(functools.partial(mit.combination_index, r))

combinations

combinations(r: Literal[2]) -> Iter[tuple[T, T]]
combinations(r: Literal[3]) -> Iter[tuple[T, T, T]]
combinations(r: Literal[4]) -> Iter[tuple[T, T, T, T]]
combinations(r: Literal[5]) -> Iter[tuple[T, T, T, T, T]]
combinations(r: int) -> Iter[tuple[T, ...]]

Return all combinations of length r.

Parameters:

Name Type Description Default
r int

Length of each combination.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).combinations(2).into(list)
[(1, 2), (1, 3), (2, 3)]
Source code in src/pyochain/_iter/_tuples.py
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def combinations(self, r: int) -> Iter[tuple[T, ...]]:
    """
    Return all combinations of length r.

    Args:
        r: Length of each combination.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).combinations(2).into(list)
    [(1, 2), (1, 3), (2, 3)]

    ```
    """
    return self.apply(itertools.combinations, r)

combinations_with_replacement

combinations_with_replacement(r: Literal[2]) -> Iter[tuple[T, T]]
combinations_with_replacement(r: Literal[3]) -> Iter[tuple[T, T, T]]
combinations_with_replacement(r: Literal[4]) -> Iter[tuple[T, T, T, T]]
combinations_with_replacement(r: Literal[5]) -> Iter[tuple[T, T, T, T, T]]
combinations_with_replacement(r: int) -> Iter[tuple[T, ...]]

Return all combinations with replacement of length r.

Parameters:

Name Type Description Default
r int

Length of each combination.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).combinations_with_replacement(2).into(list)
[(1, 1), (1, 2), (1, 3), (2, 2), (2, 3), (3, 3)]
Source code in src/pyochain/_iter/_tuples.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
def combinations_with_replacement(self, r: int) -> Iter[tuple[T, ...]]:
    """
    Return all combinations with replacement of length r.

    Args:
        r: Length of each combination.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).combinations_with_replacement(2).into(list)
    [(1, 1), (1, 2), (1, 3), (2, 2), (2, 3), (3, 3)]

    ```
    """
    return self.apply(itertools.combinations_with_replacement, r)

compress

compress(*selectors: bool) -> Iter[T]

Filter elements using a boolean selector iterable.

Parameters:

Name Type Description Default
selectors bool

Boolean values indicating which elements to keep.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("ABCDEF").compress(1, 0, 1, 0, 1, 1).into(list)
['A', 'C', 'E', 'F']

Source code in src/pyochain/_iter/_filters.py
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
def compress(self, *selectors: bool) -> Iter[T]:
    """
    Filter elements using a boolean selector iterable.

    Args:
        selectors: Boolean values indicating which elements to keep.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("ABCDEF").compress(1, 0, 1, 0, 1, 1).into(list)
    ['A', 'C', 'E', 'F']

    ```
    """
    return self.apply(itertools.compress, selectors)

count

count() -> int

Return the length of the sequence. Like the builtin len but works on lazy sequences.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).count()
2

Source code in src/pyochain/_iter/_aggregations.py
131
132
133
134
135
136
137
138
139
140
141
142
def count(self) -> int:
    """
    Return the length of the sequence.
    Like the builtin len but works on lazy sequences.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).count()
    2

    ```
    """
    return self.into(cz.itertoolz.count)

count_by

count_by(key: Callable[[T], K]) -> Dict[K, int]

Count elements of a collection by a key function.

Parameters:

Name Type Description Default
key Callable[[T], K]

Function to compute the key for counting.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(["cat", "mouse", "dog"]).count_by(len).unwrap()
{3: 2, 5: 1}
>>> def iseven(x):
...     return x % 2 == 0
>>> pc.Iter.from_([1, 2, 3]).count_by(iseven).unwrap()
{False: 2, True: 1}

Source code in src/pyochain/_iter/_groups.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
def count_by[K](self, key: Callable[[T], K]) -> Dict[K, int]:
    """
    Count elements of a collection by a key function.

    Args:
        key: Function to compute the key for counting.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(["cat", "mouse", "dog"]).count_by(len).unwrap()
    {3: 2, 5: 1}
    >>> def iseven(x):
    ...     return x % 2 == 0
    >>> pc.Iter.from_([1, 2, 3]).count_by(iseven).unwrap()
    {False: 2, True: 1}

    ```
    """
    from .._dict import Dict

    return Dict(self.into(partial(cz.recipes.countby, key)))

cycle

cycle() -> Iter[T]

Repeat the sequence indefinitely.

Warning ⚠️ This creates an infinite iterator. Be sure to use Iter.take() or Iter.slice() to limit the number of items taken.

Example:
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).cycle().take(5).into(list)
[1, 2, 1, 2, 1]

Source code in src/pyochain/_iter/_process.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
def cycle(self) -> Iter[T]:
    """
    Repeat the sequence indefinitely.

    **Warning** ⚠️
        This creates an infinite iterator.
        Be sure to use Iter.take() or Iter.slice() to limit the number of items taken.
    ```python

    Example:
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).cycle().take(5).into(list)
    [1, 2, 1, 2, 1]

    ```
    """
    return self.apply(itertools.cycle)

diff_at

diff_at(
    *others: Iterable[T],
    default: T | None = None,
    key: Callable[[T], Any] | None = None,
) -> Iter[tuple[T, ...]]

Return those items that differ between iterables. Each output item is a tuple where the i-th element is from the i-th input iterable.

If an input iterable is exhausted before others, then the corresponding output items will be filled with default.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to compare with.

()
default T | None

Value to use for missing elements. Defaults to None.

None
key Callable[[T], Any] | None

Function to apply to each item for comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> data = pc.Seq([1, 2, 3])
>>> data.iter().diff_at([1, 2, 10, 100], default=None).into(list)
[(3, 10), (None, 100)]
>>> data.iter().diff_at([1, 2, 10, 100, 2, 6, 7], default=0).into(list)
[(3, 10), (0, 100), (0, 2), (0, 6), (0, 7)]

A key function may also be applied to each item to use during comparisons:
```python
>>> import pyochain as pc
>>> pc.Iter.from_(["apples", "bananas"]).diff_at(
...     ["Apples", "Oranges"], key=str.lower
... ).into(list)
[('bananas', 'Oranges')]

Source code in src/pyochain/_iter/_joins.py
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
def diff_at(
    self,
    *others: Iterable[T],
    default: T | None = None,
    key: Callable[[T], Any] | None = None,
) -> Iter[tuple[T, ...]]:
    """
    Return those items that differ between iterables.
    Each output item is a tuple where the i-th element is from the i-th input iterable.

    If an input iterable is exhausted before others, then the corresponding output items will be filled with *default*.

    Args:
        *others: Other iterables to compare with.
        default: Value to use for missing elements. Defaults to None.
        key: Function to apply to each item for comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data = pc.Seq([1, 2, 3])
    >>> data.iter().diff_at([1, 2, 10, 100], default=None).into(list)
    [(3, 10), (None, 100)]
    >>> data.iter().diff_at([1, 2, 10, 100, 2, 6, 7], default=0).into(list)
    [(3, 10), (0, 100), (0, 2), (0, 6), (0, 7)]

    A key function may also be applied to each item to use during comparisons:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(["apples", "bananas"]).diff_at(
    ...     ["Apples", "Oranges"], key=str.lower
    ... ).into(list)
    [('bananas', 'Oranges')]

    ```
    """
    return self.apply(cz.itertoolz.diff, *others, default=default, key=key)

diff_symmetric

diff_symmetric(*others: Iterable[T]) -> Seq[T]

Return the symmetric difference (XOR) of this iterable and 'others'.

Note

This method consumes inner data, unsorts it, and removes duplicates.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to compute the symmetric difference with.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 2]).diff_symmetric([2, 3]).iter().sort().unwrap()
[1, 3]
>>> pc.Iter.from_([1, 2, 3]).diff_symmetric([3, 4, 5]).iter().sort().unwrap()
[1, 2, 4, 5]

Source code in src/pyochain/_iter/_eager.py
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
def diff_symmetric(self, *others: Iterable[T]) -> Seq[T]:
    """
    Return the symmetric difference (XOR) of this iterable and 'others'.

    Note:
        This method consumes inner data, unsorts it, and removes duplicates.

    Args:
        *others: Other iterables to compute the symmetric difference with.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 2]).diff_symmetric([2, 3]).iter().sort().unwrap()
    [1, 3]
    >>> pc.Iter.from_([1, 2, 3]).diff_symmetric([3, 4, 5]).iter().sort().unwrap()
    [1, 2, 4, 5]

    ```
    """

    def _symmetric_difference(data: Iterable[T]) -> set[T]:
        return set(data).symmetric_difference(*others)

    return self.collect(_symmetric_difference)

diff_unique

diff_unique(*others: Iterable[T]) -> Seq[T]

Return the difference of this iterable and 'others'. (Elements in 'self' but not in 'others').

Note

This method consumes inner data, unsorts it, and removes duplicates.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to subtract from this iterable.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 2]).diff_unique([2, 3]).unwrap()
{1}

Source code in src/pyochain/_iter/_eager.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
def diff_unique(self, *others: Iterable[T]) -> Seq[T]:
    """
    Return the difference of this iterable and 'others'.
    (Elements in 'self' but not in 'others').

    Note:
        This method consumes inner data, unsorts it, and removes duplicates.

    Args:
        *others: Other iterables to subtract from this iterable.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 2]).diff_unique([2, 3]).unwrap()
    {1}

    ```
    """

    def _difference(data: Iterable[T]) -> set[T]:
        return set(data).difference(*others)

    return self.collect(_difference)

elements

elements() -> Iter[T]

Iterator over elements repeating each as many times as its count.

Note

if an element's count has been set to zero or is a negative number, elements() will ignore it.

>>> import pyochain as pc
>>> pc.Iter.from_("ABCABC").elements().sort().unwrap()
['A', 'A', 'B', 'B', 'C', 'C']
Knuth's example for prime factors of 1836: 22 * 33 * 17**1
>>> import math
>>> data = [2, 2, 3, 3, 3, 17]
>>> pc.Iter.from_(data).elements().into(math.prod)
1836

Source code in src/pyochain/_iter/_process.py
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
def elements(self) -> Iter[T]:
    """
    Iterator over elements repeating each as many times as its count.

    Note:
        if an element's count has been set to zero or is a negative
        number, elements() will ignore it.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("ABCABC").elements().sort().unwrap()
    ['A', 'A', 'B', 'B', 'C', 'C']

    ```
    Knuth's example for prime factors of 1836:  2**2 * 3**3 * 17**1
    ```python
    >>> import math
    >>> data = [2, 2, 3, 3, 3, 17]
    >>> pc.Iter.from_(data).elements().into(math.prod)
    1836

    ```
    """
    from collections import Counter

    def _elements(data: Iterable[T]) -> Iterator[T]:
        return Counter(data).elements()

    return self.apply(_elements)

enumerate

enumerate() -> Iter[tuple[int, T]]

Return a Iter of (index, value) pairs.

>>> import pyochain as pc
>>> pc.Iter.from_(["a", "b"]).enumerate().into(list)
[(0, 'a'), (1, 'b')]

Source code in src/pyochain/_iter/_tuples.py
18
19
20
21
22
23
24
25
26
27
28
def enumerate(self) -> Iter[tuple[int, T]]:
    """
    Return a Iter of (index, value) pairs.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(["a", "b"]).enumerate().into(list)
    [(0, 'a'), (1, 'b')]

    ```
    """
    return self.apply(enumerate)

every

every(index: int) -> Iter[T]

Return every nth item starting from first.

Parameters:

Name Type Description Default
index int

Step size for selecting items.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([10, 20, 30, 40]).every(2).into(list)
[10, 30]

Source code in src/pyochain/_iter/_filters.py
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
def every(self, index: int) -> Iter[T]:
    """
    Return every nth item starting from first.

    Args:
        index: Step size for selecting items.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([10, 20, 30, 40]).every(2).into(list)
    [10, 30]

    ```
    """
    return self.apply(partial(cz.itertoolz.take_nth, index))

extract

extract(indices: Iterable[int]) -> Iter[T]

Yield values at the specified indices.

  • The iterable is consumed lazily and can be infinite.
  • The indices are consumed immediately and must be finite.
  • Raises IndexError if an index lies beyond the iterable.
  • Raises ValueError for negative indices.

Parameters:

Name Type Description Default
indices Iterable[int]

Iterable of indices to extract values from.

required

Example:

>>> import pyochain as pc
>>> text = "abcdefghijklmnopqrstuvwxyz"
>>> pc.Iter.from_(text).extract([7, 4, 11, 11, 14]).into(list)
['h', 'e', 'l', 'l', 'o']

Source code in src/pyochain/_iter/_filters.py
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
def extract(self, indices: Iterable[int]) -> Iter[T]:
    """
    Yield values at the specified indices.

    - The iterable is consumed lazily and can be infinite.
    - The indices are consumed immediately and must be finite.
    - Raises IndexError if an index lies beyond the iterable.
    - Raises ValueError for negative indices.

    Args:
        indices: Iterable of indices to extract values from.
    Example:
    ```python
    >>> import pyochain as pc
    >>> text = "abcdefghijklmnopqrstuvwxyz"
    >>> pc.Iter.from_(text).extract([7, 4, 11, 11, 14]).into(list)
    ['h', 'e', 'l', 'l', 'o']

    ```
    """
    return self.apply(mit.extract, indices)

filter

filter(func: Callable[[T], bool]) -> Iter[T]

Return an iterator yielding those items of iterable for which function is true.

Parameters:

Name Type Description Default
func Callable[[T], bool]

Function to evaluate each item.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).filter(lambda x: x > 1).into(list)
[2, 3]

Source code in src/pyochain/_iter/_filters.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
def filter(self, func: Callable[[T], bool]) -> Iter[T]:
    """
    Return an iterator yielding those items of iterable for which function is true.

    Args:
        func: Function to evaluate each item.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).filter(lambda x: x > 1).into(list)
    [2, 3]

    ```
    """

    def _filter(data: Iterable[T]) -> Iterator[T]:
        return (x for x in data if func(x))

    return self.apply(_filter)

filter_attr

filter_attr(attr: str, dtype: type[U] = object) -> Iter[U]

Return elements that have the given attribute.

The provided dtype is not checked at runtime for performance considerations.

Parameters:

Name Type Description Default
attr str

Name of the attribute to check for.

required
dtype type[U]

Expected type of the attribute. Defaults to object.

object

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(["hello", "world", 2, 5]).filter_attr("capitalize", str).into(
...     list
... )
['hello', 'world']

Source code in src/pyochain/_iter/_filters.py
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
def filter_attr[U](self, attr: str, dtype: type[U] = object) -> Iter[U]:
    """
    Return elements that have the given attribute.

    The provided dtype is not checked at runtime for performance considerations.

    Args:
        attr: Name of the attribute to check for.
        dtype: Expected type of the attribute. Defaults to object.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(["hello", "world", 2, 5]).filter_attr("capitalize", str).into(
    ...     list
    ... )
    ['hello', 'world']

    ```
    """

    def check(data: Iterable[Any]) -> Generator[U, None, None]:
        def _(x: Any) -> TypeGuard[U]:
            return hasattr(x, attr)

        return (x for x in data if _(x))

    return self.apply(check)

filter_callable

filter_callable() -> Iter[Callable[..., Any]]

Return only elements that are callable.

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([len, 42, str, None, list]).filter_callable().into(list)
[<built-in function len>, <class 'str'>, <class 'list'>]

Source code in src/pyochain/_iter/_filters.py
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
def filter_callable(self) -> Iter[Callable[..., Any]]:
    """
    Return only elements that are callable.

    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([len, 42, str, None, list]).filter_callable().into(list)
    [<built-in function len>, <class 'str'>, <class 'list'>]

    ```
    """

    def _filter_callable(
        data: Iterable[T],
    ) -> Generator[Callable[..., Any], None, None]:
        return (x for x in data if callable(x))

    return self.apply(_filter_callable)

filter_contain

filter_contain(text: str, format: Callable[[str], str] | None = None) -> Iter[str]

Return elements that contain the given text.

Optionally, a format function can be provided to preprocess each element before checking for the substring.

Parameters:

Name Type Description Default
text str

Substring to check for.

required
format Callable[[str], str] | None

Optional function to preprocess each element before checking. Defaults to None.

None

Example:

>>> import pyochain as pc
>>>
>>> data = pc.Seq(["apple", "banana", "cherry", "date"])
>>> data.iter().filter_contain("ana").into(list)
['banana']
>>> data.iter().map(str.upper).filter_contain("ana", str.lower).into(list)
['BANANA']

Source code in src/pyochain/_iter/_filters.py
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
def filter_contain(
    self: IterWrapper[str], text: str, format: Callable[[str], str] | None = None
) -> Iter[str]:
    """
    Return elements that contain the given text.

    Optionally, a format function can be provided to preprocess each element before checking for the substring.

    Args:
        text: Substring to check for.
        format: Optional function to preprocess each element before checking. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>>
    >>> data = pc.Seq(["apple", "banana", "cherry", "date"])
    >>> data.iter().filter_contain("ana").into(list)
    ['banana']
    >>> data.iter().map(str.upper).filter_contain("ana", str.lower).into(list)
    ['BANANA']

    ```
    """

    def _filter_contain(data: Iterable[str]) -> Generator[str, None, None]:
        def _(x: str) -> bool:
            formatted = format(x) if format else x
            return text in formatted

        return (x for x in data if _(x))

    return self.apply(_filter_contain)

filter_except

filter_except(func: Callable[[T], object], *exceptions: type[BaseException]) -> Iter[T]

Yield the items from iterable for which the validator function does not raise one of the specified exceptions.

Validator is called for each item in iterable.

It should be a function that accepts one argument and raises an exception if that item is not valid.

If an exception other than one given by exceptions is raised by validator, it is raised like normal.

Parameters:

Name Type Description Default
func Callable[[T], object]

Validator function to apply to each item.

required
exceptions type[BaseException]

Exceptions to catch and ignore.

()

Example:

>>> import pyochain as pc
>>> iterable = ["1", "2", "three", "4", None]
>>> pc.Iter.from_(iterable).filter_except(int, ValueError, TypeError).into(list)
['1', '2', '4']

Source code in src/pyochain/_iter/_filters.py
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
def filter_except(
    self, func: Callable[[T], object], *exceptions: type[BaseException]
) -> Iter[T]:
    """
    Yield the items from iterable for which the validator function does not raise one of the specified exceptions.

    Validator is called for each item in iterable.

    It should be a function that accepts one argument and raises an exception if that item is not valid.

    If an exception other than one given by exceptions is raised by validator, it is raised like normal.

    Args:
        func: Validator function to apply to each item.
        exceptions: Exceptions to catch and ignore.
    Example:
    ```python
    >>> import pyochain as pc
    >>> iterable = ["1", "2", "three", "4", None]
    >>> pc.Iter.from_(iterable).filter_except(int, ValueError, TypeError).into(list)
    ['1', '2', '4']

    ```
    """

    def _filter_except(data: Iterable[T]) -> Iterator[T]:
        return mit.filter_except(func, data, *exceptions)

    return self.apply(_filter_except)

filter_false

filter_false(func: Callable[[T], bool]) -> Iter[T]

Return elements for which func is false.

Parameters:

Name Type Description Default
func Callable[[T], bool]

Function to evaluate each item.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).filter_false(lambda x: x > 1).into(list)
[1]

Source code in src/pyochain/_iter/_filters.py
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
def filter_false(self, func: Callable[[T], bool]) -> Iter[T]:
    """
    Return elements for which func is false.

    Args:
        func: Function to evaluate each item.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).filter_false(lambda x: x > 1).into(list)
    [1]

    ```
    """
    return self.apply(partial(itertools.filterfalse, func))

filter_isin

filter_isin(values: Iterable[T]) -> Iter[T]

Return elements that are in the given values iterable.

Parameters:

Name Type Description Default
values Iterable[T]

Iterable of values to check membership against.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4]).filter_isin([2, 4, 6]).into(list)
[2, 4]

Source code in src/pyochain/_iter/_filters.py
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
def filter_isin(self, values: Iterable[T]) -> Iter[T]:
    """
    Return elements that are in the given values iterable.

    Args:
        values: Iterable of values to check membership against.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4]).filter_isin([2, 4, 6]).into(list)
    [2, 4]

    ```
    """

    def _filter_isin(data: Iterable[T]) -> Generator[T, None, None]:
        value_set: set[T] = set(values)
        return (x for x in data if x in value_set)

    return self.apply(_filter_isin)

filter_map

filter_map(func: Callable[[T], R]) -> Iter[R]

Apply func to every element of iterable, yielding only those which are not None.

Parameters:

Name Type Description Default
func Callable[[T], R]

Function to apply to each item.

required

Example:

>>> import pyochain as pc
>>> def to_int(s: str) -> int | None:
...     return int(s) if s.isnumeric() else None
>>> elems = ["1", "a", "2", "b", "3"]
>>> pc.Iter.from_(elems).filter_map(to_int).into(list)
[1, 2, 3]

Source code in src/pyochain/_iter/_filters.py
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
def filter_map[R](self, func: Callable[[T], R]) -> Iter[R]:
    """
    Apply func to every element of iterable, yielding only those which are not None.

    Args:
        func: Function to apply to each item.
    Example:
    ```python
    >>> import pyochain as pc
    >>> def to_int(s: str) -> int | None:
    ...     return int(s) if s.isnumeric() else None
    >>> elems = ["1", "a", "2", "b", "3"]
    >>> pc.Iter.from_(elems).filter_map(to_int).into(list)
    [1, 2, 3]

    ```
    """
    return self.apply(partial(mit.filter_map, func))

filter_notin

filter_notin(values: Iterable[T]) -> Iter[T]

Return elements that are not in the given values iterable.

Parameters:

Name Type Description Default
values Iterable[T]

Iterable of values to exclude.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4]).filter_notin([2, 4, 6]).into(list)
[1, 3]

Source code in src/pyochain/_iter/_filters.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
def filter_notin(self, values: Iterable[T]) -> Iter[T]:
    """
    Return elements that are not in the given values iterable.

    Args:
        values: Iterable of values to exclude.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4]).filter_notin([2, 4, 6]).into(list)
    [1, 3]

    ```
    """

    def _filter_notin(data: Iterable[T]) -> Generator[T, None, None]:
        value_set: set[T] = set(values)
        return (x for x in data if x not in value_set)

    return self.apply(_filter_notin)

filter_subclass

filter_subclass(parent: type[R], keep_parent: bool = True) -> Iter[type[R]]

Return elements that are subclasses of the given class, optionally excluding the parent class itself.

Parameters:

Name Type Description Default
parent type[R]

Parent class to check against.

required
keep_parent bool

Whether to include the parent class itself. Defaults to True.

True

Example:

>>> import pyochain as pc
>>> class A:
...     pass
>>> class B(A):
...     pass
>>> class C:
...     pass
>>> def name(cls: type[Any]) -> str:
...     return cls.__name__
>>>
>>> data = pc.Seq([A, B, C])
>>> data.iter().filter_subclass(A).map(name).into(list)
['A', 'B']
>>> data.iter().filter_subclass(A, keep_parent=False).map(name).into(list)
['B']

Source code in src/pyochain/_iter/_filters.py
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
def filter_subclass[U: type[Any], R](
    self: IterWrapper[U], parent: type[R], keep_parent: bool = True
) -> Iter[type[R]]:
    """
    Return elements that are subclasses of the given class, optionally excluding the parent class itself.

    Args:
        parent: Parent class to check against.
        keep_parent: Whether to include the parent class itself. Defaults to True.
    Example:
    ```python
    >>> import pyochain as pc
    >>> class A:
    ...     pass
    >>> class B(A):
    ...     pass
    >>> class C:
    ...     pass
    >>> def name(cls: type[Any]) -> str:
    ...     return cls.__name__
    >>>
    >>> data = pc.Seq([A, B, C])
    >>> data.iter().filter_subclass(A).map(name).into(list)
    ['A', 'B']
    >>> data.iter().filter_subclass(A, keep_parent=False).map(name).into(list)
    ['B']

    ```
    """

    def _filter_subclass(
        data: Iterable[type[Any]],
    ) -> Generator[type[R], None, None]:
        if keep_parent:
            return (x for x in data if issubclass(x, parent))
        else:
            return (x for x in data if issubclass(x, parent) and x is not parent)

    return self.apply(_filter_subclass)

filter_type

filter_type(typ: type[R]) -> Iter[R]

Return elements that are instances of the given type.

Parameters:

Name Type Description Default
typ type[R]

Type to check against.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, "two", 3.0, "four", 5]).filter_type(int).into(list)
[1, 5]

Source code in src/pyochain/_iter/_filters.py
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
def filter_type[R](self, typ: type[R]) -> Iter[R]:
    """
    Return elements that are instances of the given type.

    Args:
        typ: Type to check against.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, "two", 3.0, "four", 5]).filter_type(int).into(list)
    [1, 5]

    ```
    """

    def _filter_type(data: Iterable[T]) -> Generator[R, None, None]:
        return (x for x in data if isinstance(x, typ))

    return self.apply(_filter_type)

find

find(default: None = None, predicate: Callable[[T], bool] | None = ...) -> T | None
find(default: T, predicate: Callable[[T], bool] | None = ...) -> T
find(default: U = None, predicate: Callable[[T], bool] | None = None) -> U | T

Searches for an element of an iterator that satisfies a predicate, by:

  • Taking a closure that returns true or false as predicate (optional).
  • Using the identity function if no predicate is provided.
  • Applying this closure to each element of the iterator.
  • Returning the first element that satisfies the predicate.

If all the elements return false, Iter.find() returns the default value.

Parameters:

Name Type Description Default
default U

Value to return if no element satisfies the predicate. Defaults to None.

None
predicate Callable[[T], bool] | None

Function to evaluate each item. Defaults to checking truthiness.

None

Example:

>>> import pyochain as pc
>>> def gt_five(x: int) -> bool:
...     return x > 5
>>>
>>> def gt_nine(x: int) -> bool:
...     return x > 9
>>>
>>> pc.Iter.from_(range(10)).find()
1
>>> pc.Iter.from_(range(10)).find(predicate=gt_five)
6
>>> pc.Iter.from_(range(10)).find(default="missing", predicate=gt_nine)
'missing'

Source code in src/pyochain/_iter/_booleans.py
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
def find[U](
    self, default: U = None, predicate: Callable[[T], bool] | None = None
) -> U | T:
    """
    Searches for an element of an iterator that satisfies a `predicate`, by:

    - Taking a closure that returns true or false as `predicate` (optional).
    - Using the identity function if no `predicate` is provided.
    - Applying this closure to each element of the iterator.
    - Returning the first element that satisfies the `predicate`.

    If all the elements return false, `Iter.find()` returns the default value.

    Args:
        default: Value to return if no element satisfies the predicate. Defaults to None.
        predicate: Function to evaluate each item. Defaults to checking truthiness.
    Example:
    ```python
    >>> import pyochain as pc
    >>> def gt_five(x: int) -> bool:
    ...     return x > 5
    >>>
    >>> def gt_nine(x: int) -> bool:
    ...     return x > 9
    >>>
    >>> pc.Iter.from_(range(10)).find()
    1
    >>> pc.Iter.from_(range(10)).find(predicate=gt_five)
    6
    >>> pc.Iter.from_(range(10)).find(default="missing", predicate=gt_nine)
    'missing'

    ```
    """
    return self.into(mit.first_true, default, predicate)

first

first() -> T

Return the first element.

>>> import pyochain as pc
>>> pc.Iter.from_([9]).first()
9

Source code in src/pyochain/_iter/_aggregations.py
 95
 96
 97
 98
 99
100
101
102
103
104
105
def first(self) -> T:
    """
    Return the first element.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([9]).first()
    9

    ```
    """
    return self.into(cz.itertoolz.first)

flat_map

flat_map(func: Callable[[T], Iterable[Iterable[R]]]) -> Iter[Iterable[Iterable[R]]]
flat_map(func: Callable[[T], Iterable[R]]) -> Iter[Iterable[R]]
flat_map(func: Callable[[T], R]) -> Iter[R]
flat_map(func: Callable[[T], R]) -> Iter[Any]

Map each element through func and flatten the result by one level.

Parameters:

Name Type Description Default
func Callable[[T], R]

Function to apply to each element.

required
>>> import pyochain as pc
>>> data = [[1, 2], [3, 4]]
>>> pc.Iter.from_(data).flat_map(lambda x: x + 10).into(list)
[11, 12, 13, 14]
Source code in src/pyochain/_iter/_maps.py
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
def flat_map[U: Iterable[Any], R](
    self: IterWrapper[U], func: Callable[[T], R]
) -> Iter[Any]:
    """
    Map each element through func and flatten the result by one level.

    Args:
        func: Function to apply to each element.

    ```python
    >>> import pyochain as pc
    >>> data = [[1, 2], [3, 4]]
    >>> pc.Iter.from_(data).flat_map(lambda x: x + 10).into(list)
    [11, 12, 13, 14]

    ```
    """

    def _flat_map(data: Iterable[U]) -> map[R]:
        return map(func, itertools.chain.from_iterable(data))

    return self.apply(_flat_map)

flatten

flatten() -> Iter[Iterable[Iterable[U]]]
flatten() -> Iter[Iterable[U]]
flatten() -> Iter[U]
flatten() -> Iter[Any]

Flatten one level of nesting and return a new Iterable wrapper.

This is a shortcut for .apply(itertools.chain.from_iterable).

>>> import pyochain as pc
>>> pc.Iter.from_([[1, 2], [3]]).flatten().into(list)
[1, 2, 3]

Source code in src/pyochain/_iter/_maps.py
288
289
290
291
292
293
294
295
296
297
298
299
300
def flatten(self: IterWrapper[Iterable[Any]]) -> Iter[Any]:
    """
    Flatten one level of nesting and return a new Iterable wrapper.

    This is a shortcut for `.apply(itertools.chain.from_iterable)`.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([[1, 2], [3]]).flatten().into(list)
    [1, 2, 3]

    ```
    """
    return self.apply(itertools.chain.from_iterable)

for_each

for_each(
    func: Callable[Concatenate[T, P], Any], *args: P.args, **kwargs: P.kwargs
) -> Self

Apply a function to each element in the iterable.

Parameters:

Name Type Description Default
func Callable[Concatenate[T, P], Any]

Function to apply to each element.

required
args P.args

Positional arguments for the function.

()
kwargs P.kwargs

Keyword arguments for the function.

{}

Can be used for side effects such as printing or logging.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x)).collect().unwrap()
1
2
3
[]

Source code in src/pyochain/_iter/_maps.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
def for_each[**P](
    self,
    func: Callable[Concatenate[T, P], Any],
    *args: P.args,
    **kwargs: P.kwargs,
) -> Self:
    """
    Apply a function to each element in the iterable.

    Args:
        func: Function to apply to each element.
        args: Positional arguments for the function.
        kwargs: Keyword arguments for the function.

    Can be used for side effects such as printing or logging.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x)).collect().unwrap()
    1
    2
    3
    []

    ```
    """
    for v in self.unwrap():
        func(v, *args, **kwargs)
    return self

frequencies

frequencies() -> Dict[T, int]

Find number of occurrences of each value in the iterable.

>>> import pyochain as pc
>>> data = ["cat", "cat", "ox", "pig", "pig", "cat"]
>>> pc.Iter.from_(data).frequencies().unwrap()
{'cat': 3, 'ox': 1, 'pig': 2}

Source code in src/pyochain/_iter/_groups.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def frequencies(self) -> Dict[T, int]:
    """
    Find number of occurrences of each value in the iterable.
    ```python
    >>> import pyochain as pc
    >>> data = ["cat", "cat", "ox", "pig", "pig", "cat"]
    >>> pc.Iter.from_(data).frequencies().unwrap()
    {'cat': 3, 'ox': 1, 'pig': 2}

    ```
    """
    from .._dict import Dict

    return Dict(self.into(cz.itertoolz.frequencies))

from_ staticmethod

from_(data: Iterable[U]) -> Iter[U]

Create an iterator from any Iterable.

  • An Iterable is any object capable of returning its members one at a time, permitting it to be iterated over in a for-loop.
  • An Iterator is an object representing a stream of data; returned by calling iter() on an Iterable.
  • Once an Iterator is exhausted, it cannot be reused or reset.

If you need to reuse the data, consider collecting it into a list first with .collect().

In general, avoid intermediate references when dealing with lazy iterators, and prioritize method chaining instead.

Parameters:

Name Type Description Default
data Iterable[U]

Iterable to convert into an iterator.

required

Example:

>>> import pyochain as pc
>>> data: tuple[int, ...] = (1, 2, 3)
>>> iterator = pc.Iter.from_(data)
>>> iterator.unwrap().__class__.__name__
'tuple_iterator'
>>> mapped = iterator.map(lambda x: x * 2)
>>> mapped.unwrap().__class__.__name__
'map'
>>> mapped.collect(tuple).unwrap()
(2, 4, 6)
>>> # iterator is now exhausted
>>> iterator.collect().unwrap()
[]

Source code in src/pyochain/_iter/_constructors.py
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
@staticmethod
def from_[U](data: Iterable[U]) -> Iter[U]:
    """
    Create an iterator from any Iterable.

    - An Iterable is any object capable of returning its members one at a time, permitting it to be iterated over in a for-loop.
    - An Iterator is an object representing a stream of data; returned by calling `iter()` on an Iterable.
    - Once an Iterator is exhausted, it cannot be reused or reset.

    If you need to reuse the data, consider collecting it into a list first with `.collect()`.

    In general, avoid intermediate references when dealing with lazy iterators, and prioritize method chaining instead.

    Args:
        data: Iterable to convert into an iterator.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data: tuple[int, ...] = (1, 2, 3)
    >>> iterator = pc.Iter.from_(data)
    >>> iterator.unwrap().__class__.__name__
    'tuple_iterator'
    >>> mapped = iterator.map(lambda x: x * 2)
    >>> mapped.unwrap().__class__.__name__
    'map'
    >>> mapped.collect(tuple).unwrap()
    (2, 4, 6)
    >>> # iterator is now exhausted
    >>> iterator.collect().unwrap()
    []

    ```
    """
    from ._main import Iter

    return Iter(iter(data))

from_count staticmethod

from_count(start: int = 0, step: int = 1) -> Iter[int]

Create an infinite iterator of evenly spaced values.

Warning ⚠️ This creates an infinite iterator. Be sure to use Iter.take() or Iter.slice() to limit the number of items taken.

Parameters:

Name Type Description Default
start int

Starting value of the sequence. Defaults to 0.

0
step int

Difference between consecutive values. Defaults to 1.

1

Example:

>>> import pyochain as pc
>>> pc.Iter.from_count(10, 2).take(3).into(list)
[10, 12, 14]

Source code in src/pyochain/_iter/_constructors.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
@staticmethod
def from_count(start: int = 0, step: int = 1) -> Iter[int]:
    """
    Create an infinite iterator of evenly spaced values.

    **Warning** ⚠️
        This creates an infinite iterator.
        Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.

    Args:
        start: Starting value of the sequence. Defaults to 0.
        step: Difference between consecutive values. Defaults to 1.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_count(10, 2).take(3).into(list)
    [10, 12, 14]

    ```
    """
    from ._main import Iter

    return Iter(itertools.count(start, step))

from_func staticmethod

from_func(func: Callable[[U], U], input: U) -> Iter[U]

Create an infinite iterator by repeatedly applying a function on an original input.

Warning ⚠️ This creates an infinite iterator. Be sure to use Iter.take() or Iter.slice() to limit the number of items taken.

Parameters:

Name Type Description Default
func Callable[[U], U]

Function to apply repeatedly.

required
input U

Initial value to start the iteration.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_func(lambda x: x + 1, 0).take(3).into(list)
[0, 1, 2]

Source code in src/pyochain/_iter/_constructors.py
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
@staticmethod
def from_func[U](func: Callable[[U], U], input: U) -> Iter[U]:
    """
    Create an infinite iterator by repeatedly applying a function on an original input.

    **Warning** ⚠️
        This creates an infinite iterator.
        Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.

    Args:
        func: Function to apply repeatedly.
        input: Initial value to start the iteration.

    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_func(lambda x: x + 1, 0).take(3).into(list)
    [0, 1, 2]

    ```
    """
    from ._main import Iter

    return Iter(cz.itertoolz.iterate(func, input))

group_by

group_by(on: Callable[[T], K]) -> Dict[K, list[T]]

Group elements by key function and return a Dict result.

Parameters:

Name Type Description Default
on Callable[[T], K]

Function to compute the key for grouping.

required

Example:

>>> import pyochain as pc
>>> names = [
...     "Alice",
...     "Bob",
...     "Charlie",
...     "Dan",
...     "Edith",
...     "Frank",
... ]
>>> pc.Iter.from_(names).group_by(len).sort()
... # doctest: +NORMALIZE_WHITESPACE
Dict({
    3: ['Bob', 'Dan'],
    5: ['Alice', 'Edith', 'Frank'],
    7: ['Charlie']
})
>>>
>>> iseven = lambda x: x % 2 == 0
>>> pc.Iter.from_([1, 2, 3, 4, 5, 6, 7, 8]).group_by(iseven)
... # doctest: +NORMALIZE_WHITESPACE
Dict({
    False: [1, 3, 5, 7],
    True: [2, 4, 6, 8]
})
Non-callable keys imply grouping on a member.
>>> data = [
...     {"name": "Alice", "gender": "F"},
...     {"name": "Bob", "gender": "M"},
...     {"name": "Charlie", "gender": "M"},
... ]
>>> pc.Iter.from_(data).group_by("gender").sort()
... # doctest: +NORMALIZE_WHITESPACE
Dict({
    'F': [
        {'name': 'Alice', 'gender': 'F'}
    ],
    'M': [
        {'name': 'Bob', 'gender': 'M'},
        {'name': 'Charlie', 'gender': 'M'}
    ]
})

Source code in src/pyochain/_iter/_groups.py
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def group_by[K](self, on: Callable[[T], K]) -> Dict[K, list[T]]:
    """
    Group elements by key function and return a Dict result.

    Args:
        on: Function to compute the key for grouping.
    Example:
    ```python
    >>> import pyochain as pc
    >>> names = [
    ...     "Alice",
    ...     "Bob",
    ...     "Charlie",
    ...     "Dan",
    ...     "Edith",
    ...     "Frank",
    ... ]
    >>> pc.Iter.from_(names).group_by(len).sort()
    ... # doctest: +NORMALIZE_WHITESPACE
    Dict({
        3: ['Bob', 'Dan'],
        5: ['Alice', 'Edith', 'Frank'],
        7: ['Charlie']
    })
    >>>
    >>> iseven = lambda x: x % 2 == 0
    >>> pc.Iter.from_([1, 2, 3, 4, 5, 6, 7, 8]).group_by(iseven)
    ... # doctest: +NORMALIZE_WHITESPACE
    Dict({
        False: [1, 3, 5, 7],
        True: [2, 4, 6, 8]
    })

    ```
    Non-callable keys imply grouping on a member.
    ```python
    >>> data = [
    ...     {"name": "Alice", "gender": "F"},
    ...     {"name": "Bob", "gender": "M"},
    ...     {"name": "Charlie", "gender": "M"},
    ... ]
    >>> pc.Iter.from_(data).group_by("gender").sort()
    ... # doctest: +NORMALIZE_WHITESPACE
    Dict({
        'F': [
            {'name': 'Alice', 'gender': 'F'}
        ],
        'M': [
            {'name': 'Bob', 'gender': 'M'},
            {'name': 'Charlie', 'gender': 'M'}
        ]
    })

    ```
    """
    from .._dict import Dict

    return Dict(self.into(partial(cz.itertoolz.groupby, on)))

group_by_transform

group_by_transform(
    keyfunc: None = None, valuefunc: None = None, reducefunc: None = None
) -> Iter[tuple[T, Iterator[T]]]
group_by_transform(
    keyfunc: Callable[[T], U], valuefunc: None, reducefunc: None
) -> Iter[tuple[U, Iterator[T]]]
group_by_transform(
    keyfunc: None, valuefunc: Callable[[T], V], reducefunc: None
) -> Iter[tuple[T, Iterator[V]]]
group_by_transform(
    keyfunc: Callable[[T], U], valuefunc: Callable[[T], V], reducefunc: None
) -> Iter[tuple[U, Iterator[V]]]
group_by_transform(
    keyfunc: None, valuefunc: None, reducefunc: Callable[[Iterator[T]], W]
) -> Iter[tuple[T, W]]
group_by_transform(
    keyfunc: Callable[[T], U], valuefunc: None, reducefunc: Callable[[Iterator[T]], W]
) -> Iter[tuple[U, W]]
group_by_transform(
    keyfunc: None, valuefunc: Callable[[T], V], reducefunc: Callable[[Iterator[V]], W]
) -> Iter[tuple[T, W]]
group_by_transform(
    keyfunc: Callable[[T], U],
    valuefunc: Callable[[T], V],
    reducefunc: Callable[[Iterator[V]], W],
) -> Iter[tuple[U, W]]
group_by_transform(
    keyfunc: Callable[[T], U] | None = None,
    valuefunc: Callable[[T], V] | None = None,
    reducefunc: Any = None,
) -> Iter[tuple[Any, ...]]

An extension of itertools.groupby that can apply transformations to the grouped data.

Parameters:

Name Type Description Default
keyfunc Callable[[T], U] | None

Function to compute the key for grouping. Defaults to None.

None
valuefunc Callable[[T], V] | None

Function to transform individual items after grouping. Defaults to None.

None
reducefunc Any

Function to transform each group of items. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> data = pc.Iter.from_("aAAbBBcCC")
>>> data.group_by_transform(
...     lambda k: k.upper(), lambda v: v.lower(), lambda g: "".join(g)
... ).into(list)
[('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
Each optional argument defaults to an identity function if not specified.

group_by_transform is useful when grouping elements of an iterable using a separate iterable as the key.

To do this, zip the iterables and pass a keyfunc that extracts the first element and a valuefunc that extracts the second element:

Note that the order of items in the iterable is significant.

Only adjacent items are grouped together, so if you don't want any duplicate groups, you should sort the iterable by the key function.

Example:

>>> from operator import itemgetter
>>> data = pc.Iter.from_([0, 0, 1, 1, 1, 2, 2, 2, 3])
>>> data.zip("abcdefghi").group_by_transform(itemgetter(0), itemgetter(1)).map(
...     lambda kv: (kv[0], "".join(kv[1]))
... ).into(list)
[(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]

Source code in src/pyochain/_iter/_groups.py
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
def group_by_transform[U, V](
    self,
    keyfunc: Callable[[T], U] | None = None,
    valuefunc: Callable[[T], V] | None = None,
    reducefunc: Any = None,
) -> Iter[tuple[Any, ...]]:
    """
    An extension of itertools.groupby that can apply transformations to the grouped data.

    Args:
        keyfunc: Function to compute the key for grouping. Defaults to None.
        valuefunc: Function to transform individual items after grouping. Defaults to None.
        reducefunc: Function to transform each group of items. Defaults to None.

    Example:
    ```python
    >>> import pyochain as pc
    >>> data = pc.Iter.from_("aAAbBBcCC")
    >>> data.group_by_transform(
    ...     lambda k: k.upper(), lambda v: v.lower(), lambda g: "".join(g)
    ... ).into(list)
    [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]

    ```
    Each optional argument defaults to an identity function if not specified.

    group_by_transform is useful when grouping elements of an iterable using a separate iterable as the key.

    To do this, zip the iterables and pass a keyfunc that extracts the first element and a valuefunc that extracts the second element:

    Note that the order of items in the iterable is significant.

    Only adjacent items are grouped together, so if you don't want any duplicate groups, you should sort the iterable by the key function.

    Example:
    ```python
    >>> from operator import itemgetter
    >>> data = pc.Iter.from_([0, 0, 1, 1, 1, 2, 2, 2, 3])
    >>> data.zip("abcdefghi").group_by_transform(itemgetter(0), itemgetter(1)).map(
    ...     lambda kv: (kv[0], "".join(kv[1]))
    ... ).into(list)
    [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]

    ```
    """

    def _group_by_transform(data: Iterable[T]) -> Iterator[tuple[Any, ...]]:
        return mit.groupby_transform(data, keyfunc, valuefunc, reducefunc)

    return self.apply(_group_by_transform)

ichunked

ichunked(n: int) -> Iter[Iterator[T]]

Break iterable into sub-iterables with n elements each.

Parameters:

Name Type Description Default
n int

Number of elements in each chunk.

required

If the sub-iterables are read in order, the elements of iterable won't be stored in memory.

If they are read out of order, :func:itertools.tee is used to cache elements as necessary.

>>> import pyochain as pc
>>> all_chunks = pc.Iter.from_count().ichunked(4).unwrap()
>>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
>>> list(c_2)  # c_1's elements have been cached; c_3's haven't been
[4, 5, 6, 7]
>>> list(c_1)
[0, 1, 2, 3]
>>> list(c_3)
[8, 9, 10, 11]

Source code in src/pyochain/_iter/_maps.py
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
def ichunked(self, n: int) -> Iter[Iterator[T]]:
    """

    Break *iterable* into sub-iterables with *n* elements each.

    Args:
        n: Number of elements in each chunk.

    If the sub-iterables are read in order, the elements of *iterable*
    won't be stored in memory.

    If they are read out of order, :func:`itertools.tee` is used to cache
    elements as necessary.
    ```python
    >>> import pyochain as pc
    >>> all_chunks = pc.Iter.from_count().ichunked(4).unwrap()
    >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
    >>> list(c_2)  # c_1's elements have been cached; c_3's haven't been
    [4, 5, 6, 7]
    >>> list(c_1)
    [0, 1, 2, 3]
    >>> list(c_3)
    [8, 9, 10, 11]

    ```
    """
    return self.apply(mit.ichunked, n)

implode

implode() -> Iter[list[T]]

Wrap each element in the iterable into a list.

Syntactic sugar for Iter.map(lambda x: [x]).

>>> import pyochain as pc
>>> pc.Iter.from_(range(5)).implode().into(list)
[[0], [1], [2], [3], [4]]

Source code in src/pyochain/_iter/_lists.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
def implode(self) -> Iter[list[T]]:
    """
    Wrap each element in the iterable into a list.

    Syntactic sugar for `Iter.map(lambda x: [x])`.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(5)).implode().into(list)
    [[0], [1], [2], [3], [4]]

    ```
    """

    def _implode(data: Iterable[T]) -> Generator[list[T], None, None]:
        return ([x] for x in data)

    return self.apply(_implode)

insert_left

insert_left(value: T) -> Iter[T]

Prepend value to the sequence and return a new Iterable wrapper.

Parameters:

Name Type Description Default
value T

The value to prepend.

required
>>> import pyochain as pc
>>> pc.Iter.from_([2, 3]).insert_left(1).into(list)
[1, 2, 3]
Source code in src/pyochain/_iter/_process.py
129
130
131
132
133
134
135
136
137
138
139
140
141
142
def insert_left(self, value: T) -> Iter[T]:
    """
    Prepend value to the sequence and return a new Iterable wrapper.

    Args:
        value: The value to prepend.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([2, 3]).insert_left(1).into(list)
    [1, 2, 3]

    ```
    """
    return self.apply(partial(cz.itertoolz.cons, value))

interleave

interleave(*others: Iterable[T]) -> Iter[T]

Interleave multiple sequences element-wise.

Parameters:

Name Type Description Default
others Iterable[T]

Other iterables to interleave.

()
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).interleave([3, 4]).into(list)
[1, 3, 2, 4]
Source code in src/pyochain/_iter/_process.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def interleave(self, *others: Iterable[T]) -> Iter[T]:
    """
    Interleave multiple sequences element-wise.

    Args:
        others: Other iterables to interleave.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).interleave([3, 4]).into(list)
    [1, 3, 2, 4]

    ```
    """

    def _interleave(data: Iterable[T]) -> Iterator[T]:
        return cz.itertoolz.interleave((data, *others))

    return self.apply(_interleave)

interpose

interpose(element: T) -> Iter[T]

Interpose element between items and return a new Iterable wrapper.

Parameters:

Name Type Description Default
element T

The element to interpose between items.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).interpose(0).into(list)
[1, 0, 2]

Source code in src/pyochain/_iter/_process.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
def interpose(self, element: T) -> Iter[T]:
    """
    Interpose element between items and return a new Iterable wrapper.

    Args:
        element: The element to interpose between items.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).interpose(0).into(list)
    [1, 0, 2]

    ```
    """
    return self.apply(partial(cz.itertoolz.interpose, element))

intersection

intersection(*others: Iterable[T]) -> Seq[T]

Return the elements common to this iterable and 'others'.

Note

This method consumes inner data, unsorts it, and removes duplicates.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to intersect with.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 2]).intersection([2, 3], [2]).unwrap()
{2}

Source code in src/pyochain/_iter/_eager.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def intersection(self, *others: Iterable[T]) -> Seq[T]:
    """
    Return the elements common to this iterable and 'others'.

    Note:
        This method consumes inner data, unsorts it, and removes duplicates.

    Args:
        *others: Other iterables to intersect with.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 2]).intersection([2, 3], [2]).unwrap()
    {2}

    ```
    """

    def _intersection(data: Iterable[T]) -> set[T]:
        return set(data).intersection(*others)

    return self.collect(_intersection)

into

into(func: Callable[Concatenate[T, P], R], *args: P.args, **kwargs: P.kwargs) -> R

Pass the unwrapped underlying data into a function.

The result is not wrapped.

>>> import pyochain as pc
>>> pc.Iter.from_(range(5)).into(list)
[0, 1, 2, 3, 4]
This is a core functionality that allows ending the chain whilst keeping the code style consistent.

Source code in src/pyochain/_core/_main.py
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
def into[**P, R](
    self,
    func: Callable[Concatenate[T, P], R],
    *args: P.args,
    **kwargs: P.kwargs,
) -> R:
    """
    Pass the *unwrapped* underlying data into a function.

    The result is not wrapped.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(5)).into(list)
    [0, 1, 2, 3, 4]

    ```
    This is a core functionality that allows ending the chain whilst keeping the code style consistent.
    """
    return func(self.unwrap(), *args, **kwargs)

is_distinct

is_distinct() -> bool

Return True if all items are distinct.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).is_distinct()
True

Source code in src/pyochain/_iter/_booleans.py
84
85
86
87
88
89
90
91
92
93
94
def is_distinct(self) -> bool:
    """
    Return True if all items are distinct.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).is_distinct()
    True

    ```
    """
    return self.into(cz.itertoolz.isdistinct)

is_sorted

is_sorted(
    key: Callable[[T], U] | None = None, reverse: bool = False, strict: bool = False
) -> bool

Returns True if the items of iterable are in sorted order.

Parameters:

Name Type Description Default
key Callable[[T], U] | None

Function to transform items before comparison. Defaults to None.

None
reverse bool

Whether to check for descending order. Defaults to False.

False
strict bool

Whether to enforce strict sorting (no equal elements). Defaults to False.

False

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(["1", "2", "3", "4", "5"]).is_sorted(key=int)
True
>>> pc.Iter.from_([5, 4, 3, 1, 2]).is_sorted(reverse=True)
False

If strict, tests for strict sorting, that is, returns False if equal elements are found:
```python
>>> pc.Iter.from_([1, 2, 2]).is_sorted()
True
>>> pc.Iter.from_([1, 2, 2]).is_sorted(strict=True)
False

The function returns False after encountering the first out-of-order item.

This means it may produce results that differ from the built-in sorted function for objects with unusual comparison dynamics (like math.nan).

If there are no out-of-order items, the iterable is exhausted.

Source code in src/pyochain/_iter/_booleans.py
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
def is_sorted[U](
    self,
    key: Callable[[T], U] | None = None,
    reverse: bool = False,
    strict: bool = False,
) -> bool:
    """
    Returns True if the items of iterable are in sorted order.

    Args:
        key: Function to transform items before comparison. Defaults to None.
        reverse: Whether to check for descending order. Defaults to False.
        strict: Whether to enforce strict sorting (no equal elements). Defaults to False.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(["1", "2", "3", "4", "5"]).is_sorted(key=int)
    True
    >>> pc.Iter.from_([5, 4, 3, 1, 2]).is_sorted(reverse=True)
    False

    If strict, tests for strict sorting, that is, returns False if equal elements are found:
    ```python
    >>> pc.Iter.from_([1, 2, 2]).is_sorted()
    True
    >>> pc.Iter.from_([1, 2, 2]).is_sorted(strict=True)
    False

    ```

    The function returns False after encountering the first out-of-order item.

    This means it may produce results that differ from the built-in sorted function for objects with unusual comparison dynamics (like math.nan).

    If there are no out-of-order items, the iterable is exhausted.
    """
    return self.into(mit.is_sorted, key=key, reverse=reverse, strict=strict)

is_strictly_n

is_strictly_n(
    n: int,
    too_short: Callable[[int], Iterator[T]] | Callable[[int], None] = _too_short,
    too_long: Callable[[int], Iterator[T]] | Callable[[int], None] = _too_long,
) -> Iter[T]

Validate that iterable has exactly n items and return them if it does.

If it has fewer than n items, call function too_short with the actual number of items.

If it has more than n items, call function too_long with the number n + 1.

Parameters:

Name Type Description Default
n int

The exact number of items expected.

required
too_short Callable[[int], Iterator[T]] | Callable[[int], None]

Function to call if there are too few items.

_too_short
too_long Callable[[int], Iterator[T]] | Callable[[int], None]

Function to call if there are too many items.

_too_long

>>> import pyochain as pc
>>> iterable = ["a", "b", "c", "d"]
>>> n = 4
>>> pc.Iter.from_(iterable).is_strictly_n(n).into(list)
['a', 'b', 'c', 'd']
Note that the returned iterable must be consumed in order for the check to be made.

By default, too_short and too_long are functions that raiseValueError.

>>> pc.Iter.from_("ab").is_strictly_n(3).into(
...     list
... )  # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: too few items in iterable (got 2)

>>> pc.Iter.from_("abc").is_strictly_n(2).into(
...     list
... )  # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: too many items in iterable (got at least 3)
You can instead supply functions that do something else.

too_short will be called with the number of items in iterable.

too_long will be called with n + 1.

>>> def too_short(item_count):
...     raise RuntimeError
>>> pc.Iter.from_("abcd").is_strictly_n(6, too_short=too_short).into(list)
Traceback (most recent call last):
...
RuntimeError
>>> def too_long(item_count):
...     print("The boss is going to hear about this")
>>> pc.Iter.from_("abcdef").is_strictly_n(4, too_long=too_long).into(list)
The boss is going to hear about this
['a', 'b', 'c', 'd']

Source code in src/pyochain/_iter/_process.py
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
def is_strictly_n(
    self,
    n: int,
    too_short: Callable[[int], Iterator[T]] | Callable[[int], None] = _too_short,
    too_long: Callable[[int], Iterator[T]] | Callable[[int], None] = _too_long,
) -> Iter[T]:
    """
    Validate that *iterable* has exactly *n* items and return them if it does.

    If it has fewer than *n* items, call function *too_short* with the actual number of items.

    If it has more than *n* items, call function *too_long* with the number `n + 1`.

    Args:
        n: The exact number of items expected.
        too_short: Function to call if there are too few items.
        too_long: Function to call if there are too many items.
    ```python
    >>> import pyochain as pc
    >>> iterable = ["a", "b", "c", "d"]
    >>> n = 4
    >>> pc.Iter.from_(iterable).is_strictly_n(n).into(list)
    ['a', 'b', 'c', 'd']

    ```
    Note that the returned iterable must be consumed in order for the check to
    be made.

    By default, *too_short* and *too_long* are functions that raise`ValueError`.
    ```python
    >>> pc.Iter.from_("ab").is_strictly_n(3).into(
    ...     list
    ... )  # doctest: +IGNORE_EXCEPTION_DETAIL
    Traceback (most recent call last):
    ...
    ValueError: too few items in iterable (got 2)

    >>> pc.Iter.from_("abc").is_strictly_n(2).into(
    ...     list
    ... )  # doctest: +IGNORE_EXCEPTION_DETAIL
    Traceback (most recent call last):
    ...
    ValueError: too many items in iterable (got at least 3)

    ```
    You can instead supply functions that do something else.

    *too_short* will be called with the number of items in *iterable*.

    *too_long* will be called with `n + 1`.
    ```python
    >>> def too_short(item_count):
    ...     raise RuntimeError
    >>> pc.Iter.from_("abcd").is_strictly_n(6, too_short=too_short).into(list)
    Traceback (most recent call last):
    ...
    RuntimeError
    >>> def too_long(item_count):
    ...     print("The boss is going to hear about this")
    >>> pc.Iter.from_("abcdef").is_strictly_n(4, too_long=too_long).into(list)
    The boss is going to hear about this
    ['a', 'b', 'c', 'd']

    ```
    """

    def strictly_n_(iterable: Iterable[T]) -> Generator[T, Any, None]:
        """from more_itertools.strictly_n"""
        it = iter(iterable)

        sent = 0
        for item in itertools.islice(it, n):
            yield item
            sent += 1

        if sent < n:
            too_short(sent)
            return

        for item in it:
            too_long(n + 1)
            return

    return self.apply(strictly_n_)

item

item(index: int) -> T

Return item at index.

Parameters:

Name Type Description Default
index int

The index of the item to retrieve.

required
>>> import pyochain as pc
>>> pc.Iter.from_([10, 20]).item(1)
20
Source code in src/pyochain/_iter/_aggregations.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
def item(self, index: int) -> T:
    """
    Return item at index.

    Args:
        index: The index of the item to retrieve.

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([10, 20]).item(1)
    20

    ```
    """
    return self.into(functools.partial(cz.itertoolz.nth, index))

itr

itr(
    func: Callable[Concatenate[Iter[U], P], R], *args: P.args, **kwargs: P.kwargs
) -> Iter[R]

Apply a function to each element after wrapping it in an Iter.

This is a convenience method for the common pattern of mapping a function over an iterable of iterables.

Parameters:

Name Type Description Default
func Callable[Concatenate[Iter[U], P], R]

Function to apply to each wrapped element.

required
*args P.args

Positional arguments to pass to the function.

()
**kwargs P.kwargs

Keyword arguments to pass to the function.

{}

Example:

>>> import pyochain as pc
>>> data = [
...     [1, 2, 3],
...     [4, 5],
...     [6, 7, 8, 9],
... ]
>>> pc.Iter.from_(data).itr(
...     lambda x: x.repeat(2).flatten().reduce(lambda a, b: a + b)
... ).into(list)
[12, 18, 60]

Source code in src/pyochain/_iter/_main.py
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
def itr[**P, R, U: Iterable[Any]](
    self: Iter[U],
    func: Callable[Concatenate[Iter[U], P], R],
    *args: P.args,
    **kwargs: P.kwargs,
) -> Iter[R]:
    """
    Apply a function to each element after wrapping it in an Iter.

    This is a convenience method for the common pattern of mapping a function over an iterable of iterables.

    Args:
        func: Function to apply to each wrapped element.
        *args: Positional arguments to pass to the function.
        **kwargs: Keyword arguments to pass to the function.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data = [
    ...     [1, 2, 3],
    ...     [4, 5],
    ...     [6, 7, 8, 9],
    ... ]
    >>> pc.Iter.from_(data).itr(
    ...     lambda x: x.repeat(2).flatten().reduce(lambda a, b: a + b)
    ... ).into(list)
    [12, 18, 60]

    ```
    """

    def _itr(data: Iterable[U]) -> Generator[R, None, None]:
        return (func(Iter.from_(x), *args, **kwargs) for x in data)

    return self.apply(_itr)

join

join(
    other: Iterable[R],
    left_on: Callable[[T], K],
    right_on: Callable[[R], K],
    left_default: T | None = None,
    right_default: R | None = None,
) -> Iter[tuple[T, R]]

Perform a relational join with another iterable.

Parameters:

Name Type Description Default
other Iterable[R]

Iterable to join with.

required
left_on Callable[[T], K]

Function to extract the join key from the left iterable.

required
right_on Callable[[R], K]

Function to extract the join key from the right iterable.

required
left_default T | None

Default value for missing elements in the left iterable. Defaults to None.

None
right_default R | None

Default value for missing elements in the right iterable. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> colors = pc.Iter.from_(["blue", "red"])
>>> sizes = ["S", "M"]
>>> colors.join(sizes, left_on=lambda c: c, right_on=lambda s: s).into(list)
[(None, 'S'), (None, 'M'), ('blue', None), ('red', None)]

Source code in src/pyochain/_iter/_joins.py
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
def join[R, K](
    self,
    other: Iterable[R],
    left_on: Callable[[T], K],
    right_on: Callable[[R], K],
    left_default: T | None = None,
    right_default: R | None = None,
) -> Iter[tuple[T, R]]:
    """
    Perform a relational join with another iterable.

    Args:
        other: Iterable to join with.
        left_on: Function to extract the join key from the left iterable.
        right_on: Function to extract the join key from the right iterable.
        left_default: Default value for missing elements in the left iterable. Defaults to None.
        right_default: Default value for missing elements in the right iterable. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> colors = pc.Iter.from_(["blue", "red"])
    >>> sizes = ["S", "M"]
    >>> colors.join(sizes, left_on=lambda c: c, right_on=lambda s: s).into(list)
    [(None, 'S'), (None, 'M'), ('blue', None), ('red', None)]

    ```
    """

    def _join(data: Iterable[T]) -> Iterator[tuple[T, R]]:
        return cz.itertoolz.join(
            leftkey=left_on,
            leftseq=data,
            rightkey=right_on,
            rightseq=other,
            left_default=left_default,
            right_default=right_default,
        )

    return self.apply(_join)

last

last() -> T

Return the last element.

>>> import pyochain as pc
>>> pc.Iter.from_([7, 8, 9]).last()
9

Source code in src/pyochain/_iter/_aggregations.py
119
120
121
122
123
124
125
126
127
128
129
def last(self) -> T:
    """
    Return the last element.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([7, 8, 9]).last()
    9

    ```
    """
    return self.into(cz.itertoolz.last)

map

map(func: Callable[[T], R]) -> Iter[R]

Map each element through func and return a Iter of results.

Parameters:

Name Type Description Default
func Callable[[T], R]

Function to apply to each element.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).map(lambda x: x + 1).into(list)
[2, 3]
Source code in src/pyochain/_iter/_maps.py
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def map[R](self, func: Callable[[T], R]) -> Iter[R]:
    """
    Map each element through func and return a Iter of results.

    Args:
        func: Function to apply to each element.

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).map(lambda x: x + 1).into(list)
    [2, 3]

    ```
    """
    return self.apply(partial(map, func))

map_except

map_except(func: Callable[[T], R], *exceptions: type[BaseException]) -> Iter[R]

Transform each item from iterable with function and yield the result, unless function raises one of the specified exceptions.

Parameters:

Name Type Description Default
func Callable[[T], R]

Function to apply to each item.

required
exceptions type[BaseException]

Exceptions to catch and ignore.

()

The function is called to transform each item in iterable

If an exception other than one given by exceptions is raised by function, it is raised like normal.

>>> import pyochain as pc
>>> iterable = ["1", "2", "three", "4", None]
>>> pc.Iter.from_(iterable).map_except(int, ValueError, TypeError).into(list)
[1, 2, 4]

Source code in src/pyochain/_iter/_maps.py
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
def map_except[R](
    self, func: Callable[[T], R], *exceptions: type[BaseException]
) -> Iter[R]:
    """
    Transform each item from iterable with function and yield the result, unless function raises one of the specified exceptions.

    Args:
        func: Function to apply to each item.
        exceptions: Exceptions to catch and ignore.

    The function is called to transform each item in iterable

    If an exception other than one given by exceptions is raised by function, it is raised like normal.
    ```python
    >>> import pyochain as pc
    >>> iterable = ["1", "2", "three", "4", None]
    >>> pc.Iter.from_(iterable).map_except(int, ValueError, TypeError).into(list)
    [1, 2, 4]

    ```
    """

    def _map_except(data: Iterable[T]) -> Iterator[R]:
        return mit.map_except(func, data, *exceptions)

    return self.apply(_map_except)

map_if

map_if(
    predicate: Callable[[T], bool],
    func: Callable[[T], R],
    func_else: Callable[[T], R] | None = None,
) -> Iter[R]

Evaluate each item from iterable using pred.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to evaluate each item.

required
func Callable[[T], R]

Function to apply if predicate is True.

required
func_else Callable[[T], R] | None

Function to apply if predicate is False.

None
  • If the result is equivalent to True, transform the item with func and yield it.
  • Otherwise, transform the item with func_else and yield it.
  • Predicate, func, and func_else should each be functions that accept one argument.

By default, func_else is the identity function.

>>> import pyochain as pc
>>> from math import sqrt
>>> iterable = pc.Iter.from_(range(-5, 5)).collect()
>>> iterable.into(list)
[-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
>>> iterable.iter().map_if(lambda x: x > 3, lambda x: "toobig").into(list)
[-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
>>> iterable.iter().map_if(
...     lambda x: x >= 0,
...     lambda x: f"{sqrt(x):.2f}",
...     lambda x: None,
... ).into(list)
[None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']

Source code in src/pyochain/_iter/_maps.py
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
def map_if[R](
    self,
    predicate: Callable[[T], bool],
    func: Callable[[T], R],
    func_else: Callable[[T], R] | None = None,
) -> Iter[R]:
    """
    Evaluate each item from iterable using pred.

    Args:
        predicate: Function to evaluate each item.
        func: Function to apply if predicate is True.
        func_else: Function to apply if predicate is False.

    - If the result is equivalent to True, transform the item with func and yield it.
    - Otherwise, transform the item with func_else and yield it.
    - Predicate, func, and func_else should each be functions that accept one argument.

    By default, func_else is the identity function.
    ```python
    >>> import pyochain as pc
    >>> from math import sqrt
    >>> iterable = pc.Iter.from_(range(-5, 5)).collect()
    >>> iterable.into(list)
    [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
    >>> iterable.iter().map_if(lambda x: x > 3, lambda x: "toobig").into(list)
    [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
    >>> iterable.iter().map_if(
    ...     lambda x: x >= 0,
    ...     lambda x: f"{sqrt(x):.2f}",
    ...     lambda x: None,
    ... ).into(list)
    [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']

    ```
    """
    return self.apply(mit.map_if, predicate, func, func_else=func_else)

map_juxt

map_juxt(func1: Callable[[T], R1], func2: Callable[[T], R2]) -> Iter[tuple[R1, R2]]
map_juxt(
    func1: Callable[[T], R1], func2: Callable[[T], R2], func3: Callable[[T], R3]
) -> Iter[tuple[R1, R2, R3]]
map_juxt(
    func1: Callable[[T], R1],
    func2: Callable[[T], R2],
    func3: Callable[[T], R3],
    func4: Callable[[T], R4],
) -> Iter[tuple[R1, R2, R3, R4]]
map_juxt(*funcs: Callable[[T], object]) -> Iter[tuple[object, ...]]

Apply several functions to each item.

Returns a new Iter where each item is a tuple of the results of applying each function to the original item.

>>> import pyochain as pc
>>> def is_even(n: int) -> bool:
...     return n % 2 == 0
>>> def is_positive(n: int) -> bool:
...     return n > 0
>>>
>>> pc.Iter.from_([1, -2, 3]).map_juxt(is_even, is_positive).into(list)
[(False, True), (True, False), (False, True)]

Source code in src/pyochain/_iter/_tuples.py
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
def map_juxt(self, *funcs: Callable[[T], object]) -> Iter[tuple[object, ...]]:
    """
    Apply several functions to each item.

    Returns a new Iter where each item is a tuple of the results of applying each function to the original item.
    ```python
    >>> import pyochain as pc
    >>> def is_even(n: int) -> bool:
    ...     return n % 2 == 0
    >>> def is_positive(n: int) -> bool:
    ...     return n > 0
    >>>
    >>> pc.Iter.from_([1, -2, 3]).map_juxt(is_even, is_positive).into(list)
    [(False, True), (True, False), (False, True)]

    ```
    """
    return self.apply(partial(map, cz.functoolz.juxt(*funcs)))

map_star

map_star(func: Callable[..., R]) -> Iter[R]

Applies a function to each element, where each element is an iterable.

Parameters:

Name Type Description Default
func Callable[..., R]

Function to apply to unpacked elements.

required

Unlike .map(), which passes each element as a single argument, .starmap() unpacks each element into positional arguments for the function.

In short, for each element in the sequence, it computes func(*element).

>>> import pyochain as pc
>>> def make_sku(color, size):
...     return f"{color}-{size}"
>>> data = pc.Seq(["blue", "red"])
>>> data.iter().product(["S", "M"]).map_star(make_sku).into(list)
['blue-S', 'blue-M', 'red-S', 'red-M']
This is equivalent to:
>>> data.iter().product(["S", "M"]).map(lambda x: make_sku(*x)).into(list)
['blue-S', 'blue-M', 'red-S', 'red-M']
- Use map_star when the performance matters (it is faster). - Use map with unpacking when readability matters (the types can be inferred).

Source code in src/pyochain/_iter/_maps.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
def map_star[U: Iterable[Any], R](
    self: IterWrapper[U], func: Callable[..., R]
) -> Iter[R]:
    """
    Applies a function to each element, where each element is an iterable.

    Args:
        func: Function to apply to unpacked elements.

    Unlike `.map()`, which passes each element as a single argument, `.starmap()` unpacks each element into positional arguments for the function.

    In short, for each `element` in the sequence, it computes `func(*element)`.
    ```python
    >>> import pyochain as pc
    >>> def make_sku(color, size):
    ...     return f"{color}-{size}"
    >>> data = pc.Seq(["blue", "red"])
    >>> data.iter().product(["S", "M"]).map_star(make_sku).into(list)
    ['blue-S', 'blue-M', 'red-S', 'red-M']

    ```
    This is equivalent to:
    ```python
    >>> data.iter().product(["S", "M"]).map(lambda x: make_sku(*x)).into(list)
    ['blue-S', 'blue-M', 'red-S', 'red-M']

    ```
    - Use map_star when the performance matters (it is faster).
    - Use map with unpacking when readability matters (the types can be inferred).
    """

    return self.apply(partial(itertools.starmap, func))

max

max() -> U

Return the maximum of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([3, 1, 2]).max()
3

Source code in src/pyochain/_iter/_aggregations.py
249
250
251
252
253
254
255
256
257
258
259
def max[U: int | float](self: IterWrapper[U]) -> U:
    """
    Return the maximum of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([3, 1, 2]).max()
    3

    ```
    """
    return self.into(max)

mean

mean() -> float

Return the mean of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).mean()
2

Source code in src/pyochain/_iter/_aggregations.py
261
262
263
264
265
266
267
268
269
270
271
def mean[U: int | float](self: IterWrapper[U]) -> float:
    """
    Return the mean of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).mean()
    2

    ```
    """
    return self.into(statistics.mean)

median

median() -> float

Return the median of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 3, 2]).median()
2

Source code in src/pyochain/_iter/_aggregations.py
273
274
275
276
277
278
279
280
281
282
283
def median[U: int | float](self: IterWrapper[U]) -> float:
    """
    Return the median of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 3, 2]).median()
    2

    ```
    """
    return self.into(statistics.median)

merge_sorted

merge_sorted(
    *others: Iterable[T], sort_on: Callable[[T], Any] | None = None
) -> Iter[T]

Merge already-sorted sequences.

Parameters:

Name Type Description Default
others Iterable[T]

Other sorted iterables to merge.

()
sort_on Callable[[T], Any] | None

Optional key function for sorting.

None
>>> import pyochain as pc
>>> pc.Iter.from_([1, 3]).merge_sorted([2, 4]).into(list)
[1, 2, 3, 4]
Source code in src/pyochain/_iter/_process.py
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
def merge_sorted(
    self, *others: Iterable[T], sort_on: Callable[[T], Any] | None = None
) -> Iter[T]:
    """
    Merge already-sorted sequences.

    Args:
        others: Other sorted iterables to merge.
        sort_on: Optional key function for sorting.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 3]).merge_sorted([2, 4]).into(list)
    [1, 2, 3, 4]

    ```
    """
    return self.apply(cz.itertoolz.merge_sorted, *others, key=sort_on)

min

min() -> U

Return the minimum of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([3, 1, 2]).min()
1

Source code in src/pyochain/_iter/_aggregations.py
237
238
239
240
241
242
243
244
245
246
247
def min[U: int | float](self: IterWrapper[U]) -> U:
    """
    Return the minimum of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([3, 1, 2]).min()
    1

    ```
    """
    return self.into(min)

mode

mode() -> U

Return the mode of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 2, 3]).mode()
2

Source code in src/pyochain/_iter/_aggregations.py
285
286
287
288
289
290
291
292
293
294
295
def mode[U: int | float](self: IterWrapper[U]) -> U:
    """
    Return the mode of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 2, 3]).mode()
    2

    ```
    """
    return self.into(statistics.mode)

most_common

most_common(n: int | None = None) -> Seq[tuple[T, int]]

Return the n most common elements and their counts.

If n is None, then all elements are returned.

Parameters:

Name Type Description Default
n int | None

Number of most common elements to return. Defaults to None (all elements).

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 1, 2, 3, 3, 3]).most_common(2).unwrap()
[(3, 3), (1, 2)]

Source code in src/pyochain/_iter/_eager.py
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
def most_common(self, n: int | None = None) -> Seq[tuple[T, int]]:
    """
    Return the n most common elements and their counts.

    If n is None, then all elements are returned.

    Args:
        n: Number of most common elements to return. Defaults to None (all elements).
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 1, 2, 3, 3, 3]).most_common(2).unwrap()
    [(3, 3), (1, 2)]

    ```
    """
    from collections import Counter

    from ._main import Seq

    def _most_common(data: Iterable[T]) -> list[tuple[T, int]]:
        return Counter(data).most_common(n)

    return Seq(self.into(_most_common))

pairwise

pairwise() -> Iter[tuple[T, T]]

Return an iterator over pairs of consecutive elements.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).pairwise().into(list)
[(1, 2), (2, 3)]

Source code in src/pyochain/_iter/_tuples.py
103
104
105
106
107
108
109
110
111
112
113
def pairwise(self) -> Iter[tuple[T, T]]:
    """
    Return an iterator over pairs of consecutive elements.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).pairwise().into(list)
    [(1, 2), (2, 3)]

    ```
    """
    return self.apply(itertools.pairwise)

partition

partition(n: Literal[1], pad: None = None) -> Iter[tuple[T]]
partition(n: Literal[2], pad: None = None) -> Iter[tuple[T, T]]
partition(n: Literal[3], pad: None = None) -> Iter[tuple[T, T, T]]
partition(n: Literal[4], pad: None = None) -> Iter[tuple[T, T, T, T]]
partition(n: Literal[5], pad: None = None) -> Iter[tuple[T, T, T, T, T]]
partition(n: int, pad: int) -> Iter[tuple[T, ...]]
partition(n: int, pad: int | None = None) -> Iter[tuple[T, ...]]

Partition sequence into tuples of length n.

Parameters:

Name Type Description Default
n int

Length of each partition.

required
pad int | None

Value to pad the last partition if needed.

None

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4]).partition(2).into(list)
[(1, 2), (3, 4)]
If the length of seq is not evenly divisible by n, the final tuple is dropped if pad is not specified, or filled to length n by pad:
>>> pc.Iter.from_([1, 2, 3, 4, 5]).partition(2).into(list)
[(1, 2), (3, 4), (5, None)]

Source code in src/pyochain/_iter/_partitions.py
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def partition(self, n: int, pad: int | None = None) -> Iter[tuple[T, ...]]:
    """
    Partition sequence into tuples of length n.

    Args:
        n: Length of each partition.
        pad: Value to pad the last partition if needed.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4]).partition(2).into(list)
    [(1, 2), (3, 4)]

    ```
    If the length of seq is not evenly divisible by n, the final tuple is dropped if pad is not specified, or filled to length n by pad:
    ```python
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).partition(2).into(list)
    [(1, 2), (3, 4), (5, None)]

    ```
    """

    return self.apply(partial(cz.itertoolz.partition, n, pad=pad))

partition_all

partition_all(n: int) -> Iter[tuple[T, ...]]

Partition all elements of sequence into tuples of length at most n.

Parameters:

Name Type Description Default
n int

Maximum length of each partition.

required

The final tuple may be shorter to accommodate extra elements.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4]).partition_all(2).into(list)
[(1, 2), (3, 4)]
>>> pc.Iter.from_([1, 2, 3, 4, 5]).partition_all(2).into(list)
[(1, 2), (3, 4), (5,)]

Source code in src/pyochain/_iter/_partitions.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def partition_all(self, n: int) -> Iter[tuple[T, ...]]:
    """
    Partition all elements of sequence into tuples of length at most n.

    Args:
        n: Maximum length of each partition.
    The final tuple may be shorter to accommodate extra elements.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4]).partition_all(2).into(list)
    [(1, 2), (3, 4)]
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).partition_all(2).into(list)
    [(1, 2), (3, 4), (5,)]

    ```
    """
    return self.apply(partial(cz.itertoolz.partition_all, n))

partition_by

partition_by(predicate: Callable[[T], bool]) -> Iter[tuple[T, ...]]

Partition the iterable into a sequence of tuples according to a predicate function.

Every time the output of predicate changes, a new tuple is started, and subsequent items are collected into that tuple.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to determine partition boundaries.

required
>>> import pyochain as pc
>>> pc.Iter.from_("I have space").partition_by(lambda c: c == " ").into(list)
[('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')]
>>>
>>> data = [1, 2, 1, 99, 88, 33, 99, -1, 5]
>>> pc.Iter.from_(data).partition_by(lambda x: x > 10).into(list)
[(1, 2, 1), (99, 88, 33, 99), (-1, 5)]
Source code in src/pyochain/_iter/_partitions.py
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def partition_by(self, predicate: Callable[[T], bool]) -> Iter[tuple[T, ...]]:
    """
    Partition the `iterable` into a sequence of `tuples` according to a predicate function.

    Every time the output of `predicate` changes, a new `tuple` is started,
    and subsequent items are collected into that `tuple`.

    Args:
        predicate: Function to determine partition boundaries.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("I have space").partition_by(lambda c: c == " ").into(list)
    [('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')]
    >>>
    >>> data = [1, 2, 1, 99, 88, 33, 99, -1, 5]
    >>> pc.Iter.from_(data).partition_by(lambda x: x > 10).into(list)
    [(1, 2, 1), (99, 88, 33, 99), (-1, 5)]

    ```
    """
    return self.apply(partial(cz.recipes.partitionby, predicate))

peek

peek() -> Iter[T]

Print and return sequence after peeking first item.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).peek().into(list)
Peeked value: 1
[1, 2]

Source code in src/pyochain/_iter/_process.py
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
def peek(self) -> Iter[T]:
    """
    Print and return sequence after peeking first item.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).peek().into(list)
    Peeked value: 1
    [1, 2]

    ```
    """

    def _peek(data: Iterable[T]) -> Iterator[T]:
        peeked = Peeked(*cz.itertoolz.peek(data))
        print(f"Peeked value: {peeked.value}")
        return peeked.sequence

    return self.apply(_peek)

peekn

peekn(n: int) -> Iter[T]

Print and return sequence after peeking n items.

Parameters:

Name Type Description Default
n int

Number of items to peek.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).peekn(2).into(list)
Peeked 2 values: (1, 2)
[1, 2, 3]
Source code in src/pyochain/_iter/_process.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
def peekn(self, n: int) -> Iter[T]:
    """
    Print and return sequence after peeking n items.

    Args:
        n: Number of items to peek.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).peekn(2).into(list)
    Peeked 2 values: (1, 2)
    [1, 2, 3]

    ```
    """

    def _peekn(data: Iterable[T]) -> Iterator[T]:
        peeked = Peeked(*cz.itertoolz.peekn(n, data))
        print(f"Peeked {n} values: {peeked.value}")
        return peeked.sequence

    return self.apply(_peekn)

permutations

permutations(r: Literal[2]) -> Iter[tuple[T, T]]
permutations(r: Literal[3]) -> Iter[tuple[T, T, T]]
permutations(r: Literal[4]) -> Iter[tuple[T, T, T, T]]
permutations(r: Literal[5]) -> Iter[tuple[T, T, T, T, T]]
permutations(r: int | None = None) -> Iter[tuple[T, ...]]

Return all permutations of length r.

Parameters:

Name Type Description Default
r int | None

Length of each permutation. Defaults to the length of the iterable.

None
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).permutations(2).into(list)
[(1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2)]
Source code in src/pyochain/_iter/_tuples.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
def permutations(self, r: int | None = None) -> Iter[tuple[T, ...]]:
    """
    Return all permutations of length r.

    Args:
        r: Length of each permutation. Defaults to the length of the iterable.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).permutations(2).into(list)
    [(1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2)]

    ```
    """
    return self.apply(itertools.permutations, r)

pipe

pipe(func: Callable[Concatenate[Self, P], R], *args: P.args, **kwargs: P.kwargs) -> R

Pipe the instance in the function and return the result.

Source code in src/pyochain/_core/_main.py
13
14
15
16
17
18
19
20
def pipe[**P, R](
    self,
    func: Callable[Concatenate[Self, P], R],
    *args: P.args,
    **kwargs: P.kwargs,
) -> R:
    """Pipe the instance in the function and return the result."""
    return func(self, *args, **kwargs)

pluck

pluck(*keys: str | int) -> Iter[Any]

Get an element from each item in a sequence using a nested key path.

Parameters:

Name Type Description Default
keys str | int

Nested keys to extract values.

()

>>> import pyochain as pc
>>> data = pc.Seq(
...     [
...         {"id": 1, "info": {"name": "Alice", "age": 30}},
...         {"id": 2, "info": {"name": "Bob", "age": 25}},
...     ]
... )
>>> data.iter().pluck("info").into(list)
[{'name': 'Alice', 'age': 30}, {'name': 'Bob', 'age': 25}]
>>> data.iter().pluck("info", "name").into(list)
['Alice', 'Bob']
Example: get the maximum age along with the corresponding id)
>>> data.iter().pluck("info", "age").zip(
...     data.iter().pluck("id").into(list)
... ).max()
(30, 1)

Source code in src/pyochain/_iter/_maps.py
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
def pluck[U: Mapping[Any, Any]](
    self: IterWrapper[U], *keys: str | int
) -> Iter[Any]:
    """
    Get an element from each item in a sequence using a nested key path.

    Args:
        keys: Nested keys to extract values.

    ```python
    >>> import pyochain as pc
    >>> data = pc.Seq(
    ...     [
    ...         {"id": 1, "info": {"name": "Alice", "age": 30}},
    ...         {"id": 2, "info": {"name": "Bob", "age": 25}},
    ...     ]
    ... )
    >>> data.iter().pluck("info").into(list)
    [{'name': 'Alice', 'age': 30}, {'name': 'Bob', 'age': 25}]
    >>> data.iter().pluck("info", "name").into(list)
    ['Alice', 'Bob']

    ```
    Example: get the maximum age along with the corresponding id)
    ```python
    >>> data.iter().pluck("info", "age").zip(
    ...     data.iter().pluck("id").into(list)
    ... ).max()
    (30, 1)

    ```
    """

    getter = partial(cz.dicttoolz.get_in, keys)
    return self.apply(partial(map, getter))

println

println(pretty: bool = True) -> Self

Print the underlying data and return self for chaining.

Useful for debugging, simply insert .println() in the chain, and then removing it will not affect the rest of the chain.

Source code in src/pyochain/_core/_main.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def println(self, pretty: bool = True) -> Self:
    """
    Print the underlying data and return self for chaining.

    Useful for debugging, simply insert `.println()` in the chain,
    and then removing it will not affect the rest of the chain.
    """
    from pprint import pprint

    if pretty:
        pprint(self.unwrap(), sort_dicts=False)
    else:
        print(self.unwrap())
    return self

product

product() -> Iter[tuple[T]]
product(iter1: Iterable[T1]) -> Iter[tuple[T, T1]]
product(iter1: Iterable[T1], iter2: Iterable[T2]) -> Iter[tuple[T, T1, T2]]
product(
    iter1: Iterable[T1], iter2: Iterable[T2], iter3: Iterable[T3]
) -> Iter[tuple[T, T1, T2, T3]]
product(
    iter1: Iterable[T1], iter2: Iterable[T2], iter3: Iterable[T3], iter4: Iterable[T4]
) -> Iter[tuple[T, T1, T2, T3, T4]]
product(*others: Iterable[Any]) -> Iter[tuple[Any, ...]]

Computes the Cartesian product with another iterable. This is the declarative equivalent of nested for-loops.

It pairs every element from the source iterable with every element from the other iterable.

Parameters:

Name Type Description Default
*others Iterable[Any]

Other iterables to compute the Cartesian product with.

()

Example:

>>> import pyochain as pc
>>> colors = pc.Iter.from_(["blue", "red"])
>>> sizes = ["S", "M"]
>>> colors.product(sizes).into(list)
[('blue', 'S'), ('blue', 'M'), ('red', 'S'), ('red', 'M')]

Source code in src/pyochain/_iter/_joins.py
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
def product(self, *others: Iterable[Any]) -> Iter[tuple[Any, ...]]:
    """
    Computes the Cartesian product with another iterable.
    This is the declarative equivalent of nested for-loops.

    It pairs every element from the source iterable with every element from the
    other iterable.

    Args:
        *others: Other iterables to compute the Cartesian product with.
    Example:
    ```python
    >>> import pyochain as pc
    >>> colors = pc.Iter.from_(["blue", "red"])
    >>> sizes = ["S", "M"]
    >>> colors.product(sizes).into(list)
    [('blue', 'S'), ('blue', 'M'), ('red', 'S'), ('red', 'M')]

    ```
    """
    return self.apply(itertools.product, *others)

random_sample

random_sample(probability: float, state: Random | int | None = None) -> Iter[T]

Return elements from a sequence with probability of prob.

Returns a lazy iterator of random items from seq.

random_sample considers each item independently and without replacement.

See below how the first time it returned 13 items and the next time it returned 6 items.

Parameters:

Name Type Description Default
probability float

The probability of including each element.

required
state Random | int | None

Random state or seed for deterministic sampling.

None

>>> import pyochain as pc
>>> data = pc.Seq(list(range(100)))
>>> data.iter().random_sample(0.1).into(list)  # doctest: +SKIP
[6, 9, 19, 35, 45, 50, 58, 62, 68, 72, 78, 86, 95]
>>> data.iter().random_sample(0.1).into(list)  # doctest: +SKIP
[6, 44, 54, 61, 69, 94]
Providing an integer seed for random_state will result in deterministic sampling.

Given the same seed it will return the same sample every time.

>>> data.iter().random_sample(0.1, state=2016).into(list)
[7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]
>>> data.iter().random_sample(0.1, state=2016).into(list)
[7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]
random_state can also be any object with a method random that returns floats between 0.0 and 1.0 (exclusive).
>>> from random import Random
>>> randobj = Random(2016)
>>> data.iter().random_sample(0.1, state=randobj).into(list)
[7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]

Source code in src/pyochain/_iter/_process.py
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
def random_sample(
    self, probability: float, state: Random | int | None = None
) -> Iter[T]:
    """
    Return elements from a sequence with probability of prob.

    Returns a lazy iterator of random items from seq.

    random_sample considers each item independently and without replacement.

    See below how the first time it returned 13 items and the next time it returned 6 items.

    Args:
        probability: The probability of including each element.
        state: Random state or seed for deterministic sampling.
    ```python
    >>> import pyochain as pc
    >>> data = pc.Seq(list(range(100)))
    >>> data.iter().random_sample(0.1).into(list)  # doctest: +SKIP
    [6, 9, 19, 35, 45, 50, 58, 62, 68, 72, 78, 86, 95]
    >>> data.iter().random_sample(0.1).into(list)  # doctest: +SKIP
    [6, 44, 54, 61, 69, 94]
    ```
    Providing an integer seed for random_state will result in deterministic sampling.

    Given the same seed it will return the same sample every time.
    ```python
    >>> data.iter().random_sample(0.1, state=2016).into(list)
    [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]
    >>> data.iter().random_sample(0.1, state=2016).into(list)
    [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]

    ```
    random_state can also be any object with a method random that returns floats between 0.0 and 1.0 (exclusive).
    ```python
    >>> from random import Random
    >>> randobj = Random(2016)
    >>> data.iter().random_sample(0.1, state=randobj).into(list)
    [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98]

    ```
    """

    return self.apply(
        partial(cz.itertoolz.random_sample, probability, random_state=state)
    )

reduce

reduce(func: Callable[[T, T], T]) -> T

Apply a function of two arguments cumulatively to the items of an iterable, from left to right.

Parameters:

Name Type Description Default
func Callable[[T, T], T]

Function to apply cumulatively to the items of the iterable.

required

This effectively reduces the iterable to a single value.

If initial is present, it is placed before the items of the iterable in the calculation.

It then serves as a default when the iterable is empty.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).reduce(lambda a, b: a + b)
6

Source code in src/pyochain/_iter/_aggregations.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
def reduce(self, func: Callable[[T, T], T]) -> T:
    """
    Apply a function of two arguments cumulatively to the items of an iterable, from left to right.

    Args:
        func: Function to apply cumulatively to the items of the iterable.

    This effectively reduces the iterable to a single value.

    If initial is present, it is placed before the items of the iterable in the calculation.

    It then serves as a default when the iterable is empty.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).reduce(lambda a, b: a + b)
    6

    ```
    """
    return self.into(functools.partial(functools.reduce, func))

reduce_by

reduce_by(key: Callable[[T], K], binop: Callable[[T, T], T]) -> Dict[K, T]

Perform a simultaneous groupby and reduction.

Parameters:

Name Type Description Default
key Callable[[T], K]

Function to compute the key for grouping.

required
binop Callable[[T, T], T]

Binary operation to reduce the grouped elements.

required

Example:

>>> from collections.abc import Iterable
>>> import pyochain as pc
>>> from operator import add, mul
>>>
>>> def is_even(x: int) -> bool:
...     return x % 2 == 0
>>>
>>> def group_reduce(data: Iterable[int]) -> int:
...     return pc.Iter.from_(data).reduce(add)
>>>
>>> data = pc.Seq([1, 2, 3, 4, 5])
>>> data.iter().reduce_by(is_even, add).unwrap()
{False: 9, True: 6}
>>> data.iter().group_by(is_even).map_values(group_reduce).unwrap()
{False: 9, True: 6}
But the former does not build the intermediate groups, allowing it to operate in much less space.

This makes it suitable for larger datasets that do not fit comfortably in memory

Simple Examples:

>>> pc.Iter.from_([1, 2, 3, 4, 5]).reduce_by(is_even, add).unwrap()
{False: 9, True: 6}
>>> pc.Iter.from_([1, 2, 3, 4, 5]).reduce_by(is_even, mul).unwrap()
{False: 15, True: 8}

Source code in src/pyochain/_iter/_groups.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def reduce_by[K](
    self, key: Callable[[T], K], binop: Callable[[T, T], T]
) -> Dict[K, T]:
    """
    Perform a simultaneous groupby and reduction.

    Args:
        key: Function to compute the key for grouping.
        binop: Binary operation to reduce the grouped elements.
    Example:
    ```python
    >>> from collections.abc import Iterable
    >>> import pyochain as pc
    >>> from operator import add, mul
    >>>
    >>> def is_even(x: int) -> bool:
    ...     return x % 2 == 0
    >>>
    >>> def group_reduce(data: Iterable[int]) -> int:
    ...     return pc.Iter.from_(data).reduce(add)
    >>>
    >>> data = pc.Seq([1, 2, 3, 4, 5])
    >>> data.iter().reduce_by(is_even, add).unwrap()
    {False: 9, True: 6}
    >>> data.iter().group_by(is_even).map_values(group_reduce).unwrap()
    {False: 9, True: 6}

    ```
    But the former does not build the intermediate groups, allowing it to operate in much less space.

    This makes it suitable for larger datasets that do not fit comfortably in memory

    Simple Examples:
    ```python
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).reduce_by(is_even, add).unwrap()
    {False: 9, True: 6}
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).reduce_by(is_even, mul).unwrap()
    {False: 15, True: 8}

    ```
    """
    from .._dict import Dict

    return Dict(self.into(partial(cz.itertoolz.reduceby, key, binop)))

repeat

repeat(
    n: int, factory: Callable[[Iterable[T]], Collection[T]] = tuple
) -> Iter[Iterable[T]]

Repeat the entire iterable n times (as elements) and return Iter.

Parameters:

Name Type Description Default
n int

Number of repetitions.

required
factory Callable[[Iterable[T]], Collection[T]]

Factory to create the repeated collection (default: tuple).

tuple
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).repeat(2).collect().unwrap()
[(1, 2), (1, 2)]
>>> pc.Iter.from_([1, 2]).repeat(3, list).collect().unwrap()
[[1, 2], [1, 2], [1, 2]]
Source code in src/pyochain/_iter/_maps.py
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def repeat(
    self, n: int, factory: Callable[[Iterable[T]], Collection[T]] = tuple
) -> Iter[Iterable[T]]:
    """
    Repeat the entire iterable n times (as elements) and return Iter.

    Args:
        n: Number of repetitions.
        factory: Factory to create the repeated collection (default: tuple).

    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).repeat(2).collect().unwrap()
    [(1, 2), (1, 2)]
    >>> pc.Iter.from_([1, 2]).repeat(3, list).collect().unwrap()
    [[1, 2], [1, 2], [1, 2]]

    ```
    """

    def _repeat(data: Iterable[T]) -> Iterator[Iterable[T]]:
        return itertools.repeat(factory(data), n)

    return self.apply(_repeat)

repeat_last

repeat_last(default: T) -> Iter[T]
repeat_last(default: U) -> Iter[T | U]
repeat_last(default: U = None) -> Iter[T | U]

After the iterable is exhausted, keep yielding its last element.

Warning ⚠️ This creates an infinite iterator. Be sure to use Iter.take() or Iter.slice() to limit the number of items taken.

Parameters:

Name Type Description Default
default U

Value to yield if the iterable is empty.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(range(3)).repeat_last().take(5).into(list)
[0, 1, 2, 2, 2]

If the iterable is empty, yield default forever:
```python
>>> pc.Iter.from_(range(0)).repeat_last(42).take(5).into(list)
[42, 42, 42, 42, 42]

Source code in src/pyochain/_iter/_maps.py
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
def repeat_last[U](self, default: U = None) -> Iter[T | U]:
    """
    After the iterable is exhausted, keep yielding its last element.

    **Warning** ⚠️
        This creates an infinite iterator.
        Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.

    Args:
        default: Value to yield if the iterable is empty.

    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(3)).repeat_last().take(5).into(list)
    [0, 1, 2, 2, 2]

    If the iterable is empty, yield default forever:
    ```python
    >>> pc.Iter.from_(range(0)).repeat_last(42).take(5).into(list)
    [42, 42, 42, 42, 42]

    ```
    """
    return self.apply(mit.repeat_last, default)

reverse

reverse() -> Iter[T]

Return a new Iterable wrapper with elements in reverse order.

The result is a new iterable over the reversed sequence.

Note

This method must consume the entire iterable to perform the reversal.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).reverse().into(list)
[3, 2, 1]
Source code in src/pyochain/_iter/_process.py
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
def reverse(self) -> Iter[T]:
    """
    Return a new Iterable wrapper with elements in reverse order.

    The result is a new iterable over the reversed sequence.

    Note:
        This method must consume the entire iterable to perform the reversal.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).reverse().into(list)
    [3, 2, 1]

    ```
    """

    def _reverse(data: Iterable[T]) -> Iterator[T]:
        return reversed(list(data))

    return self.apply(_reverse)

rolling_all

rolling_all(window_size: int) -> Iter[bool]

Compute whether all values in the window evaluate to True.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([True, True, False, True, True]).rolling_all(2).into(list)
[True, False, False, True]
Source code in src/pyochain/_iter/_rolling.py
152
153
154
155
156
157
158
159
160
161
162
163
164
165
def rolling_all(self, window_size: int) -> Iter[bool]:
    """
    Compute whether all values in the window evaluate to True.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([True, True, False, True, True]).rolling_all(2).into(list)
    [True, False, False, True]

    ```
    """
    return self.apply(rolling.All, window_size)

rolling_any

rolling_any(window_size: int) -> Iter[bool]

Compute whether any value in the window evaluates to True.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([True, True, False, True, True]).rolling_any(2).into(list)
[True, True, True, True]
Source code in src/pyochain/_iter/_rolling.py
167
168
169
170
171
172
173
174
175
176
177
178
179
180
def rolling_any(self, window_size: int) -> Iter[bool]:
    """
    Compute whether any value in the window evaluates to True.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([True, True, False, True, True]).rolling_any(2).into(list)
    [True, True, True, True]

    ```
    """
    return self.apply(rolling.Any, window_size)

rolling_apply

rolling_apply(func: Callable[[Iterable[T]], R], window_size: int) -> Iter[R]

Apply a custom function to each rolling window.

The function should accept an iterable and return a single value.

Parameters:

Name Type Description Default
func Callable[[Iterable[T]], R]

Function to apply to each rolling window.

required
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> def range_func(window):
...     return max(window) - min(window)
>>> pc.Iter.from_([1, 3, 2, 5, 4]).rolling_apply(range_func, 3).into(list)
[2, 3, 3]
Source code in src/pyochain/_iter/_rolling.py
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
def rolling_apply[R](
    self, func: Callable[[Iterable[T]], R], window_size: int
) -> Iter[R]:
    """
    Apply a custom function to each rolling window.

    The function should accept an iterable and return a single value.

    Args:
        func: Function to apply to each rolling window.
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> def range_func(window):
    ...     return max(window) - min(window)
    >>> pc.Iter.from_([1, 3, 2, 5, 4]).rolling_apply(range_func, 3).into(list)
    [2, 3, 3]

    ```
    """
    return self.apply(rolling.Apply, window_size, "fixed", func)

rolling_apply_pairwise

rolling_apply_pairwise(
    other: Iterable[T], func: Callable[[T, T], R], window_size: int
) -> Iter[R]

Apply a custom pairwise function to each rolling window of size 2.

The function should accept two arguments and return a single value.

Parameters:

Name Type Description Default
other Iterable[T]

Second iterable to apply the pairwise function.

required
func Callable[[T, T], R]

Function to apply to each pair of elements.

required
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> from statistics import correlation as corr
>>> seq_1 = [1, 2, 3, 4, 5]
>>> seq_2 = [1, 2, 3, 2, 1]
>>> pc.Iter.from_(seq_1).rolling_apply_pairwise(seq_2, corr, 3).into(list)
[1.0, 0.0, -1.0]
Source code in src/pyochain/_iter/_rolling.py
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
def rolling_apply_pairwise[R](
    self, other: Iterable[T], func: Callable[[T, T], R], window_size: int
) -> Iter[R]:
    """
    Apply a custom pairwise function to each rolling window of size 2.

    The function should accept two arguments and return a single value.

    Args:
        other: Second iterable to apply the pairwise function.
        func: Function to apply to each pair of elements.
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> from statistics import correlation as corr
    >>> seq_1 = [1, 2, 3, 4, 5]
    >>> seq_2 = [1, 2, 3, 2, 1]
    >>> pc.Iter.from_(seq_1).rolling_apply_pairwise(seq_2, corr, 3).into(list)
    [1.0, 0.0, -1.0]

    ```
    """
    return self.apply(rolling.ApplyPairwise, other, window_size, func)

rolling_kurtosis

rolling_kurtosis(window_size: int) -> Iter[float]

Compute the rolling kurtosis.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window. Must be at least 4.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_kurtosis(4).into(list)
[1.5, -3.901234567901234]
Source code in src/pyochain/_iter/_rolling.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def rolling_kurtosis(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling kurtosis.

    Args:
        window_size: Size of the rolling window. Must be at least 4.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_kurtosis(4).into(list)
    [1.5, -3.901234567901234]

    ```
    """
    return self.apply(rolling.Kurtosis, window_size)

rolling_max

rolling_max(window_size: int) -> Iter[T]

Compute the rolling maximum.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([3, 1, 4, 1, 5, 9, 2]).rolling_max(3).into(list)
[4, 4, 5, 9, 9]
Source code in src/pyochain/_iter/_rolling.py
77
78
79
80
81
82
83
84
85
86
87
88
89
90
def rolling_max(self, window_size: int) -> Iter[T]:
    """
    Compute the rolling maximum.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([3, 1, 4, 1, 5, 9, 2]).rolling_max(3).into(list)
    [4, 4, 5, 9, 9]

    ```
    """
    return self.apply(rolling.Max, window_size)

rolling_mean

rolling_mean(window_size: int) -> Iter[float]

Compute the rolling mean.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4, 5]).rolling_mean(3).into(list)
[2.0, 3.0, 4.0]
Source code in src/pyochain/_iter/_rolling.py
15
16
17
18
19
20
21
22
23
24
25
26
27
28
def rolling_mean(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling mean.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).rolling_mean(3).into(list)
    [2.0, 3.0, 4.0]

    ```
    """
    return self.apply(rolling.Mean, window_size)

rolling_median

rolling_median(window_size: int) -> Iter[T]

Compute the rolling median.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 3, 2, 5, 4]).rolling_median(3).into(list)
[2, 3, 4]
Source code in src/pyochain/_iter/_rolling.py
30
31
32
33
34
35
36
37
38
39
40
41
42
43
def rolling_median(self, window_size: int) -> Iter[T]:
    """
    Compute the rolling median.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 3, 2, 5, 4]).rolling_median(3).into(list)
    [2, 3, 4]

    ```
    """
    return self.apply(rolling.Median, window_size)

rolling_min

rolling_min(window_size: int) -> Iter[T]

Compute the rolling minimum.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([3, 1, 4, 1, 5, 9, 2]).rolling_min(3).into(list)
[1, 1, 1, 1, 2]
Source code in src/pyochain/_iter/_rolling.py
62
63
64
65
66
67
68
69
70
71
72
73
74
75
def rolling_min(self, window_size: int) -> Iter[T]:
    """
    Compute the rolling minimum.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([3, 1, 4, 1, 5, 9, 2]).rolling_min(3).into(list)
    [1, 1, 1, 1, 2]

    ```
    """
    return self.apply(rolling.Min, window_size)

rolling_product

rolling_product(window_size: int) -> Iter[float]

Compute the rolling product.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4, 5]).rolling_product(3).into(list)
[6.0, 24.0, 60.0]
Source code in src/pyochain/_iter/_rolling.py
182
183
184
185
186
187
188
189
190
191
192
193
194
195
def rolling_product(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling product.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).rolling_product(3).into(list)
    [6.0, 24.0, 60.0]

    ```
    """
    return self.apply(rolling.Product, window_size)

rolling_skew

rolling_skew(window_size: int) -> Iter[float]

Compute the rolling skewness.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window. Must be at least 3.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_skew(3).round(2).into(list)
[0.94, 0.94, -1.73]
Source code in src/pyochain/_iter/_rolling.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
def rolling_skew(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling skewness.

    Args:
        window_size: Size of the rolling window. Must be at least 3.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_skew(3).round(2).into(list)
    [0.94, 0.94, -1.73]

    ```
    """
    return self.apply(rolling.Skew, window_size)

rolling_std

rolling_std(window_size: int) -> Iter[float]

Compute the rolling standard deviation.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_std(3).round(2).into(list)
[1.53, 1.53, 1.73]
Source code in src/pyochain/_iter/_rolling.py
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def rolling_std(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling standard deviation.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_std(3).round(2).into(list)
    [1.53, 1.53, 1.73]

    ```
    """
    return self.apply(rolling.Std, window_size)

rolling_sum

rolling_sum(window_size: int) -> Iter[T]

Compute the rolling sum.

Will return integers if the input is integers, floats if the input is floats or mixed.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1.0, 2, 3, 4, 5]).rolling_sum(3).into(list)
[6.0, 9.0, 12.0]
Source code in src/pyochain/_iter/_rolling.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
def rolling_sum(self, window_size: int) -> Iter[T]:
    """
    Compute the rolling sum.

    Will return integers if the input is integers, floats if the input is floats or mixed.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1.0, 2, 3, 4, 5]).rolling_sum(3).into(list)
    [6.0, 9.0, 12.0]

    ```
    """
    return self.apply(rolling.Sum, window_size)

rolling_var

rolling_var(window_size: int) -> Iter[float]

Compute the rolling variance.

Parameters:

Name Type Description Default
window_size int

Size of the rolling window.

required
>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_var(3).round(2).into(list)
[2.33, 2.33, 3.0]
Source code in src/pyochain/_iter/_rolling.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
def rolling_var(self, window_size: int) -> Iter[float]:
    """
    Compute the rolling variance.

    Args:
        window_size: Size of the rolling window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 4, 1, 4]).rolling_var(3).round(2).into(list)
    [2.33, 2.33, 3.0]

    ```
    """
    return self.apply(rolling.Var, window_size)

round

round(ndigits: int | None = None) -> Iter[float]

Round each element in the iterable to the given number of decimal places and return Iter.

Parameters:

Name Type Description Default
ndigits int | None

Number of decimal places to round to.

None
>>> import pyochain as pc
>>> pc.Iter.from_([1.2345, 2.3456, 3.4567]).round(2).into(list)
[1.23, 2.35, 3.46]
Source code in src/pyochain/_iter/_maps.py
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
def round[U: float | int](
    self: IterWrapper[U], ndigits: int | None = None
) -> Iter[float]:
    """
    Round each element in the iterable to the given number of decimal places and return Iter.

    Args:
        ndigits: Number of decimal places to round to.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1.2345, 2.3456, 3.4567]).round(2).into(list)
    [1.23, 2.35, 3.46]

    ```
    """

    def _round(data: Iterable[U]) -> Generator[float | int, None, None]:
        return (round(x, ndigits) for x in data)

    return self.apply(_round)

second

second() -> T

Return the second element.

>>> import pyochain as pc
>>> pc.Iter.from_([9, 8]).second()
8

Source code in src/pyochain/_iter/_aggregations.py
107
108
109
110
111
112
113
114
115
116
117
def second(self) -> T:
    """
    Return the second element.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([9, 8]).second()
    8

    ```
    """
    return self.into(cz.itertoolz.second)

skip

skip(n: int) -> Iter[T]

Drop first n elements.

Parameters:

Name Type Description Default
n int

Number of elements to skip.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).skip(1).into(list)
[2, 3]

Source code in src/pyochain/_iter/_filters.py
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
def skip(self, n: int) -> Iter[T]:
    """
    Drop first n elements.

    Args:
        n: Number of elements to skip.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).skip(1).into(list)
    [2, 3]

    ```
    """
    return self.apply(partial(cz.itertoolz.drop, n))

skip_while

skip_while(predicate: Callable[[T], bool]) -> Iter[T]

Drop items while predicate holds.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to evaluate each item.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 0]).skip_while(lambda x: x > 0).into(list)
[0]

Source code in src/pyochain/_iter/_filters.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
def skip_while(self, predicate: Callable[[T], bool]) -> Iter[T]:
    """
    Drop items while predicate holds.

    Args:
        predicate: Function to evaluate each item.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 0]).skip_while(lambda x: x > 0).into(list)
    [0]

    ```
    """
    return self.apply(partial(itertools.dropwhile, predicate))

slice

slice(start: int | None = None, stop: int | None = None) -> Iter[T]

Return a slice of the iterable.

Parameters:

Name Type Description Default
start int | None

Starting index of the slice. Defaults to None.

None
stop int | None

Ending index of the slice. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4, 5]).slice(1, 4).into(list)
[2, 3, 4]

Source code in src/pyochain/_iter/_filters.py
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
def slice(self, start: int | None = None, stop: int | None = None) -> Iter[T]:
    """
    Return a slice of the iterable.

    Args:
        start: Starting index of the slice. Defaults to None.
        stop: Ending index of the slice. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4, 5]).slice(1, 4).into(list)
    [2, 3, 4]

    ```
    """

    def _slice(data: Iterable[T]) -> Iterator[T]:
        return itertools.islice(data, start, stop)

    return self.apply(_slice)

sort

sort(reverse: bool = False, key: Callable[[U], Any] | None = None) -> Seq[U]

Sort the elements of the sequence.

Note

This method must consume the entire iterable to perform the sort. The result is a new iterable over the sorted sequence.

Parameters:

Name Type Description Default
reverse bool

Whether to sort in descending order. Defaults to False.

False
key Callable[[U], Any] | None

Function to extract a comparison key from each element. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([3, 1, 2]).sort().into(list)
[1, 2, 3]

Source code in src/pyochain/_iter/_eager.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
def sort[U: SupportsRichComparison[Any]](
    self: BaseEager[U], reverse: bool = False, key: Callable[[U], Any] | None = None
) -> Seq[U]:
    """
    Sort the elements of the sequence.

    Note:
        This method must consume the entire iterable to perform the sort.
        The result is a new iterable over the sorted sequence.

    Args:
        reverse: Whether to sort in descending order. Defaults to False.
        key: Function to extract a comparison key from each element. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([3, 1, 2]).sort().into(list)
    [1, 2, 3]

    ```
    """

    def _sort(data: Iterable[U]) -> list[U]:
        return sorted(data, reverse=reverse, key=key)

    return self.collect(_sort)

split_after

split_after(predicate: Callable[[T], bool], max_split: int = -1) -> Iter[list[T]]

Yield lists of items from iterable, where each list ends with an item where callable pred returns True.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to determine the split points.

required
max_split int

Maximum number of splits to perform. Defaults to -1 (no limit).

-1

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("one1two2").split_after(str.isdigit).into(list)
[['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]

>>> def cond(n: int) -> bool:
...     return n % 3 == 0
>>>
>>> pc.Iter.from_(range(10)).split_after(cond).into(list)
[[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
>>> pc.Iter.from_(range(10)).split_after(cond, max_split=2).into(list)
[[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]

Source code in src/pyochain/_iter/_lists.py
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
def split_after(
    self, predicate: Callable[[T], bool], max_split: int = -1
) -> Iter[list[T]]:
    """
    Yield lists of items from iterable, where each list ends with an item where callable pred returns True.

    Args:
        predicate: Function to determine the split points.
        max_split: Maximum number of splits to perform. Defaults to -1 (no limit).
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("one1two2").split_after(str.isdigit).into(list)
    [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]

    >>> def cond(n: int) -> bool:
    ...     return n % 3 == 0
    >>>
    >>> pc.Iter.from_(range(10)).split_after(cond).into(list)
    [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
    >>> pc.Iter.from_(range(10)).split_after(cond, max_split=2).into(list)
    [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]

    ```
    """
    return self.apply(mit.split_after, predicate, max_split)

split_at

split_at(
    pred: Callable[[T], bool], maxsplit: int = -1, keep_separator: bool = False
) -> Iter[list[T]]

Yield lists of items from iterable, where each list is delimited by an item where callable pred returns True.

Parameters:

Name Type Description Default
pred Callable[[T], bool]

Function to determine the split points.

required
maxsplit int

Maximum number of splits to perform. Defaults to -1 (no limit).

-1
keep_separator bool

Whether to include the separator in the output. Defaults to False.

False

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("abcdcba").split_at(lambda x: x == "b").into(list)
[['a'], ['c', 'd', 'c'], ['a']]
>>> pc.Iter.from_(range(10)).split_at(lambda n: n % 2 == 1).into(list)
[[0], [2], [4], [6], [8], []]

At most *maxsplit* splits are done.

If *maxsplit* is not specified or -1, then there is no limit on the number of splits:
```python
>>> pc.Iter.from_(range(10)).split_at(lambda n: n % 2 == 1, maxsplit=2).into(
...     list
... )
[[0], [2], [4, 5, 6, 7, 8, 9]]
By default, the delimiting items are not included in the output.

To include them, set keep_separator to True.

>>> def cond(x: str) -> bool:
...     return x == "b"
>>> pc.Iter.from_("abcdcba").split_at(cond, keep_separator=True).into(list)
[['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]

Source code in src/pyochain/_iter/_lists.py
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
def split_at(
    self,
    pred: Callable[[T], bool],
    maxsplit: int = -1,
    keep_separator: bool = False,
) -> Iter[list[T]]:
    """
    Yield lists of items from iterable, where each list is delimited by an item where callable pred returns True.

    Args:
        pred: Function to determine the split points.
        maxsplit: Maximum number of splits to perform. Defaults to -1 (no limit).
        keep_separator: Whether to include the separator in the output. Defaults to False.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("abcdcba").split_at(lambda x: x == "b").into(list)
    [['a'], ['c', 'd', 'c'], ['a']]
    >>> pc.Iter.from_(range(10)).split_at(lambda n: n % 2 == 1).into(list)
    [[0], [2], [4], [6], [8], []]

    At most *maxsplit* splits are done.

    If *maxsplit* is not specified or -1, then there is no limit on the number of splits:
    ```python
    >>> pc.Iter.from_(range(10)).split_at(lambda n: n % 2 == 1, maxsplit=2).into(
    ...     list
    ... )
    [[0], [2], [4, 5, 6, 7, 8, 9]]

    ```
    By default, the delimiting items are not included in the output.

    To include them, set *keep_separator* to `True`.
    ```python
    >>> def cond(x: str) -> bool:
    ...     return x == "b"
    >>> pc.Iter.from_("abcdcba").split_at(cond, keep_separator=True).into(list)
    [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]

    ```
    """
    return self.apply(mit.split_at, pred, maxsplit, keep_separator)

split_before

split_before(predicate: Callable[[T], bool], max_split: int = -1) -> Iter[list[T]]

Yield lists of items from iterable, where each list ends with an item where callable pred returns True.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to determine the split points.

required
max_split int

Maximum number of splits to perform. Defaults to -1 (no limit).

-1

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("abcdcba").split_before(lambda x: x == "b").into(list)
[['a'], ['b', 'c', 'd', 'c'], ['b', 'a']]
>>>
>>> def cond(n: int) -> bool:
...     return n % 2 == 1
>>>
>>> pc.Iter.from_(range(10)).split_before(cond).into(list)
[[0], [1, 2], [3, 4], [5, 6], [7, 8], [9]]
At most max_split splits are done.

If max_split is not specified or -1, then there is no limit on the number of splits:

>>> pc.Iter.from_(range(10)).split_before(cond, max_split=2).into(list)
[[0], [1, 2], [3, 4, 5, 6, 7, 8, 9]]

Source code in src/pyochain/_iter/_lists.py
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
def split_before(
    self, predicate: Callable[[T], bool], max_split: int = -1
) -> Iter[list[T]]:
    """
    Yield lists of items from iterable, where each list ends with an item where callable pred returns True.

    Args:
        predicate: Function to determine the split points.
        max_split: Maximum number of splits to perform. Defaults to -1 (no limit).
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("abcdcba").split_before(lambda x: x == "b").into(list)
    [['a'], ['b', 'c', 'd', 'c'], ['b', 'a']]
    >>>
    >>> def cond(n: int) -> bool:
    ...     return n % 2 == 1
    >>>
    >>> pc.Iter.from_(range(10)).split_before(cond).into(list)
    [[0], [1, 2], [3, 4], [5, 6], [7, 8], [9]]

    ```
    At most *max_split* splits are done.

    If *max_split* is not specified or -1, then there is no limit on the number of splits:
    ```python
    >>> pc.Iter.from_(range(10)).split_before(cond, max_split=2).into(list)
    [[0], [1, 2], [3, 4, 5, 6, 7, 8, 9]]

    ```
    """
    return self.apply(mit.split_before, predicate, max_split)

split_into

split_into(sizes: Iterable[int | None]) -> Iter[list[T]]

Yield a list of sequential items from iterable of length 'n' for each integer 'n' in sizes.

Parameters:

Name Type Description Default
sizes Iterable[int | None]

Iterable of integers specifying the sizes of each chunk. Use None for the remainder.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).split_into([1, 2, 3]).into(list)
[[1], [2, 3], [4, 5, 6]]

If the sum of sizes is smaller than the length of iterable, then the remaining items of iterable will not be returned.
```python
>>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).split_into([2, 3]).into(list)
[[1, 2], [3, 4, 5]]

If the sum of sizes is larger than the length of iterable:

  • fewer items will be returned in the iteration that overruns the iterable
  • further lists will be empty
    >>> pc.Iter.from_([1, 2, 3, 4]).split_into([1, 2, 3, 4]).into(list)
    [[1], [2, 3], [4], []]
    

When a None object is encountered in sizes, the returned list will contain items up to the end of iterable the same way that itertools.slice does:

>>> data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]
>>> pc.Iter.from_(data).split_into([2, 3, None]).into(list)
[[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]

split_into can be useful for grouping a series of items where the sizes of the groups are not uniform.

An example would be where in a row from a table:

  • multiple columns represent elements of the same feature (e.g. a point represented by x,y,z)
  • the format is not the same for all columns.
Source code in src/pyochain/_iter/_lists.py
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
def split_into(self, sizes: Iterable[int | None]) -> Iter[list[T]]:
    """
    Yield a list of sequential items from iterable of length 'n' for each integer 'n' in sizes.

    Args:
        sizes: Iterable of integers specifying the sizes of each chunk. Use None for the remainder.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).split_into([1, 2, 3]).into(list)
    [[1], [2, 3], [4, 5, 6]]

    If the sum of sizes is smaller than the length of iterable, then the remaining items of iterable will not be returned.
    ```python
    >>> pc.Iter.from_([1, 2, 3, 4, 5, 6]).split_into([2, 3]).into(list)
    [[1, 2], [3, 4, 5]]

    ```

    If the sum of sizes is larger than the length of iterable:

    - fewer items will be returned in the iteration that overruns the iterable
    - further lists will be empty
    ```python
    >>> pc.Iter.from_([1, 2, 3, 4]).split_into([1, 2, 3, 4]).into(list)
    [[1], [2, 3], [4], []]

    ```

    When a None object is encountered in sizes, the returned list will contain items up to the end of iterable the same way that itertools.slice does:
    ```python
    >>> data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]
    >>> pc.Iter.from_(data).split_into([2, 3, None]).into(list)
    [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]

    ```

    split_into can be useful for grouping a series of items where the sizes of the groups are not uniform.

    An example would be where in a row from a table:

    - multiple columns represent elements of the same feature (e.g. a point represented by x,y,z)
    - the format is not the same for all columns.
    """
    return self.apply(mit.split_into, sizes)

split_when

split_when(predicate: Callable[[T, T], bool], max_split: int = -1) -> Iter[list[T]]

Split iterable into pieces based on the output of a predicate function.

Parameters:

Name Type Description Default
predicate Callable[[T, T], bool]

Function that takes successive pairs of items and returns True if the iterable should be split.

required
max_split int

Maximum number of splits to perform. Defaults to -1 (no limit).

-1

For example, to find runs of increasing numbers, split the iterable when element i is larger than element i + 1:

>>> import pyochain as pc
>>> data = pc.Seq([1, 2, 3, 3, 2, 5, 2, 4, 2])
>>> data.iter().split_when(lambda x, y: x > y).into(list)
[[1, 2, 3, 3], [2, 5], [2, 4], [2]]

At most max_split splits are done.

If max_split is not specified or -1, then there is no limit on the number of splits:

>>> data.iter().split_when(lambda x, y: x > y, max_split=2).into(list)
[[1, 2, 3, 3], [2, 5], [2, 4, 2]]

Source code in src/pyochain/_iter/_lists.py
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
def split_when(
    self, predicate: Callable[[T, T], bool], max_split: int = -1
) -> Iter[list[T]]:
    """
    Split iterable into pieces based on the output of a predicate function.

    Args:
        predicate: Function that takes successive pairs of items and returns True if the iterable should be split.
        max_split: Maximum number of splits to perform. Defaults to -1 (no limit).

    For example, to find runs of increasing numbers, split the iterable when element i is larger than element i + 1:
    ```python
    >>> import pyochain as pc
    >>> data = pc.Seq([1, 2, 3, 3, 2, 5, 2, 4, 2])
    >>> data.iter().split_when(lambda x, y: x > y).into(list)
    [[1, 2, 3, 3], [2, 5], [2, 4], [2]]

    ```

    At most max_split splits are done.

    If max_split is not specified or -1, then there is no limit on the number of splits:
    ```python
    >>> data.iter().split_when(lambda x, y: x > y, max_split=2).into(list)
    [[1, 2, 3, 3], [2, 5], [2, 4, 2]]

    ```
    """
    return self.apply(mit.split_when, predicate, max_split)

stdev

stdev() -> float

Return the standard deviation of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).stdev()
1.0

Source code in src/pyochain/_iter/_aggregations.py
297
298
299
300
301
302
303
304
305
306
307
308
309
def stdev[U: int | float](
    self: IterWrapper[U],
) -> float:
    """
    Return the standard deviation of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).stdev()
    1.0

    ```
    """
    return self.into(statistics.stdev)

struct

struct(
    func: Callable[Concatenate[Dict[K, V], P], R], *args: P.args, **kwargs: P.kwargs
) -> Iter[R]

Apply a function to each element after wrapping it in a Dict.

This is a convenience method for the common pattern of mapping a function over an iterable of dictionaries.

Parameters:

Name Type Description Default
func Callable[Concatenate[Dict[K, V], P], R]

Function to apply to each wrapped dictionary.

required
*args P.args

Positional arguments to pass to the function.

()
**kwargs P.kwargs

Keyword arguments to pass to the function.

{}

Example:

>>> from typing import Any
>>> import pyochain as pc

>>> data: list[dict[str, Any]] = [
...     {"name": "Alice", "age": 30, "city": "New York"},
...     {"name": "Bob", "age": 25, "city": "Los Angeles"},
...     {"name": "Charlie", "age": 35, "city": "New York"},
...     {"name": "David", "age": 40, "city": "Paris"},
... ]
>>>
>>> def to_title(d: pc.Dict[str, Any]) -> pc.Dict[str, Any]:
...     return d.map_keys(lambda k: k.title())
>>> def is_young(d: pc.Dict[str, Any]) -> bool:
...     return d.unwrap().get("Age", 0) < 30
>>> def set_continent(d: pc.Dict[str, Any], value: str) -> dict[str, Any]:
...     return d.with_key("Continent", value).unwrap()
>>>
>>> pc.Iter.from_(data).struct(to_title).filter_false(is_young).map(
...     lambda d: d.drop("Age").with_key("Continent", "NA")
... ).map_if(
...     lambda d: d.unwrap().get("City") == "Paris",
...     lambda d: set_continent(d, "Europe"),
...     lambda d: set_continent(d, "America"),
... ).group_by(lambda d: d.get("Continent")).map_values(
...     lambda d: pc.Iter.from_(d)
...     .struct(lambda d: d.drop("Continent").unwrap())
...     .into(list)
... )  # doctest: +NORMALIZE_WHITESPACE
Dict({
'America': [
    {'Name': 'Alice', 'City': 'New York'},
    {'Name': 'Charlie', 'City': 'New York'}
],
'Europe': [
    {'Name': 'David', 'City': 'Paris'}
]
})

Source code in src/pyochain/_iter/_main.py
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
def struct[**P, R, K, V](
    self: Iter[dict[K, V]],
    func: Callable[Concatenate[Dict[K, V], P], R],
    *args: P.args,
    **kwargs: P.kwargs,
) -> Iter[R]:
    """
    Apply a function to each element after wrapping it in a Dict.

    This is a convenience method for the common pattern of mapping a function over an iterable of dictionaries.

    Args:
        func: Function to apply to each wrapped dictionary.
        *args: Positional arguments to pass to the function.
        **kwargs: Keyword arguments to pass to the function.
    Example:
    ```python
    >>> from typing import Any
    >>> import pyochain as pc

    >>> data: list[dict[str, Any]] = [
    ...     {"name": "Alice", "age": 30, "city": "New York"},
    ...     {"name": "Bob", "age": 25, "city": "Los Angeles"},
    ...     {"name": "Charlie", "age": 35, "city": "New York"},
    ...     {"name": "David", "age": 40, "city": "Paris"},
    ... ]
    >>>
    >>> def to_title(d: pc.Dict[str, Any]) -> pc.Dict[str, Any]:
    ...     return d.map_keys(lambda k: k.title())
    >>> def is_young(d: pc.Dict[str, Any]) -> bool:
    ...     return d.unwrap().get("Age", 0) < 30
    >>> def set_continent(d: pc.Dict[str, Any], value: str) -> dict[str, Any]:
    ...     return d.with_key("Continent", value).unwrap()
    >>>
    >>> pc.Iter.from_(data).struct(to_title).filter_false(is_young).map(
    ...     lambda d: d.drop("Age").with_key("Continent", "NA")
    ... ).map_if(
    ...     lambda d: d.unwrap().get("City") == "Paris",
    ...     lambda d: set_continent(d, "Europe"),
    ...     lambda d: set_continent(d, "America"),
    ... ).group_by(lambda d: d.get("Continent")).map_values(
    ...     lambda d: pc.Iter.from_(d)
    ...     .struct(lambda d: d.drop("Continent").unwrap())
    ...     .into(list)
    ... )  # doctest: +NORMALIZE_WHITESPACE
    Dict({
    'America': [
        {'Name': 'Alice', 'City': 'New York'},
        {'Name': 'Charlie', 'City': 'New York'}
    ],
    'Europe': [
        {'Name': 'David', 'City': 'Paris'}
    ]
    })

    ```
    """
    from .._dict import Dict

    def _struct(data: Iterable[dict[K, V]]) -> Generator[R, None, None]:
        return (func(Dict(x), *args, **kwargs) for x in data)

    return self.apply(_struct)

sum

sum() -> U | Literal[0]

Return the sum of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).sum()
6

Source code in src/pyochain/_iter/_aggregations.py
225
226
227
228
229
230
231
232
233
234
235
def sum[U: int | float](self: IterWrapper[U]) -> U | Literal[0]:
    """
    Return the sum of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).sum()
    6

    ```
    """
    return self.into(sum)

tail

tail(n: int) -> Seq[T]

Return a tuple of the last n elements.

Parameters:

Name Type Description Default
n int

Number of elements to return.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).tail(2).unwrap()
(2, 3)

Source code in src/pyochain/_iter/_eager.py
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
def tail(self, n: int) -> Seq[T]:
    """
    Return a tuple of the last n elements.

    Args:
        n: Number of elements to return.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).tail(2).unwrap()
    (2, 3)

    ```
    """
    return self.collect(partial(cz.itertoolz.tail, n))

take

take(n: int) -> Iter[T]

Creates an iterator that yields the first n elements, or fewer if the underlying iterator ends sooner.

Iter.take(n) yields elements until n elements are yielded or the end of the iterator is reached (whichever happens first).

The returned iterator is either:

  • A prefix of length n if the original iterator contains at least n elements
  • All of the (fewer than n) elements of the original iterator if it contains fewer than n elements.

Parameters:

Name Type Description Default
n int

Number of elements to take.

required

Example:

>>> import pyochain as pc
>>> data = [1, 2, 3]
>>> pc.Iter.from_(data).take(2).into(list)
[1, 2]
>>> pc.Iter.from_(data).take(5).into(list)
[1, 2, 3]

Source code in src/pyochain/_iter/_filters.py
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
def take(self, n: int) -> Iter[T]:
    """
    Creates an iterator that yields the first n elements, or fewer if the underlying iterator ends sooner.

    `Iter.take(n)` yields elements until n elements are yielded or the end of the iterator is reached (whichever happens first).

    The returned iterator is either:

    - A prefix of length n if the original iterator contains at least n elements
    - All of the (fewer than n) elements of the original iterator if it contains fewer than n elements.

    Args:
        n: Number of elements to take.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data = [1, 2, 3]
    >>> pc.Iter.from_(data).take(2).into(list)
    [1, 2]
    >>> pc.Iter.from_(data).take(5).into(list)
    [1, 2, 3]

    ```
    """

    return self.apply(partial(cz.itertoolz.take, n))

take_while

take_while(predicate: Callable[[T], bool]) -> Iter[T]

Take items while predicate holds.

Parameters:

Name Type Description Default
predicate Callable[[T], bool]

Function to evaluate each item.

required

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 0]).take_while(lambda x: x > 0).into(list)
[1, 2]

Source code in src/pyochain/_iter/_filters.py
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
def take_while(self, predicate: Callable[[T], bool]) -> Iter[T]:
    """
    Take items while predicate holds.

    Args:
        predicate: Function to evaluate each item.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 0]).take_while(lambda x: x > 0).into(list)
    [1, 2]

    ```
    """
    return self.apply(partial(itertools.takewhile, predicate))

top_n

top_n(n: int, key: Callable[[T], Any] | None = None) -> Seq[T]

Return a tuple of the top-n items according to key.

Parameters:

Name Type Description Default
n int

Number of top elements to return.

required
key Callable[[T], Any] | None

Function to extract a comparison key from each element. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 3, 2]).top_n(2).unwrap()
(3, 2)

Source code in src/pyochain/_iter/_eager.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
def top_n(self, n: int, key: Callable[[T], Any] | None = None) -> Seq[T]:
    """
    Return a tuple of the top-n items according to key.

    Args:
        n: Number of top elements to return.
        key: Function to extract a comparison key from each element. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 3, 2]).top_n(2).unwrap()
    (3, 2)

    ```
    """
    return self.collect(partial(cz.itertoolz.topk, n, key=key))

unfold staticmethod

unfold(seed: S, generator: Callable[[S], tuple[V, S] | None]) -> Iter[V]

Create an iterator by repeatedly applying a generator function to an initial state.

The generator function takes the current state and must return: - A tuple (value, new_state) to emit the value and continue with the new_state. - None to stop the generation.

This is functionally equivalent to a state-based while loop.

Warning ⚠️ If the generator function never returns None, it creates an infinite iterator. Be sure to use Iter.take() or Iter.slice() to limit the number of items taken if necessary.

Parameters:

Name Type Description Default
seed S

Initial state for the generator.

required
generator Callable[[S], tuple[V, S] | None]

Function that generates the next value and state.

required

Example:

>>> import pyochain as pc
>>> # Example 1: Simple counter up to 5
>>> def counter_generator(state: int) -> tuple[int, int] | None:
...     if state < 5:
...         return (state * 10, state + 1)
...     return None
>>> pc.Iter.unfold(seed=0, generator=counter_generator).into(list)
[0, 10, 20, 30, 40]
>>> # Example 2: Fibonacci sequence up to 100
>>> type FibState = tuple[int, int]
>>> def fib_generator(state: FibState) -> tuple[int, FibState] | None:
...     a, b = state
...     if a > 100:
...         return None
...     return (a, (b, a + b))
>>> pc.Iter.unfold(seed=(0, 1), generator=fib_generator).into(list)
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
>>> # Example 3: Infinite iterator (requires take())
>>> pc.Iter.unfold(seed=1, generator=lambda s: (s, s * 2)).take(5).into(list)
[1, 2, 4, 8, 16]

Source code in src/pyochain/_iter/_constructors.py
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
@staticmethod
def unfold[S, V](seed: S, generator: Callable[[S], tuple[V, S] | None]) -> Iter[V]:
    """
    Create an iterator by repeatedly applying a generator function to an initial state.

    The `generator` function takes the current state and must return:
        - A tuple `(value, new_state)` to emit the `value` and continue with the `new_state`.
        - `None` to stop the generation.

    This is functionally equivalent to a state-based `while` loop.

    **Warning** ⚠️
        If the `generator` function never returns `None`, it creates an infinite iterator.
        Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken if necessary.

    Args:
        seed: Initial state for the generator.
        generator: Function that generates the next value and state.

    Example:
    ```python
    >>> import pyochain as pc
    >>> # Example 1: Simple counter up to 5
    >>> def counter_generator(state: int) -> tuple[int, int] | None:
    ...     if state < 5:
    ...         return (state * 10, state + 1)
    ...     return None
    >>> pc.Iter.unfold(seed=0, generator=counter_generator).into(list)
    [0, 10, 20, 30, 40]
    >>> # Example 2: Fibonacci sequence up to 100
    >>> type FibState = tuple[int, int]
    >>> def fib_generator(state: FibState) -> tuple[int, FibState] | None:
    ...     a, b = state
    ...     if a > 100:
    ...         return None
    ...     return (a, (b, a + b))
    >>> pc.Iter.unfold(seed=(0, 1), generator=fib_generator).into(list)
    [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
    >>> # Example 3: Infinite iterator (requires take())
    >>> pc.Iter.unfold(seed=1, generator=lambda s: (s, s * 2)).take(5).into(list)
    [1, 2, 4, 8, 16]

    ```
    """
    from ._main import Iter

    def _unfold() -> Iterator[V]:
        current_seed: S = seed
        while True:
            result: tuple[V, S] | None = generator(current_seed)
            if result is None:
                break
            value, next_seed = result
            yield value
            current_seed = next_seed

    return Iter(_unfold())

union

union(*others: Iterable[T]) -> Seq[T]

Return the union of this iterable and 'others'.

Note

This method consumes inner data and removes duplicates.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to include in the union.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 2]).union([2, 3], [4]).iter().sort().unwrap()
[1, 2, 3, 4]

Source code in src/pyochain/_iter/_eager.py
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
def union(self, *others: Iterable[T]) -> Seq[T]:
    """
    Return the union of this iterable and 'others'.

    Note:
        This method consumes inner data and removes duplicates.

    Args:
        *others: Other iterables to include in the union.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 2]).union([2, 3], [4]).iter().sort().unwrap()
    [1, 2, 3, 4]

    ```
    """

    def _union(data: Iterable[T]) -> set[T]:
        return set(data).union(*others)

    return self.collect(_union)

unique

unique(key: Callable[[T], Any] | None = None) -> Iter[T]

Return only unique elements of the iterable.

Parameters:

Name Type Description Default
key Callable[[T], Any] | None

Function to transform items before comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3]).unique().into(list)
[1, 2, 3]
>>> pc.Iter.from_([1, 2, 1, 3]).unique().into(list)
[1, 2, 3]
Uniqueness can be defined by key keyword
>>> pc.Iter.from_(["cat", "mouse", "dog", "hen"]).unique(key=len).into(list)
['cat', 'mouse']

Source code in src/pyochain/_iter/_filters.py
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
def unique(self, key: Callable[[T], Any] | None = None) -> Iter[T]:
    """
    Return only unique elements of the iterable.

    Args:
        key: Function to transform items before comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3]).unique().into(list)
    [1, 2, 3]
    >>> pc.Iter.from_([1, 2, 1, 3]).unique().into(list)
    [1, 2, 3]

    ```
    Uniqueness can be defined by key keyword
    ```python
    >>> pc.Iter.from_(["cat", "mouse", "dog", "hen"]).unique(key=len).into(list)
    ['cat', 'mouse']

    ```
    """
    return self.apply(cz.itertoolz.unique, key=key)

unique_in_window

unique_in_window(n: int, key: Callable[[T], Any] | None = None) -> Iter[T]

Yield the items from iterable that haven't been seen recently.

The items in iterable must be hashable.

Parameters:

Name Type Description Default
n int

Size of the lookback window.

required
key Callable[[T], Any] | None

Function to transform items before comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> iterable = [0, 1, 0, 2, 3, 0]
>>> n = 3
>>> pc.Iter.from_(iterable).unique_in_window(n).into(list)
[0, 1, 2, 3, 0]
The key function, if provided, will be used to determine uniqueness:
>>> pc.Iter.from_("abAcda").unique_in_window(3, key=str.lower).into(list)
['a', 'b', 'c', 'd', 'a']

Source code in src/pyochain/_iter/_filters.py
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
def unique_in_window(
    self, n: int, key: Callable[[T], Any] | None = None
) -> Iter[T]:
    """
    Yield the items from iterable that haven't been seen recently.

    The items in iterable must be hashable.

    Args:
        n: Size of the lookback window.
        key: Function to transform items before comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> iterable = [0, 1, 0, 2, 3, 0]
    >>> n = 3
    >>> pc.Iter.from_(iterable).unique_in_window(n).into(list)
    [0, 1, 2, 3, 0]

    ```
    The key function, if provided, will be used to determine uniqueness:
    ```python
    >>> pc.Iter.from_("abAcda").unique_in_window(3, key=str.lower).into(list)
    ['a', 'b', 'c', 'd', 'a']

    ```
    """
    return self.apply(mit.unique_in_window, n, key=key)

unique_justseen

unique_justseen(key: Callable[[T], Any] | None = None) -> Iter[T]

Yields elements in order, ignoring serial duplicates.

Parameters:

Name Type Description Default
key Callable[[T], Any] | None

Function to transform items before comparison. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_("AAAABBBCCDAABBB").unique_justseen().into(list)
['A', 'B', 'C', 'D', 'A', 'B']
>>> pc.Iter.from_("ABBCcAD").unique_justseen(str.lower).into(list)
['A', 'B', 'C', 'A', 'D']

Source code in src/pyochain/_iter/_filters.py
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
def unique_justseen(self, key: Callable[[T], Any] | None = None) -> Iter[T]:
    """
    Yields elements in order, ignoring serial duplicates.

    Args:
        key: Function to transform items before comparison. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_("AAAABBBCCDAABBB").unique_justseen().into(list)
    ['A', 'B', 'C', 'D', 'A', 'B']
    >>> pc.Iter.from_("ABBCcAD").unique_justseen(str.lower).into(list)
    ['A', 'B', 'C', 'A', 'D']

    ```
    """
    return self.apply(mit.unique_justseen, key=key)

unique_to_each

unique_to_each() -> Iter[list[U]]

Return the elements from each of the iterables that aren't in the other iterables.

For example, suppose you have a set of packages, each with a set of dependencies:

{'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}

If you remove one package, which dependencies can also be removed?

If pkg_1 is removed, then A is no longer necessary - it is not associated with pkg_2 or pkg_3.

Similarly, C is only needed for pkg_2, and D is only needed for pkg_3:

>>> import pyochain as pc
>>> data = ({"A", "B"}, {"B", "C"}, {"B", "D"})
>>> pc.Iter.from_(data).unique_to_each().collect().unwrap()
[['A'], ['C'], ['D']]

If there are duplicates in one input iterable that aren't in the others they will be duplicated in the output.

Input order is preserved:

>>> data = ("mississippi", "missouri")
>>> pc.Iter.from_(data).unique_to_each().collect().unwrap()
[['p', 'p'], ['o', 'u', 'r']]

It is assumed that the elements of each iterable are hashable.

Source code in src/pyochain/_iter/_lists.py
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
def unique_to_each[U: Iterable[Any]](self: IterWrapper[U]) -> Iter[list[U]]:
    """
    Return the elements from each of the iterables that aren't in the other iterables.

    For example, suppose you have a set of packages, each with a set of dependencies:

    **{'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}**

    If you remove one package, which dependencies can also be removed?

    If pkg_1 is removed, then A is no longer necessary - it is not associated with pkg_2 or pkg_3.

    Similarly, C is only needed for pkg_2, and D is only needed for pkg_3:
    ```python
    >>> import pyochain as pc
    >>> data = ({"A", "B"}, {"B", "C"}, {"B", "D"})
    >>> pc.Iter.from_(data).unique_to_each().collect().unwrap()
    [['A'], ['C'], ['D']]

    ```

    If there are duplicates in one input iterable that aren't in the others they will be duplicated in the output.

    Input order is preserved:
    ```python
    >>> data = ("mississippi", "missouri")
    >>> pc.Iter.from_(data).unique_to_each().collect().unwrap()
    [['p', 'p'], ['o', 'u', 'r']]

    ```

    It is assumed that the elements of each iterable are hashable.
    """

    from collections import Counter

    def _unique_to_each(data: Iterable[U]) -> Generator[list[U], None, None]:
        """from more_itertools.unique_to_each"""
        pool: list[Iterable[U]] = [it for it in data]
        counts: Counter[U] = Counter(itertools.chain.from_iterable(map(set, pool)))
        uniques: set[U] = {element for element in counts if counts[element] == 1}
        return ((list(filter(uniques.__contains__, it))) for it in pool)

    return self.apply(_unique_to_each)

unwrap

unwrap() -> T

Return the underlying data.

This is a terminal operation.

Source code in src/pyochain/_core/_main.py
61
62
63
64
65
66
67
def unwrap(self) -> T:
    """
    Return the underlying data.

    This is a terminal operation.
    """
    return self._data

unzip

unzip() -> Unzipped[U, V]

Converts an iterator of pairs into a pair of iterators.

Iter.unzip() consumes the iterator of pairs.

Returns an Unzipped NamedTuple, containing two iterators:

  • one from the left elements of the pairs
  • one from the right elements. This function is, in some sense, the opposite of zip.
    >>> import pyochain as pc
    >>> data = [(1, "a"), (2, "b"), (3, "c")]
    >>> unzipped = pc.Iter.from_(data).unzip()
    >>> unzipped.first.into(list)
    [1, 2, 3]
    >>> unzipped.second.into(list)
    ['a', 'b', 'c']
    
Source code in src/pyochain/_iter/_aggregations.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def unzip[U, V](self: IterWrapper[tuple[U, V]]) -> Unzipped[U, V]:
    """
    Converts an iterator of pairs into a pair of iterators.

    `Iter.unzip()` consumes the iterator of pairs.

    Returns an Unzipped NamedTuple, containing two iterators:

    - one from the left elements of the pairs
    - one from the right elements.
    This function is, in some sense, the opposite of zip.
    ```python
    >>> import pyochain as pc
    >>> data = [(1, "a"), (2, "b"), (3, "c")]
    >>> unzipped = pc.Iter.from_(data).unzip()
    >>> unzipped.first.into(list)
    [1, 2, 3]
    >>> unzipped.second.into(list)
    ['a', 'b', 'c']

    ```
    """
    from ._main import Iter

    def _unzip(data: Iterable[tuple[U, V]]) -> Unzipped[U, V]:
        d: list[tuple[U, V]] = list(data)
        return Unzipped(Iter(x[0] for x in d), Iter(x[1] for x in d))

    return self.into(_unzip)

variance

variance() -> float

Return the variance of the sequence.

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 7, 8]).variance()
9.7

Source code in src/pyochain/_iter/_aggregations.py
311
312
313
314
315
316
317
318
319
320
321
322
323
def variance[U: int | float](
    self: IterWrapper[U],
) -> float:
    """
    Return the variance of the sequence.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 7, 8]).variance()
    9.7

    ```
    """
    return self.into(statistics.variance)

windows

windows(length: Literal[1]) -> Iter[tuple[T]]
windows(length: Literal[2]) -> Iter[tuple[T, T]]
windows(length: Literal[3]) -> Iter[tuple[T, T, T]]
windows(length: Literal[4]) -> Iter[tuple[T, T, T, T]]
windows(length: Literal[5]) -> Iter[tuple[T, T, T, T, T]]
windows(length: int) -> Iter[tuple[T, ...]]

A sequence of overlapping subsequences of the given length.

Parameters:

Name Type Description Default
length int

The length of each window.

required

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2, 3, 4]).windows(2).into(list)
[(1, 2), (2, 3), (3, 4)]
This function allows you to apply custom function not available in the rolling namespace.
>>> def moving_average(seq: tuple[int, ...]) -> float:
...     return float(sum(seq)) / len(seq)
>>> pc.Iter.from_([1, 2, 3, 4]).windows(2).map(moving_average).into(list)
[1.5, 2.5, 3.5]

Source code in src/pyochain/_iter/_partitions.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
def windows(self, length: int) -> Iter[tuple[T, ...]]:
    """
    A sequence of overlapping subsequences of the given length.

    Args:
        length: The length of each window.
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2, 3, 4]).windows(2).into(list)
    [(1, 2), (2, 3), (3, 4)]

    ```
    This function allows you to apply custom function not available in the rolling namespace.
    ```python
    >>> def moving_average(seq: tuple[int, ...]) -> float:
    ...     return float(sum(seq)) / len(seq)
    >>> pc.Iter.from_([1, 2, 3, 4]).windows(2).map(moving_average).into(list)
    [1.5, 2.5, 3.5]

    ```
    """
    return self.apply(partial(cz.itertoolz.sliding_window, length))

with_keys

with_keys(keys: Iterable[K]) -> Dict[K, T]

Create a Dict by zipping the iterable with keys.

Parameters:

Name Type Description Default
keys Iterable[K]

Iterable of keys to pair with the values.

required

Example:

>>> import pyochain as pc
>>> keys = ["a", "b", "c"]
>>> values = [1, 2, 3]
>>> pc.Iter.from_(values).with_keys(keys).unwrap()
{'a': 1, 'b': 2, 'c': 3}
>>> # This is equivalent to:
>>> pc.Iter.from_(keys).zip(values).pipe(
...     lambda x: pc.Dict(x.into(dict)).unwrap()
... )
{'a': 1, 'b': 2, 'c': 3}

Source code in src/pyochain/_iter/_main.py
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
    """
    Create a Dict by zipping the iterable with keys.

    Args:
        keys: Iterable of keys to pair with the values.
    Example:
    ```python
    >>> import pyochain as pc
    >>> keys = ["a", "b", "c"]
    >>> values = [1, 2, 3]
    >>> pc.Iter.from_(values).with_keys(keys).unwrap()
    {'a': 1, 'b': 2, 'c': 3}
    >>> # This is equivalent to:
    >>> pc.Iter.from_(keys).zip(values).pipe(
    ...     lambda x: pc.Dict(x.into(dict)).unwrap()
    ... )
    {'a': 1, 'b': 2, 'c': 3}

    ```
    """
    from .._dict import Dict

    return Dict(dict(zip(keys, self.unwrap())))

with_values

with_values(values: Iterable[V]) -> Dict[T, V]

Create a Dict by zipping the iterable with values.

Parameters:

Name Type Description Default
values Iterable[V]

Iterable of values to pair with the keys.

required

Example:

>>> import pyochain as pc
>>> keys = [1, 2, 3]
>>> values = ["a", "b", "c"]
>>> pc.Iter.from_(keys).with_values(values).unwrap()
{1: 'a', 2: 'b', 3: 'c'}
>>> # This is equivalent to:
>>> pc.Iter.from_(keys).zip(values).pipe(
...     lambda x: pc.Dict(x.into(dict)).unwrap()
... )
{1: 'a', 2: 'b', 3: 'c'}

Source code in src/pyochain/_iter/_main.py
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
    """
    Create a Dict by zipping the iterable with values.

    Args:
        values: Iterable of values to pair with the keys.
    Example:
    ```python
    >>> import pyochain as pc
    >>> keys = [1, 2, 3]
    >>> values = ["a", "b", "c"]
    >>> pc.Iter.from_(keys).with_values(values).unwrap()
    {1: 'a', 2: 'b', 3: 'c'}
    >>> # This is equivalent to:
    >>> pc.Iter.from_(keys).zip(values).pipe(
    ...     lambda x: pc.Dict(x.into(dict)).unwrap()
    ... )
    {1: 'a', 2: 'b', 3: 'c'}

    ```
    """
    from .._dict import Dict

    return Dict(dict(zip(self.unwrap(), values)))

zip

zip(iter1: Iterable[T1], /, *, strict: bool = ...) -> Iter[tuple[T, T1]]
zip(
    iter1: Iterable[T1], iter2: Iterable[T2], /, *, strict: bool = ...
) -> Iter[tuple[T, T1, T2]]
zip(
    iter1: Iterable[T1],
    iter2: Iterable[T2],
    iter3: Iterable[T3],
    /,
    *,
    strict: bool = ...,
) -> Iter[tuple[T, T1, T2, T3]]
zip(
    iter1: Iterable[T1],
    iter2: Iterable[T2],
    iter3: Iterable[T3],
    iter4: Iterable[T4],
    /,
    *,
    strict: bool = ...,
) -> Iter[tuple[T, T1, T2, T3, T4]]
zip(*others: Iterable[Any], strict: bool = False) -> Iter[tuple[Any, ...]]

Zip with other iterables, optionally strict.

Parameters:

Name Type Description Default
*others Iterable[Any]

Other iterables to zip with.

()
strict bool

Whether to enforce equal lengths of iterables. Defaults to False.

False

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).zip([10, 20]).into(list)
[(1, 10), (2, 20)]
>>> pc.Iter.from_(["a", "b"]).zip([1, 2, 3]).into(list)
[('a', 1), ('b', 2)]

Source code in src/pyochain/_iter/_joins.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
def zip(
    self, *others: Iterable[Any], strict: bool = False
) -> Iter[tuple[Any, ...]]:
    """
    Zip with other iterables, optionally strict.

    Args:
        *others: Other iterables to zip with.
        strict: Whether to enforce equal lengths of iterables. Defaults to False.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).zip([10, 20]).into(list)
    [(1, 10), (2, 20)]
    >>> pc.Iter.from_(["a", "b"]).zip([1, 2, 3]).into(list)
    [('a', 1), ('b', 2)]

    ```
    """
    return self.apply(zip, *others, strict=strict)

zip_broadcast

zip_broadcast(iter1: Iterable[T1], /, *, strict: bool = False) -> Iter[tuple[T, T1]]
zip_broadcast(
    iter1: Iterable[T1], iter2: Iterable[T2], /, *, strict: bool = False
) -> Iter[tuple[T, T1, T2]]
zip_broadcast(
    iter1: Iterable[T1],
    iter2: Iterable[T2],
    iter3: Iterable[T3],
    /,
    *,
    strict: bool = False,
) -> Iter[tuple[T, T1, T2, T3]]
zip_broadcast(
    iter1: Iterable[T1],
    iter2: Iterable[T2],
    iter3: Iterable[T3],
    iter4: Iterable[T4],
    /,
    *,
    strict: bool = False,
) -> Iter[tuple[T, T1, T2, T3, T4]]
zip_broadcast(*others: Iterable[Any], strict: bool = False) -> Iter[tuple[Any, ...]]

Version of zip that "broadcasts" any scalar (i.e., non-iterable) items into output tuples.

str and bytes are not treated as iterables.

If the strict keyword argument is True, then UnequalIterablesError will be raised if any of the iterables have different lengths.

Parameters:

Name Type Description Default
*others Iterable[Any]

Other iterables or scalars to zip with.

()
strict bool

Whether to enforce equal lengths of iterables. Defaults to False.

False

Example:

>>> import pyochain as pc
>>> data = pc.Iter.from_([1, 2, 3])
>>> other = ["a", "b", "c"]
>>> scalar = "_"
>>> data.zip_broadcast(other, scalar).into(list)
[(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]

Source code in src/pyochain/_iter/_joins.py
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def zip_broadcast(
    self, *others: Iterable[Any], strict: bool = False
) -> Iter[tuple[Any, ...]]:
    """
    Version of zip that "broadcasts" any scalar (i.e., non-iterable) items into output tuples.

    `str` and `bytes` are not treated as iterables.

    If the strict keyword argument is True, then UnequalIterablesError will be raised if any of the iterables have different lengths.

    Args:
        *others: Other iterables or scalars to zip with.
        strict: Whether to enforce equal lengths of iterables. Defaults to False.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data = pc.Iter.from_([1, 2, 3])
    >>> other = ["a", "b", "c"]
    >>> scalar = "_"
    >>> data.zip_broadcast(other, scalar).into(list)
    [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]

    ```
    """

    def _zip_broadcast(
        *objects: Iterable[Any],
    ) -> Generator[tuple[Iterable[Any], ...] | tuple[object, ...], Any, None]:
        """from more_itertools.zip_broadcast"""

        def is_scalar(obj: Any) -> bool:
            if isinstance(obj, (str, bytes)):
                return True
            try:
                iter(obj)
            except TypeError:
                return True
            else:
                return False

        size = len(objects)
        if not size:
            return

        new_item: list[object] = [None] * size
        iterables: list[Iterator[Any]] = []
        iterable_positions: list[int] = []
        for i, obj in enumerate(objects):
            if is_scalar(obj):
                new_item[i] = obj
            else:
                iterables.append(iter(obj))
                iterable_positions.append(i)

        if not iterables:
            yield tuple(objects)
            return

        zipper = mit.zip_equal if strict else zip
        for item in zipper(*iterables):
            for i, new_item[i] in zip(iterable_positions, item):
                pass
            yield tuple(new_item)

    return self.apply(_zip_broadcast, *others)

zip_equal

zip_equal() -> Iter[tuple[T]]
zip_equal(__iter2: Iterable[T2]) -> Iter[tuple[T, T2]]
zip_equal(__iter2: Iterable[T2], __iter3: Iterable[T3]) -> Iter[tuple[T, T2, T3]]
zip_equal(
    __iter2: Iterable[T2], __iter3: Iterable[T3], __iter4: Iterable[T4]
) -> Iter[tuple[T, T2, T3, T4]]
zip_equal(
    __iter2: Iterable[T2],
    __iter3: Iterable[T3],
    __iter4: Iterable[T4],
    __iter5: Iterable[T5],
) -> Iter[tuple[T, T2, T3, T4, T5]]
zip_equal(*others: Iterable[Any]) -> Iter[tuple[Any, ...]]

zip the input iterables together but raise UnequalIterablesError if they aren't all the same length.

Parameters:

Name Type Description Default
*others Iterable[Any]

Other iterables to zip with.

()

Example:

>>> import pyochain as pc
>>> pc.Iter.from_(range(3)).zip_equal("abc").into(list)
[(0, 'a'), (1, 'b'), (2, 'c')]
>>> pc.Iter.from_(range(3)).zip_equal("abcd").into(list)
... # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
more_itertools.more.UnequalIterablesError: Iterables have different
lengths

Source code in src/pyochain/_iter/_joins.py
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
def zip_equal(self, *others: Iterable[Any]) -> Iter[tuple[Any, ...]]:
    """
    `zip` the input *iterables* together but raise `UnequalIterablesError` if they aren't all the same length.

    Args:
        *others: Other iterables to zip with.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_(range(3)).zip_equal("abc").into(list)
    [(0, 'a'), (1, 'b'), (2, 'c')]
    >>> pc.Iter.from_(range(3)).zip_equal("abcd").into(list)
    ... # doctest: +IGNORE_EXCEPTION_DETAIL
    Traceback (most recent call last):
    ...
    more_itertools.more.UnequalIterablesError: Iterables have different
    lengths

    ```
    """

    def _zip_equal(data: Iterable[T]) -> Iterator[tuple[Any, ...]]:
        return mit.zip_equal(data, *others)

    return self.apply(_zip_equal)

zip_longest

zip_longest(*others: Iterable[T], fill_value: U = None) -> Iter[tuple[U | T, ...]]

Zip with other iterables, filling missing values.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to zip with.

()
fill_value U

Value to use for missing elements. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> pc.Iter.from_([1, 2]).zip_longest([10], fill_value=0).into(list)
[(1, 10), (2, 0)]

Source code in src/pyochain/_iter/_joins.py
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
def zip_longest[U](
    self, *others: Iterable[T], fill_value: U = None
) -> Iter[tuple[U | T, ...]]:
    """
    Zip with other iterables, filling missing values.

    Args:
        *others: Other iterables to zip with.
        fill_value: Value to use for missing elements. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> pc.Iter.from_([1, 2]).zip_longest([10], fill_value=0).into(list)
    [(1, 10), (2, 0)]

    ```
    """
    return self.apply(itertools.zip_longest, *others, fillvalue=fill_value)

zip_offset

zip_offset(
    *others: Iterable[T], offsets: list[int], longest: bool = False, fillvalue: U = None
) -> Iter[tuple[T | U, ...]]

Zip the input iterables together, but offset the i-th iterable by the i-th item in offsets.

Parameters:

Name Type Description Default
*others Iterable[T]

Other iterables to zip with.

()
offsets list[int]

List of integers specifying the offsets for each iterable.

required
longest bool

Whether to continue until the longest iterable is exhausted. Defaults to False.

False
fillvalue U

Value to use for missing elements. Defaults to None.

None

Example:

>>> import pyochain as pc
>>> data = pc.Seq("0123")
>>> data.iter().zip_offset("abcdef", offsets=(0, 1)).into(list)
[('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
This can be used as a lightweight alternative to SciPy or pandas to analyze data sets in which some series have a lead or lag relationship.

By default, the sequence will end when the shortest iterable is exhausted.

To continue until the longest iterable is exhausted, set longest to True.

>>> data.iter().zip_offset("abcdef", offsets=(0, 1), longest=True).into(list)
[('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]

Source code in src/pyochain/_iter/_joins.py
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def zip_offset[U](
    self,
    *others: Iterable[T],
    offsets: list[int],
    longest: bool = False,
    fillvalue: U = None,
) -> Iter[tuple[T | U, ...]]:
    """
    Zip the input iterables together, but offset the i-th iterable by the i-th item in offsets.

    Args:
        *others: Other iterables to zip with.
        offsets: List of integers specifying the offsets for each iterable.
        longest: Whether to continue until the longest iterable is exhausted. Defaults to False.
        fillvalue: Value to use for missing elements. Defaults to None.
    Example:
    ```python
    >>> import pyochain as pc
    >>> data = pc.Seq("0123")
    >>> data.iter().zip_offset("abcdef", offsets=(0, 1)).into(list)
    [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]

    ```
    This can be used as a lightweight alternative to SciPy or pandas to analyze data sets in which some series have a lead or lag relationship.

    By default, the sequence will end when the shortest iterable is exhausted.

    To continue until the longest iterable is exhausted, set longest to True.
    ```python
    >>> data.iter().zip_offset("abcdef", offsets=(0, 1), longest=True).into(list)
    [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]

    ```
    """

    def _zip_offset(data: Iterable[T]) -> Iterator[tuple[T | U, ...]]:
        return mit.zip_offset(
            data,
            *others,
            offsets=offsets,
            longest=longest,
            fillvalue=fillvalue,
        )

    return self.apply(_zip_offset)