mirror of
https://mau.dev/maunium/synapse.git
synced 2025-01-05 21:54:02 +01:00
Add batch_iter to utils
There's a frequent idiom I noticed where an iterable is split up into a number of chunks/batches. Unfortunately that method does not work with iterators like dict.keys() in python3. This implementation works with iterators. Signed-off-by: Adrian Tschira <nota@notafile.com>
This commit is contained in:
parent
08462620bf
commit
45b55e23d3
1 changed files with 18 additions and 0 deletions
|
@ -20,6 +20,8 @@ from twisted.internet import defer, reactor, task
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from itertools import islice
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,3 +81,19 @@ class Clock(object):
|
||||||
except Exception:
|
except Exception:
|
||||||
if not ignore_errs:
|
if not ignore_errs:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def batch_iter(iterable, size):
|
||||||
|
"""batch an iterable up into tuples with a maximum size
|
||||||
|
|
||||||
|
Args:
|
||||||
|
iterable (iterable): the iterable to slice
|
||||||
|
size (int): the maximum batch size
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
an iterator over the chunks
|
||||||
|
"""
|
||||||
|
# make sure we can deal with iterables like lists too
|
||||||
|
sourceiter = iter(iterable)
|
||||||
|
# call islice until it returns an empty tuple
|
||||||
|
return iter(lambda: tuple(islice(sourceiter, size)), ())
|
||||||
|
|
Loading…
Reference in a new issue