Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(3)  |  Call(2)  |  Derive(0)  |  Import(1)
It makes a sample from the given iterator

        def sample_low_mem(iterator, iter_length, sample_size):
    'It makes a sample from the given iterator'
    # This implementation will use less memory when the number of sampled items
    # is quite high.
    # It requires to know the number of items beforehand

    if sample_size <= 0:
        raise SampleSizeError('No items to sample')
    elif sample_size > iter_length:
        raise SampleSizeError('Sample larger than population')

    if sample_size > iter_length / 2:
        num_items_to_select = iter_length - sample_size
        invert = True
    else:
        num_items_to_select = sample_size
        invert = False

    selected = set(random.randint(0, iter_length - 1)
                                           for n in range(num_items_to_select))
    selected_add = selected.add
    while len(selected) < num_items_to_select:
        selected_add(random.randint(0, iter_length - 1))

    for index, item in enumerate(iterator):
        item_in_selected = index in selected
        if item_in_selected and not invert or not item_in_selected and invert:
            yield item
        


src/s/e/seq_crumbs-HEAD/test/test_iterutils.py   seq_crumbs(Download)
import tempfile
 
from crumbs.iterutils import (sample, sample_low_mem, length, group_in_packets,
                              rolling_window, group_in_packets_fill_last,
                              sorted_items, unique, unique_unordered)
        num_sampled_items = 10
        for num_sampled_items in (10, 90):
            sampled_items = list(sample_low_mem(items, length_,
                                                num_sampled_items))
            self.check_sampled_items(items, sampled_items, num_sampled_items)
 
    def test_sample_too_much(self):
        for items in [range(10), []]:
            try:
                list(sample_low_mem(items, 10, 20))