Mercurial > bzapi
changeset 28:ce19838a318d
we now split large batches of bugs into small segments of 10 each
author | Atul Varma <varmaa@toolness.com> |
---|---|
date | Thu, 24 Dec 2009 11:54:11 -0800 |
parents | f717ecd3ede1 |
children | 9a7052db1045 |
files | bzapi.py test.py |
diffstat | 2 files changed, 23 insertions(+), 1 deletions(-) [+] |
line wrap: on
line diff
--- a/bzapi.py Thu Dec 24 11:45:06 2009 -0800 +++ b/bzapi.py Thu Dec 24 11:54:11 2009 -0800 @@ -6,6 +6,12 @@ import pymongo import simplejson as json +def split_seq(seq, size): + """ Split up seq in pieces of size """ + + # Taken from http://code.activestate.com/recipes/425044/ + return [seq[i:i+size] for i in range(0, len(seq), size)] + def open_url(url, headers, query_args=None, urllib2=urllib2): if query_args: full_url = "%s?%s" % (url, urllib.urlencode(query_args)) @@ -49,6 +55,8 @@ sanitize(item) class CachedSearch(object): + MAX_BUG_BATCH_SIZE = 10 + def __init__(self, api, collection, **kwargs): self.observers = [] self.options = kwargs @@ -104,7 +112,8 @@ bugs_to_update = self.bugs.find({'needs_full_update': True}) bug_ids = [bug['id'] for bug in bugs_to_update] if bug_ids: - self._retrieve_full_bugs(bug_ids) + for segment in split_seq(bug_ids, self.MAX_BUG_BATCH_SIZE): + self._retrieve_full_bugs(segment) self._update_last_update() class BugzillaApi(object):
--- a/test.py Thu Dec 24 11:45:06 2009 -0800 +++ b/test.py Thu Dec 24 11:54:11 2009 -0800 @@ -10,6 +10,19 @@ connection = pymongo.Connection('localhost', 27017) testdb = connection.bzapi_testing_db +class MiscTests(unittest.TestCase): + def test_split_seq_with_tiny_seq(self): + self.assertEqual(repr(bzapi.split_seq([1], 2)), + '[[1]]') + + def test_split_seq_with_imperfect_split(self): + self.assertEqual(repr(bzapi.split_seq([1,2,3,4,5], 2)), + '[[1, 2], [3, 4], [5]]') + + def test_split_seq_with_perfect_split(self): + self.assertEqual(repr(bzapi.split_seq([1,2,3,4], 2)), + '[[1, 2], [3, 4]]') + class TimestampTests(unittest.TestCase): def test_datetime_from_iso(self): date = bzapi.datetime_from_iso('2009-06-11T22:31:24Z')