On Mon, 2012-08-20 at 16:46 +0200, Zdeněk Pavlas wrote: > --- > urlgrabber/grabber.py | 82 > ++++++++++++++++++++++++++++++++----------------- > 1 files changed, 54 insertions(+), 28 deletions(-) > > diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py > index b3071ce..b16b9a9 100644 > --- a/urlgrabber/grabber.py > +++ b/urlgrabber/grabber.py > @@ -54,6 +54,21 @@ GENERAL ARGUMENTS (kwargs) > po.update(read) # read == bytes read so far > po.end() > > + multi_obj = None
Can we call this multi_progress_obj instead? > + a class instance that supports the following methods: > + mo.start(total_files, total_size) > + mo.newMeter() => meter > + mo.removeMeter(meter) > + mo.end() > + > + The 'meter' object is similiar to progress_obj, but multiple > + instances may be created and updated at the same time. > + > + When downloading multiple files in parallel and multi_obj is None > + progress_obj is used in compatibility mode: finished files are > + shown but there's no in-progress display. > + > text = None > > specifies alternative text to be passed to the progress meter > @@ -909,6 +924,7 @@ class URLGrabberOptions: > provided here. > """ > self.progress_obj = None > + self.multi_obj = None > self.throttle = 1.0 > self.bandwidth = 0 > self.retry = None > @@ -2028,7 +2044,7 @@ class _ExternalDownloader: > v = getattr(opts, k) > if v is None: continue > arg.append('%s=%s' % (k, _dumps(v))) > - if opts.progress_obj: > + if opts.progress_obj and opts.multi_obj: > arg.append('progress_obj=True') > arg = ' '.join(arg) > if DEBUG: DEBUG.info('attempt %i/%s: %s', opts.tries, opts.retry, > opts.url) > @@ -2048,7 +2064,7 @@ class _ExternalDownloader: > line = line.split(' ', 5) > _id, size = map(int, line[:2]) > if len(line) == 2: > - self.running[_id].progress_obj.update(size) > + self.running[_id].progress.update(size) See below. > continue > # job done > opts = self.running.pop(_id) > @@ -2117,20 +2133,31 @@ class _ExternalDownloaderPool: > > _async_queue = [] > > -def parallel_wait(meter = 'text'): > +def parallel_wait(): Any caller that does parallel_wait('text') or parallel_wait(meter='text') gets a traceback after this change. > +class CompatMeter: > + """ Compat wrapper on top of a single-file progress meter 'obj'. > + """ > + def __init__(self, obj): > + self.obj = obj > + def start(self, text): > + self.text = text > + self.started = time.time() > + def end(self, amount): > + self.obj.start(text=self.text, now=self.started) > + self.obj.update(amount) > + self.obj.end(amount) > + def update(self, amount): pass > + This should be in the progress module, and are you sure you want it to be public with that name? Maybe Single2MultiWrapperMeter()? Or maybe don't use a wrapper, and have functions somewhere? > '''Process queued requests in parallel. > ''' > > - if meter: > - count = total = 0 > - for opts in _async_queue: > - if opts.progress_obj: > - count += 1 > - total += opts.size > - if meter == 'text': > - from progress import TextMultiFileMeter > - meter = TextMultiFileMeter() > - meter.start(count, total) > + meters = {} > + for opts in _async_queue: > + if opts.progress_obj and opts.multi_obj: > + count, total = meters.get(opts.multi_obj) or (0, 0) > + meters[opts.multi_obj] = count + 1, total + opts.size > + for meter in meters: > + meter.start(*meters[meter]) > > dl = _ExternalDownloaderPool() > host_con = {} # current host connection counts > @@ -2139,11 +2166,10 @@ def parallel_wait(meter = 'text'): > key, limit = opts.async > host_con[key] = host_con.get(key, 0) + 1 > opts.tries = tries > - if meter and opts.progress_obj: > - opts.progress_obj = meter.newMeter() > - opts.progress_obj.start(text=opts.text, > basename=os.path.basename(opts.filename)) > - else: > - opts.progress_obj = None > + if opts.progress_obj: > + m = opts.multi_obj > + opts.progress = m and m.newMeter() or > CompatMeter(opts.progress_obj) Again with the magic lines of doing N things? If statements do work. Also if we want to wrap this are you sure you want self.progress to be public? On the other side it doesn't seem that bad to have a function/if at the 2-3 places self.progress is being called. > if DEBUG: DEBUG.info('attempt %i/%s: %s', opts.tries, opts.retry, > opts.url) > dl.start(opts) > > @@ -2151,15 +2177,14 @@ def parallel_wait(meter = 'text'): > for opts, size, ug_err in dl.perform(): > key, limit = opts.async > host_con[key] -= 1 > - m = opts.progress_obj > - if m: > - if ug_err: > - m.failure(ug_err.args[1]) > - else: > - # file size might have changed > - meter.re.total += size - opts.size > - m.end(size) > - meter.removeMeter(m) > + if opts.progress_obj: > + if not ug_err: > + if opts.multi_obj: > + # file size might have changed > + opts.multi_obj.re.total += size - opts.size > + opts.progress.end(size) > + if opts.multi_obj: > + opts.multi_obj.removeMeter(opts.progress) > > if ug_err is None: > if opts.checkfunc: _______________________________________________ Yum-devel mailing list Yum-devel@lists.baseurl.org http://lists.baseurl.org/mailman/listinfo/yum-devel