- Dropped the 'meter' argument of parallel_wait(), added
  the multi_obj option instead, defaulting to None.

- progress_obj and not multi_obj case: progress_obj is used
  in compatibility mode, legacy users should not break.

- dropped po.failure() call.  #1 progress_obj does not implement
  this, #2 the error message is shown in failure_callback too,
  and we don't want to display it twice.
---
 urlgrabber/grabber.py |   82 ++++++++++++++++++++++++++++++++-----------------
 1 files changed, 54 insertions(+), 28 deletions(-)

diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
index b3071ce..b16b9a9 100644
--- a/urlgrabber/grabber.py
+++ b/urlgrabber/grabber.py
@@ -54,6 +54,21 @@ GENERAL ARGUMENTS (kwargs)
       po.update(read) # read == bytes read so far
       po.end()
 
+  multi_obj = None
+
+    a class instance that supports the following methods:
+      mo.start(total_files, total_size)
+      mo.newMeter() => meter
+      mo.removeMeter(meter)
+      mo.end()
+
+   The 'meter' object is similiar to progress_obj, but multiple
+   instances may be created and updated at the same time.
+
+   When downloading multiple files in parallel and multi_obj is None
+   progress_obj is used in compatibility mode: finished files are
+   shown but there's no in-progress display.
+
   text = None
   
     specifies alternative text to be passed to the progress meter
@@ -909,6 +924,7 @@ class URLGrabberOptions:
         provided here.
         """
         self.progress_obj = None
+        self.multi_obj = None
         self.throttle = 1.0
         self.bandwidth = 0
         self.retry = None
@@ -2028,7 +2044,7 @@ class _ExternalDownloader:
             v = getattr(opts, k)
             if v is None: continue
             arg.append('%s=%s' % (k, _dumps(v)))
-        if opts.progress_obj:
+        if opts.progress_obj and opts.multi_obj:
             arg.append('progress_obj=True')
         arg = ' '.join(arg)
         if DEBUG: DEBUG.info('attempt %i/%s: %s', opts.tries, opts.retry, 
opts.url)
@@ -2048,7 +2064,7 @@ class _ExternalDownloader:
             line = line.split(' ', 5)
             _id, size = map(int, line[:2])
             if len(line) == 2:
-                self.running[_id].progress_obj.update(size)
+                self.running[_id].progress.update(size)
                 continue
             # job done
             opts = self.running.pop(_id)
@@ -2117,20 +2133,31 @@ class _ExternalDownloaderPool:
 
 _async_queue = []
 
-def parallel_wait(meter = 'text'):
+class CompatMeter:
+    """ Compat wrapper on top of a single-file progress meter 'obj'.
+    """
+    def __init__(self, obj):
+        self.obj = obj
+    def start(self, text):
+        self.text = text
+        self.started = time.time()
+    def end(self, amount):
+        self.obj.start(text=self.text, now=self.started)
+        self.obj.update(amount)
+        self.obj.end(amount)
+    def update(self, amount): pass
+
+def parallel_wait():
     '''Process queued requests in parallel.
     '''
 
-    if meter:
-        count = total = 0
-        for opts in _async_queue:
-            if opts.progress_obj:
-                count += 1
-                total += opts.size
-        if meter == 'text':
-            from progress import TextMultiFileMeter
-            meter = TextMultiFileMeter()
-        meter.start(count, total)
+    meters = {}
+    for opts in _async_queue:
+        if opts.progress_obj and opts.multi_obj:
+            count, total = meters.get(opts.multi_obj) or (0, 0)
+            meters[opts.multi_obj] = count + 1, total + opts.size
+    for meter in meters:
+        meter.start(*meters[meter])
 
     dl = _ExternalDownloaderPool()
     host_con = {} # current host connection counts
@@ -2139,11 +2166,10 @@ def parallel_wait(meter = 'text'):
         key, limit = opts.async
         host_con[key] = host_con.get(key, 0) + 1
         opts.tries = tries
-        if meter and opts.progress_obj:
-            opts.progress_obj = meter.newMeter()
-            opts.progress_obj.start(text=opts.text, 
basename=os.path.basename(opts.filename))
-        else:
-            opts.progress_obj = None
+        if opts.progress_obj:
+            m = opts.multi_obj
+            opts.progress = m and m.newMeter() or 
CompatMeter(opts.progress_obj)
+            opts.progress.start(text=opts.text)
         if DEBUG: DEBUG.info('attempt %i/%s: %s', opts.tries, opts.retry, 
opts.url)
         dl.start(opts)
 
@@ -2151,15 +2177,14 @@ def parallel_wait(meter = 'text'):
         for opts, size, ug_err in dl.perform():
             key, limit = opts.async
             host_con[key] -= 1
-            m = opts.progress_obj
-            if m:
-                if ug_err:
-                    m.failure(ug_err.args[1])
-                else:
-                    # file size might have changed
-                    meter.re.total += size - opts.size
-                    m.end(size)
-                meter.removeMeter(m)
+            if opts.progress_obj:
+                if not ug_err:
+                    if opts.multi_obj:
+                        # file size might have changed
+                        opts.multi_obj.re.total += size - opts.size
+                    opts.progress.end(size)
+                if opts.multi_obj:
+                    opts.multi_obj.removeMeter(opts.progress)
 
             if ug_err is None:
                 if opts.checkfunc:
@@ -2267,7 +2292,8 @@ def parallel_wait(meter = 'text'):
 
     finally:
         dl.abort()
-        if meter: meter.end()
+        for meter in meters:
+            meter.end()
         del _async_queue[:]
         _TH.save()
 
-- 
1.7.4.4

_______________________________________________
Yum-devel mailing list
[email protected]
http://lists.baseurl.org/mailman/listinfo/yum-devel

Reply via email to