The current fetch() function is quite long, which makes it hard to
know what I can change without adverse side effects.  By pulling this
logic out of the main function, we get clearer logic in fetch() and
more explicit input for the config extraction.

This block was especially complicated, so I also created the helper
functions _get_file_uri_tuples and _expand_mirror.  I'd prefer if
_expand_mirror iterated through URIs instead of (group, URI) tuples,
but we need a distinct marker for third-party URIs to build
third_party_mirror_uris which is used to build primaryuri_dict which
is used way down in fetch():

  if checksum_failure_count == \
      checksum_failure_primaryuri:
      # Switch to "primaryuri" mode in order
      # to increase the probablility of
      # of success.
      primaryuris = \
          primaryuri_dict.get(myfile)
          if primaryuris:
              uri_list.extend(
                  reversed(primaryuris))

I don't know if this is useful enough to motivate the uglier
_expandmirror return values, but I'll kick that can down the road for
now.
---
 pym/portage/package/ebuild/fetch.py | 209 ++++++++++++++++++++++--------------
 1 file changed, 128 insertions(+), 81 deletions(-)

diff --git a/pym/portage/package/ebuild/fetch.py 
b/pym/portage/package/ebuild/fetch.py
index de5bf00..a1940f4 100644
--- a/pym/portage/package/ebuild/fetch.py
+++ b/pym/portage/package/ebuild/fetch.py
@@ -15,9 +15,9 @@ import sys
 import tempfile
 
 try:
-       from urllib.parse import urlparse
+       from urllib.parse import urlparse, urlunparse
 except ImportError:
-       from urlparse import urlparse
+       from urlparse import urlparse, urlunparse
 
 import portage
 portage.proxy.lazyimport.lazyimport(globals(),
@@ -298,6 +298,129 @@ def _get_fetch_resume_size(settings, default='350K'):
        return v
 
 
+def _get_file_uri_tuples(uris):
+       """
+       Return a list of (filename, uri) tuples
+       """
+       file_uri_tuples = []
+       # Check for 'items' attribute since OrderedDict is not a dict.
+       if hasattr(uris, 'items'):
+               for filename, uri_set in uris.items():
+                       for uri in uri_set:
+                               file_uri_tuples.append((filename, uri))
+                       if not uri_set:
+                               file_uri_tuples.append((filename, None))
+       else:
+               for uri in uris:
+                       if urlparse(uri).scheme:
+                               file_uri_tuples.append(
+                                       (os.path.basename(uri), uri))
+                       else:
+                               file_uri_tuples.append(
+                                       (os.path.basename(uri), None))
+       return file_uri_tuples
+
+
+def _expand_mirror(uri, custom_mirrors=(), third_party_mirrors=()):
+       """
+       Replace the 'mirror://' scheme in the uri
+
+       Returns an iterable listing expanded (group, URI) tuples,
+       where the group is either 'custom' or 'third-party'.
+       """
+       parsed = urlparse(uri)
+       mirror = parsed.netloc
+       path = parsed.path
+       if path:
+               # Try user-defined mirrors first
+               if mirror in custom_mirrors:
+                       for cmirr in custom_mirrors[mirror]:
+                               m_uri = urlparse(cmirr)
+                               yield ('custom', urlunparse((
+                                       m_uri.scheme, m_uri.netloc, path) +
+                                       parsed[3:]))
+
+               # now try the official mirrors
+               if mirror in third_party_mirrors:
+                       uris = []
+                       for locmirr in third_party_mirrors[mirror]:
+                               m_uri = urlparse(locmirr)
+                               uris.append(urlunparse((
+                                       m_uri.scheme, m_uri.netloc, path) +
+                                       parsed[3:]))
+                       random.shuffle(uris)
+                       for uri in uris:
+                               yield ('third-party', uri)
+
+               if (not custom_mirrors.get(mirror, []) and
+                               not third_party_mirrors.get(mirror, [])):
+                       writemsg(
+                               _("No known mirror by the name: %s\n")
+                               % (mirror,))
+       else:
+               writemsg(_("Invalid mirror definition in SRC_URI:\n"),
+                        noiselevel=-1)
+               writemsg("  %s\n" % (uri), noiselevel=-1)
+
+
+def _get_uris(uris, settings, custom_mirrors=(), locations=()):
+       restrict = settings.get("PORTAGE_RESTRICT", "").split()
+       restrict_fetch = "fetch" in restrict
+       restrict_mirror = "mirror" in restrict or "nomirror" in restrict
+       force_mirror = (
+               "force-mirror" in settings.features and
+               not restrict_mirror)
+
+       third_party_mirrors = settings.thirdpartymirrors()
+       third_party_mirror_uris = {}
+       filedict = OrderedDict()
+       primaryuri_dict = {}
+       for filename, uri in _get_file_uri_tuples(uris=uris):
+               if filename not in filedict:
+                       filedict[filename] = [
+                               os.path.join(location, 'distfiles', filename)
+                               for location in locations]
+               if uri is None:
+                       continue
+               if uri.startswith('mirror://'):
+                       uris = _expand_mirror(
+                               uri=uri, custom_mirrors=custom_mirrors,
+                               third_party_mirrors=third_party_mirrors)
+                       filedict[filename].extend(uri for group, uri in uris)
+                       third_party_mirror_uris.setdefault(filename, []).extend(
+                               uri for group, uri in uris
+                               if group == 'third-party')
+               else:
+                       if restrict_fetch or force_mirror:
+                               # Only fetch from specific mirrors is allowed.
+                               continue
+                       primaryuris = primaryuri_dict.get(filename)
+                       if primaryuris is None:
+                               primaryuris = []
+                               primaryuri_dict[filename] = primaryuris
+                       primaryuris.append(uri)
+
+       # Order primaryuri_dict values to match that in SRC_URI.
+       for uris in primaryuri_dict.values():
+               uris.reverse()
+
+       # Prefer third_party_mirrors over normal mirrors in cases when
+       # the file does not yet exist on the normal mirrors.
+       for filename, uris in third_party_mirror_uris.items():
+               primaryuri_dict.setdefault(filename, []).extend(uris)
+
+       # Now merge primaryuri values into filedict (includes mirrors
+       # explicitly referenced in SRC_URI).
+       if "primaryuri" in restrict:
+               for filename, uris in filedict.items():
+                       filedict[filename] = primaryuri_dict.get(filename, []) 
+ uris
+       else:
+               for filename in filedict:
+                       filedict[filename] += primaryuri_dict.get(filename, [])
+
+       return filedict, primaryuri_dict
+
+
 def fetch(myuris, mysettings, listonly=0, fetchonly=0,
        locks_in_subdir=".locks", use_locks=1, try_mirrors=1, digests=None,
        allow_missing_digests=True):
@@ -329,7 +452,6 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
        # couple of checksum failures, to increase the probablility
        # of success before checksum_failure_max_tries is reached.
        checksum_failure_primaryuri = 2
-       thirdpartymirrors = mysettings.thirdpartymirrors()
 
        # In the background parallel-fetch process, it's safe to skip checksum
        # verification of pre-existing files in $DISTDIR that have the correct
@@ -402,7 +524,6 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                        del mymirrors[x]
 
        restrict_fetch = "fetch" in restrict
-       force_mirror = "force-mirror" in features and not restrict_mirror
        custom_local_mirrors = custommirrors.get("local", [])
        if restrict_fetch:
                # With fetch restriction, a normal uri may only be fetched from
@@ -413,83 +534,9 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
        else:
                locations = mymirrors
 
-       file_uri_tuples = []
-       # Check for 'items' attribute since OrderedDict is not a dict.
-       if hasattr(myuris, 'items'):
-               for myfile, uri_set in myuris.items():
-                       for myuri in uri_set:
-                               file_uri_tuples.append((myfile, myuri))
-                       if not uri_set:
-                               file_uri_tuples.append((myfile, None))
-       else:
-               for myuri in myuris:
-                       if urlparse(myuri).scheme:
-                               
file_uri_tuples.append((os.path.basename(myuri), myuri))
-                       else:
-                               
file_uri_tuples.append((os.path.basename(myuri), None))
-
-       filedict = OrderedDict()
-       primaryuri_dict = {}
-       thirdpartymirror_uris = {}
-       for myfile, myuri in file_uri_tuples:
-               if myfile not in filedict:
-                       filedict[myfile]=[]
-                       for y in range(0,len(locations)):
-                               
filedict[myfile].append(locations[y]+"/distfiles/"+myfile)
-               if myuri is None:
-                       continue
-               if myuri[:9]=="mirror://":
-                       eidx = myuri.find("/", 9)
-                       if eidx != -1:
-                               mirrorname = myuri[9:eidx]
-                               path = myuri[eidx+1:]
-
-                               # Try user-defined mirrors first
-                               if mirrorname in custommirrors:
-                                       for cmirr in custommirrors[mirrorname]:
-                                               filedict[myfile].append(
-                                                       cmirr.rstrip("/") + "/" 
+ path)
-
-                               # now try the official mirrors
-                               if mirrorname in thirdpartymirrors:
-                                       uris = [locmirr.rstrip("/") + "/" + 
path \
-                                               for locmirr in 
thirdpartymirrors[mirrorname]]
-                                       random.shuffle(uris)
-                                       filedict[myfile].extend(uris)
-                                       
thirdpartymirror_uris.setdefault(myfile, []).extend(uris)
-
-                               if not filedict[myfile]:
-                                       writemsg(_("No known mirror by the 
name: %s\n") % (mirrorname))
-                       else:
-                               writemsg(_("Invalid mirror definition in 
SRC_URI:\n"), noiselevel=-1)
-                               writemsg("  %s\n" % (myuri), noiselevel=-1)
-               else:
-                       if restrict_fetch or force_mirror:
-                               # Only fetch from specific mirrors is allowed.
-                               continue
-                       primaryuris = primaryuri_dict.get(myfile)
-                       if primaryuris is None:
-                               primaryuris = []
-                               primaryuri_dict[myfile] = primaryuris
-                       primaryuris.append(myuri)
-
-       # Order primaryuri_dict values to match that in SRC_URI.
-       for uris in primaryuri_dict.values():
-               uris.reverse()
-
-       # Prefer thirdpartymirrors over normal mirrors in cases when
-       # the file does not yet exist on the normal mirrors.
-       for myfile, uris in thirdpartymirror_uris.items():
-               primaryuri_dict.setdefault(myfile, []).extend(uris)
-
-       # Now merge primaryuri values into filedict (includes mirrors
-       # explicitly referenced in SRC_URI).
-       if "primaryuri" in restrict:
-               for myfile, uris in filedict.items():
-                       filedict[myfile] = primaryuri_dict.get(myfile, []) + 
uris
-       else:
-               for myfile in filedict:
-                       filedict[myfile] += primaryuri_dict.get(myfile, [])
+       filedict, primaryuri_dict = _get_uris(
+               uris=myuris, settings=mysettings,
+               custom_mirrors=custommirrors, locations=locations)
 
        can_fetch=True
 
-- 
1.8.5.2.8.g0f6c0d1


Reply via email to