Package portage :: Package _emirrordist :: Module FetchIterator
[hide private]

Source Code for Module portage._emirrordist.FetchIterator

  1  # Copyright 2013 Gentoo Foundation 
  2  # Distributed under the terms of the GNU General Public License v2 
  3   
  4  import threading 
  5   
  6  from portage import os 
  7  from portage.checksum import (_apply_hash_filter, 
  8          _filter_unaccelarated_hashes, _hash_filter) 
  9  from portage.dep import use_reduce 
 10  from portage.exception import PortageException 
 11  from .FetchTask import FetchTask 
 12   
13 -class FetchIterator(object):
14
15 - def __init__(self, config):
16 self._config = config 17 self._log_failure = config.log_failure 18 self._terminated = threading.Event()
19
20 - def terminate(self):
21 """ 22 Schedules early termination of the __iter__ method, which is 23 useful because under some conditions it's possible for __iter__ 24 to loop for a long time without yielding to the caller. For 25 example, it's useful when there are many ebuilds with stale 26 cache and RESTRICT=mirror. 27 28 This method is thread-safe (and safe for signal handlers). 29 """ 30 self._terminated.set()
31
32 - def _iter_every_cp(self):
33 # List categories individually, in order to start yielding quicker, 34 # and in order to reduce latency in case of a signal interrupt. 35 cp_all = self._config.portdb.cp_all 36 for category in sorted(self._config.portdb.categories): 37 for cp in cp_all(categories=(category,)): 38 yield cp
39
40 - def __iter__(self):
41 42 portdb = self._config.portdb 43 get_repo_for_location = portdb.repositories.get_repo_for_location 44 file_owners = self._config.file_owners 45 file_failures = self._config.file_failures 46 restrict_mirror_exemptions = self._config.restrict_mirror_exemptions 47 48 hash_filter = _hash_filter( 49 portdb.settings.get("PORTAGE_CHECKSUM_FILTER", "")) 50 if hash_filter.transparent: 51 hash_filter = None 52 53 for cp in self._iter_every_cp(): 54 55 if self._terminated.is_set(): 56 return 57 58 for tree in portdb.porttrees: 59 60 # Reset state so the Manifest is pulled once 61 # for this cp / tree combination. 62 digests = None 63 repo_config = get_repo_for_location(tree) 64 65 for cpv in portdb.cp_list(cp, mytree=tree): 66 67 if self._terminated.is_set(): 68 return 69 70 try: 71 restrict, = portdb.aux_get(cpv, ("RESTRICT",), 72 mytree=tree) 73 except (KeyError, PortageException) as e: 74 self._log_failure("%s\t\taux_get exception %s" % 75 (cpv, e)) 76 continue 77 78 # Here we use matchnone=True to ignore conditional parts 79 # of RESTRICT since they don't apply unconditionally. 80 # Assume such conditionals only apply on the client side. 81 try: 82 restrict = frozenset(use_reduce(restrict, 83 flat=True, matchnone=True)) 84 except PortageException as e: 85 self._log_failure("%s\t\tuse_reduce exception %s" % 86 (cpv, e)) 87 continue 88 89 if "fetch" in restrict: 90 continue 91 92 try: 93 uri_map = portdb.getFetchMap(cpv) 94 except PortageException as e: 95 self._log_failure("%s\t\tgetFetchMap exception %s" % 96 (cpv, e)) 97 continue 98 99 if not uri_map: 100 continue 101 102 if "mirror" in restrict: 103 skip = False 104 if restrict_mirror_exemptions is not None: 105 new_uri_map = {} 106 for filename, uri_tuple in uri_map.items(): 107 for uri in uri_tuple: 108 if uri[:9] == "mirror://": 109 i = uri.find("/", 9) 110 if i != -1 and uri[9:i].strip("/") in \ 111 restrict_mirror_exemptions: 112 new_uri_map[filename] = uri_tuple 113 break 114 if new_uri_map: 115 uri_map = new_uri_map 116 else: 117 skip = True 118 else: 119 skip = True 120 121 if skip: 122 continue 123 124 # Parse Manifest for this cp if we haven't yet. 125 if digests is None: 126 try: 127 digests = repo_config.load_manifest( 128 os.path.join(repo_config.location, cp) 129 ).getTypeDigests("DIST") 130 except (EnvironmentError, PortageException) as e: 131 for filename in uri_map: 132 self._log_failure( 133 "%s\t%s\tManifest exception %s" % 134 (cpv, filename, e)) 135 file_failures[filename] = cpv 136 continue 137 138 if not digests: 139 for filename in uri_map: 140 self._log_failure("%s\t%s\tdigest entry missing" % 141 (cpv, filename)) 142 file_failures[filename] = cpv 143 continue 144 145 for filename, uri_tuple in uri_map.items(): 146 file_digests = digests.get(filename) 147 if file_digests is None: 148 self._log_failure("%s\t%s\tdigest entry missing" % 149 (cpv, filename)) 150 file_failures[filename] = cpv 151 continue 152 if filename in file_owners: 153 continue 154 file_owners[filename] = cpv 155 156 file_digests = \ 157 _filter_unaccelarated_hashes(file_digests) 158 if hash_filter is not None: 159 file_digests = _apply_hash_filter( 160 file_digests, hash_filter) 161 162 yield FetchTask(cpv=cpv, 163 background=True, 164 digests=file_digests, 165 distfile=filename, 166 restrict=restrict, 167 uri_tuple=uri_tuple, 168 config=self._config)
169