Package portage :: Package dbapi :: Module porttree
[hide private]

Source Code for Module portage.dbapi.porttree

   1  # Copyright 1998-2013 Gentoo Foundation 
   2  # Distributed under the terms of the GNU General Public License v2 
   3   
   4  from __future__ import unicode_literals 
   5   
   6  __all__ = [ 
   7          "close_portdbapi_caches", "FetchlistDict", "portagetree", "portdbapi" 
   8  ] 
   9   
  10  import portage 
  11  portage.proxy.lazyimport.lazyimport(globals(), 
  12          'portage.checksum', 
  13          'portage.data:portage_gid,secpass', 
  14          'portage.dbapi.dep_expand:dep_expand', 
  15          'portage.dep:Atom,dep_getkey,match_from_list,use_reduce,_match_slot', 
  16          'portage.package.ebuild.doebuild:doebuild', 
  17          'portage.util:ensure_dirs,shlex_split,writemsg,writemsg_level', 
  18          'portage.util.listdir:listdir', 
  19          'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp,_pkg_str', 
  20  ) 
  21   
  22  from portage.cache import volatile 
  23  from portage.cache.cache_errors import CacheError 
  24  from portage.cache.mappings import Mapping 
  25  from portage.dbapi import dbapi 
  26  from portage.exception import PortageException, \ 
  27          FileNotFound, InvalidAtom, InvalidData, \ 
  28          InvalidDependString, InvalidPackageName 
  29  from portage.localization import _ 
  30   
  31  from portage import eclass_cache, \ 
  32          eapi_is_supported, \ 
  33          _eapi_is_deprecated 
  34  from portage import os 
  35  from portage import _encodings 
  36  from portage import _unicode_encode 
  37  from portage import OrderedDict 
  38  from portage.util._eventloop.EventLoop import EventLoop 
  39  from portage.util._eventloop.global_event_loop import global_event_loop 
  40  from _emerge.EbuildMetadataPhase import EbuildMetadataPhase 
  41   
  42  import os as _os 
  43  import sys 
  44  import traceback 
  45  import warnings 
  46   
  47  try: 
  48          from urllib.parse import urlparse 
  49  except ImportError: 
  50          from urlparse import urlparse 
  51   
  52  if sys.hexversion >= 0x3000000: 
  53          basestring = str 
  54          long = int 
55 56 -def close_portdbapi_caches():
57 # The python interpreter does _not_ guarantee that destructors are 58 # called for objects that remain when the interpreter exits, so we 59 # use an atexit hook to call destructors for any global portdbapi 60 # instances that may have been constructed. 61 try: 62 portage._legacy_globals_constructed 63 except AttributeError: 64 pass 65 else: 66 if "db" in portage._legacy_globals_constructed: 67 try: 68 db = portage.db 69 except AttributeError: 70 pass 71 else: 72 if isinstance(db, dict): 73 for x in db.values(): 74 try: 75 if "porttree" in x.lazy_items: 76 continue 77 except (AttributeError, TypeError): 78 continue 79 try: 80 x = x.pop("porttree").dbapi 81 except (AttributeError, KeyError): 82 continue 83 if not isinstance(x, portdbapi): 84 continue 85 x.close_caches()
86 87 portage.process.atexit_register(close_portdbapi_caches)
88 89 # It used to be necessary for API consumers to remove portdbapi instances 90 # from portdbapi_instances, in order to avoid having accumulated instances 91 # consume memory. Now, portdbapi_instances is just an empty dummy list, so 92 # for backward compatibility, ignore ValueError for removal on non-existent 93 # items. 94 -class _dummy_list(list):
95 - def remove(self, item):
96 # TODO: Trigger a DeprecationWarning here, after stable portage 97 # has dummy portdbapi_instances. 98 try: 99 list.remove(self, item) 100 except ValueError: 101 pass
102
103 -class portdbapi(dbapi):
104 """this tree will scan a portage directory located at root (passed to init)""" 105 portdbapi_instances = _dummy_list() 106 _use_mutable = True 107 108 @property
109 - def _categories(self):
110 return self.settings.categories
111 112 @property
113 - def porttree_root(self):
114 return self.settings.repositories.mainRepoLocation()
115 116 @property
117 - def eclassdb(self):
118 main_repo = self.repositories.mainRepo() 119 if main_repo is None: 120 return None 121 return main_repo.eclass_db
122
123 - def __init__(self, _unused_param=DeprecationWarning, mysettings=None):
124 """ 125 @param _unused_param: deprecated, use mysettings['PORTDIR'] instead 126 @type _unused_param: None 127 @param mysettings: an immutable config instance 128 @type mysettings: portage.config 129 """ 130 131 from portage import config 132 if mysettings: 133 self.settings = mysettings 134 else: 135 from portage import settings 136 self.settings = config(clone=settings) 137 138 if _unused_param is not DeprecationWarning: 139 warnings.warn("The first parameter of the " + \ 140 "portage.dbapi.porttree.portdbapi" + \ 141 " constructor is unused since portage-2.1.8. " + \ 142 "mysettings['PORTDIR'] is used instead.", 143 DeprecationWarning, stacklevel=2) 144 145 self.repositories = self.settings.repositories 146 self.treemap = self.repositories.treemap 147 148 # This is strictly for use in aux_get() doebuild calls when metadata 149 # is generated by the depend phase. It's safest to use a clone for 150 # this purpose because doebuild makes many changes to the config 151 # instance that is passed in. 152 self.doebuild_settings = config(clone=self.settings) 153 self.depcachedir = os.path.realpath(self.settings.depcachedir) 154 155 if os.environ.get("SANDBOX_ON") == "1": 156 # Make api consumers exempt from sandbox violations 157 # when doing metadata cache updates. 158 sandbox_write = os.environ.get("SANDBOX_WRITE", "").split(":") 159 if self.depcachedir not in sandbox_write: 160 sandbox_write.append(self.depcachedir) 161 os.environ["SANDBOX_WRITE"] = \ 162 ":".join(filter(None, sandbox_write)) 163 164 self.porttrees = list(self.settings.repositories.repoLocationList()) 165 166 # This is used as sanity check for aux_get(). If there is no 167 # root eclass dir, we assume that PORTDIR is invalid or 168 # missing. This check allows aux_get() to detect a missing 169 # portage tree and return early by raising a KeyError. 170 self._have_root_eclass_dir = os.path.isdir( 171 os.path.join(self.settings.repositories.mainRepoLocation(), "eclass")) 172 173 #if the portdbapi is "frozen", then we assume that we can cache everything (that no updates to it are happening) 174 self.xcache = {} 175 self.frozen = 0 176 177 #Keep a list of repo names, sorted by priority (highest priority first). 178 self._ordered_repo_name_list = tuple(reversed(self.repositories.prepos_order)) 179 180 self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule") 181 self.auxdb = {} 182 self._pregen_auxdb = {} 183 # If the current user doesn't have depcachedir write permission, 184 # then the depcachedir cache is kept here read-only access. 185 self._ro_auxdb = {} 186 self._init_cache_dirs() 187 try: 188 depcachedir_st = os.stat(self.depcachedir) 189 depcachedir_w_ok = os.access(self.depcachedir, os.W_OK) 190 except OSError: 191 depcachedir_st = None 192 depcachedir_w_ok = False 193 194 cache_kwargs = {} 195 196 depcachedir_unshared = False 197 if portage.data.secpass < 1 and \ 198 depcachedir_w_ok and \ 199 depcachedir_st is not None and \ 200 os.getuid() == depcachedir_st.st_uid and \ 201 os.getgid() == depcachedir_st.st_gid: 202 # If this user owns depcachedir and is not in the 203 # portage group, then don't bother to set permissions 204 # on cache entries. This makes it possible to run 205 # egencache without any need to be a member of the 206 # portage group. 207 depcachedir_unshared = True 208 else: 209 cache_kwargs.update(portage._native_kwargs({ 210 'gid' : portage_gid, 211 'perms' : 0o664 212 })) 213 214 # If secpass < 1, we don't want to write to the cache 215 # since then we won't be able to apply group permissions 216 # to the cache entries/directories. 217 if (secpass < 1 and not depcachedir_unshared) or not depcachedir_w_ok: 218 for x in self.porttrees: 219 self.auxdb[x] = volatile.database( 220 self.depcachedir, x, self._known_keys, 221 **cache_kwargs) 222 try: 223 self._ro_auxdb[x] = self.auxdbmodule(self.depcachedir, x, 224 self._known_keys, readonly=True, **cache_kwargs) 225 except CacheError: 226 pass 227 else: 228 for x in self.porttrees: 229 if x in self.auxdb: 230 continue 231 # location, label, auxdbkeys 232 self.auxdb[x] = self.auxdbmodule( 233 self.depcachedir, x, self._known_keys, **cache_kwargs) 234 if "metadata-transfer" not in self.settings.features: 235 for x in self.porttrees: 236 if x in self._pregen_auxdb: 237 continue 238 cache = self._create_pregen_cache(x) 239 if cache is not None: 240 self._pregen_auxdb[x] = cache 241 # Selectively cache metadata in order to optimize dep matching. 242 self._aux_cache_keys = set( 243 ["DEPEND", "EAPI", "HDEPEND", 244 "INHERITED", "IUSE", "KEYWORDS", "LICENSE", 245 "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", 246 "RESTRICT", "SLOT", "DEFINED_PHASES", "REQUIRED_USE"]) 247 248 self._aux_cache = {} 249 self._broken_ebuilds = set()
250 251 @property
252 - def _event_loop(self):
253 if portage._internal_caller: 254 # For internal portage usage, the global_event_loop is safe. 255 return global_event_loop() 256 else: 257 # For external API consumers, use a local EventLoop, since 258 # we don't want to assume that it's safe to override the 259 # global SIGCHLD handler. 260 return EventLoop(main=False)
261
262 - def _create_pregen_cache(self, tree):
263 conf = self.repositories.get_repo_for_location(tree) 264 cache = conf.get_pregenerated_cache( 265 self._known_keys, readonly=True) 266 if cache is not None: 267 try: 268 cache.ec = self.repositories.get_repo_for_location(tree).eclass_db 269 except AttributeError: 270 pass 271 272 if not cache.complete_eclass_entries: 273 warnings.warn( 274 ("Repository '%s' used deprecated 'pms' cache format. " 275 "Please migrate to 'md5-dict' format.") % (conf.name,), 276 DeprecationWarning) 277 278 return cache
279
280 - def _init_cache_dirs(self):
281 """Create /var/cache/edb/dep and adjust permissions for the portage 282 group.""" 283 284 dirmode = 0o2070 285 modemask = 0o2 286 287 try: 288 ensure_dirs(self.depcachedir, gid=portage_gid, 289 mode=dirmode, mask=modemask) 290 except PortageException: 291 pass
292
293 - def close_caches(self):
294 if not hasattr(self, "auxdb"): 295 # unhandled exception thrown from constructor 296 return 297 for x in self.auxdb: 298 self.auxdb[x].sync() 299 self.auxdb.clear()
300
301 - def flush_cache(self):
302 for x in self.auxdb.values(): 303 x.sync()
304
305 - def findLicensePath(self, license_name):
306 for x in reversed(self.porttrees): 307 license_path = os.path.join(x, "licenses", license_name) 308 if os.access(license_path, os.R_OK): 309 return license_path 310 return None
311
312 - def findname(self,mycpv, mytree = None, myrepo = None):
313 return self.findname2(mycpv, mytree, myrepo)[0]
314
315 - def getRepositoryPath(self, repository_id):
316 """ 317 This function is required for GLEP 42 compliance; given a valid repository ID 318 it must return a path to the repository 319 TreeMap = { id:path } 320 """ 321 return self.treemap.get(repository_id)
322
323 - def getRepositoryName(self, canonical_repo_path):
324 """ 325 This is the inverse of getRepositoryPath(). 326 @param canonical_repo_path: the canonical path of a repository, as 327 resolved by os.path.realpath() 328 @type canonical_repo_path: String 329 @return: The repo_name for the corresponding repository, or None 330 if the path does not correspond a known repository 331 @rtype: String or None 332 """ 333 try: 334 return self.repositories.get_name_for_location(canonical_repo_path) 335 except KeyError: 336 return None
337
338 - def getRepositories(self):
339 """ 340 This function is required for GLEP 42 compliance; it will return a list of 341 repository IDs 342 TreeMap = {id: path} 343 """ 344 return self._ordered_repo_name_list
345
346 - def getMissingRepoNames(self):
347 """ 348 Returns a list of repository paths that lack profiles/repo_name. 349 """ 350 return self.settings.repositories.missing_repo_names
351
352 - def getIgnoredRepos(self):
353 """ 354 Returns a list of repository paths that have been ignored, because 355 another repo with the same name exists. 356 """ 357 return self.settings.repositories.ignored_repos
358
359 - def findname2(self, mycpv, mytree=None, myrepo = None):
360 """ 361 Returns the location of the CPV, and what overlay it was in. 362 Searches overlays first, then PORTDIR; this allows us to return the first 363 matching file. As opposed to starting in portdir and then doing overlays 364 second, we would have to exhaustively search the overlays until we found 365 the file we wanted. 366 If myrepo is not None it will find packages from this repository(overlay) 367 """ 368 if not mycpv: 369 return (None, 0) 370 371 if myrepo is not None: 372 mytree = self.treemap.get(myrepo) 373 if mytree is None: 374 return (None, 0) 375 376 mysplit = mycpv.split("/") 377 psplit = pkgsplit(mysplit[1]) 378 if psplit is None or len(mysplit) != 2: 379 raise InvalidPackageName(mycpv) 380 381 # For optimal performace in this hot spot, we do manual unicode 382 # handling here instead of using the wrapped os module. 383 encoding = _encodings['fs'] 384 errors = 'strict' 385 386 if mytree: 387 mytrees = [mytree] 388 else: 389 mytrees = reversed(self.porttrees) 390 391 relative_path = mysplit[0] + _os.sep + psplit[0] + _os.sep + \ 392 mysplit[1] + ".ebuild" 393 394 for x in mytrees: 395 filename = x + _os.sep + relative_path 396 if _os.access(_unicode_encode(filename, 397 encoding=encoding, errors=errors), _os.R_OK): 398 return (filename, x) 399 return (None, 0)
400
401 - def _write_cache(self, cpv, repo_path, metadata, ebuild_hash):
402 403 try: 404 cache = self.auxdb[repo_path] 405 chf = cache.validation_chf 406 metadata['_%s_' % chf] = getattr(ebuild_hash, chf) 407 except CacheError: 408 # Normally this shouldn't happen, so we'll show 409 # a traceback for debugging purposes. 410 traceback.print_exc() 411 cache = None 412 413 if cache is not None: 414 try: 415 cache[cpv] = metadata 416 except CacheError: 417 # Normally this shouldn't happen, so we'll show 418 # a traceback for debugging purposes. 419 traceback.print_exc()
420
421 - def _pull_valid_cache(self, cpv, ebuild_path, repo_path):
422 try: 423 ebuild_hash = eclass_cache.hashed_path(ebuild_path) 424 # snag mtime since we use it later, and to trigger stat failure 425 # if it doesn't exist 426 ebuild_hash.mtime 427 except FileNotFound: 428 writemsg(_("!!! aux_get(): ebuild for " \ 429 "'%s' does not exist at:\n") % (cpv,), noiselevel=-1) 430 writemsg("!!! %s\n" % ebuild_path, noiselevel=-1) 431 raise KeyError(cpv) 432 433 # Pull pre-generated metadata from the metadata/cache/ 434 # directory if it exists and is valid, otherwise fall 435 # back to the normal writable cache. 436 auxdbs = [] 437 pregen_auxdb = self._pregen_auxdb.get(repo_path) 438 if pregen_auxdb is not None: 439 auxdbs.append(pregen_auxdb) 440 ro_auxdb = self._ro_auxdb.get(repo_path) 441 if ro_auxdb is not None: 442 auxdbs.append(ro_auxdb) 443 auxdbs.append(self.auxdb[repo_path]) 444 eclass_db = self.repositories.get_repo_for_location(repo_path).eclass_db 445 446 for auxdb in auxdbs: 447 try: 448 metadata = auxdb[cpv] 449 except KeyError: 450 continue 451 except CacheError: 452 if not auxdb.readonly: 453 try: 454 del auxdb[cpv] 455 except (KeyError, CacheError): 456 pass 457 continue 458 eapi = metadata.get('EAPI', '').strip() 459 if not eapi: 460 eapi = '0' 461 metadata['EAPI'] = eapi 462 if not eapi_is_supported(eapi): 463 # Since we're supposed to be able to efficiently obtain the 464 # EAPI from _parse_eapi_ebuild_head, we disregard cache entries 465 # for unsupported EAPIs. 466 continue 467 if auxdb.validate_entry(metadata, ebuild_hash, eclass_db): 468 break 469 else: 470 metadata = None 471 472 return (metadata, ebuild_hash)
473
474 - def aux_get(self, mycpv, mylist, mytree=None, myrepo=None):
475 "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc." 476 'input: "sys-apps/foo-1.0",["SLOT","DEPEND","HOMEPAGE"]' 477 'return: ["0",">=sys-libs/bar-1.0","http://www.foo.com"] or raise KeyError if error' 478 cache_me = False 479 if myrepo is not None: 480 mytree = self.treemap.get(myrepo) 481 if mytree is None: 482 raise KeyError(myrepo) 483 484 if mytree is not None and len(self.porttrees) == 1 \ 485 and mytree == self.porttrees[0]: 486 # mytree matches our only tree, so it's safe to 487 # ignore mytree and cache the result 488 mytree = None 489 myrepo = None 490 491 if mytree is None: 492 cache_me = True 493 if mytree is None and not self._known_keys.intersection( 494 mylist).difference(self._aux_cache_keys): 495 aux_cache = self._aux_cache.get(mycpv) 496 if aux_cache is not None: 497 return [aux_cache.get(x, "") for x in mylist] 498 cache_me = True 499 500 try: 501 cat, pkg = mycpv.split("/", 1) 502 except ValueError: 503 # Missing slash. Can't find ebuild so raise KeyError. 504 raise KeyError(mycpv) 505 506 myebuild, mylocation = self.findname2(mycpv, mytree) 507 508 if not myebuild: 509 writemsg("!!! aux_get(): %s\n" % \ 510 _("ebuild not found for '%s'") % mycpv, noiselevel=1) 511 raise KeyError(mycpv) 512 513 mydata, ebuild_hash = self._pull_valid_cache(mycpv, myebuild, mylocation) 514 doregen = mydata is None 515 516 if doregen: 517 if myebuild in self._broken_ebuilds: 518 raise KeyError(mycpv) 519 520 proc = EbuildMetadataPhase(cpv=mycpv, 521 ebuild_hash=ebuild_hash, portdb=self, 522 repo_path=mylocation, scheduler=self._event_loop, 523 settings=self.doebuild_settings) 524 525 proc.start() 526 proc.wait() 527 528 if proc.returncode != os.EX_OK: 529 self._broken_ebuilds.add(myebuild) 530 raise KeyError(mycpv) 531 532 mydata = proc.metadata 533 534 mydata["repository"] = self.repositories.get_name_for_location(mylocation) 535 mydata["_mtime_"] = ebuild_hash.mtime 536 eapi = mydata.get("EAPI") 537 if not eapi: 538 eapi = "0" 539 mydata["EAPI"] = eapi 540 if eapi_is_supported(eapi): 541 mydata["INHERITED"] = " ".join(mydata.get("_eclasses_", [])) 542 543 #finally, we look at our internal cache entry and return the requested data. 544 returnme = [mydata.get(x, "") for x in mylist] 545 546 if cache_me: 547 aux_cache = {} 548 for x in self._aux_cache_keys: 549 aux_cache[x] = mydata.get(x, "") 550 self._aux_cache[mycpv] = aux_cache 551 552 return returnme
553
554 - def getFetchMap(self, mypkg, useflags=None, mytree=None):
555 """ 556 Get the SRC_URI metadata as a dict which maps each file name to a 557 set of alternative URIs. 558 559 @param mypkg: cpv for an ebuild 560 @type mypkg: String 561 @param useflags: a collection of enabled USE flags, for evaluation of 562 conditionals 563 @type useflags: set, or None to enable all conditionals 564 @param mytree: The canonical path of the tree in which the ebuild 565 is located, or None for automatic lookup 566 @type mypkg: String 567 @return: A dict which maps each file name to a set of alternative 568 URIs. 569 @rtype: dict 570 """ 571 572 try: 573 eapi, myuris = self.aux_get(mypkg, 574 ["EAPI", "SRC_URI"], mytree=mytree) 575 except KeyError: 576 # Convert this to an InvalidDependString exception since callers 577 # already handle it. 578 raise portage.exception.InvalidDependString( 579 "getFetchMap(): aux_get() error reading "+mypkg+"; aborting.") 580 581 if not eapi_is_supported(eapi): 582 # Convert this to an InvalidDependString exception 583 # since callers already handle it. 584 raise portage.exception.InvalidDependString( 585 "getFetchMap(): '%s' has unsupported EAPI: '%s'" % \ 586 (mypkg, eapi)) 587 588 return _parse_uri_map(mypkg, {'EAPI':eapi,'SRC_URI':myuris}, 589 use=useflags)
590
591 - def getfetchsizes(self, mypkg, useflags=None, debug=0, myrepo=None):
592 # returns a filename:size dictionnary of remaining downloads 593 myebuild, mytree = self.findname2(mypkg, myrepo=myrepo) 594 if myebuild is None: 595 raise AssertionError(_("ebuild not found for '%s'") % mypkg) 596 pkgdir = os.path.dirname(myebuild) 597 mf = self.repositories.get_repo_for_location( 598 os.path.dirname(os.path.dirname(pkgdir))).load_manifest( 599 pkgdir, self.settings["DISTDIR"]) 600 checksums = mf.getDigests() 601 if not checksums: 602 if debug: 603 writemsg(_("[empty/missing/bad digest]: %s\n") % (mypkg,)) 604 return {} 605 filesdict={} 606 myfiles = self.getFetchMap(mypkg, useflags=useflags, mytree=mytree) 607 #XXX: maybe this should be improved: take partial downloads 608 # into account? check checksums? 609 for myfile in myfiles: 610 try: 611 fetch_size = int(checksums[myfile]["size"]) 612 except (KeyError, ValueError): 613 if debug: 614 writemsg(_("[bad digest]: missing %(file)s for %(pkg)s\n") % {"file":myfile, "pkg":mypkg}) 615 continue 616 file_path = os.path.join(self.settings["DISTDIR"], myfile) 617 mystat = None 618 try: 619 mystat = os.stat(file_path) 620 except OSError: 621 pass 622 if mystat is None: 623 existing_size = 0 624 ro_distdirs = self.settings.get("PORTAGE_RO_DISTDIRS") 625 if ro_distdirs is not None: 626 for x in shlex_split(ro_distdirs): 627 try: 628 mystat = os.stat(os.path.join(x, myfile)) 629 except OSError: 630 pass 631 else: 632 if mystat.st_size == fetch_size: 633 existing_size = fetch_size 634 break 635 else: 636 existing_size = mystat.st_size 637 remaining_size = fetch_size - existing_size 638 if remaining_size > 0: 639 # Assume the download is resumable. 640 filesdict[myfile] = remaining_size 641 elif remaining_size < 0: 642 # The existing file is too large and therefore corrupt. 643 filesdict[myfile] = int(checksums[myfile]["size"]) 644 return filesdict
645
646 - def fetch_check(self, mypkg, useflags=None, mysettings=None, all=False, myrepo=None):
647 """ 648 TODO: account for PORTAGE_RO_DISTDIRS 649 """ 650 if all: 651 useflags = None 652 elif useflags is None: 653 if mysettings: 654 useflags = mysettings["USE"].split() 655 if myrepo is not None: 656 mytree = self.treemap.get(myrepo) 657 if mytree is None: 658 return False 659 else: 660 mytree = None 661 662 myfiles = self.getFetchMap(mypkg, useflags=useflags, mytree=mytree) 663 myebuild = self.findname(mypkg, myrepo=myrepo) 664 if myebuild is None: 665 raise AssertionError(_("ebuild not found for '%s'") % mypkg) 666 pkgdir = os.path.dirname(myebuild) 667 mf = self.repositories.get_repo_for_location( 668 os.path.dirname(os.path.dirname(pkgdir))) 669 mf = mf.load_manifest(pkgdir, self.settings["DISTDIR"]) 670 mysums = mf.getDigests() 671 672 failures = {} 673 for x in myfiles: 674 if not mysums or x not in mysums: 675 ok = False 676 reason = _("digest missing") 677 else: 678 try: 679 ok, reason = portage.checksum.verify_all( 680 os.path.join(self.settings["DISTDIR"], x), mysums[x]) 681 except FileNotFound as e: 682 ok = False 683 reason = _("File Not Found: '%s'") % (e,) 684 if not ok: 685 failures[x] = reason 686 if failures: 687 return False 688 return True
689
690 - def cpv_exists(self, mykey, myrepo=None):
691 "Tells us whether an actual ebuild exists on disk (no masking)" 692 cps2 = mykey.split("/") 693 cps = catpkgsplit(mykey, silent=0) 694 if not cps: 695 #invalid cat/pkg-v 696 return 0 697 if self.findname(cps[0] + "/" + cps2[1], myrepo=myrepo): 698 return 1 699 else: 700 return 0
701
702 - def cp_all(self, categories=None, trees=None, reverse=False):
703 """ 704 This returns a list of all keys in our tree or trees 705 @param categories: optional list of categories to search or 706 defaults to self.settings.categories 707 @param trees: optional list of trees to search the categories in or 708 defaults to self.porttrees 709 @param reverse: reverse sort order (default is False) 710 @rtype list of [cat/pkg,...] 711 """ 712 d = {} 713 if categories is None: 714 categories = self.settings.categories 715 if trees is None: 716 trees = self.porttrees 717 for x in categories: 718 for oroot in trees: 719 for y in listdir(oroot+"/"+x, EmptyOnError=1, ignorecvs=1, dirsonly=1): 720 try: 721 atom = Atom("%s/%s" % (x, y)) 722 except InvalidAtom: 723 continue 724 if atom != atom.cp: 725 continue 726 d[atom.cp] = None 727 l = list(d) 728 l.sort(reverse=reverse) 729 return l
730
731 - def cp_list(self, mycp, use_cache=1, mytree=None):
732 # NOTE: Cache can be safely shared with the match cache, since the 733 # match cache uses the result from dep_expand for the cache_key. 734 if self.frozen and mytree is not None \ 735 and len(self.porttrees) == 1 \ 736 and mytree == self.porttrees[0]: 737 # mytree matches our only tree, so it's safe to 738 # ignore mytree and cache the result 739 mytree = None 740 741 if self.frozen and mytree is None: 742 cachelist = self.xcache["cp-list"].get(mycp) 743 if cachelist is not None: 744 # Try to propagate this to the match-all cache here for 745 # repoman since he uses separate match-all caches for each 746 # profile (due to differences in _get_implicit_iuse). 747 self.xcache["match-all"][(mycp, mycp)] = cachelist 748 return cachelist[:] 749 mysplit = mycp.split("/") 750 invalid_category = mysplit[0] not in self._categories 751 d={} 752 if mytree is not None: 753 if isinstance(mytree, basestring): 754 mytrees = [mytree] 755 else: 756 # assume it's iterable 757 mytrees = mytree 758 else: 759 mytrees = self.porttrees 760 for oroot in mytrees: 761 try: 762 file_list = os.listdir(os.path.join(oroot, mycp)) 763 except OSError: 764 continue 765 for x in file_list: 766 pf = None 767 if x[-7:] == '.ebuild': 768 pf = x[:-7] 769 770 if pf is not None: 771 ps = pkgsplit(pf) 772 if not ps: 773 writemsg(_("\nInvalid ebuild name: %s\n") % \ 774 os.path.join(oroot, mycp, x), noiselevel=-1) 775 continue 776 if ps[0] != mysplit[1]: 777 writemsg(_("\nInvalid ebuild name: %s\n") % \ 778 os.path.join(oroot, mycp, x), noiselevel=-1) 779 continue 780 ver_match = ver_regexp.match("-".join(ps[1:])) 781 if ver_match is None or not ver_match.groups(): 782 writemsg(_("\nInvalid ebuild version: %s\n") % \ 783 os.path.join(oroot, mycp, x), noiselevel=-1) 784 continue 785 d[_pkg_str(mysplit[0]+"/"+pf)] = None 786 if invalid_category and d: 787 writemsg(_("\n!!! '%s' has a category that is not listed in " \ 788 "%setc/portage/categories\n") % \ 789 (mycp, self.settings["PORTAGE_CONFIGROOT"]), noiselevel=-1) 790 mylist = [] 791 else: 792 mylist = list(d) 793 # Always sort in ascending order here since it's handy 794 # and the result can be easily cached and reused. 795 self._cpv_sort_ascending(mylist) 796 if self.frozen and mytree is None: 797 cachelist = mylist[:] 798 self.xcache["cp-list"][mycp] = cachelist 799 self.xcache["match-all"][(mycp, mycp)] = cachelist 800 return mylist
801
802 - def freeze(self):
803 for x in "bestmatch-visible", "cp-list", "match-all", \ 804 "match-all-cpv-only", "match-visible", "minimum-all", \ 805 "minimum-visible": 806 self.xcache[x]={} 807 self.frozen=1
808
809 - def melt(self):
810 self.xcache = {} 811 self.frozen = 0
812
813 - def xmatch(self,level,origdep,mydep=None,mykey=None,mylist=None):
814 "caching match function; very trick stuff" 815 if level == "list-visible": 816 level = "match-visible" 817 warnings.warn("The 'list-visible' mode of " 818 "portage.dbapi.porttree.portdbapi.xmatch " 819 "has been renamed to match-visible", 820 DeprecationWarning, stacklevel=2) 821 822 if mydep is None: 823 #this stuff only runs on first call of xmatch() 824 #create mydep, mykey from origdep 825 mydep = dep_expand(origdep, mydb=self, settings=self.settings) 826 mykey = mydep.cp 827 828 #if no updates are being made to the tree, we can consult our xcache... 829 cache_key = None 830 if self.frozen: 831 cache_key = (mydep, mydep.unevaluated_atom) 832 try: 833 return self.xcache[level][cache_key][:] 834 except KeyError: 835 pass 836 837 myval = None 838 mytree = None 839 if mydep.repo is not None: 840 mytree = self.treemap.get(mydep.repo) 841 if mytree is None: 842 if level.startswith("match-"): 843 myval = [] 844 else: 845 myval = "" 846 847 if myval is not None: 848 # Unknown repo, empty result. 849 pass 850 elif level == "match-all-cpv-only": 851 # match *all* packages, only against the cpv, in order 852 # to bypass unnecessary cache access for things like IUSE 853 # and SLOT. 854 if mydep == mykey: 855 # Share cache with match-all/cp_list when the result is the 856 # same. Note that this requires that mydep.repo is None and 857 # thus mytree is also None. 858 level = "match-all" 859 myval = self.cp_list(mykey, mytree=mytree) 860 else: 861 myval = match_from_list(mydep, 862 self.cp_list(mykey, mytree=mytree)) 863 864 elif level in ("bestmatch-visible", "match-all", "match-visible", 865 "minimum-all", "minimum-visible"): 866 # Find the minimum matching visible version. This is optimized to 867 # minimize the number of metadata accesses (improves performance 868 # especially in cases where metadata needs to be generated). 869 if mydep == mykey: 870 mylist = self.cp_list(mykey, mytree=mytree) 871 else: 872 mylist = match_from_list(mydep, 873 self.cp_list(mykey, mytree=mytree)) 874 875 visibility_filter = level not in ("match-all", "minimum-all") 876 single_match = level not in ("match-all", "match-visible") 877 myval = [] 878 aux_keys = list(self._aux_cache_keys) 879 if level == "bestmatch-visible": 880 iterfunc = reversed 881 else: 882 iterfunc = iter 883 884 if mydep.repo is not None: 885 repos = [mydep.repo] 886 else: 887 # We iterate over self.porttrees, since it's common to 888 # tweak this attribute in order to adjust match behavior. 889 repos = [] 890 for tree in reversed(self.porttrees): 891 repos.append(self.repositories.get_name_for_location(tree)) 892 893 for cpv in iterfunc(mylist): 894 for repo in repos: 895 try: 896 metadata = dict(zip(aux_keys, 897 self.aux_get(cpv, aux_keys, myrepo=repo))) 898 except KeyError: 899 # ebuild not in this repo, or masked by corruption 900 continue 901 902 try: 903 pkg_str = _pkg_str(cpv, metadata=metadata, 904 settings=self.settings) 905 except InvalidData: 906 continue 907 908 if visibility_filter and not self._visible(pkg_str, metadata): 909 continue 910 911 if mydep.slot is not None and \ 912 not _match_slot(mydep, pkg_str): 913 continue 914 915 if mydep.unevaluated_atom.use is not None and \ 916 not self._match_use(mydep, pkg_str, metadata): 917 continue 918 919 myval.append(pkg_str) 920 # only yield a given cpv once 921 break 922 923 if myval and single_match: 924 break 925 926 if single_match: 927 if myval: 928 myval = myval[0] 929 else: 930 myval = "" 931 932 elif level == "bestmatch-list": 933 #dep match -- find best match but restrict search to sublist 934 warnings.warn("The 'bestmatch-list' mode of " 935 "portage.dbapi.porttree.portdbapi.xmatch is deprecated", 936 DeprecationWarning, stacklevel=2) 937 myval = best(list(self._iter_match(mydep, mylist))) 938 elif level == "match-list": 939 #dep match -- find all matches but restrict search to sublist (used in 2nd half of visible()) 940 warnings.warn("The 'match-list' mode of " 941 "portage.dbapi.porttree.portdbapi.xmatch is deprecated", 942 DeprecationWarning, stacklevel=2) 943 myval = list(self._iter_match(mydep, mylist)) 944 else: 945 raise AssertionError( 946 "Invalid level argument: '%s'" % level) 947 948 if self.frozen: 949 xcache_this_level = self.xcache.get(level) 950 if xcache_this_level is not None: 951 xcache_this_level[cache_key] = myval 952 if not isinstance(myval, _pkg_str): 953 myval = myval[:] 954 955 return myval
956
957 - def match(self, mydep, use_cache=1):
958 return self.xmatch("match-visible", mydep)
959
960 - def gvisible(self, mylist):
961 warnings.warn("The 'gvisible' method of " 962 "portage.dbapi.porttree.portdbapi " 963 "is deprecated", 964 DeprecationWarning, stacklevel=2) 965 return list(self._iter_visible(iter(mylist)))
966
967 - def visible(self, cpv_iter):
968 warnings.warn("The 'visible' method of " 969 "portage.dbapi.porttree.portdbapi " 970 "is deprecated", 971 DeprecationWarning, stacklevel=2) 972 if cpv_iter is None: 973 return [] 974 return list(self._iter_visible(iter(cpv_iter)))
975
976 - def _iter_visible(self, cpv_iter, myrepo=None):
977 """ 978 Return a new list containing only visible packages. 979 """ 980 aux_keys = list(self._aux_cache_keys) 981 metadata = {} 982 983 if myrepo is not None: 984 repos = [myrepo] 985 else: 986 # We iterate over self.porttrees, since it's common to 987 # tweak this attribute in order to adjust match behavior. 988 repos = [] 989 for tree in reversed(self.porttrees): 990 repos.append(self.repositories.get_name_for_location(tree)) 991 992 for mycpv in cpv_iter: 993 for repo in repos: 994 metadata.clear() 995 try: 996 metadata.update(zip(aux_keys, 997 self.aux_get(mycpv, aux_keys, myrepo=repo))) 998 except KeyError: 999 continue 1000 except PortageException as e: 1001 writemsg("!!! Error: aux_get('%s', %s)\n" % 1002 (mycpv, aux_keys), noiselevel=-1) 1003 writemsg("!!! %s\n" % (e,), noiselevel=-1) 1004 del e 1005 continue 1006 1007 if not self._visible(mycpv, metadata): 1008 continue 1009 1010 yield mycpv 1011 # only yield a given cpv once 1012 break
1013
1014 - def _visible(self, cpv, metadata):
1015 eapi = metadata["EAPI"] 1016 if not eapi_is_supported(eapi): 1017 return False 1018 if _eapi_is_deprecated(eapi): 1019 return False 1020 if not metadata["SLOT"]: 1021 return False 1022 1023 settings = self.settings 1024 if settings._getMaskAtom(cpv, metadata): 1025 return False 1026 if settings._getMissingKeywords(cpv, metadata): 1027 return False 1028 if settings.local_config: 1029 metadata['CHOST'] = settings.get('CHOST', '') 1030 if not settings._accept_chost(cpv, metadata): 1031 return False 1032 metadata["USE"] = "" 1033 if "?" in metadata["LICENSE"] or \ 1034 "?" in metadata["PROPERTIES"]: 1035 self.doebuild_settings.setcpv(cpv, mydb=metadata) 1036 metadata['USE'] = self.doebuild_settings['PORTAGE_USE'] 1037 try: 1038 if settings._getMissingLicenses(cpv, metadata): 1039 return False 1040 if settings._getMissingProperties(cpv, metadata): 1041 return False 1042 if settings._getMissingRestrict(cpv, metadata): 1043 return False 1044 except InvalidDependString: 1045 return False 1046 1047 return True
1048
1049 -class portagetree(object):
1050 - def __init__(self, root=DeprecationWarning, virtual=DeprecationWarning, 1051 settings=None):
1052 """ 1053 Constructor for a PortageTree 1054 1055 @param root: deprecated, defaults to settings['ROOT'] 1056 @type root: String/Path 1057 @param virtual: UNUSED 1058 @type virtual: No Idea 1059 @param settings: Portage Configuration object (portage.settings) 1060 @type settings: Instance of portage.config 1061 """ 1062 1063 if settings is None: 1064 settings = portage.settings 1065 self.settings = settings 1066 1067 if root is not DeprecationWarning: 1068 warnings.warn("The root parameter of the " + \ 1069 "portage.dbapi.porttree.portagetree" + \ 1070 " constructor is now unused. Use " + \ 1071 "settings['ROOT'] instead.", 1072 DeprecationWarning, stacklevel=2) 1073 1074 if virtual is not DeprecationWarning: 1075 warnings.warn("The 'virtual' parameter of the " 1076 "portage.dbapi.porttree.portagetree" 1077 " constructor is unused", 1078 DeprecationWarning, stacklevel=2) 1079 1080 self.portroot = settings["PORTDIR"] 1081 self.__virtual = virtual 1082 self.dbapi = portdbapi(mysettings=settings)
1083 1084 @property
1085 - def root(self):
1086 warnings.warn("The root attribute of " + \ 1087 "portage.dbapi.porttree.portagetree" + \ 1088 " is deprecated. Use " + \ 1089 "settings['ROOT'] instead.", 1090 DeprecationWarning, stacklevel=3) 1091 return self.settings['ROOT']
1092 1093 @property
1094 - def virtual(self):
1095 warnings.warn("The 'virtual' attribute of " + \ 1096 "portage.dbapi.porttree.portagetree" + \ 1097 " is deprecated.", 1098 DeprecationWarning, stacklevel=3) 1099 return self.__virtual
1100
1101 - def dep_bestmatch(self,mydep):
1102 "compatibility method" 1103 mymatch = self.dbapi.xmatch("bestmatch-visible",mydep) 1104 if mymatch is None: 1105 return "" 1106 return mymatch
1107
1108 - def dep_match(self,mydep):
1109 "compatibility method" 1110 mymatch = self.dbapi.xmatch("match-visible",mydep) 1111 if mymatch is None: 1112 return [] 1113 return mymatch
1114
1115 - def exists_specific(self,cpv):
1116 return self.dbapi.cpv_exists(cpv)
1117
1118 - def getallnodes(self):
1119 """new behavior: these are all *unmasked* nodes. There may or may not be available 1120 masked package for nodes in this nodes list.""" 1121 return self.dbapi.cp_all()
1122
1123 - def getname(self, pkgname):
1124 "returns file location for this particular package (DEPRECATED)" 1125 if not pkgname: 1126 return "" 1127 mysplit = pkgname.split("/") 1128 psplit = pkgsplit(mysplit[1]) 1129 return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]])+".ebuild"
1130
1131 - def getslot(self,mycatpkg):
1132 "Get a slot for a catpkg; assume it exists." 1133 myslot = "" 1134 try: 1135 myslot = self.dbapi._pkg_str(mycatpkg, None).slot 1136 except KeyError: 1137 pass 1138 return myslot
1139
1140 -class FetchlistDict(Mapping):
1141 """ 1142 This provide a mapping interface to retrieve fetch lists. It's used 1143 to allow portage.manifest.Manifest to access fetch lists via a standard 1144 mapping interface rather than use the dbapi directly. 1145 """
1146 - def __init__(self, pkgdir, settings, mydbapi):
1147 """pkgdir is a directory containing ebuilds and settings is passed into 1148 portdbapi.getfetchlist for __getitem__ calls.""" 1149 self.pkgdir = pkgdir 1150 self.cp = os.sep.join(pkgdir.split(os.sep)[-2:]) 1151 self.settings = settings 1152 self.mytree = os.path.realpath(os.path.dirname(os.path.dirname(pkgdir))) 1153 self.portdb = mydbapi
1154
1155 - def __getitem__(self, pkg_key):
1156 """Returns the complete fetch list for a given package.""" 1157 return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree))
1158
1159 - def __contains__(self, cpv):
1160 return cpv in self.__iter__()
1161
1162 - def has_key(self, pkg_key):
1163 """Returns true if the given package exists within pkgdir.""" 1164 warnings.warn("portage.dbapi.porttree.FetchlistDict.has_key() is " 1165 "deprecated, use the 'in' operator instead", 1166 DeprecationWarning, stacklevel=2) 1167 return pkg_key in self
1168
1169 - def __iter__(self):
1170 return iter(self.portdb.cp_list(self.cp, mytree=self.mytree))
1171
1172 - def __len__(self):
1173 """This needs to be implemented in order to avoid 1174 infinite recursion in some cases.""" 1175 return len(self.portdb.cp_list(self.cp, mytree=self.mytree))
1176
1177 - def keys(self):
1178 """Returns keys for all packages within pkgdir""" 1179 return self.portdb.cp_list(self.cp, mytree=self.mytree)
1180 1181 if sys.hexversion >= 0x3000000: 1182 keys = __iter__
1183
1184 -def _parse_uri_map(cpv, metadata, use=None):
1185 1186 myuris = use_reduce(metadata.get('SRC_URI', ''), 1187 uselist=use, matchall=(use is None), 1188 is_src_uri=True, 1189 eapi=metadata['EAPI']) 1190 1191 uri_map = OrderedDict() 1192 1193 myuris.reverse() 1194 while myuris: 1195 uri = myuris.pop() 1196 if myuris and myuris[-1] == "->": 1197 myuris.pop() 1198 distfile = myuris.pop() 1199 else: 1200 distfile = os.path.basename(uri) 1201 if not distfile: 1202 raise portage.exception.InvalidDependString( 1203 ("getFetchMap(): '%s' SRC_URI has no file " + \ 1204 "name: '%s'") % (cpv, uri)) 1205 1206 uri_set = uri_map.get(distfile) 1207 if uri_set is None: 1208 # Use OrderedDict to preserve order from SRC_URI 1209 # while ensuring uniqueness. 1210 uri_set = OrderedDict() 1211 uri_map[distfile] = uri_set 1212 1213 # SRC_URI may contain a file name with no scheme, and in 1214 # this case it does not belong in uri_set. 1215 if urlparse(uri).scheme: 1216 uri_set[uri] = True 1217 1218 # Convert OrderedDicts to tuples. 1219 for k, v in uri_map.items(): 1220 uri_map[k] = tuple(v) 1221 1222 return uri_map
1223