Package portage :: Package dbapi :: Module porttree
[hide private]

Source Code for Module portage.dbapi.porttree

   1  # Copyright 1998-2014 Gentoo Foundation 
   2  # Distributed under the terms of the GNU General Public License v2 
   3   
   4  from __future__ import unicode_literals 
   5   
   6  __all__ = [ 
   7          "close_portdbapi_caches", "FetchlistDict", "portagetree", "portdbapi" 
   8  ] 
   9   
  10  import portage 
  11  portage.proxy.lazyimport.lazyimport(globals(), 
  12          'portage.checksum', 
  13          'portage.data:portage_gid,secpass', 
  14          'portage.dbapi.dep_expand:dep_expand', 
  15          'portage.dep:Atom,dep_getkey,match_from_list,use_reduce,_match_slot', 
  16          'portage.package.ebuild.doebuild:doebuild', 
  17          'portage.util:ensure_dirs,shlex_split,writemsg,writemsg_level', 
  18          'portage.util.listdir:listdir', 
  19          'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp,_pkg_str', 
  20  ) 
  21   
  22  from portage.cache import volatile 
  23  from portage.cache.cache_errors import CacheError 
  24  from portage.cache.mappings import Mapping 
  25  from portage.dbapi import dbapi 
  26  from portage.exception import PortageException, \ 
  27          FileNotFound, InvalidAtom, InvalidData, \ 
  28          InvalidDependString, InvalidPackageName 
  29  from portage.localization import _ 
  30   
  31  from portage import eclass_cache, \ 
  32          eapi_is_supported, \ 
  33          _eapi_is_deprecated 
  34  from portage import os 
  35  from portage import _encodings 
  36  from portage import _unicode_encode 
  37  from portage import OrderedDict 
  38  from portage.util._eventloop.EventLoop import EventLoop 
  39  from portage.util._eventloop.global_event_loop import global_event_loop 
  40  from _emerge.EbuildMetadataPhase import EbuildMetadataPhase 
  41   
  42  import os as _os 
  43  import sys 
  44  import traceback 
  45  import warnings 
  46   
  47  try: 
  48          from urllib.parse import urlparse 
  49  except ImportError: 
  50          from urlparse import urlparse 
  51   
  52  if sys.hexversion >= 0x3000000: 
  53          # pylint: disable=W0622 
  54          basestring = str 
  55          long = int 
56 57 -def close_portdbapi_caches():
58 # The python interpreter does _not_ guarantee that destructors are 59 # called for objects that remain when the interpreter exits, so we 60 # use an atexit hook to call destructors for any global portdbapi 61 # instances that may have been constructed. 62 try: 63 portage._legacy_globals_constructed 64 except AttributeError: 65 pass 66 else: 67 if "db" in portage._legacy_globals_constructed: 68 try: 69 db = portage.db 70 except AttributeError: 71 pass 72 else: 73 if isinstance(db, dict): 74 for x in db.values(): 75 try: 76 if "porttree" in x.lazy_items: 77 continue 78 except (AttributeError, TypeError): 79 continue 80 try: 81 x = x.pop("porttree").dbapi 82 except (AttributeError, KeyError): 83 continue 84 if not isinstance(x, portdbapi): 85 continue 86 x.close_caches()
87 88 portage.process.atexit_register(close_portdbapi_caches)
89 90 # It used to be necessary for API consumers to remove portdbapi instances 91 # from portdbapi_instances, in order to avoid having accumulated instances 92 # consume memory. Now, portdbapi_instances is just an empty dummy list, so 93 # for backward compatibility, ignore ValueError for removal on non-existent 94 # items. 95 -class _dummy_list(list):
96 - def remove(self, item):
97 # TODO: Trigger a DeprecationWarning here, after stable portage 98 # has dummy portdbapi_instances. 99 try: 100 list.remove(self, item) 101 except ValueError: 102 pass
103
104 -class portdbapi(dbapi):
105 """this tree will scan a portage directory located at root (passed to init)""" 106 portdbapi_instances = _dummy_list() 107 _use_mutable = True 108 109 @property
110 - def _categories(self):
111 return self.settings.categories
112 113 @property
114 - def porttree_root(self):
115 warnings.warn("portage.dbapi.porttree.portdbapi.porttree_root is deprecated in favor of portage.repository.config.RepoConfig.location " 116 "(available as repositories[repo_name].location attribute of instances of portage.dbapi.porttree.portdbapi class)", 117 DeprecationWarning, stacklevel=2) 118 return self.settings.repositories.mainRepoLocation()
119 120 @property
121 - def eclassdb(self):
122 warnings.warn("portage.dbapi.porttree.portdbapi.eclassdb is deprecated in favor of portage.repository.config.RepoConfig.eclass_db " 123 "(available as repositories[repo_name].eclass_db attribute of instances of portage.dbapi.porttree.portdbapi class)", 124 DeprecationWarning, stacklevel=2) 125 main_repo = self.repositories.mainRepo() 126 if main_repo is None: 127 return None 128 return main_repo.eclass_db
129
130 - def __init__(self, _unused_param=DeprecationWarning, mysettings=None):
131 """ 132 @param _unused_param: deprecated, use mysettings['PORTDIR'] instead 133 @type _unused_param: None 134 @param mysettings: an immutable config instance 135 @type mysettings: portage.config 136 """ 137 138 from portage import config 139 if mysettings: 140 self.settings = mysettings 141 else: 142 from portage import settings 143 self.settings = config(clone=settings) 144 145 if _unused_param is not DeprecationWarning: 146 warnings.warn("The first parameter of the " + \ 147 "portage.dbapi.porttree.portdbapi" + \ 148 " constructor is unused since portage-2.1.8. " + \ 149 "mysettings['PORTDIR'] is used instead.", 150 DeprecationWarning, stacklevel=2) 151 152 self.repositories = self.settings.repositories 153 self.treemap = self.repositories.treemap 154 155 # This is strictly for use in aux_get() doebuild calls when metadata 156 # is generated by the depend phase. It's safest to use a clone for 157 # this purpose because doebuild makes many changes to the config 158 # instance that is passed in. 159 self.doebuild_settings = config(clone=self.settings) 160 self.depcachedir = os.path.realpath(self.settings.depcachedir) 161 162 if os.environ.get("SANDBOX_ON") == "1": 163 # Make api consumers exempt from sandbox violations 164 # when doing metadata cache updates. 165 sandbox_write = os.environ.get("SANDBOX_WRITE", "").split(":") 166 if self.depcachedir not in sandbox_write: 167 sandbox_write.append(self.depcachedir) 168 os.environ["SANDBOX_WRITE"] = \ 169 ":".join(filter(None, sandbox_write)) 170 171 self.porttrees = list(self.settings.repositories.repoLocationList()) 172 173 # This is used as sanity check for aux_get(). If there is no 174 # root eclass dir, we assume that PORTDIR is invalid or 175 # missing. This check allows aux_get() to detect a missing 176 # portage tree and return early by raising a KeyError. 177 self._have_root_eclass_dir = os.path.isdir( 178 os.path.join(self.settings.repositories.mainRepoLocation(), "eclass")) 179 180 #if the portdbapi is "frozen", then we assume that we can cache everything (that no updates to it are happening) 181 self.xcache = {} 182 self.frozen = 0 183 184 #Keep a list of repo names, sorted by priority (highest priority first). 185 self._ordered_repo_name_list = tuple(reversed(self.repositories.prepos_order)) 186 187 self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule") 188 self.auxdb = {} 189 self._pregen_auxdb = {} 190 # If the current user doesn't have depcachedir write permission, 191 # then the depcachedir cache is kept here read-only access. 192 self._ro_auxdb = {} 193 self._init_cache_dirs() 194 try: 195 depcachedir_st = os.stat(self.depcachedir) 196 depcachedir_w_ok = os.access(self.depcachedir, os.W_OK) 197 except OSError: 198 depcachedir_st = None 199 depcachedir_w_ok = False 200 201 cache_kwargs = {} 202 203 depcachedir_unshared = False 204 if portage.data.secpass < 1 and \ 205 depcachedir_w_ok and \ 206 depcachedir_st is not None and \ 207 os.getuid() == depcachedir_st.st_uid and \ 208 os.getgid() == depcachedir_st.st_gid: 209 # If this user owns depcachedir and is not in the 210 # portage group, then don't bother to set permissions 211 # on cache entries. This makes it possible to run 212 # egencache without any need to be a member of the 213 # portage group. 214 depcachedir_unshared = True 215 else: 216 cache_kwargs.update(portage._native_kwargs({ 217 'gid' : portage_gid, 218 'perms' : 0o664 219 })) 220 221 # If secpass < 1, we don't want to write to the cache 222 # since then we won't be able to apply group permissions 223 # to the cache entries/directories. 224 if (secpass < 1 and not depcachedir_unshared) or not depcachedir_w_ok: 225 for x in self.porttrees: 226 self.auxdb[x] = volatile.database( 227 self.depcachedir, x, self._known_keys, 228 **cache_kwargs) 229 try: 230 self._ro_auxdb[x] = self.auxdbmodule(self.depcachedir, x, 231 self._known_keys, readonly=True, **cache_kwargs) 232 except CacheError: 233 pass 234 else: 235 for x in self.porttrees: 236 if x in self.auxdb: 237 continue 238 # location, label, auxdbkeys 239 self.auxdb[x] = self.auxdbmodule( 240 self.depcachedir, x, self._known_keys, **cache_kwargs) 241 if "metadata-transfer" not in self.settings.features: 242 for x in self.porttrees: 243 if x in self._pregen_auxdb: 244 continue 245 cache = self._create_pregen_cache(x) 246 if cache is not None: 247 self._pregen_auxdb[x] = cache 248 # Selectively cache metadata in order to optimize dep matching. 249 self._aux_cache_keys = set( 250 ["DEPEND", "EAPI", "HDEPEND", 251 "INHERITED", "IUSE", "KEYWORDS", "LICENSE", 252 "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", 253 "RESTRICT", "SLOT", "DEFINED_PHASES", "REQUIRED_USE"]) 254 255 self._aux_cache = {} 256 self._broken_ebuilds = set()
257 258 @property
259 - def _event_loop(self):
260 if portage._internal_caller: 261 # For internal portage usage, the global_event_loop is safe. 262 return global_event_loop() 263 else: 264 # For external API consumers, use a local EventLoop, since 265 # we don't want to assume that it's safe to override the 266 # global SIGCHLD handler. 267 return EventLoop(main=False)
268
269 - def _create_pregen_cache(self, tree):
270 conf = self.repositories.get_repo_for_location(tree) 271 cache = conf.get_pregenerated_cache( 272 self._known_keys, readonly=True) 273 if cache is not None: 274 try: 275 cache.ec = self.repositories.get_repo_for_location(tree).eclass_db 276 except AttributeError: 277 pass 278 279 if not cache.complete_eclass_entries: 280 warnings.warn( 281 ("Repository '%s' used deprecated 'pms' cache format. " 282 "Please migrate to 'md5-dict' format.") % (conf.name,), 283 DeprecationWarning) 284 285 return cache
286
287 - def _init_cache_dirs(self):
288 """Create /var/cache/edb/dep and adjust permissions for the portage 289 group.""" 290 291 dirmode = 0o2070 292 modemask = 0o2 293 294 try: 295 ensure_dirs(self.depcachedir, gid=portage_gid, 296 mode=dirmode, mask=modemask) 297 except PortageException: 298 pass
299
300 - def close_caches(self):
301 if not hasattr(self, "auxdb"): 302 # unhandled exception thrown from constructor 303 return 304 for x in self.auxdb: 305 self.auxdb[x].sync() 306 self.auxdb.clear()
307
308 - def flush_cache(self):
309 for x in self.auxdb.values(): 310 x.sync()
311
312 - def findLicensePath(self, license_name):
313 for x in reversed(self.porttrees): 314 license_path = os.path.join(x, "licenses", license_name) 315 if os.access(license_path, os.R_OK): 316 return license_path 317 return None
318
319 - def findname(self,mycpv, mytree = None, myrepo = None):
320 return self.findname2(mycpv, mytree, myrepo)[0]
321
322 - def getRepositoryPath(self, repository_id):
323 """ 324 This function is required for GLEP 42 compliance; given a valid repository ID 325 it must return a path to the repository 326 TreeMap = { id:path } 327 """ 328 return self.treemap.get(repository_id)
329
330 - def getRepositoryName(self, canonical_repo_path):
331 """ 332 This is the inverse of getRepositoryPath(). 333 @param canonical_repo_path: the canonical path of a repository, as 334 resolved by os.path.realpath() 335 @type canonical_repo_path: String 336 @return: The repo_name for the corresponding repository, or None 337 if the path does not correspond a known repository 338 @rtype: String or None 339 """ 340 try: 341 return self.repositories.get_name_for_location(canonical_repo_path) 342 except KeyError: 343 return None
344
345 - def getRepositories(self):
346 """ 347 This function is required for GLEP 42 compliance; it will return a list of 348 repository IDs 349 TreeMap = {id: path} 350 """ 351 return self._ordered_repo_name_list
352
353 - def getMissingRepoNames(self):
354 """ 355 Returns a list of repository paths that lack profiles/repo_name. 356 """ 357 return self.settings.repositories.missing_repo_names
358
359 - def getIgnoredRepos(self):
360 """ 361 Returns a list of repository paths that have been ignored, because 362 another repo with the same name exists. 363 """ 364 return self.settings.repositories.ignored_repos
365
366 - def findname2(self, mycpv, mytree=None, myrepo = None):
367 """ 368 Returns the location of the CPV, and what overlay it was in. 369 Searches overlays first, then PORTDIR; this allows us to return the first 370 matching file. As opposed to starting in portdir and then doing overlays 371 second, we would have to exhaustively search the overlays until we found 372 the file we wanted. 373 If myrepo is not None it will find packages from this repository(overlay) 374 """ 375 if not mycpv: 376 return (None, 0) 377 378 if myrepo is not None: 379 mytree = self.treemap.get(myrepo) 380 if mytree is None: 381 return (None, 0) 382 383 mysplit = mycpv.split("/") 384 psplit = pkgsplit(mysplit[1]) 385 if psplit is None or len(mysplit) != 2: 386 raise InvalidPackageName(mycpv) 387 388 # For optimal performace in this hot spot, we do manual unicode 389 # handling here instead of using the wrapped os module. 390 encoding = _encodings['fs'] 391 errors = 'strict' 392 393 if mytree: 394 mytrees = [mytree] 395 else: 396 mytrees = reversed(self.porttrees) 397 398 relative_path = mysplit[0] + _os.sep + psplit[0] + _os.sep + \ 399 mysplit[1] + ".ebuild" 400 401 for x in mytrees: 402 filename = x + _os.sep + relative_path 403 if _os.access(_unicode_encode(filename, 404 encoding=encoding, errors=errors), _os.R_OK): 405 return (filename, x) 406 return (None, 0)
407
408 - def _write_cache(self, cpv, repo_path, metadata, ebuild_hash):
409 410 try: 411 cache = self.auxdb[repo_path] 412 chf = cache.validation_chf 413 metadata['_%s_' % chf] = getattr(ebuild_hash, chf) 414 except CacheError: 415 # Normally this shouldn't happen, so we'll show 416 # a traceback for debugging purposes. 417 traceback.print_exc() 418 cache = None 419 420 if cache is not None: 421 try: 422 cache[cpv] = metadata 423 except CacheError: 424 # Normally this shouldn't happen, so we'll show 425 # a traceback for debugging purposes. 426 traceback.print_exc()
427
428 - def _pull_valid_cache(self, cpv, ebuild_path, repo_path):
429 try: 430 ebuild_hash = eclass_cache.hashed_path(ebuild_path) 431 # snag mtime since we use it later, and to trigger stat failure 432 # if it doesn't exist 433 ebuild_hash.mtime 434 except FileNotFound: 435 writemsg(_("!!! aux_get(): ebuild for " \ 436 "'%s' does not exist at:\n") % (cpv,), noiselevel=-1) 437 writemsg("!!! %s\n" % ebuild_path, noiselevel=-1) 438 raise KeyError(cpv) 439 440 # Pull pre-generated metadata from the metadata/cache/ 441 # directory if it exists and is valid, otherwise fall 442 # back to the normal writable cache. 443 auxdbs = [] 444 pregen_auxdb = self._pregen_auxdb.get(repo_path) 445 if pregen_auxdb is not None: 446 auxdbs.append(pregen_auxdb) 447 ro_auxdb = self._ro_auxdb.get(repo_path) 448 if ro_auxdb is not None: 449 auxdbs.append(ro_auxdb) 450 auxdbs.append(self.auxdb[repo_path]) 451 eclass_db = self.repositories.get_repo_for_location(repo_path).eclass_db 452 453 for auxdb in auxdbs: 454 try: 455 metadata = auxdb[cpv] 456 except KeyError: 457 continue 458 except CacheError: 459 if not auxdb.readonly: 460 try: 461 del auxdb[cpv] 462 except (KeyError, CacheError): 463 pass 464 continue 465 eapi = metadata.get('EAPI', '').strip() 466 if not eapi: 467 eapi = '0' 468 metadata['EAPI'] = eapi 469 if not eapi_is_supported(eapi): 470 # Since we're supposed to be able to efficiently obtain the 471 # EAPI from _parse_eapi_ebuild_head, we disregard cache entries 472 # for unsupported EAPIs. 473 continue 474 if auxdb.validate_entry(metadata, ebuild_hash, eclass_db): 475 break 476 else: 477 metadata = None 478 479 return (metadata, ebuild_hash)
480
481 - def aux_get(self, mycpv, mylist, mytree=None, myrepo=None):
482 "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc." 483 'input: "sys-apps/foo-1.0",["SLOT","DEPEND","HOMEPAGE"]' 484 'return: ["0",">=sys-libs/bar-1.0","http://www.foo.com"] or raise KeyError if error' 485 cache_me = False 486 if myrepo is not None: 487 mytree = self.treemap.get(myrepo) 488 if mytree is None: 489 raise KeyError(myrepo) 490 491 if mytree is not None and len(self.porttrees) == 1 \ 492 and mytree == self.porttrees[0]: 493 # mytree matches our only tree, so it's safe to 494 # ignore mytree and cache the result 495 mytree = None 496 myrepo = None 497 498 if mytree is None: 499 cache_me = True 500 if mytree is None and not self._known_keys.intersection( 501 mylist).difference(self._aux_cache_keys): 502 aux_cache = self._aux_cache.get(mycpv) 503 if aux_cache is not None: 504 return [aux_cache.get(x, "") for x in mylist] 505 cache_me = True 506 507 try: 508 cat, pkg = mycpv.split("/", 1) 509 except ValueError: 510 # Missing slash. Can't find ebuild so raise KeyError. 511 raise KeyError(mycpv) 512 513 myebuild, mylocation = self.findname2(mycpv, mytree) 514 515 if not myebuild: 516 writemsg("!!! aux_get(): %s\n" % \ 517 _("ebuild not found for '%s'") % mycpv, noiselevel=1) 518 raise KeyError(mycpv) 519 520 mydata, ebuild_hash = self._pull_valid_cache(mycpv, myebuild, mylocation) 521 doregen = mydata is None 522 523 if doregen: 524 if myebuild in self._broken_ebuilds: 525 raise KeyError(mycpv) 526 527 proc = EbuildMetadataPhase(cpv=mycpv, 528 ebuild_hash=ebuild_hash, portdb=self, 529 repo_path=mylocation, scheduler=self._event_loop, 530 settings=self.doebuild_settings) 531 532 proc.start() 533 proc.wait() 534 535 if proc.returncode != os.EX_OK: 536 self._broken_ebuilds.add(myebuild) 537 raise KeyError(mycpv) 538 539 mydata = proc.metadata 540 541 mydata["repository"] = self.repositories.get_name_for_location(mylocation) 542 mydata["_mtime_"] = ebuild_hash.mtime 543 eapi = mydata.get("EAPI") 544 if not eapi: 545 eapi = "0" 546 mydata["EAPI"] = eapi 547 if eapi_is_supported(eapi): 548 mydata["INHERITED"] = " ".join(mydata.get("_eclasses_", [])) 549 550 #finally, we look at our internal cache entry and return the requested data. 551 returnme = [mydata.get(x, "") for x in mylist] 552 553 if cache_me: 554 aux_cache = {} 555 for x in self._aux_cache_keys: 556 aux_cache[x] = mydata.get(x, "") 557 self._aux_cache[mycpv] = aux_cache 558 559 return returnme
560
561 - def getFetchMap(self, mypkg, useflags=None, mytree=None):
562 """ 563 Get the SRC_URI metadata as a dict which maps each file name to a 564 set of alternative URIs. 565 566 @param mypkg: cpv for an ebuild 567 @type mypkg: String 568 @param useflags: a collection of enabled USE flags, for evaluation of 569 conditionals 570 @type useflags: set, or None to enable all conditionals 571 @param mytree: The canonical path of the tree in which the ebuild 572 is located, or None for automatic lookup 573 @type mypkg: String 574 @return: A dict which maps each file name to a set of alternative 575 URIs. 576 @rtype: dict 577 """ 578 579 try: 580 eapi, myuris = self.aux_get(mypkg, 581 ["EAPI", "SRC_URI"], mytree=mytree) 582 except KeyError: 583 # Convert this to an InvalidDependString exception since callers 584 # already handle it. 585 raise portage.exception.InvalidDependString( 586 "getFetchMap(): aux_get() error reading "+mypkg+"; aborting.") 587 588 if not eapi_is_supported(eapi): 589 # Convert this to an InvalidDependString exception 590 # since callers already handle it. 591 raise portage.exception.InvalidDependString( 592 "getFetchMap(): '%s' has unsupported EAPI: '%s'" % \ 593 (mypkg, eapi)) 594 595 return _parse_uri_map(mypkg, {'EAPI':eapi,'SRC_URI':myuris}, 596 use=useflags)
597
598 - def getfetchsizes(self, mypkg, useflags=None, debug=0, myrepo=None):
599 # returns a filename:size dictionnary of remaining downloads 600 myebuild, mytree = self.findname2(mypkg, myrepo=myrepo) 601 if myebuild is None: 602 raise AssertionError(_("ebuild not found for '%s'") % mypkg) 603 pkgdir = os.path.dirname(myebuild) 604 mf = self.repositories.get_repo_for_location( 605 os.path.dirname(os.path.dirname(pkgdir))).load_manifest( 606 pkgdir, self.settings["DISTDIR"]) 607 checksums = mf.getDigests() 608 if not checksums: 609 if debug: 610 writemsg(_("[empty/missing/bad digest]: %s\n") % (mypkg,)) 611 return {} 612 filesdict={} 613 myfiles = self.getFetchMap(mypkg, useflags=useflags, mytree=mytree) 614 #XXX: maybe this should be improved: take partial downloads 615 # into account? check checksums? 616 for myfile in myfiles: 617 try: 618 fetch_size = int(checksums[myfile]["size"]) 619 except (KeyError, ValueError): 620 if debug: 621 writemsg(_("[bad digest]: missing %(file)s for %(pkg)s\n") % {"file":myfile, "pkg":mypkg}) 622 continue 623 file_path = os.path.join(self.settings["DISTDIR"], myfile) 624 mystat = None 625 try: 626 mystat = os.stat(file_path) 627 except OSError: 628 pass 629 if mystat is None: 630 existing_size = 0 631 ro_distdirs = self.settings.get("PORTAGE_RO_DISTDIRS") 632 if ro_distdirs is not None: 633 for x in shlex_split(ro_distdirs): 634 try: 635 mystat = os.stat(os.path.join(x, myfile)) 636 except OSError: 637 pass 638 else: 639 if mystat.st_size == fetch_size: 640 existing_size = fetch_size 641 break 642 else: 643 existing_size = mystat.st_size 644 remaining_size = fetch_size - existing_size 645 if remaining_size > 0: 646 # Assume the download is resumable. 647 filesdict[myfile] = remaining_size 648 elif remaining_size < 0: 649 # The existing file is too large and therefore corrupt. 650 filesdict[myfile] = int(checksums[myfile]["size"]) 651 return filesdict
652
653 - def fetch_check(self, mypkg, useflags=None, mysettings=None, all=False, myrepo=None):
654 """ 655 TODO: account for PORTAGE_RO_DISTDIRS 656 """ 657 if all: 658 useflags = None 659 elif useflags is None: 660 if mysettings: 661 useflags = mysettings["USE"].split() 662 if myrepo is not None: 663 mytree = self.treemap.get(myrepo) 664 if mytree is None: 665 return False 666 else: 667 mytree = None 668 669 myfiles = self.getFetchMap(mypkg, useflags=useflags, mytree=mytree) 670 myebuild = self.findname(mypkg, myrepo=myrepo) 671 if myebuild is None: 672 raise AssertionError(_("ebuild not found for '%s'") % mypkg) 673 pkgdir = os.path.dirname(myebuild) 674 mf = self.repositories.get_repo_for_location( 675 os.path.dirname(os.path.dirname(pkgdir))) 676 mf = mf.load_manifest(pkgdir, self.settings["DISTDIR"]) 677 mysums = mf.getDigests() 678 679 failures = {} 680 for x in myfiles: 681 if not mysums or x not in mysums: 682 ok = False 683 reason = _("digest missing") 684 else: 685 try: 686 ok, reason = portage.checksum.verify_all( 687 os.path.join(self.settings["DISTDIR"], x), mysums[x]) 688 except FileNotFound as e: 689 ok = False 690 reason = _("File Not Found: '%s'") % (e,) 691 if not ok: 692 failures[x] = reason 693 if failures: 694 return False 695 return True
696
697 - def cpv_exists(self, mykey, myrepo=None):
698 "Tells us whether an actual ebuild exists on disk (no masking)" 699 cps2 = mykey.split("/") 700 cps = catpkgsplit(mykey, silent=0) 701 if not cps: 702 #invalid cat/pkg-v 703 return 0 704 if self.findname(cps[0] + "/" + cps2[1], myrepo=myrepo): 705 return 1 706 else: 707 return 0
708
709 - def cp_all(self, categories=None, trees=None, reverse=False):
710 """ 711 This returns a list of all keys in our tree or trees 712 @param categories: optional list of categories to search or 713 defaults to self.settings.categories 714 @param trees: optional list of trees to search the categories in or 715 defaults to self.porttrees 716 @param reverse: reverse sort order (default is False) 717 @rtype list of [cat/pkg,...] 718 """ 719 d = {} 720 if categories is None: 721 categories = self.settings.categories 722 if trees is None: 723 trees = self.porttrees 724 for x in categories: 725 for oroot in trees: 726 for y in listdir(oroot+"/"+x, EmptyOnError=1, ignorecvs=1, dirsonly=1): 727 try: 728 atom = Atom("%s/%s" % (x, y)) 729 except InvalidAtom: 730 continue 731 if atom != atom.cp: 732 continue 733 d[atom.cp] = None 734 l = list(d) 735 l.sort(reverse=reverse) 736 return l
737
738 - def cp_list(self, mycp, use_cache=1, mytree=None):
739 # NOTE: Cache can be safely shared with the match cache, since the 740 # match cache uses the result from dep_expand for the cache_key. 741 if self.frozen and mytree is not None \ 742 and len(self.porttrees) == 1 \ 743 and mytree == self.porttrees[0]: 744 # mytree matches our only tree, so it's safe to 745 # ignore mytree and cache the result 746 mytree = None 747 748 if self.frozen and mytree is None: 749 cachelist = self.xcache["cp-list"].get(mycp) 750 if cachelist is not None: 751 # Try to propagate this to the match-all cache here for 752 # repoman since he uses separate match-all caches for each 753 # profile (due to differences in _get_implicit_iuse). 754 self.xcache["match-all"][(mycp, mycp)] = cachelist 755 return cachelist[:] 756 mysplit = mycp.split("/") 757 invalid_category = mysplit[0] not in self._categories 758 d={} 759 if mytree is not None: 760 if isinstance(mytree, basestring): 761 mytrees = [mytree] 762 else: 763 # assume it's iterable 764 mytrees = mytree 765 else: 766 mytrees = self.porttrees 767 for oroot in mytrees: 768 try: 769 file_list = os.listdir(os.path.join(oroot, mycp)) 770 except OSError: 771 continue 772 for x in file_list: 773 pf = None 774 if x[-7:] == '.ebuild': 775 pf = x[:-7] 776 777 if pf is not None: 778 ps = pkgsplit(pf) 779 if not ps: 780 writemsg(_("\nInvalid ebuild name: %s\n") % \ 781 os.path.join(oroot, mycp, x), noiselevel=-1) 782 continue 783 if ps[0] != mysplit[1]: 784 writemsg(_("\nInvalid ebuild name: %s\n") % \ 785 os.path.join(oroot, mycp, x), noiselevel=-1) 786 continue 787 ver_match = ver_regexp.match("-".join(ps[1:])) 788 if ver_match is None or not ver_match.groups(): 789 writemsg(_("\nInvalid ebuild version: %s\n") % \ 790 os.path.join(oroot, mycp, x), noiselevel=-1) 791 continue 792 d[_pkg_str(mysplit[0]+"/"+pf)] = None 793 if invalid_category and d: 794 writemsg(_("\n!!! '%s' has a category that is not listed in " \ 795 "%setc/portage/categories\n") % \ 796 (mycp, self.settings["PORTAGE_CONFIGROOT"]), noiselevel=-1) 797 mylist = [] 798 else: 799 mylist = list(d) 800 # Always sort in ascending order here since it's handy 801 # and the result can be easily cached and reused. 802 self._cpv_sort_ascending(mylist) 803 if self.frozen and mytree is None: 804 cachelist = mylist[:] 805 self.xcache["cp-list"][mycp] = cachelist 806 self.xcache["match-all"][(mycp, mycp)] = cachelist 807 return mylist
808
809 - def freeze(self):
810 for x in "bestmatch-visible", "cp-list", "match-all", \ 811 "match-all-cpv-only", "match-visible", "minimum-all", \ 812 "minimum-visible": 813 self.xcache[x]={} 814 self.frozen=1
815
816 - def melt(self):
817 self.xcache = {} 818 self.frozen = 0
819
820 - def xmatch(self,level,origdep,mydep=None,mykey=None,mylist=None):
821 "caching match function; very trick stuff" 822 if level == "list-visible": 823 level = "match-visible" 824 warnings.warn("The 'list-visible' mode of " 825 "portage.dbapi.porttree.portdbapi.xmatch " 826 "has been renamed to match-visible", 827 DeprecationWarning, stacklevel=2) 828 829 if mydep is None: 830 #this stuff only runs on first call of xmatch() 831 #create mydep, mykey from origdep 832 mydep = dep_expand(origdep, mydb=self, settings=self.settings) 833 mykey = mydep.cp 834 835 #if no updates are being made to the tree, we can consult our xcache... 836 cache_key = None 837 if self.frozen: 838 cache_key = (mydep, mydep.unevaluated_atom) 839 try: 840 return self.xcache[level][cache_key][:] 841 except KeyError: 842 pass 843 844 myval = None 845 mytree = None 846 if mydep.repo is not None: 847 mytree = self.treemap.get(mydep.repo) 848 if mytree is None: 849 if level.startswith("match-"): 850 myval = [] 851 else: 852 myval = "" 853 854 if myval is not None: 855 # Unknown repo, empty result. 856 pass 857 elif level == "match-all-cpv-only": 858 # match *all* packages, only against the cpv, in order 859 # to bypass unnecessary cache access for things like IUSE 860 # and SLOT. 861 if mydep == mykey: 862 # Share cache with match-all/cp_list when the result is the 863 # same. Note that this requires that mydep.repo is None and 864 # thus mytree is also None. 865 level = "match-all" 866 myval = self.cp_list(mykey, mytree=mytree) 867 else: 868 myval = match_from_list(mydep, 869 self.cp_list(mykey, mytree=mytree)) 870 871 elif level in ("bestmatch-visible", "match-all", "match-visible", 872 "minimum-all", "minimum-visible"): 873 # Find the minimum matching visible version. This is optimized to 874 # minimize the number of metadata accesses (improves performance 875 # especially in cases where metadata needs to be generated). 876 if mydep == mykey: 877 mylist = self.cp_list(mykey, mytree=mytree) 878 else: 879 mylist = match_from_list(mydep, 880 self.cp_list(mykey, mytree=mytree)) 881 882 visibility_filter = level not in ("match-all", "minimum-all") 883 single_match = level not in ("match-all", "match-visible") 884 myval = [] 885 aux_keys = list(self._aux_cache_keys) 886 if level == "bestmatch-visible": 887 iterfunc = reversed 888 else: 889 iterfunc = iter 890 891 if mydep.repo is not None: 892 repos = [mydep.repo] 893 else: 894 # We iterate over self.porttrees, since it's common to 895 # tweak this attribute in order to adjust match behavior. 896 repos = [] 897 for tree in reversed(self.porttrees): 898 repos.append(self.repositories.get_name_for_location(tree)) 899 900 for cpv in iterfunc(mylist): 901 for repo in repos: 902 try: 903 metadata = dict(zip(aux_keys, 904 self.aux_get(cpv, aux_keys, myrepo=repo))) 905 except KeyError: 906 # ebuild not in this repo, or masked by corruption 907 continue 908 909 try: 910 pkg_str = _pkg_str(cpv, metadata=metadata, 911 settings=self.settings) 912 except InvalidData: 913 continue 914 915 if visibility_filter and not self._visible(pkg_str, metadata): 916 continue 917 918 if mydep.slot is not None and \ 919 not _match_slot(mydep, pkg_str): 920 continue 921 922 if mydep.unevaluated_atom.use is not None and \ 923 not self._match_use(mydep, pkg_str, metadata): 924 continue 925 926 myval.append(pkg_str) 927 # only yield a given cpv once 928 break 929 930 if myval and single_match: 931 break 932 933 if single_match: 934 if myval: 935 myval = myval[0] 936 else: 937 myval = "" 938 939 elif level == "bestmatch-list": 940 #dep match -- find best match but restrict search to sublist 941 warnings.warn("The 'bestmatch-list' mode of " 942 "portage.dbapi.porttree.portdbapi.xmatch is deprecated", 943 DeprecationWarning, stacklevel=2) 944 myval = best(list(self._iter_match(mydep, mylist))) 945 elif level == "match-list": 946 #dep match -- find all matches but restrict search to sublist (used in 2nd half of visible()) 947 warnings.warn("The 'match-list' mode of " 948 "portage.dbapi.porttree.portdbapi.xmatch is deprecated", 949 DeprecationWarning, stacklevel=2) 950 myval = list(self._iter_match(mydep, mylist)) 951 else: 952 raise AssertionError( 953 "Invalid level argument: '%s'" % level) 954 955 if self.frozen: 956 xcache_this_level = self.xcache.get(level) 957 if xcache_this_level is not None: 958 xcache_this_level[cache_key] = myval 959 if not isinstance(myval, _pkg_str): 960 myval = myval[:] 961 962 return myval
963
964 - def match(self, mydep, use_cache=1):
965 return self.xmatch("match-visible", mydep)
966
967 - def gvisible(self, mylist):
968 warnings.warn("The 'gvisible' method of " 969 "portage.dbapi.porttree.portdbapi " 970 "is deprecated", 971 DeprecationWarning, stacklevel=2) 972 return list(self._iter_visible(iter(mylist)))
973
974 - def visible(self, cpv_iter):
975 warnings.warn("The 'visible' method of " 976 "portage.dbapi.porttree.portdbapi " 977 "is deprecated", 978 DeprecationWarning, stacklevel=2) 979 if cpv_iter is None: 980 return [] 981 return list(self._iter_visible(iter(cpv_iter)))
982
983 - def _iter_visible(self, cpv_iter, myrepo=None):
984 """ 985 Return a new list containing only visible packages. 986 """ 987 aux_keys = list(self._aux_cache_keys) 988 metadata = {} 989 990 if myrepo is not None: 991 repos = [myrepo] 992 else: 993 # We iterate over self.porttrees, since it's common to 994 # tweak this attribute in order to adjust match behavior. 995 repos = [] 996 for tree in reversed(self.porttrees): 997 repos.append(self.repositories.get_name_for_location(tree)) 998 999 for mycpv in cpv_iter: 1000 for repo in repos: 1001 metadata.clear() 1002 try: 1003 metadata.update(zip(aux_keys, 1004 self.aux_get(mycpv, aux_keys, myrepo=repo))) 1005 except KeyError: 1006 continue 1007 except PortageException as e: 1008 writemsg("!!! Error: aux_get('%s', %s)\n" % 1009 (mycpv, aux_keys), noiselevel=-1) 1010 writemsg("!!! %s\n" % (e,), noiselevel=-1) 1011 del e 1012 continue 1013 1014 if not self._visible(mycpv, metadata): 1015 continue 1016 1017 yield mycpv 1018 # only yield a given cpv once 1019 break
1020
1021 - def _visible(self, cpv, metadata):
1022 eapi = metadata["EAPI"] 1023 if not eapi_is_supported(eapi): 1024 return False 1025 if _eapi_is_deprecated(eapi): 1026 return False 1027 if not metadata["SLOT"]: 1028 return False 1029 1030 settings = self.settings 1031 if settings._getMaskAtom(cpv, metadata): 1032 return False 1033 if settings._getMissingKeywords(cpv, metadata): 1034 return False 1035 if settings.local_config: 1036 metadata['CHOST'] = settings.get('CHOST', '') 1037 if not settings._accept_chost(cpv, metadata): 1038 return False 1039 metadata["USE"] = "" 1040 if "?" in metadata["LICENSE"] or \ 1041 "?" in metadata["PROPERTIES"]: 1042 self.doebuild_settings.setcpv(cpv, mydb=metadata) 1043 metadata['USE'] = self.doebuild_settings['PORTAGE_USE'] 1044 try: 1045 if settings._getMissingLicenses(cpv, metadata): 1046 return False 1047 if settings._getMissingProperties(cpv, metadata): 1048 return False 1049 if settings._getMissingRestrict(cpv, metadata): 1050 return False 1051 except InvalidDependString: 1052 return False 1053 1054 return True
1055
1056 -class portagetree(object):
1057 - def __init__(self, root=DeprecationWarning, virtual=DeprecationWarning, 1058 settings=None):
1059 """ 1060 Constructor for a PortageTree 1061 1062 @param root: deprecated, defaults to settings['ROOT'] 1063 @type root: String/Path 1064 @param virtual: UNUSED 1065 @type virtual: No Idea 1066 @param settings: Portage Configuration object (portage.settings) 1067 @type settings: Instance of portage.config 1068 """ 1069 1070 if settings is None: 1071 settings = portage.settings 1072 self.settings = settings 1073 1074 if root is not DeprecationWarning: 1075 warnings.warn("The root parameter of the " + \ 1076 "portage.dbapi.porttree.portagetree" + \ 1077 " constructor is now unused. Use " + \ 1078 "settings['ROOT'] instead.", 1079 DeprecationWarning, stacklevel=2) 1080 1081 if virtual is not DeprecationWarning: 1082 warnings.warn("The 'virtual' parameter of the " 1083 "portage.dbapi.porttree.portagetree" 1084 " constructor is unused", 1085 DeprecationWarning, stacklevel=2) 1086 1087 self.portroot = settings["PORTDIR"] 1088 self.__virtual = virtual 1089 self.dbapi = portdbapi(mysettings=settings)
1090 1091 @property
1092 - def root(self):
1093 warnings.warn("The root attribute of " + \ 1094 "portage.dbapi.porttree.portagetree" + \ 1095 " is deprecated. Use " + \ 1096 "settings['ROOT'] instead.", 1097 DeprecationWarning, stacklevel=3) 1098 return self.settings['ROOT']
1099 1100 @property
1101 - def virtual(self):
1102 warnings.warn("The 'virtual' attribute of " + \ 1103 "portage.dbapi.porttree.portagetree" + \ 1104 " is deprecated.", 1105 DeprecationWarning, stacklevel=3) 1106 return self.__virtual
1107
1108 - def dep_bestmatch(self,mydep):
1109 "compatibility method" 1110 mymatch = self.dbapi.xmatch("bestmatch-visible",mydep) 1111 if mymatch is None: 1112 return "" 1113 return mymatch
1114
1115 - def dep_match(self,mydep):
1116 "compatibility method" 1117 mymatch = self.dbapi.xmatch("match-visible",mydep) 1118 if mymatch is None: 1119 return [] 1120 return mymatch
1121
1122 - def exists_specific(self,cpv):
1123 return self.dbapi.cpv_exists(cpv)
1124
1125 - def getallnodes(self):
1126 """new behavior: these are all *unmasked* nodes. There may or may not be available 1127 masked package for nodes in this nodes list.""" 1128 return self.dbapi.cp_all()
1129
1130 - def getname(self, pkgname):
1131 "returns file location for this particular package (DEPRECATED)" 1132 if not pkgname: 1133 return "" 1134 mysplit = pkgname.split("/") 1135 psplit = pkgsplit(mysplit[1]) 1136 return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]])+".ebuild"
1137
1138 - def getslot(self,mycatpkg):
1139 "Get a slot for a catpkg; assume it exists." 1140 myslot = "" 1141 try: 1142 myslot = self.dbapi._pkg_str(mycatpkg, None).slot 1143 except KeyError: 1144 pass 1145 return myslot
1146
1147 -class FetchlistDict(Mapping):
1148 """ 1149 This provide a mapping interface to retrieve fetch lists. It's used 1150 to allow portage.manifest.Manifest to access fetch lists via a standard 1151 mapping interface rather than use the dbapi directly. 1152 """
1153 - def __init__(self, pkgdir, settings, mydbapi):
1154 """pkgdir is a directory containing ebuilds and settings is passed into 1155 portdbapi.getfetchlist for __getitem__ calls.""" 1156 self.pkgdir = pkgdir 1157 self.cp = os.sep.join(pkgdir.split(os.sep)[-2:]) 1158 self.settings = settings 1159 self.mytree = os.path.realpath(os.path.dirname(os.path.dirname(pkgdir))) 1160 self.portdb = mydbapi
1161
1162 - def __getitem__(self, pkg_key):
1163 """Returns the complete fetch list for a given package.""" 1164 return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree))
1165
1166 - def __contains__(self, cpv):
1167 return cpv in self.__iter__()
1168
1169 - def has_key(self, pkg_key):
1170 """Returns true if the given package exists within pkgdir.""" 1171 warnings.warn("portage.dbapi.porttree.FetchlistDict.has_key() is " 1172 "deprecated, use the 'in' operator instead", 1173 DeprecationWarning, stacklevel=2) 1174 return pkg_key in self
1175
1176 - def __iter__(self):
1177 return iter(self.portdb.cp_list(self.cp, mytree=self.mytree))
1178
1179 - def __len__(self):
1180 """This needs to be implemented in order to avoid 1181 infinite recursion in some cases.""" 1182 return len(self.portdb.cp_list(self.cp, mytree=self.mytree))
1183
1184 - def keys(self):
1185 """Returns keys for all packages within pkgdir""" 1186 return self.portdb.cp_list(self.cp, mytree=self.mytree)
1187 1188 if sys.hexversion >= 0x3000000: 1189 keys = __iter__
1190
1191 -def _parse_uri_map(cpv, metadata, use=None):
1192 1193 myuris = use_reduce(metadata.get('SRC_URI', ''), 1194 uselist=use, matchall=(use is None), 1195 is_src_uri=True, 1196 eapi=metadata['EAPI']) 1197 1198 uri_map = OrderedDict() 1199 1200 myuris.reverse() 1201 while myuris: 1202 uri = myuris.pop() 1203 if myuris and myuris[-1] == "->": 1204 myuris.pop() 1205 distfile = myuris.pop() 1206 else: 1207 distfile = os.path.basename(uri) 1208 if not distfile: 1209 raise portage.exception.InvalidDependString( 1210 ("getFetchMap(): '%s' SRC_URI has no file " + \ 1211 "name: '%s'") % (cpv, uri)) 1212 1213 uri_set = uri_map.get(distfile) 1214 if uri_set is None: 1215 # Use OrderedDict to preserve order from SRC_URI 1216 # while ensuring uniqueness. 1217 uri_set = OrderedDict() 1218 uri_map[distfile] = uri_set 1219 1220 # SRC_URI may contain a file name with no scheme, and in 1221 # this case it does not belong in uri_set. 1222 if urlparse(uri).scheme: 1223 uri_set[uri] = True 1224 1225 # Convert OrderedDicts to tuples. 1226 for k, v in uri_map.items(): 1227 uri_map[k] = tuple(v) 1228 1229 return uri_map
1230