Package portage :: Package util
[hide private]

Source Code for Package portage.util

   1  # Copyright 2004-2014 Gentoo Foundation 
   2  # Distributed under the terms of the GNU General Public License v2 
   3   
   4  from __future__ import unicode_literals 
   5   
   6  __all__ = ['apply_permissions', 'apply_recursive_permissions', 
   7          'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream', 
   8          'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs', 
   9          'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict', 
  10          'grabdict_package', 'grabfile', 'grabfile_package', 'grablines', 
  11          'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals', 
  12          'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist', 
  13          'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand', 
  14          'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout'] 
  15   
  16  from copy import deepcopy 
  17  import errno 
  18  import io 
  19  try: 
  20          from itertools import filterfalse 
  21  except ImportError: 
  22          from itertools import ifilterfalse as filterfalse 
  23  import logging 
  24  import re 
  25  import shlex 
  26  import stat 
  27  import string 
  28  import sys 
  29  import traceback 
  30  import glob 
  31   
  32  import portage 
  33  portage.proxy.lazyimport.lazyimport(globals(), 
  34          'pickle', 
  35          'portage.dep:Atom', 
  36          'subprocess', 
  37  ) 
  38   
  39  from portage import os 
  40  from portage import _encodings 
  41  from portage import _os_merge 
  42  from portage import _unicode_encode 
  43  from portage import _unicode_decode 
  44  from portage.const import VCS_DIRS 
  45  from portage.exception import InvalidAtom, PortageException, FileNotFound, \ 
  46         IsADirectory, OperationNotPermitted, ParseError, PermissionDenied, \ 
  47             ReadOnlyFileSystem 
  48  from portage.localization import _ 
  49  from portage.proxy.objectproxy import ObjectProxy 
  50  from portage.cache.mappings import UserDict 
  51   
  52  if sys.hexversion >= 0x3000000: 
  53          _unicode = str 
  54  else: 
  55          _unicode = unicode 
  56   
  57  noiselimit = 0 
  58   
59 -def initialize_logger(level=logging.WARN):
60 """Sets up basic logging of portage activities 61 Args: 62 level: the level to emit messages at ('info', 'debug', 'warning' ...) 63 Returns: 64 None 65 """ 66 logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
67
68 -def writemsg(mystr, noiselevel=0, fd=None):
69 """Prints out warning and debug messages based on the noiselimit setting""" 70 global noiselimit 71 if fd is None: 72 fd = sys.stderr 73 if noiselevel <= noiselimit: 74 # avoid potential UnicodeEncodeError 75 if isinstance(fd, io.StringIO): 76 mystr = _unicode_decode(mystr, 77 encoding=_encodings['content'], errors='replace') 78 else: 79 mystr = _unicode_encode(mystr, 80 encoding=_encodings['stdio'], errors='backslashreplace') 81 if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr): 82 fd = fd.buffer 83 fd.write(mystr) 84 fd.flush()
85
86 -def writemsg_stdout(mystr, noiselevel=0):
87 """Prints messages stdout based on the noiselimit setting""" 88 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
89
90 -def writemsg_level(msg, level=0, noiselevel=0):
91 """ 92 Show a message for the given level as defined by the logging module 93 (default is 0). When level >= logging.WARNING then the message is 94 sent to stderr, otherwise it is sent to stdout. The noiselevel is 95 passed directly to writemsg(). 96 97 @type msg: str 98 @param msg: a message string, including newline if appropriate 99 @type level: int 100 @param level: a numeric logging level (see the logging module) 101 @type noiselevel: int 102 @param noiselevel: passed directly to writemsg 103 """ 104 if level >= logging.WARNING: 105 fd = sys.stderr 106 else: 107 fd = sys.stdout 108 writemsg(msg, noiselevel=noiselevel, fd=fd)
109
110 -def normalize_path(mypath):
111 """ 112 os.path.normpath("//foo") returns "//foo" instead of "/foo" 113 We dislike this behavior so we create our own normpath func 114 to fix it. 115 """ 116 if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes): 117 path_sep = os.path.sep.encode() 118 else: 119 path_sep = os.path.sep 120 121 if mypath.startswith(path_sep): 122 # posixpath.normpath collapses 3 or more leading slashes to just 1. 123 return os.path.normpath(2*path_sep + mypath) 124 else: 125 return os.path.normpath(mypath)
126
127 -def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
128 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line 129 begins with a #, it is ignored, as are empty lines""" 130 131 mylines = grablines(myfilename, recursive, remember_source_file=True) 132 newlines = [] 133 134 for x, source_file in mylines: 135 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 136 #into single spaces. 137 myline = x.split() 138 if x and x[0] != "#": 139 mylinetemp = [] 140 for item in myline: 141 if item[:1] != "#": 142 mylinetemp.append(item) 143 else: 144 break 145 myline = mylinetemp 146 147 myline = " ".join(myline) 148 if not myline: 149 continue 150 if myline[0] == "#": 151 # Check if we have a compat-level string. BC-integration data. 152 # '##COMPAT==>N<==' 'some string attached to it' 153 mylinetest = myline.split("<==", 1) 154 if len(mylinetest) == 2: 155 myline_potential = mylinetest[1] 156 mylinetest = mylinetest[0].split("##COMPAT==>") 157 if len(mylinetest) == 2: 158 if compat_level >= int(mylinetest[1]): 159 # It's a compat line, and the key matches. 160 newlines.append(myline_potential) 161 continue 162 else: 163 continue 164 if remember_source_file: 165 newlines.append((myline, source_file)) 166 else: 167 newlines.append(myline) 168 return newlines
169
170 -def map_dictlist_vals(func, myDict):
171 """Performs a function on each value of each key in a dictlist. 172 Returns a new dictlist.""" 173 new_dl = {} 174 for key in myDict: 175 new_dl[key] = [] 176 new_dl[key] = [func(x) for x in myDict[key]] 177 return new_dl
178
179 -def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
180 """ 181 Stacks an array of dict-types into one array. Optionally merging or 182 overwriting matching key/value pairs for the dict[key]->list. 183 Returns a single dict. Higher index in lists is preferenced. 184 185 Example usage: 186 >>> from portage.util import stack_dictlist 187 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}]) 188 >>> {'a':'b','x':'y'} 189 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True ) 190 >>> {'a':['b','c'] } 191 >>> a = {'KEYWORDS':['x86','alpha']} 192 >>> b = {'KEYWORDS':['-x86']} 193 >>> print stack_dictlist( [a,b] ) 194 >>> { 'KEYWORDS':['x86','alpha','-x86']} 195 >>> print stack_dictlist( [a,b], incremental=True) 196 >>> { 'KEYWORDS':['alpha'] } 197 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS']) 198 >>> { 'KEYWORDS':['alpha'] } 199 200 @param original_dicts a list of (dictionary objects or None) 201 @type list 202 @param incremental True or false depending on whether new keys should overwrite 203 keys which already exist. 204 @type boolean 205 @param incrementals A list of items that should be incremental (-foo removes foo from 206 the returned dict). 207 @type list 208 @param ignore_none Appears to be ignored, but probably was used long long ago. 209 @type boolean 210 211 """ 212 final_dict = {} 213 for mydict in original_dicts: 214 if mydict is None: 215 continue 216 for y in mydict: 217 if not y in final_dict: 218 final_dict[y] = [] 219 220 for thing in mydict[y]: 221 if thing: 222 if incremental or y in incrementals: 223 if thing == "-*": 224 final_dict[y] = [] 225 continue 226 elif thing[:1] == '-': 227 try: 228 final_dict[y].remove(thing[1:]) 229 except ValueError: 230 pass 231 continue 232 if thing not in final_dict[y]: 233 final_dict[y].append(thing) 234 if y in final_dict and not final_dict[y]: 235 del final_dict[y] 236 return final_dict
237
238 -def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
239 """Stacks an array of dict-types into one array. Optionally merging or 240 overwriting matching key/value pairs for the dict[key]->string. 241 Returns a single dict.""" 242 final_dict = {} 243 for mydict in dicts: 244 if not mydict: 245 continue 246 for k, v in mydict.items(): 247 if k in final_dict and (incremental or (k in incrementals)): 248 final_dict[k] += " " + v 249 else: 250 final_dict[k] = v 251 return final_dict
252
253 -def append_repo(atom_list, repo_name, remember_source_file=False):
254 """ 255 Takes a list of valid atoms without repo spec and appends ::repo_name. 256 If an atom already has a repo part, then it is preserved (see bug #461948). 257 """ 258 if remember_source_file: 259 return [(atom.repo is not None and atom or atom.with_repo(repo_name), source) \ 260 for atom, source in atom_list] 261 else: 262 return [atom.repo is not None and atom or atom.with_repo(repo_name) \ 263 for atom in atom_list]
264
265 -def stack_lists(lists, incremental=1, remember_source_file=False, 266 warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
267 """Stacks an array of list-types into one array. Optionally removing 268 distinct values using '-value' notation. Higher index is preferenced. 269 270 all elements must be hashable.""" 271 matched_removals = set() 272 unmatched_removals = {} 273 new_list = {} 274 for sub_list in lists: 275 for token in sub_list: 276 token_key = token 277 if remember_source_file: 278 token, source_file = token 279 else: 280 source_file = False 281 282 if token is None: 283 continue 284 285 if incremental: 286 if token == "-*": 287 new_list.clear() 288 elif token[:1] == '-': 289 matched = False 290 if ignore_repo and not "::" in token: 291 #Let -cat/pkg remove cat/pkg::repo. 292 to_be_removed = [] 293 token_slice = token[1:] 294 for atom in new_list: 295 atom_without_repo = atom 296 if atom.repo is not None: 297 # Atom.without_repo instantiates a new Atom, 298 # which is unnecessary here, so use string 299 # replacement instead. 300 atom_without_repo = \ 301 atom.replace("::" + atom.repo, "", 1) 302 if atom_without_repo == token_slice: 303 to_be_removed.append(atom) 304 if to_be_removed: 305 matched = True 306 for atom in to_be_removed: 307 new_list.pop(atom) 308 else: 309 try: 310 new_list.pop(token[1:]) 311 matched = True 312 except KeyError: 313 pass 314 315 if not matched: 316 if source_file and \ 317 (strict_warn_for_unmatched_removal or \ 318 token_key not in matched_removals): 319 unmatched_removals.setdefault(source_file, set()).add(token) 320 else: 321 matched_removals.add(token_key) 322 else: 323 new_list[token] = source_file 324 else: 325 new_list[token] = source_file 326 327 if warn_for_unmatched_removal: 328 for source_file, tokens in unmatched_removals.items(): 329 if len(tokens) > 3: 330 selected = [tokens.pop(), tokens.pop(), tokens.pop()] 331 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \ 332 (source_file, ", ".join(selected), len(tokens)), 333 noiselevel=-1) 334 else: 335 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)), 336 noiselevel=-1) 337 338 if remember_source_file: 339 return list(new_list.items()) 340 else: 341 return list(new_list)
342
343 -def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
344 """ 345 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary 346 347 @param myfilename: file to process 348 @type myfilename: string (path) 349 @param juststrings: only return strings 350 @type juststrings: Boolean (integer) 351 @param empty: Ignore certain lines 352 @type empty: Boolean (integer) 353 @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends ) 354 @type recursive: Boolean (integer) 355 @param incremental: Append to the return list, don't overwrite 356 @type incremental: Boolean (integer) 357 @rtype: Dictionary 358 @return: 359 1. Returns the lines in a file in a dictionary, for example: 360 'sys-apps/portage x86 amd64 ppc' 361 would return 362 {"sys-apps/portage" : ['x86', 'amd64', 'ppc']} 363 """ 364 newdict = {} 365 for x in grablines(myfilename, recursive): 366 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 367 #into single spaces. 368 if x[0] == "#": 369 continue 370 myline=x.split() 371 mylinetemp = [] 372 for item in myline: 373 if item[:1] != "#": 374 mylinetemp.append(item) 375 else: 376 break 377 myline = mylinetemp 378 if len(myline) < 2 and empty == 0: 379 continue 380 if len(myline) < 1 and empty == 1: 381 continue 382 if incremental: 383 newdict.setdefault(myline[0], []).extend(myline[1:]) 384 else: 385 newdict[myline[0]] = myline[1:] 386 if juststrings: 387 for k, v in newdict.items(): 388 newdict[k] = " ".join(v) 389 return newdict
390 391 _eapi_cache = {} 392
393 -def read_corresponding_eapi_file(filename, default="0"):
394 """ 395 Read the 'eapi' file from the directory 'filename' is in. 396 Returns "0" if the file is not present or invalid. 397 """ 398 eapi_file = os.path.join(os.path.dirname(filename), "eapi") 399 try: 400 eapi = _eapi_cache[eapi_file] 401 except KeyError: 402 pass 403 else: 404 if eapi is None: 405 return default 406 return eapi 407 408 eapi = None 409 try: 410 with io.open(_unicode_encode(eapi_file, 411 encoding=_encodings['fs'], errors='strict'), 412 mode='r', encoding=_encodings['repo.content'], errors='replace') as f: 413 lines = f.readlines() 414 if len(lines) == 1: 415 eapi = lines[0].rstrip("\n") 416 else: 417 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file), 418 noiselevel=-1) 419 except IOError: 420 pass 421 422 _eapi_cache[eapi_file] = eapi 423 if eapi is None: 424 return default 425 return eapi
426
427 -def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False, 428 verify_eapi=False, eapi=None):
429 """ Does the same thing as grabdict except it validates keys 430 with isvalidatom()""" 431 432 if recursive: 433 file_list = _recursive_file_list(myfilename) 434 else: 435 file_list = [myfilename] 436 437 atoms = {} 438 for filename in file_list: 439 d = grabdict(filename, juststrings=False, 440 empty=True, recursive=False, incremental=True) 441 if not d: 442 continue 443 if verify_eapi and eapi is None: 444 eapi = read_corresponding_eapi_file(myfilename) 445 446 for k, v in d.items(): 447 try: 448 k = Atom(k, allow_wildcard=allow_wildcard, 449 allow_repo=allow_repo, eapi=eapi) 450 except InvalidAtom as e: 451 writemsg(_("--- Invalid atom in %s: %s\n") % (filename, e), 452 noiselevel=-1) 453 else: 454 atoms.setdefault(k, []).extend(v) 455 456 if juststrings: 457 for k, v in atoms.items(): 458 atoms[k] = " ".join(v) 459 460 return atoms
461
462 -def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False, 463 remember_source_file=False, verify_eapi=False, eapi=None):
464 465 pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True) 466 if not pkgs: 467 return pkgs 468 if verify_eapi and eapi is None: 469 eapi = read_corresponding_eapi_file(myfilename) 470 mybasename = os.path.basename(myfilename) 471 atoms = [] 472 for pkg, source_file in pkgs: 473 pkg_orig = pkg 474 # for packages and package.mask files 475 if pkg[:1] == "-": 476 pkg = pkg[1:] 477 if pkg[:1] == '*' and mybasename == 'packages': 478 pkg = pkg[1:] 479 try: 480 pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi) 481 except InvalidAtom as e: 482 writemsg(_("--- Invalid atom in %s: %s\n") % (source_file, e), 483 noiselevel=-1) 484 else: 485 if pkg_orig == _unicode(pkg): 486 # normal atom, so return as Atom instance 487 if remember_source_file: 488 atoms.append((pkg, source_file)) 489 else: 490 atoms.append(pkg) 491 else: 492 # atom has special prefix, so return as string 493 if remember_source_file: 494 atoms.append((pkg_orig, source_file)) 495 else: 496 atoms.append(pkg_orig) 497 return atoms
498
499 -def _recursive_basename_filter(f):
500 return not f.startswith(".") and not f.endswith("~")
501
502 -def _recursive_file_list(path):
503 # path may be a regular file or a directory 504 505 def onerror(e): 506 if e.errno == PermissionDenied.errno: 507 raise PermissionDenied(path)
508 509 stack = [os.path.split(path)] 510 511 while stack: 512 parent, fname = stack.pop() 513 fullpath = os.path.join(parent, fname) 514 515 try: 516 st = os.stat(fullpath) 517 except OSError as e: 518 onerror(e) 519 continue 520 521 if stat.S_ISDIR(st.st_mode): 522 if fname in VCS_DIRS or not _recursive_basename_filter(fname): 523 continue 524 try: 525 children = os.listdir(fullpath) 526 except OSError as e: 527 onerror(e) 528 continue 529 530 # Sort in reverse, since we pop from the end of the stack. 531 # Include regular files in the stack, so files are sorted 532 # together with directories. 533 children.sort(reverse=True) 534 stack.extend((fullpath, x) for x in children) 535 536 elif stat.S_ISREG(st.st_mode): 537 if _recursive_basename_filter(fname): 538 yield fullpath 539
540 -def grablines(myfilename, recursive=0, remember_source_file=False):
541 mylines = [] 542 if recursive: 543 for f in _recursive_file_list(myfilename): 544 mylines.extend(grablines(f, recursive=False, 545 remember_source_file=remember_source_file)) 546 547 else: 548 try: 549 with io.open(_unicode_encode(myfilename, 550 encoding=_encodings['fs'], errors='strict'), 551 mode='r', encoding=_encodings['content'], errors='replace') as myfile: 552 if remember_source_file: 553 mylines = [(line, myfilename) for line in myfile.readlines()] 554 else: 555 mylines = myfile.readlines() 556 except IOError as e: 557 if e.errno == PermissionDenied.errno: 558 raise PermissionDenied(myfilename) 559 elif e.errno in (errno.ENOENT, errno.ESTALE): 560 pass 561 else: 562 raise 563 return mylines
564
565 -def writedict(mydict, myfilename, writekey=True):
566 """Writes out a dict to a file; writekey=0 mode doesn't write out 567 the key and assumes all values are strings, not lists.""" 568 lines = [] 569 if not writekey: 570 for v in mydict.values(): 571 lines.append(v + "\n") 572 else: 573 for k, v in mydict.items(): 574 lines.append("%s %s\n" % (k, " ".join(v))) 575 write_atomic(myfilename, "".join(lines))
576
577 -def shlex_split(s):
578 """ 579 This is equivalent to shlex.split, but if the current interpreter is 580 python2, it temporarily encodes unicode strings to bytes since python2's 581 shlex.split() doesn't handle unicode strings. 582 """ 583 convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes) 584 if convert_to_bytes: 585 s = _unicode_encode(s) 586 rval = shlex.split(s) 587 if convert_to_bytes: 588 rval = [_unicode_decode(x) for x in rval] 589 return rval
590
591 -class _getconfig_shlex(shlex.shlex):
592
593 - def __init__(self, portage_tolerant=False, **kwargs):
594 shlex.shlex.__init__(self, **kwargs) 595 self.__portage_tolerant = portage_tolerant
596
597 - def allow_sourcing(self, var_expand_map):
598 self.source = portage._native_string("source") 599 self.var_expand_map = var_expand_map
600
601 - def sourcehook(self, newfile):
602 try: 603 newfile = varexpand(newfile, self.var_expand_map) 604 return shlex.shlex.sourcehook(self, newfile) 605 except EnvironmentError as e: 606 if e.errno == PermissionDenied.errno: 607 raise PermissionDenied(newfile) 608 if e.errno not in (errno.ENOENT, errno.ENOTDIR): 609 writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1) 610 raise 611 612 msg = self.error_leader() 613 if e.errno == errno.ENOTDIR: 614 msg += _("%s: Not a directory") % newfile 615 else: 616 msg += _("%s: No such file or directory") % newfile 617 618 if self.__portage_tolerant: 619 writemsg("%s\n" % msg, noiselevel=-1) 620 else: 621 raise ParseError(msg) 622 return (newfile, io.StringIO())
623 624 _invalid_var_name_re = re.compile(r'^\d|\W') 625
626 -def getconfig(mycfg, tolerant=False, allow_sourcing=False, expand=True, 627 recursive=False):
628 629 if isinstance(expand, dict): 630 # Some existing variable definitions have been 631 # passed in, for use in substitutions. 632 expand_map = expand 633 expand = True 634 else: 635 expand_map = {} 636 mykeys = {} 637 638 if recursive: 639 # Emulate source commands so that syntax error messages 640 # can display real file names and line numbers. 641 if not expand: 642 expand_map = False 643 fname = None 644 for fname in _recursive_file_list(mycfg): 645 mykeys.update(getconfig(fname, tolerant=tolerant, 646 allow_sourcing=allow_sourcing, expand=expand_map, 647 recursive=False) or {}) 648 if fname is None: 649 return None 650 return mykeys 651 652 f = None 653 try: 654 # NOTE: shlex doesn't support unicode objects with Python 2 655 # (produces spurious \0 characters). 656 if sys.hexversion < 0x3000000: 657 f = open(_unicode_encode(mycfg, 658 encoding=_encodings['fs'], errors='strict'), 'rb') 659 else: 660 f = open(_unicode_encode(mycfg, 661 encoding=_encodings['fs'], errors='strict'), mode='r', 662 encoding=_encodings['content'], errors='replace') 663 content = f.read() 664 except IOError as e: 665 if e.errno == PermissionDenied.errno: 666 raise PermissionDenied(mycfg) 667 if e.errno != errno.ENOENT: 668 writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1) 669 if e.errno not in (errno.EISDIR,): 670 raise 671 return None 672 finally: 673 if f is not None: 674 f.close() 675 676 # Since this file has unicode_literals enabled, and Python 2's 677 # shlex implementation does not support unicode, the following code 678 # uses _native_string() to encode unicode literals when necessary. 679 680 # Workaround for avoiding a silent error in shlex that is 681 # triggered by a source statement at the end of the file 682 # without a trailing newline after the source statement. 683 if content and content[-1] != portage._native_string('\n'): 684 content += portage._native_string('\n') 685 686 # Warn about dos-style line endings since that prevents 687 # people from being able to source them with bash. 688 if portage._native_string('\r') in content: 689 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \ 690 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1) 691 692 lex = None 693 try: 694 # The default shlex.sourcehook() implementation 695 # only joins relative paths when the infile 696 # attribute is properly set. 697 lex = _getconfig_shlex(instream=content, infile=mycfg, posix=True, 698 portage_tolerant=tolerant) 699 lex.wordchars = portage._native_string(string.digits + 700 string.ascii_letters + "~!@#$%*_\:;?,./-+{}") 701 lex.quotes = portage._native_string("\"'") 702 if allow_sourcing: 703 lex.allow_sourcing(expand_map) 704 705 while True: 706 key = _unicode_decode(lex.get_token()) 707 if key == "export": 708 key = _unicode_decode(lex.get_token()) 709 if key is None: 710 #normal end of file 711 break 712 713 equ = _unicode_decode(lex.get_token()) 714 if not equ: 715 msg = lex.error_leader() + _("Unexpected EOF") 716 if not tolerant: 717 raise ParseError(msg) 718 else: 719 writemsg("%s\n" % msg, noiselevel=-1) 720 return mykeys 721 722 elif equ != "=": 723 msg = lex.error_leader() + \ 724 _("Invalid token '%s' (not '=')") % (equ,) 725 if not tolerant: 726 raise ParseError(msg) 727 else: 728 writemsg("%s\n" % msg, noiselevel=-1) 729 return mykeys 730 731 val = _unicode_decode(lex.get_token()) 732 if val is None: 733 msg = lex.error_leader() + \ 734 _("Unexpected end of config file: variable '%s'") % (key,) 735 if not tolerant: 736 raise ParseError(msg) 737 else: 738 writemsg("%s\n" % msg, noiselevel=-1) 739 return mykeys 740 741 if _invalid_var_name_re.search(key) is not None: 742 msg = lex.error_leader() + \ 743 _("Invalid variable name '%s'") % (key,) 744 if not tolerant: 745 raise ParseError(msg) 746 writemsg("%s\n" % msg, noiselevel=-1) 747 continue 748 749 if expand: 750 mykeys[key] = varexpand(val, mydict=expand_map, 751 error_leader=lex.error_leader) 752 expand_map[key] = mykeys[key] 753 else: 754 mykeys[key] = val 755 except SystemExit as e: 756 raise 757 except Exception as e: 758 if isinstance(e, ParseError) or lex is None: 759 raise 760 msg = "%s%s" % (lex.error_leader(), e) 761 writemsg("%s\n" % msg, noiselevel=-1) 762 raise 763 764 return mykeys
765 766 _varexpand_word_chars = frozenset(string.ascii_letters + string.digits + "_") 767 _varexpand_unexpected_eof_msg = "unexpected EOF while looking for matching `}'" 768
769 -def varexpand(mystring, mydict=None, error_leader=None):
770 if mydict is None: 771 mydict = {} 772 773 """ 774 new variable expansion code. Preserves quotes, handles \n, etc. 775 This code is used by the configfile code, as well as others (parser) 776 This would be a good bunch of code to port to C. 777 """ 778 numvars = 0 779 # in single, double quotes 780 insing = 0 781 indoub = 0 782 pos = 0 783 length = len(mystring) 784 newstring = [] 785 while pos < length: 786 current = mystring[pos] 787 if current == "'": 788 if (indoub): 789 newstring.append("'") 790 else: 791 newstring.append("'") # Quote removal is handled by shlex. 792 insing=not insing 793 pos += 1 794 continue 795 elif current == '"': 796 if (insing): 797 newstring.append('"') 798 else: 799 newstring.append('"') # Quote removal is handled by shlex. 800 indoub=not indoub 801 pos += 1 802 continue 803 if not insing: 804 #expansion time 805 if current == "\n": 806 #convert newlines to spaces 807 newstring.append(" ") 808 pos += 1 809 elif current == "\\": 810 # For backslash expansion, this function used to behave like 811 # echo -e, but that's not needed for our purposes. We want to 812 # behave like bash does when expanding a variable assignment 813 # in a sourced file, in which case it performs backslash 814 # removal for \\ and \$ but nothing more. It also removes 815 # escaped newline characters. Note that we don't handle 816 # escaped quotes here, since getconfig() uses shlex 817 # to handle that earlier. 818 if pos + 1 >= len(mystring): 819 newstring.append(current) 820 break 821 else: 822 current = mystring[pos + 1] 823 pos += 2 824 if current == "$": 825 newstring.append(current) 826 elif current == "\\": 827 newstring.append(current) 828 # BUG: This spot appears buggy, but it's intended to 829 # be bug-for-bug compatible with existing behavior. 830 if pos < length and \ 831 mystring[pos] in ("'", '"', "$"): 832 newstring.append(mystring[pos]) 833 pos += 1 834 elif current == "\n": 835 pass 836 else: 837 newstring.append(mystring[pos - 2:pos]) 838 continue 839 elif current == "$": 840 pos += 1 841 if mystring[pos] == "{": 842 pos += 1 843 braced = True 844 else: 845 braced = False 846 myvstart = pos 847 while mystring[pos] in _varexpand_word_chars: 848 if pos + 1 >= len(mystring): 849 if braced: 850 msg = _varexpand_unexpected_eof_msg 851 if error_leader is not None: 852 msg = error_leader() + msg 853 writemsg(msg + "\n", noiselevel=-1) 854 return "" 855 else: 856 pos += 1 857 break 858 pos += 1 859 myvarname = mystring[myvstart:pos] 860 if braced: 861 if mystring[pos] != "}": 862 msg = _varexpand_unexpected_eof_msg 863 if error_leader is not None: 864 msg = error_leader() + msg 865 writemsg(msg + "\n", noiselevel=-1) 866 return "" 867 else: 868 pos += 1 869 if len(myvarname) == 0: 870 msg = "$" 871 if braced: 872 msg += "{}" 873 msg += ": bad substitution" 874 if error_leader is not None: 875 msg = error_leader() + msg 876 writemsg(msg + "\n", noiselevel=-1) 877 return "" 878 numvars += 1 879 if myvarname in mydict: 880 newstring.append(mydict[myvarname]) 881 else: 882 newstring.append(current) 883 pos += 1 884 else: 885 newstring.append(current) 886 pos += 1 887 888 return "".join(newstring)
889 890 # broken and removed, but can still be imported 891 pickle_write = None 892
893 -def pickle_read(filename, default=None, debug=0):
894 if not os.access(filename, os.R_OK): 895 writemsg(_("pickle_read(): File not readable. '") + filename + "'\n", 1) 896 return default 897 data = None 898 try: 899 myf = open(_unicode_encode(filename, 900 encoding=_encodings['fs'], errors='strict'), 'rb') 901 mypickle = pickle.Unpickler(myf) 902 data = mypickle.load() 903 myf.close() 904 del mypickle, myf 905 writemsg(_("pickle_read(): Loaded pickle. '") + filename + "'\n", 1) 906 except SystemExit as e: 907 raise 908 except Exception as e: 909 writemsg(_("!!! Failed to load pickle: ") + str(e) + "\n", 1) 910 data = default 911 return data
912
913 -def dump_traceback(msg, noiselevel=1):
914 info = sys.exc_info() 915 if not info[2]: 916 stack = traceback.extract_stack()[:-1] 917 error = None 918 else: 919 stack = traceback.extract_tb(info[2]) 920 error = str(info[1]) 921 writemsg("\n====================================\n", noiselevel=noiselevel) 922 writemsg("%s\n\n" % msg, noiselevel=noiselevel) 923 for line in traceback.format_list(stack): 924 writemsg(line, noiselevel=noiselevel) 925 if error: 926 writemsg(error+"\n", noiselevel=noiselevel) 927 writemsg("====================================\n\n", noiselevel=noiselevel)
928
929 -class cmp_sort_key(object):
930 """ 931 In python-3.0 the list.sort() method no longer has a "cmp" keyword 932 argument. This class acts as an adapter which converts a cmp function 933 into one that's suitable for use as the "key" keyword argument to 934 list.sort(), making it easier to port code for python-3.0 compatibility. 935 It works by generating key objects which use the given cmp function to 936 implement their __lt__ method. 937 938 Beginning with Python 2.7 and 3.2, equivalent functionality is provided 939 by functools.cmp_to_key(). 940 """ 941 __slots__ = ("_cmp_func",) 942
943 - def __init__(self, cmp_func):
944 """ 945 @type cmp_func: callable which takes 2 positional arguments 946 @param cmp_func: A cmp function. 947 """ 948 self._cmp_func = cmp_func
949
950 - def __call__(self, lhs):
951 return self._cmp_key(self._cmp_func, lhs)
952
953 - class _cmp_key(object):
954 __slots__ = ("_cmp_func", "_obj") 955
956 - def __init__(self, cmp_func, obj):
957 self._cmp_func = cmp_func 958 self._obj = obj
959
960 - def __lt__(self, other):
961 if other.__class__ is not self.__class__: 962 raise TypeError("Expected type %s, got %s" % \ 963 (self.__class__, other.__class__)) 964 return self._cmp_func(self._obj, other._obj) < 0
965
966 -def unique_array(s):
967 """lifted from python cookbook, credit: Tim Peters 968 Return a list of the elements in s in arbitrary order, sans duplicates""" 969 n = len(s) 970 # assume all elements are hashable, if so, it's linear 971 try: 972 return list(set(s)) 973 except TypeError: 974 pass 975 976 # so much for linear. abuse sort. 977 try: 978 t = list(s) 979 t.sort() 980 except TypeError: 981 pass 982 else: 983 assert n > 0 984 last = t[0] 985 lasti = i = 1 986 while i < n: 987 if t[i] != last: 988 t[lasti] = last = t[i] 989 lasti += 1 990 i += 1 991 return t[:lasti] 992 993 # blah. back to original portage.unique_array 994 u = [] 995 for x in s: 996 if x not in u: 997 u.append(x) 998 return u
999
1000 -def unique_everseen(iterable, key=None):
1001 """ 1002 List unique elements, preserving order. Remember all elements ever seen. 1003 Taken from itertools documentation. 1004 """ 1005 # unique_everseen('AAAABBBCCDAABBB') --> A B C D 1006 # unique_everseen('ABBCcAD', str.lower) --> A B C D 1007 seen = set() 1008 seen_add = seen.add 1009 if key is None: 1010 for element in filterfalse(seen.__contains__, iterable): 1011 seen_add(element) 1012 yield element 1013 else: 1014 for element in iterable: 1015 k = key(element) 1016 if k not in seen: 1017 seen_add(k) 1018 yield element
1019
1020 -def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1021 stat_cached=None, follow_links=True):
1022 """Apply user, group, and mode bits to a file if the existing bits do not 1023 already match. The default behavior is to force an exact match of mode 1024 bits. When mask=0 is specified, mode bits on the target file are allowed 1025 to be a superset of the mode argument (via logical OR). When mask>0, the 1026 mode bits that the target file is allowed to have are restricted via 1027 logical XOR. 1028 Returns True if the permissions were modified and False otherwise.""" 1029 1030 modified = False 1031 1032 # Since Python 3.4, chown requires int type (no proxies). 1033 uid = int(uid) 1034 gid = int(gid) 1035 1036 if stat_cached is None: 1037 try: 1038 if follow_links: 1039 stat_cached = os.stat(filename) 1040 else: 1041 stat_cached = os.lstat(filename) 1042 except OSError as oe: 1043 func_call = "stat('%s')" % filename 1044 if oe.errno == errno.EPERM: 1045 raise OperationNotPermitted(func_call) 1046 elif oe.errno == errno.EACCES: 1047 raise PermissionDenied(func_call) 1048 elif oe.errno == errno.ENOENT: 1049 raise FileNotFound(filename) 1050 else: 1051 raise 1052 1053 if (uid != -1 and uid != stat_cached.st_uid) or \ 1054 (gid != -1 and gid != stat_cached.st_gid): 1055 try: 1056 if follow_links: 1057 os.chown(filename, uid, gid) 1058 else: 1059 portage.data.lchown(filename, uid, gid) 1060 modified = True 1061 except OSError as oe: 1062 func_call = "chown('%s', %i, %i)" % (filename, uid, gid) 1063 if oe.errno == errno.EPERM: 1064 raise OperationNotPermitted(func_call) 1065 elif oe.errno == errno.EACCES: 1066 raise PermissionDenied(func_call) 1067 elif oe.errno == errno.EROFS: 1068 raise ReadOnlyFileSystem(func_call) 1069 elif oe.errno == errno.ENOENT: 1070 raise FileNotFound(filename) 1071 else: 1072 raise 1073 1074 new_mode = -1 1075 st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits 1076 if mask >= 0: 1077 if mode == -1: 1078 mode = 0 # Don't add any mode bits when mode is unspecified. 1079 else: 1080 mode = mode & 0o7777 1081 if (mode & st_mode != mode) or \ 1082 ((mask ^ st_mode) & st_mode != st_mode): 1083 new_mode = mode | st_mode 1084 new_mode = (mask ^ new_mode) & new_mode 1085 elif mode != -1: 1086 mode = mode & 0o7777 # protect from unwanted bits 1087 if mode != st_mode: 1088 new_mode = mode 1089 1090 # The chown system call may clear S_ISUID and S_ISGID 1091 # bits, so those bits are restored if necessary. 1092 if modified and new_mode == -1 and \ 1093 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID): 1094 if mode == -1: 1095 new_mode = st_mode 1096 else: 1097 mode = mode & 0o7777 1098 if mask >= 0: 1099 new_mode = mode | st_mode 1100 new_mode = (mask ^ new_mode) & new_mode 1101 else: 1102 new_mode = mode 1103 if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID): 1104 new_mode = -1 1105 1106 if not follow_links and stat.S_ISLNK(stat_cached.st_mode): 1107 # Mode doesn't matter for symlinks. 1108 new_mode = -1 1109 1110 if new_mode != -1: 1111 try: 1112 os.chmod(filename, new_mode) 1113 modified = True 1114 except OSError as oe: 1115 func_call = "chmod('%s', %s)" % (filename, oct(new_mode)) 1116 if oe.errno == errno.EPERM: 1117 raise OperationNotPermitted(func_call) 1118 elif oe.errno == errno.EACCES: 1119 raise PermissionDenied(func_call) 1120 elif oe.errno == errno.EROFS: 1121 raise ReadOnlyFileSystem(func_call) 1122 elif oe.errno == errno.ENOENT: 1123 raise FileNotFound(filename) 1124 raise 1125 return modified
1126
1127 -def apply_stat_permissions(filename, newstat, **kwargs):
1128 """A wrapper around apply_secpass_permissions that gets 1129 uid, gid, and mode from a stat object""" 1130 return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid, 1131 mode=newstat.st_mode, **kwargs)
1132
1133 -def apply_recursive_permissions(top, uid=-1, gid=-1, 1134 dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
1135 """A wrapper around apply_secpass_permissions that applies permissions 1136 recursively. If optional argument onerror is specified, it should be a 1137 function; it will be called with one argument, a PortageException instance. 1138 Returns True if all permissions are applied and False if some are left 1139 unapplied.""" 1140 1141 # Avoid issues with circular symbolic links, as in bug #339670. 1142 follow_links = False 1143 1144 if onerror is None: 1145 # Default behavior is to dump errors to stderr so they won't 1146 # go unnoticed. Callers can pass in a quiet instance. 1147 def onerror(e): 1148 if isinstance(e, OperationNotPermitted): 1149 writemsg(_("Operation Not Permitted: %s\n") % str(e), 1150 noiselevel=-1) 1151 elif isinstance(e, FileNotFound): 1152 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1) 1153 else: 1154 raise
1155 1156 all_applied = True 1157 for dirpath, dirnames, filenames in os.walk(top): 1158 try: 1159 applied = apply_secpass_permissions(dirpath, 1160 uid=uid, gid=gid, mode=dirmode, mask=dirmask, 1161 follow_links=follow_links) 1162 if not applied: 1163 all_applied = False 1164 except PortageException as e: 1165 all_applied = False 1166 onerror(e) 1167 1168 for name in filenames: 1169 try: 1170 applied = apply_secpass_permissions(os.path.join(dirpath, name), 1171 uid=uid, gid=gid, mode=filemode, mask=filemask, 1172 follow_links=follow_links) 1173 if not applied: 1174 all_applied = False 1175 except PortageException as e: 1176 # Ignore InvalidLocation exceptions such as FileNotFound 1177 # and DirectoryNotFound since sometimes things disappear, 1178 # like when adjusting permissions on DISTCC_DIR. 1179 if not isinstance(e, portage.exception.InvalidLocation): 1180 all_applied = False 1181 onerror(e) 1182 return all_applied 1183
1184 -def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1185 stat_cached=None, follow_links=True):
1186 """A wrapper around apply_permissions that uses secpass and simple 1187 logic to apply as much of the permissions as possible without 1188 generating an obviously avoidable permission exception. Despite 1189 attempts to avoid an exception, it's possible that one will be raised 1190 anyway, so be prepared. 1191 Returns True if all permissions are applied and False if some are left 1192 unapplied.""" 1193 1194 if stat_cached is None: 1195 try: 1196 if follow_links: 1197 stat_cached = os.stat(filename) 1198 else: 1199 stat_cached = os.lstat(filename) 1200 except OSError as oe: 1201 func_call = "stat('%s')" % filename 1202 if oe.errno == errno.EPERM: 1203 raise OperationNotPermitted(func_call) 1204 elif oe.errno == errno.EACCES: 1205 raise PermissionDenied(func_call) 1206 elif oe.errno == errno.ENOENT: 1207 raise FileNotFound(filename) 1208 else: 1209 raise 1210 1211 all_applied = True 1212 1213 if portage.data.secpass < 2: 1214 1215 if uid != -1 and \ 1216 uid != stat_cached.st_uid: 1217 all_applied = False 1218 uid = -1 1219 1220 if gid != -1 and \ 1221 gid != stat_cached.st_gid and \ 1222 gid not in os.getgroups(): 1223 all_applied = False 1224 gid = -1 1225 1226 apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask, 1227 stat_cached=stat_cached, follow_links=follow_links) 1228 return all_applied
1229
1230 -class atomic_ofstream(ObjectProxy):
1231 """Write a file atomically via os.rename(). Atomic replacement prevents 1232 interprocess interference and prevents corruption of the target 1233 file when the write is interrupted (for example, when an 'out of space' 1234 error occurs).""" 1235
1236 - def __init__(self, filename, mode='w', follow_links=True, **kargs):
1237 """Opens a temporary filename.pid in the same directory as filename.""" 1238 ObjectProxy.__init__(self) 1239 object.__setattr__(self, '_aborted', False) 1240 if 'b' in mode: 1241 open_func = open 1242 else: 1243 open_func = io.open 1244 kargs.setdefault('encoding', _encodings['content']) 1245 kargs.setdefault('errors', 'backslashreplace') 1246 1247 if follow_links: 1248 canonical_path = os.path.realpath(filename) 1249 object.__setattr__(self, '_real_name', canonical_path) 1250 tmp_name = "%s.%i" % (canonical_path, os.getpid()) 1251 try: 1252 object.__setattr__(self, '_file', 1253 open_func(_unicode_encode(tmp_name, 1254 encoding=_encodings['fs'], errors='strict'), 1255 mode=mode, **portage._native_kwargs(kargs))) 1256 return 1257 except IOError as e: 1258 if canonical_path == filename: 1259 raise 1260 # Ignore this error, since it's irrelevant 1261 # and the below open call will produce a 1262 # new error if necessary. 1263 1264 object.__setattr__(self, '_real_name', filename) 1265 tmp_name = "%s.%i" % (filename, os.getpid()) 1266 object.__setattr__(self, '_file', 1267 open_func(_unicode_encode(tmp_name, 1268 encoding=_encodings['fs'], errors='strict'), 1269 mode=mode, **kargs))
1270
1271 - def _get_target(self):
1272 return object.__getattribute__(self, '_file')
1273 1274 if sys.hexversion >= 0x3000000: 1275
1276 - def __getattribute__(self, attr):
1277 if attr in ('close', 'abort', '__del__'): 1278 return object.__getattribute__(self, attr) 1279 return getattr(object.__getattribute__(self, '_file'), attr)
1280 1281 else: 1282 1283 # For TextIOWrapper, automatically coerce write calls to 1284 # unicode, in order to avoid TypeError when writing raw 1285 # bytes with python2. 1286
1287 - def __getattribute__(self, attr):
1288 if attr in ('close', 'abort', 'write', '__del__'): 1289 return object.__getattribute__(self, attr) 1290 return getattr(object.__getattribute__(self, '_file'), attr)
1291
1292 - def write(self, s):
1293 f = object.__getattribute__(self, '_file') 1294 if isinstance(f, io.TextIOWrapper): 1295 s = _unicode_decode(s) 1296 return f.write(s)
1297
1298 - def close(self):
1299 """Closes the temporary file, copies permissions (if possible), 1300 and performs the atomic replacement via os.rename(). If the abort() 1301 method has been called, then the temp file is closed and removed.""" 1302 f = object.__getattribute__(self, '_file') 1303 real_name = object.__getattribute__(self, '_real_name') 1304 if not f.closed: 1305 try: 1306 f.close() 1307 if not object.__getattribute__(self, '_aborted'): 1308 try: 1309 apply_stat_permissions(f.name, os.stat(real_name)) 1310 except OperationNotPermitted: 1311 pass 1312 except FileNotFound: 1313 pass 1314 except OSError as oe: # from the above os.stat call 1315 if oe.errno in (errno.ENOENT, errno.EPERM): 1316 pass 1317 else: 1318 raise 1319 os.rename(f.name, real_name) 1320 finally: 1321 # Make sure we cleanup the temp file 1322 # even if an exception is raised. 1323 try: 1324 os.unlink(f.name) 1325 except OSError as oe: 1326 pass
1327
1328 - def abort(self):
1329 """If an error occurs while writing the file, the user should 1330 call this method in order to leave the target file unchanged. 1331 This will call close() automatically.""" 1332 if not object.__getattribute__(self, '_aborted'): 1333 object.__setattr__(self, '_aborted', True) 1334 self.close()
1335
1336 - def __del__(self):
1337 """If the user does not explicitly call close(), it is 1338 assumed that an error has occurred, so we abort().""" 1339 try: 1340 f = object.__getattribute__(self, '_file') 1341 except AttributeError: 1342 pass 1343 else: 1344 if not f.closed: 1345 self.abort() 1346 # ensure destructor from the base class is called 1347 base_destructor = getattr(ObjectProxy, '__del__', None) 1348 if base_destructor is not None: 1349 base_destructor(self)
1350
1351 -def write_atomic(file_path, content, **kwargs):
1352 f = None 1353 try: 1354 f = atomic_ofstream(file_path, **kwargs) 1355 f.write(content) 1356 f.close() 1357 except (IOError, OSError) as e: 1358 if f: 1359 f.abort() 1360 func_call = "write_atomic('%s')" % file_path 1361 if e.errno == errno.EPERM: 1362 raise OperationNotPermitted(func_call) 1363 elif e.errno == errno.EACCES: 1364 raise PermissionDenied(func_call) 1365 elif e.errno == errno.EROFS: 1366 raise ReadOnlyFileSystem(func_call) 1367 elif e.errno == errno.ENOENT: 1368 raise FileNotFound(file_path) 1369 else: 1370 raise
1371
1372 -def ensure_dirs(dir_path, **kwargs):
1373 """Create a directory and call apply_permissions. 1374 Returns True if a directory is created or the permissions needed to be 1375 modified, and False otherwise. 1376 1377 This function's handling of EEXIST errors makes it useful for atomic 1378 directory creation, in which multiple processes may be competing to 1379 create the same directory. 1380 """ 1381 1382 created_dir = False 1383 1384 try: 1385 os.makedirs(dir_path) 1386 created_dir = True 1387 except OSError as oe: 1388 func_call = "makedirs('%s')" % dir_path 1389 if oe.errno in (errno.EEXIST,): 1390 pass 1391 else: 1392 if os.path.isdir(dir_path): 1393 # NOTE: DragonFly raises EPERM for makedir('/') 1394 # and that is supposed to be ignored here. 1395 # Also, sometimes mkdir raises EISDIR on FreeBSD 1396 # and we want to ignore that too (bug #187518). 1397 pass 1398 elif oe.errno == errno.EPERM: 1399 raise OperationNotPermitted(func_call) 1400 elif oe.errno == errno.EACCES: 1401 raise PermissionDenied(func_call) 1402 elif oe.errno == errno.EROFS: 1403 raise ReadOnlyFileSystem(func_call) 1404 else: 1405 raise 1406 if kwargs: 1407 perms_modified = apply_permissions(dir_path, **kwargs) 1408 else: 1409 perms_modified = False 1410 return created_dir or perms_modified
1411
1412 -class LazyItemsDict(UserDict):
1413 """A mapping object that behaves like a standard dict except that it allows 1414 for lazy initialization of values via callable objects. Lazy items can be 1415 overwritten and deleted just as normal items.""" 1416 1417 __slots__ = ('lazy_items',) 1418
1419 - def __init__(self, *args, **kwargs):
1420 1421 self.lazy_items = {} 1422 UserDict.__init__(self, *args, **kwargs)
1423
1424 - def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1425 """Add a lazy item for the given key. When the item is requested, 1426 value_callable will be called with *pargs and **kwargs arguments.""" 1427 self.lazy_items[item_key] = \ 1428 self._LazyItem(value_callable, pargs, kwargs, False) 1429 # make it show up in self.keys(), etc... 1430 UserDict.__setitem__(self, item_key, None)
1431
1432 - def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1433 """This is like addLazyItem except value_callable will only be called 1434 a maximum of 1 time and the result will be cached for future requests.""" 1435 self.lazy_items[item_key] = \ 1436 self._LazyItem(value_callable, pargs, kwargs, True) 1437 # make it show up in self.keys(), etc... 1438 UserDict.__setitem__(self, item_key, None)
1439
1440 - def update(self, *args, **kwargs):
1441 if len(args) > 1: 1442 raise TypeError( 1443 "expected at most 1 positional argument, got " + \ 1444 repr(len(args))) 1445 if args: 1446 map_obj = args[0] 1447 else: 1448 map_obj = None 1449 if map_obj is None: 1450 pass 1451 elif isinstance(map_obj, LazyItemsDict): 1452 for k in map_obj: 1453 if k in map_obj.lazy_items: 1454 UserDict.__setitem__(self, k, None) 1455 else: 1456 UserDict.__setitem__(self, k, map_obj[k]) 1457 self.lazy_items.update(map_obj.lazy_items) 1458 else: 1459 UserDict.update(self, map_obj) 1460 if kwargs: 1461 UserDict.update(self, kwargs)
1462
1463 - def __getitem__(self, item_key):
1464 if item_key in self.lazy_items: 1465 lazy_item = self.lazy_items[item_key] 1466 pargs = lazy_item.pargs 1467 if pargs is None: 1468 pargs = () 1469 kwargs = lazy_item.kwargs 1470 if kwargs is None: 1471 kwargs = {} 1472 result = lazy_item.func(*pargs, **kwargs) 1473 if lazy_item.singleton: 1474 self[item_key] = result 1475 return result 1476 1477 else: 1478 return UserDict.__getitem__(self, item_key)
1479
1480 - def __setitem__(self, item_key, value):
1481 if item_key in self.lazy_items: 1482 del self.lazy_items[item_key] 1483 UserDict.__setitem__(self, item_key, value)
1484
1485 - def __delitem__(self, item_key):
1486 if item_key in self.lazy_items: 1487 del self.lazy_items[item_key] 1488 UserDict.__delitem__(self, item_key)
1489
1490 - def clear(self):
1491 self.lazy_items.clear() 1492 UserDict.clear(self)
1493
1494 - def copy(self):
1495 return self.__copy__()
1496
1497 - def __copy__(self):
1498 return self.__class__(self)
1499
1500 - def __deepcopy__(self, memo=None):
1501 """ 1502 This forces evaluation of each contained lazy item, and deepcopy of 1503 the result. A TypeError is raised if any contained lazy item is not 1504 a singleton, since it is not necessarily possible for the behavior 1505 of this type of item to be safely preserved. 1506 """ 1507 if memo is None: 1508 memo = {} 1509 result = self.__class__() 1510 memo[id(self)] = result 1511 for k in self: 1512 k_copy = deepcopy(k, memo) 1513 lazy_item = self.lazy_items.get(k) 1514 if lazy_item is not None: 1515 if not lazy_item.singleton: 1516 raise TypeError("LazyItemsDict " + \ 1517 "deepcopy is unsafe with lazy items that are " + \ 1518 "not singletons: key=%s value=%s" % (k, lazy_item,)) 1519 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo)) 1520 return result
1521
1522 - class _LazyItem(object):
1523 1524 __slots__ = ('func', 'pargs', 'kwargs', 'singleton') 1525
1526 - def __init__(self, func, pargs, kwargs, singleton):
1527 1528 if not pargs: 1529 pargs = None 1530 if not kwargs: 1531 kwargs = None 1532 1533 self.func = func 1534 self.pargs = pargs 1535 self.kwargs = kwargs 1536 self.singleton = singleton
1537
1538 - def __copy__(self):
1539 return self.__class__(self.func, self.pargs, 1540 self.kwargs, self.singleton)
1541
1542 - def __deepcopy__(self, memo=None):
1543 """ 1544 Override this since the default implementation can fail silently, 1545 leaving some attributes unset. 1546 """ 1547 if memo is None: 1548 memo = {} 1549 result = self.__copy__() 1550 memo[id(self)] = result 1551 result.func = deepcopy(self.func, memo) 1552 result.pargs = deepcopy(self.pargs, memo) 1553 result.kwargs = deepcopy(self.kwargs, memo) 1554 result.singleton = deepcopy(self.singleton, memo) 1555 return result
1556
1557 -class ConfigProtect(object):
1558 - def __init__(self, myroot, protect_list, mask_list, 1559 case_insensitive=False):
1560 self.myroot = myroot 1561 self.protect_list = protect_list 1562 self.mask_list = mask_list 1563 self.case_insensitive = case_insensitive 1564 self.updateprotect()
1565
1566 - def updateprotect(self):
1567 """Update internal state for isprotected() calls. Nonexistent paths 1568 are ignored.""" 1569 1570 os = _os_merge 1571 1572 self.protect = [] 1573 self._dirs = set() 1574 for x in self.protect_list: 1575 ppath = normalize_path( 1576 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1577 # Protect files that don't exist (bug #523684). If the 1578 # parent directory doesn't exist, we can safely skip it. 1579 if os.path.isdir(os.path.dirname(ppath)): 1580 self.protect.append(ppath) 1581 try: 1582 if stat.S_ISDIR(os.stat(ppath).st_mode): 1583 self._dirs.add(ppath) 1584 except OSError: 1585 pass 1586 1587 self.protectmask = [] 1588 for x in self.mask_list: 1589 ppath = normalize_path( 1590 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1591 if self.case_insensitive: 1592 ppath = ppath.lower() 1593 try: 1594 """Use lstat so that anything, even a broken symlink can be 1595 protected.""" 1596 if stat.S_ISDIR(os.lstat(ppath).st_mode): 1597 self._dirs.add(ppath) 1598 self.protectmask.append(ppath) 1599 """Now use stat in case this is a symlink to a directory.""" 1600 if stat.S_ISDIR(os.stat(ppath).st_mode): 1601 self._dirs.add(ppath) 1602 except OSError: 1603 # If it doesn't exist, there's no need to mask it. 1604 pass
1605
1606 - def isprotected(self, obj):
1607 """Returns True if obj is protected, False otherwise. The caller must 1608 ensure that obj is normalized with a single leading slash. A trailing 1609 slash is optional for directories.""" 1610 masked = 0 1611 protected = 0 1612 sep = os.path.sep 1613 if self.case_insensitive: 1614 obj = obj.lower() 1615 for ppath in self.protect: 1616 if len(ppath) > masked and obj.startswith(ppath): 1617 if ppath in self._dirs: 1618 if obj != ppath and not obj.startswith(ppath + sep): 1619 # /etc/foo does not match /etc/foobaz 1620 continue 1621 elif obj != ppath: 1622 # force exact match when CONFIG_PROTECT lists a 1623 # non-directory 1624 continue 1625 protected = len(ppath) 1626 #config file management 1627 for pmpath in self.protectmask: 1628 if len(pmpath) >= protected and obj.startswith(pmpath): 1629 if pmpath in self._dirs: 1630 if obj != pmpath and \ 1631 not obj.startswith(pmpath + sep): 1632 # /etc/foo does not match /etc/foobaz 1633 continue 1634 elif obj != pmpath: 1635 # force exact match when CONFIG_PROTECT_MASK lists 1636 # a non-directory 1637 continue 1638 #skip, it's in the mask 1639 masked = len(pmpath) 1640 return protected > masked
1641
1642 -def new_protect_filename(mydest, newmd5=None, force=False):
1643 """Resolves a config-protect filename for merging, optionally 1644 using the last filename if the md5 matches. If force is True, 1645 then a new filename will be generated even if mydest does not 1646 exist yet. 1647 (dest,md5) ==> 'string' --- path_to_target_filename 1648 (dest) ==> ('next', 'highest') --- next_target and most-recent_target 1649 """ 1650 1651 # config protection filename format: 1652 # ._cfg0000_foo 1653 # 0123456789012 1654 1655 os = _os_merge 1656 1657 prot_num = -1 1658 last_pfile = "" 1659 1660 if not force and \ 1661 not os.path.exists(mydest): 1662 return mydest 1663 1664 real_filename = os.path.basename(mydest) 1665 real_dirname = os.path.dirname(mydest) 1666 for pfile in os.listdir(real_dirname): 1667 if pfile[0:5] != "._cfg": 1668 continue 1669 if pfile[10:] != real_filename: 1670 continue 1671 try: 1672 new_prot_num = int(pfile[5:9]) 1673 if new_prot_num > prot_num: 1674 prot_num = new_prot_num 1675 last_pfile = pfile 1676 except ValueError: 1677 continue 1678 prot_num = prot_num + 1 1679 1680 new_pfile = normalize_path(os.path.join(real_dirname, 1681 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename)) 1682 old_pfile = normalize_path(os.path.join(real_dirname, last_pfile)) 1683 if last_pfile and newmd5: 1684 try: 1685 old_pfile_st = _os_merge.lstat(old_pfile) 1686 except OSError as e: 1687 if e.errno != errno.ENOENT: 1688 raise 1689 else: 1690 if stat.S_ISLNK(old_pfile_st.st_mode): 1691 try: 1692 # Read symlink target as bytes, in case the 1693 # target path has a bad encoding. 1694 pfile_link = _os.readlink(_unicode_encode(old_pfile, 1695 encoding=_encodings['merge'], errors='strict')) 1696 except OSError: 1697 if e.errno != errno.ENOENT: 1698 raise 1699 else: 1700 pfile_link = _unicode_decode( 1701 encoding=_encodings['merge'], errors='replace') 1702 if pfile_link == newmd5: 1703 return old_pfile 1704 else: 1705 try: 1706 last_pfile_md5 = \ 1707 portage.checksum._perform_md5_merge(old_pfile) 1708 except FileNotFound: 1709 # The file suddenly disappeared or it's a 1710 # broken symlink. 1711 pass 1712 else: 1713 if last_pfile_md5 == newmd5: 1714 return old_pfile 1715 return new_pfile
1716
1717 -def find_updated_config_files(target_root, config_protect):
1718 """ 1719 Return a tuple of configuration files that needs to be updated. 1720 The tuple contains lists organized like this: 1721 [protected_dir, file_list] 1722 If the protected config isn't a protected_dir but a procted_file, list is: 1723 [protected_file, None] 1724 If no configuration files needs to be updated, None is returned 1725 """ 1726 1727 encoding = _encodings['fs'] 1728 1729 if config_protect: 1730 # directories with some protect files in them 1731 for x in config_protect: 1732 files = [] 1733 1734 x = os.path.join(target_root, x.lstrip(os.path.sep)) 1735 if not os.access(x, os.W_OK): 1736 continue 1737 try: 1738 mymode = os.lstat(x).st_mode 1739 except OSError: 1740 continue 1741 1742 if stat.S_ISLNK(mymode): 1743 # We want to treat it like a directory if it 1744 # is a symlink to an existing directory. 1745 try: 1746 real_mode = os.stat(x).st_mode 1747 if stat.S_ISDIR(real_mode): 1748 mymode = real_mode 1749 except OSError: 1750 pass 1751 1752 if stat.S_ISDIR(mymode): 1753 mycommand = \ 1754 "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x 1755 else: 1756 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \ 1757 os.path.split(x.rstrip(os.path.sep)) 1758 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0" 1759 cmd = shlex_split(mycommand) 1760 1761 if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000: 1762 # Python 3.1 _execvp throws TypeError for non-absolute executable 1763 # path passed as bytes (see http://bugs.python.org/issue8513). 1764 fullname = portage.process.find_binary(cmd[0]) 1765 if fullname is None: 1766 raise portage.exception.CommandNotFound(cmd[0]) 1767 cmd[0] = fullname 1768 1769 cmd = [_unicode_encode(arg, encoding=encoding, errors='strict') 1770 for arg in cmd] 1771 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, 1772 stderr=subprocess.STDOUT) 1773 output = _unicode_decode(proc.communicate()[0], encoding=encoding) 1774 status = proc.wait() 1775 if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK: 1776 files = output.split('\0') 1777 # split always produces an empty string as the last element 1778 if files and not files[-1]: 1779 del files[-1] 1780 if files: 1781 if stat.S_ISDIR(mymode): 1782 yield (x, files) 1783 else: 1784 yield (x, None)
1785 1786 _ld_so_include_re = re.compile(r'^include\s+(\S.*)') 1787
1788 -def getlibpaths(root, env=None):
1789 def read_ld_so_conf(path): 1790 for l in grabfile(path): 1791 include_match = _ld_so_include_re.match(l) 1792 if include_match is not None: 1793 subpath = os.path.join(os.path.dirname(path), 1794 include_match.group(1)) 1795 for p in glob.glob(subpath): 1796 for r in read_ld_so_conf(p): 1797 yield r 1798 else: 1799 yield l
1800 1801 """ Return a list of paths that are used for library lookups """ 1802 if env is None: 1803 env = os.environ 1804 # the following is based on the information from ld.so(8) 1805 rval = env.get("LD_LIBRARY_PATH", "").split(":") 1806 rval.extend(read_ld_so_conf(os.path.join(root, "etc", "ld.so.conf"))) 1807 rval.append("/usr/lib") 1808 rval.append("/lib") 1809 1810 return [normalize_path(x) for x in rval if x] 1811