Package portage :: Package util
[hide private]

Source Code for Package portage.util

   1  # Copyright 2004-2014 Gentoo Foundation 
   2  # Distributed under the terms of the GNU General Public License v2 
   3   
   4  from __future__ import unicode_literals 
   5   
   6  __all__ = ['apply_permissions', 'apply_recursive_permissions', 
   7          'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream', 
   8          'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs', 
   9          'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict', 
  10          'grabdict_package', 'grabfile', 'grabfile_package', 'grablines', 
  11          'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals', 
  12          'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist', 
  13          'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand', 
  14          'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout'] 
  15   
  16  from copy import deepcopy 
  17  import errno 
  18  import io 
  19  try: 
  20          from itertools import chain, filterfalse 
  21  except ImportError: 
  22          from itertools import chain, ifilterfalse as filterfalse 
  23  import logging 
  24  import re 
  25  import shlex 
  26  import stat 
  27  import string 
  28  import sys 
  29  import traceback 
  30  import glob 
  31   
  32  import portage 
  33  portage.proxy.lazyimport.lazyimport(globals(), 
  34          'pickle', 
  35          'portage.dep:Atom', 
  36          'subprocess', 
  37  ) 
  38   
  39  from portage import os 
  40  from portage import _encodings 
  41  from portage import _os_merge 
  42  from portage import _unicode_encode 
  43  from portage import _unicode_decode 
  44  from portage.const import VCS_DIRS 
  45  from portage.exception import InvalidAtom, PortageException, FileNotFound, \ 
  46          IsADirectory, OperationNotPermitted, ParseError, PermissionDenied, \ 
  47          ReadOnlyFileSystem 
  48  from portage.localization import _ 
  49  from portage.proxy.objectproxy import ObjectProxy 
  50  from portage.cache.mappings import UserDict 
  51   
  52  if sys.hexversion >= 0x3000000: 
  53          _unicode = str 
  54  else: 
  55          _unicode = unicode 
  56   
  57  noiselimit = 0 
  58   
59 -def initialize_logger(level=logging.WARNING):
60 """Sets up basic logging of portage activities 61 Args: 62 level: the level to emit messages at ('info', 'debug', 'warning' ...) 63 Returns: 64 None 65 """ 66 logging.basicConfig(level=level, format='[%(levelname)-4s] %(message)s')
67
68 -def writemsg(mystr, noiselevel=0, fd=None):
69 """Prints out warning and debug messages based on the noiselimit setting""" 70 global noiselimit 71 if fd is None: 72 fd = sys.stderr 73 if noiselevel <= noiselimit: 74 # avoid potential UnicodeEncodeError 75 if isinstance(fd, io.StringIO): 76 mystr = _unicode_decode(mystr, 77 encoding=_encodings['content'], errors='replace') 78 else: 79 mystr = _unicode_encode(mystr, 80 encoding=_encodings['stdio'], errors='backslashreplace') 81 if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr): 82 fd = fd.buffer 83 fd.write(mystr) 84 fd.flush()
85
86 -def writemsg_stdout(mystr, noiselevel=0):
87 """Prints messages stdout based on the noiselimit setting""" 88 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
89
90 -def writemsg_level(msg, level=0, noiselevel=0):
91 """ 92 Show a message for the given level as defined by the logging module 93 (default is 0). When level >= logging.WARNING then the message is 94 sent to stderr, otherwise it is sent to stdout. The noiselevel is 95 passed directly to writemsg(). 96 97 @type msg: str 98 @param msg: a message string, including newline if appropriate 99 @type level: int 100 @param level: a numeric logging level (see the logging module) 101 @type noiselevel: int 102 @param noiselevel: passed directly to writemsg 103 """ 104 if level >= logging.WARNING: 105 fd = sys.stderr 106 else: 107 fd = sys.stdout 108 writemsg(msg, noiselevel=noiselevel, fd=fd)
109
110 -def normalize_path(mypath):
111 """ 112 os.path.normpath("//foo") returns "//foo" instead of "/foo" 113 We dislike this behavior so we create our own normpath func 114 to fix it. 115 """ 116 if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes): 117 path_sep = os.path.sep.encode() 118 else: 119 path_sep = os.path.sep 120 121 if mypath.startswith(path_sep): 122 # posixpath.normpath collapses 3 or more leading slashes to just 1. 123 return os.path.normpath(2*path_sep + mypath) 124 else: 125 return os.path.normpath(mypath)
126
127 -def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
128 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line 129 begins with a #, it is ignored, as are empty lines""" 130 131 mylines = grablines(myfilename, recursive, remember_source_file=True) 132 newlines = [] 133 134 for x, source_file in mylines: 135 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 136 #into single spaces. 137 myline = x.split() 138 if x and x[0] != "#": 139 mylinetemp = [] 140 for item in myline: 141 if item[:1] != "#": 142 mylinetemp.append(item) 143 else: 144 break 145 myline = mylinetemp 146 147 myline = " ".join(myline) 148 if not myline: 149 continue 150 if myline[0] == "#": 151 # Check if we have a compat-level string. BC-integration data. 152 # '##COMPAT==>N<==' 'some string attached to it' 153 mylinetest = myline.split("<==", 1) 154 if len(mylinetest) == 2: 155 myline_potential = mylinetest[1] 156 mylinetest = mylinetest[0].split("##COMPAT==>") 157 if len(mylinetest) == 2: 158 if compat_level >= int(mylinetest[1]): 159 # It's a compat line, and the key matches. 160 newlines.append(myline_potential) 161 continue 162 else: 163 continue 164 if remember_source_file: 165 newlines.append((myline, source_file)) 166 else: 167 newlines.append(myline) 168 return newlines
169
170 -def map_dictlist_vals(func, myDict):
171 """Performs a function on each value of each key in a dictlist. 172 Returns a new dictlist.""" 173 new_dl = {} 174 for key in myDict: 175 new_dl[key] = [] 176 new_dl[key] = [func(x) for x in myDict[key]] 177 return new_dl
178
179 -def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
180 """ 181 Stacks an array of dict-types into one array. Optionally merging or 182 overwriting matching key/value pairs for the dict[key]->list. 183 Returns a single dict. Higher index in lists is preferenced. 184 185 Example usage: 186 >>> from portage.util import stack_dictlist 187 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}]) 188 >>> {'a':'b','x':'y'} 189 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True ) 190 >>> {'a':['b','c'] } 191 >>> a = {'KEYWORDS':['x86','alpha']} 192 >>> b = {'KEYWORDS':['-x86']} 193 >>> print stack_dictlist( [a,b] ) 194 >>> { 'KEYWORDS':['x86','alpha','-x86']} 195 >>> print stack_dictlist( [a,b], incremental=True) 196 >>> { 'KEYWORDS':['alpha'] } 197 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS']) 198 >>> { 'KEYWORDS':['alpha'] } 199 200 @param original_dicts a list of (dictionary objects or None) 201 @type list 202 @param incremental True or false depending on whether new keys should overwrite 203 keys which already exist. 204 @type boolean 205 @param incrementals A list of items that should be incremental (-foo removes foo from 206 the returned dict). 207 @type list 208 @param ignore_none Appears to be ignored, but probably was used long long ago. 209 @type boolean 210 211 """ 212 final_dict = {} 213 for mydict in original_dicts: 214 if mydict is None: 215 continue 216 for y in mydict: 217 if not y in final_dict: 218 final_dict[y] = [] 219 220 for thing in mydict[y]: 221 if thing: 222 if incremental or y in incrementals: 223 if thing == "-*": 224 final_dict[y] = [] 225 continue 226 elif thing[:1] == '-': 227 try: 228 final_dict[y].remove(thing[1:]) 229 except ValueError: 230 pass 231 continue 232 if thing not in final_dict[y]: 233 final_dict[y].append(thing) 234 if y in final_dict and not final_dict[y]: 235 del final_dict[y] 236 return final_dict
237
238 -def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
239 """Stacks an array of dict-types into one array. Optionally merging or 240 overwriting matching key/value pairs for the dict[key]->string. 241 Returns a single dict.""" 242 final_dict = {} 243 for mydict in dicts: 244 if not mydict: 245 continue 246 for k, v in mydict.items(): 247 if k in final_dict and (incremental or (k in incrementals)): 248 final_dict[k] += " " + v 249 else: 250 final_dict[k] = v 251 return final_dict
252
253 -def append_repo(atom_list, repo_name, remember_source_file=False):
254 """ 255 Takes a list of valid atoms without repo spec and appends ::repo_name. 256 If an atom already has a repo part, then it is preserved (see bug #461948). 257 """ 258 if remember_source_file: 259 return [(atom.repo is not None and atom or atom.with_repo(repo_name), source) \ 260 for atom, source in atom_list] 261 else: 262 return [atom.repo is not None and atom or atom.with_repo(repo_name) \ 263 for atom in atom_list]
264
265 -def stack_lists(lists, incremental=1, remember_source_file=False, 266 warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
267 """Stacks an array of list-types into one array. Optionally removing 268 distinct values using '-value' notation. Higher index is preferenced. 269 270 all elements must be hashable.""" 271 matched_removals = set() 272 unmatched_removals = {} 273 new_list = {} 274 for sub_list in lists: 275 for token in sub_list: 276 token_key = token 277 if remember_source_file: 278 token, source_file = token 279 else: 280 source_file = False 281 282 if token is None: 283 continue 284 285 if incremental: 286 if token == "-*": 287 new_list.clear() 288 elif token[:1] == '-': 289 matched = False 290 if ignore_repo and not "::" in token: 291 #Let -cat/pkg remove cat/pkg::repo. 292 to_be_removed = [] 293 token_slice = token[1:] 294 for atom in new_list: 295 atom_without_repo = atom 296 if atom.repo is not None: 297 # Atom.without_repo instantiates a new Atom, 298 # which is unnecessary here, so use string 299 # replacement instead. 300 atom_without_repo = \ 301 atom.replace("::" + atom.repo, "", 1) 302 if atom_without_repo == token_slice: 303 to_be_removed.append(atom) 304 if to_be_removed: 305 matched = True 306 for atom in to_be_removed: 307 new_list.pop(atom) 308 else: 309 try: 310 new_list.pop(token[1:]) 311 matched = True 312 except KeyError: 313 pass 314 315 if not matched: 316 if source_file and \ 317 (strict_warn_for_unmatched_removal or \ 318 token_key not in matched_removals): 319 unmatched_removals.setdefault(source_file, set()).add(token) 320 else: 321 matched_removals.add(token_key) 322 else: 323 new_list[token] = source_file 324 else: 325 new_list[token] = source_file 326 327 if warn_for_unmatched_removal: 328 for source_file, tokens in unmatched_removals.items(): 329 if len(tokens) > 3: 330 selected = [tokens.pop(), tokens.pop(), tokens.pop()] 331 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \ 332 (source_file, ", ".join(selected), len(tokens)), 333 noiselevel=-1) 334 else: 335 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)), 336 noiselevel=-1) 337 338 if remember_source_file: 339 return list(new_list.items()) 340 else: 341 return list(new_list)
342
343 -def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1, newlines=0):
344 """ 345 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary 346 347 @param myfilename: file to process 348 @type myfilename: string (path) 349 @param juststrings: only return strings 350 @type juststrings: Boolean (integer) 351 @param empty: Ignore certain lines 352 @type empty: Boolean (integer) 353 @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends ) 354 @type recursive: Boolean (integer) 355 @param incremental: Append to the return list, don't overwrite 356 @type incremental: Boolean (integer) 357 @param newlines: Append newlines 358 @type newlines: Boolean (integer) 359 @rtype: Dictionary 360 @return: 361 1. Returns the lines in a file in a dictionary, for example: 362 'sys-apps/portage x86 amd64 ppc' 363 would return 364 {"sys-apps/portage" : ['x86', 'amd64', 'ppc']} 365 """ 366 newdict = {} 367 for x in grablines(myfilename, recursive): 368 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 369 #into single spaces. 370 if x[0] == "#": 371 continue 372 myline=x.split() 373 mylinetemp = [] 374 for item in myline: 375 if item[:1] != "#": 376 mylinetemp.append(item) 377 else: 378 break 379 myline = mylinetemp 380 if len(myline) < 2 and empty == 0: 381 continue 382 if len(myline) < 1 and empty == 1: 383 continue 384 if newlines: 385 myline.append("\n") 386 if incremental: 387 newdict.setdefault(myline[0], []).extend(myline[1:]) 388 else: 389 newdict[myline[0]] = myline[1:] 390 if juststrings: 391 for k, v in newdict.items(): 392 newdict[k] = " ".join(v) 393 return newdict
394 395 _eapi_cache = {} 396
397 -def read_corresponding_eapi_file(filename, default="0"):
398 """ 399 Read the 'eapi' file from the directory 'filename' is in. 400 Returns "0" if the file is not present or invalid. 401 """ 402 eapi_file = os.path.join(os.path.dirname(filename), "eapi") 403 try: 404 eapi = _eapi_cache[eapi_file] 405 except KeyError: 406 pass 407 else: 408 if eapi is None: 409 return default 410 return eapi 411 412 eapi = None 413 try: 414 with io.open(_unicode_encode(eapi_file, 415 encoding=_encodings['fs'], errors='strict'), 416 mode='r', encoding=_encodings['repo.content'], errors='replace') as f: 417 lines = f.readlines() 418 if len(lines) == 1: 419 eapi = lines[0].rstrip("\n") 420 else: 421 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file), 422 noiselevel=-1) 423 except IOError: 424 pass 425 426 _eapi_cache[eapi_file] = eapi 427 if eapi is None: 428 return default 429 return eapi
430
431 -def grabdict_package(myfilename, juststrings=0, recursive=0, newlines=0, 432 allow_wildcard=False, allow_repo=False, allow_build_id=False, 433 verify_eapi=False, eapi=None, eapi_default="0"):
434 """ Does the same thing as grabdict except it validates keys 435 with isvalidatom()""" 436 437 if recursive: 438 file_list = _recursive_file_list(myfilename) 439 else: 440 file_list = [myfilename] 441 442 atoms = {} 443 for filename in file_list: 444 d = grabdict(filename, juststrings=False, 445 empty=True, recursive=False, incremental=True, newlines=newlines) 446 if not d: 447 continue 448 if verify_eapi and eapi is None: 449 eapi = read_corresponding_eapi_file( 450 myfilename, default=eapi_default) 451 452 for k, v in d.items(): 453 try: 454 k = Atom(k, allow_wildcard=allow_wildcard, 455 allow_repo=allow_repo, 456 allow_build_id=allow_build_id, eapi=eapi) 457 except InvalidAtom as e: 458 writemsg(_("--- Invalid atom in %s: %s\n") % (filename, e), 459 noiselevel=-1) 460 else: 461 atoms.setdefault(k, []).extend(v) 462 463 if juststrings: 464 for k, v in atoms.items(): 465 atoms[k] = " ".join(v) 466 467 return atoms
468
469 -def grabfile_package(myfilename, compatlevel=0, recursive=0, 470 allow_wildcard=False, allow_repo=False, allow_build_id=False, 471 remember_source_file=False, verify_eapi=False, eapi=None, 472 eapi_default="0"):
473 474 pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True) 475 if not pkgs: 476 return pkgs 477 if verify_eapi and eapi is None: 478 eapi = read_corresponding_eapi_file( 479 myfilename, default=eapi_default) 480 mybasename = os.path.basename(myfilename) 481 atoms = [] 482 for pkg, source_file in pkgs: 483 pkg_orig = pkg 484 # for packages and package.mask files 485 if pkg[:1] == "-": 486 pkg = pkg[1:] 487 if pkg[:1] == '*' and mybasename == 'packages': 488 pkg = pkg[1:] 489 try: 490 pkg = Atom(pkg, allow_wildcard=allow_wildcard, 491 allow_repo=allow_repo, allow_build_id=allow_build_id, 492 eapi=eapi) 493 except InvalidAtom as e: 494 writemsg(_("--- Invalid atom in %s: %s\n") % (source_file, e), 495 noiselevel=-1) 496 else: 497 if pkg_orig == _unicode(pkg): 498 # normal atom, so return as Atom instance 499 if remember_source_file: 500 atoms.append((pkg, source_file)) 501 else: 502 atoms.append(pkg) 503 else: 504 # atom has special prefix, so return as string 505 if remember_source_file: 506 atoms.append((pkg_orig, source_file)) 507 else: 508 atoms.append(pkg_orig) 509 return atoms
510
511 -def _recursive_basename_filter(f):
512 return not f.startswith(".") and not f.endswith("~")
513
514 -def _recursive_file_list(path):
515 # path may be a regular file or a directory 516 517 def onerror(e): 518 if e.errno == PermissionDenied.errno: 519 raise PermissionDenied(path)
520 521 stack = [os.path.split(path)] 522 523 while stack: 524 parent, fname = stack.pop() 525 fullpath = os.path.join(parent, fname) 526 527 try: 528 st = os.stat(fullpath) 529 except OSError as e: 530 onerror(e) 531 continue 532 533 if stat.S_ISDIR(st.st_mode): 534 if fname in VCS_DIRS or not _recursive_basename_filter(fname): 535 continue 536 try: 537 children = os.listdir(fullpath) 538 except OSError as e: 539 onerror(e) 540 continue 541 542 # Sort in reverse, since we pop from the end of the stack. 543 # Include regular files in the stack, so files are sorted 544 # together with directories. 545 children.sort(reverse=True) 546 stack.extend((fullpath, x) for x in children) 547 548 elif stat.S_ISREG(st.st_mode): 549 if _recursive_basename_filter(fname): 550 yield fullpath 551
552 -def grablines(myfilename, recursive=0, remember_source_file=False):
553 mylines = [] 554 if recursive: 555 for f in _recursive_file_list(myfilename): 556 mylines.extend(grablines(f, recursive=False, 557 remember_source_file=remember_source_file)) 558 559 else: 560 try: 561 with io.open(_unicode_encode(myfilename, 562 encoding=_encodings['fs'], errors='strict'), 563 mode='r', encoding=_encodings['content'], errors='replace') as myfile: 564 if remember_source_file: 565 mylines = [(line, myfilename) for line in myfile.readlines()] 566 else: 567 mylines = myfile.readlines() 568 except IOError as e: 569 if e.errno == PermissionDenied.errno: 570 raise PermissionDenied(myfilename) 571 elif e.errno in (errno.ENOENT, errno.ESTALE): 572 pass 573 else: 574 raise 575 return mylines
576
577 -def writedict(mydict, myfilename, writekey=True):
578 """Writes out a dict to a file; writekey=0 mode doesn't write out 579 the key and assumes all values are strings, not lists.""" 580 lines = [] 581 if not writekey: 582 for v in mydict.values(): 583 lines.append(v + "\n") 584 else: 585 for k, v in mydict.items(): 586 lines.append("%s %s\n" % (k, " ".join(v))) 587 write_atomic(myfilename, "".join(lines))
588
589 -def shlex_split(s):
590 """ 591 This is equivalent to shlex.split, but if the current interpreter is 592 python2, it temporarily encodes unicode strings to bytes since python2's 593 shlex.split() doesn't handle unicode strings. 594 """ 595 convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes) 596 if convert_to_bytes: 597 s = _unicode_encode(s) 598 rval = shlex.split(s) 599 if convert_to_bytes: 600 rval = [_unicode_decode(x) for x in rval] 601 return rval
602
603 -class _getconfig_shlex(shlex.shlex):
604
605 - def __init__(self, portage_tolerant=False, **kwargs):
606 shlex.shlex.__init__(self, **kwargs) 607 self.__portage_tolerant = portage_tolerant
608
609 - def allow_sourcing(self, var_expand_map):
610 self.source = portage._native_string("source") 611 self.var_expand_map = var_expand_map
612
613 - def sourcehook(self, newfile):
614 try: 615 newfile = varexpand(newfile, self.var_expand_map) 616 return shlex.shlex.sourcehook(self, newfile) 617 except EnvironmentError as e: 618 if e.errno == PermissionDenied.errno: 619 raise PermissionDenied(newfile) 620 if e.errno not in (errno.ENOENT, errno.ENOTDIR): 621 writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1) 622 raise 623 624 msg = self.error_leader() 625 if e.errno == errno.ENOTDIR: 626 msg += _("%s: Not a directory") % newfile 627 else: 628 msg += _("%s: No such file or directory") % newfile 629 630 if self.__portage_tolerant: 631 writemsg("%s\n" % msg, noiselevel=-1) 632 else: 633 raise ParseError(msg) 634 return (newfile, io.StringIO())
635 636 _invalid_var_name_re = re.compile(r'^\d|\W') 637
638 -def getconfig(mycfg, tolerant=False, allow_sourcing=False, expand=True, 639 recursive=False):
640 641 if isinstance(expand, dict): 642 # Some existing variable definitions have been 643 # passed in, for use in substitutions. 644 expand_map = expand 645 expand = True 646 else: 647 expand_map = {} 648 mykeys = {} 649 650 if recursive: 651 # Emulate source commands so that syntax error messages 652 # can display real file names and line numbers. 653 if not expand: 654 expand_map = False 655 fname = None 656 for fname in _recursive_file_list(mycfg): 657 mykeys.update(getconfig(fname, tolerant=tolerant, 658 allow_sourcing=allow_sourcing, expand=expand_map, 659 recursive=False) or {}) 660 if fname is None: 661 return None 662 return mykeys 663 664 f = None 665 try: 666 # NOTE: shlex doesn't support unicode objects with Python 2 667 # (produces spurious \0 characters). 668 if sys.hexversion < 0x3000000: 669 f = open(_unicode_encode(mycfg, 670 encoding=_encodings['fs'], errors='strict'), 'rb') 671 else: 672 f = open(_unicode_encode(mycfg, 673 encoding=_encodings['fs'], errors='strict'), mode='r', 674 encoding=_encodings['content'], errors='replace') 675 content = f.read() 676 except IOError as e: 677 if e.errno == PermissionDenied.errno: 678 raise PermissionDenied(mycfg) 679 if e.errno != errno.ENOENT: 680 writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1) 681 if e.errno not in (errno.EISDIR,): 682 raise 683 return None 684 finally: 685 if f is not None: 686 f.close() 687 688 # Since this file has unicode_literals enabled, and Python 2's 689 # shlex implementation does not support unicode, the following code 690 # uses _native_string() to encode unicode literals when necessary. 691 692 # Workaround for avoiding a silent error in shlex that is 693 # triggered by a source statement at the end of the file 694 # without a trailing newline after the source statement. 695 if content and content[-1] != portage._native_string('\n'): 696 content += portage._native_string('\n') 697 698 # Warn about dos-style line endings since that prevents 699 # people from being able to source them with bash. 700 if portage._native_string('\r') in content: 701 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \ 702 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1) 703 704 lex = None 705 try: 706 # The default shlex.sourcehook() implementation 707 # only joins relative paths when the infile 708 # attribute is properly set. 709 lex = _getconfig_shlex(instream=content, infile=mycfg, posix=True, 710 portage_tolerant=tolerant) 711 lex.wordchars = portage._native_string(string.digits + 712 string.ascii_letters + "~!@#$%*_\:;?,./-+{}") 713 lex.quotes = portage._native_string("\"'") 714 if allow_sourcing: 715 lex.allow_sourcing(expand_map) 716 717 while True: 718 key = _unicode_decode(lex.get_token()) 719 if key == "export": 720 key = _unicode_decode(lex.get_token()) 721 if key is None: 722 #normal end of file 723 break 724 725 equ = _unicode_decode(lex.get_token()) 726 if not equ: 727 msg = lex.error_leader() + _("Unexpected EOF") 728 if not tolerant: 729 raise ParseError(msg) 730 else: 731 writemsg("%s\n" % msg, noiselevel=-1) 732 return mykeys 733 734 elif equ != "=": 735 msg = lex.error_leader() + \ 736 _("Invalid token '%s' (not '=')") % (equ,) 737 if not tolerant: 738 raise ParseError(msg) 739 else: 740 writemsg("%s\n" % msg, noiselevel=-1) 741 return mykeys 742 743 val = _unicode_decode(lex.get_token()) 744 if val is None: 745 msg = lex.error_leader() + \ 746 _("Unexpected end of config file: variable '%s'") % (key,) 747 if not tolerant: 748 raise ParseError(msg) 749 else: 750 writemsg("%s\n" % msg, noiselevel=-1) 751 return mykeys 752 753 if _invalid_var_name_re.search(key) is not None: 754 msg = lex.error_leader() + \ 755 _("Invalid variable name '%s'") % (key,) 756 if not tolerant: 757 raise ParseError(msg) 758 writemsg("%s\n" % msg, noiselevel=-1) 759 continue 760 761 if expand: 762 mykeys[key] = varexpand(val, mydict=expand_map, 763 error_leader=lex.error_leader) 764 expand_map[key] = mykeys[key] 765 else: 766 mykeys[key] = val 767 except SystemExit as e: 768 raise 769 except Exception as e: 770 if isinstance(e, ParseError) or lex is None: 771 raise 772 msg = "%s%s" % (lex.error_leader(), e) 773 writemsg("%s\n" % msg, noiselevel=-1) 774 raise 775 776 return mykeys
777 778 _varexpand_word_chars = frozenset(string.ascii_letters + string.digits + "_") 779 _varexpand_unexpected_eof_msg = "unexpected EOF while looking for matching `}'" 780
781 -def varexpand(mystring, mydict=None, error_leader=None):
782 if mydict is None: 783 mydict = {} 784 785 """ 786 new variable expansion code. Preserves quotes, handles \n, etc. 787 This code is used by the configfile code, as well as others (parser) 788 This would be a good bunch of code to port to C. 789 """ 790 numvars = 0 791 # in single, double quotes 792 insing = 0 793 indoub = 0 794 pos = 0 795 length = len(mystring) 796 newstring = [] 797 while pos < length: 798 current = mystring[pos] 799 if current == "'": 800 if (indoub): 801 newstring.append("'") 802 else: 803 newstring.append("'") # Quote removal is handled by shlex. 804 insing=not insing 805 pos += 1 806 continue 807 elif current == '"': 808 if (insing): 809 newstring.append('"') 810 else: 811 newstring.append('"') # Quote removal is handled by shlex. 812 indoub=not indoub 813 pos += 1 814 continue 815 if not insing: 816 #expansion time 817 if current == "\n": 818 #convert newlines to spaces 819 newstring.append(" ") 820 pos += 1 821 elif current == "\\": 822 # For backslash expansion, this function used to behave like 823 # echo -e, but that's not needed for our purposes. We want to 824 # behave like bash does when expanding a variable assignment 825 # in a sourced file, in which case it performs backslash 826 # removal for \\ and \$ but nothing more. It also removes 827 # escaped newline characters. Note that we don't handle 828 # escaped quotes here, since getconfig() uses shlex 829 # to handle that earlier. 830 if pos + 1 >= len(mystring): 831 newstring.append(current) 832 break 833 else: 834 current = mystring[pos + 1] 835 pos += 2 836 if current == "$": 837 newstring.append(current) 838 elif current == "\\": 839 newstring.append(current) 840 # BUG: This spot appears buggy, but it's intended to 841 # be bug-for-bug compatible with existing behavior. 842 if pos < length and \ 843 mystring[pos] in ("'", '"', "$"): 844 newstring.append(mystring[pos]) 845 pos += 1 846 elif current == "\n": 847 pass 848 else: 849 newstring.append(mystring[pos - 2:pos]) 850 continue 851 elif current == "$": 852 pos += 1 853 if pos == length: 854 # shells handle this like \$ 855 newstring.append(current) 856 continue 857 858 if mystring[pos] == "{": 859 pos += 1 860 if pos == length: 861 msg = _varexpand_unexpected_eof_msg 862 if error_leader is not None: 863 msg = error_leader() + msg 864 writemsg(msg + "\n", noiselevel=-1) 865 return "" 866 867 braced = True 868 else: 869 braced = False 870 myvstart = pos 871 while mystring[pos] in _varexpand_word_chars: 872 if pos + 1 >= len(mystring): 873 if braced: 874 msg = _varexpand_unexpected_eof_msg 875 if error_leader is not None: 876 msg = error_leader() + msg 877 writemsg(msg + "\n", noiselevel=-1) 878 return "" 879 else: 880 pos += 1 881 break 882 pos += 1 883 myvarname = mystring[myvstart:pos] 884 if braced: 885 if mystring[pos] != "}": 886 msg = _varexpand_unexpected_eof_msg 887 if error_leader is not None: 888 msg = error_leader() + msg 889 writemsg(msg + "\n", noiselevel=-1) 890 return "" 891 else: 892 pos += 1 893 if len(myvarname) == 0: 894 msg = "$" 895 if braced: 896 msg += "{}" 897 msg += ": bad substitution" 898 if error_leader is not None: 899 msg = error_leader() + msg 900 writemsg(msg + "\n", noiselevel=-1) 901 return "" 902 numvars += 1 903 if myvarname in mydict: 904 newstring.append(mydict[myvarname]) 905 else: 906 newstring.append(current) 907 pos += 1 908 else: 909 newstring.append(current) 910 pos += 1 911 912 return "".join(newstring)
913 914 # broken and removed, but can still be imported 915 pickle_write = None 916
917 -def pickle_read(filename, default=None, debug=0):
918 if not os.access(filename, os.R_OK): 919 writemsg(_("pickle_read(): File not readable. '") + filename + "'\n", 1) 920 return default 921 data = None 922 try: 923 myf = open(_unicode_encode(filename, 924 encoding=_encodings['fs'], errors='strict'), 'rb') 925 mypickle = pickle.Unpickler(myf) 926 data = mypickle.load() 927 myf.close() 928 del mypickle, myf 929 writemsg(_("pickle_read(): Loaded pickle. '") + filename + "'\n", 1) 930 except SystemExit as e: 931 raise 932 except Exception as e: 933 writemsg(_("!!! Failed to load pickle: ") + str(e) + "\n", 1) 934 data = default 935 return data
936
937 -def dump_traceback(msg, noiselevel=1):
938 info = sys.exc_info() 939 if not info[2]: 940 stack = traceback.extract_stack()[:-1] 941 error = None 942 else: 943 stack = traceback.extract_tb(info[2]) 944 error = str(info[1]) 945 writemsg("\n====================================\n", noiselevel=noiselevel) 946 writemsg("%s\n\n" % msg, noiselevel=noiselevel) 947 for line in traceback.format_list(stack): 948 writemsg(line, noiselevel=noiselevel) 949 if error: 950 writemsg(error+"\n", noiselevel=noiselevel) 951 writemsg("====================================\n\n", noiselevel=noiselevel)
952
953 -class cmp_sort_key(object):
954 """ 955 In python-3.0 the list.sort() method no longer has a "cmp" keyword 956 argument. This class acts as an adapter which converts a cmp function 957 into one that's suitable for use as the "key" keyword argument to 958 list.sort(), making it easier to port code for python-3.0 compatibility. 959 It works by generating key objects which use the given cmp function to 960 implement their __lt__ method. 961 962 Beginning with Python 2.7 and 3.2, equivalent functionality is provided 963 by functools.cmp_to_key(). 964 """ 965 __slots__ = ("_cmp_func",) 966
967 - def __init__(self, cmp_func):
968 """ 969 @type cmp_func: callable which takes 2 positional arguments 970 @param cmp_func: A cmp function. 971 """ 972 self._cmp_func = cmp_func
973
974 - def __call__(self, lhs):
975 return self._cmp_key(self._cmp_func, lhs)
976
977 - class _cmp_key(object):
978 __slots__ = ("_cmp_func", "_obj") 979
980 - def __init__(self, cmp_func, obj):
981 self._cmp_func = cmp_func 982 self._obj = obj
983
984 - def __lt__(self, other):
985 if other.__class__ is not self.__class__: 986 raise TypeError("Expected type %s, got %s" % \ 987 (self.__class__, other.__class__)) 988 return self._cmp_func(self._obj, other._obj) < 0
989
990 -def unique_array(s):
991 """lifted from python cookbook, credit: Tim Peters 992 Return a list of the elements in s in arbitrary order, sans duplicates""" 993 n = len(s) 994 # assume all elements are hashable, if so, it's linear 995 try: 996 return list(set(s)) 997 except TypeError: 998 pass 999 1000 # so much for linear. abuse sort. 1001 try: 1002 t = list(s) 1003 t.sort() 1004 except TypeError: 1005 pass 1006 else: 1007 assert n > 0 1008 last = t[0] 1009 lasti = i = 1 1010 while i < n: 1011 if t[i] != last: 1012 t[lasti] = last = t[i] 1013 lasti += 1 1014 i += 1 1015 return t[:lasti] 1016 1017 # blah. back to original portage.unique_array 1018 u = [] 1019 for x in s: 1020 if x not in u: 1021 u.append(x) 1022 return u
1023
1024 -def unique_everseen(iterable, key=None):
1025 """ 1026 List unique elements, preserving order. Remember all elements ever seen. 1027 Taken from itertools documentation. 1028 """ 1029 # unique_everseen('AAAABBBCCDAABBB') --> A B C D 1030 # unique_everseen('ABBCcAD', str.lower) --> A B C D 1031 seen = set() 1032 seen_add = seen.add 1033 if key is None: 1034 for element in filterfalse(seen.__contains__, iterable): 1035 seen_add(element) 1036 yield element 1037 else: 1038 for element in iterable: 1039 k = key(element) 1040 if k not in seen: 1041 seen_add(k) 1042 yield element
1043
1044 -def _do_stat(filename, follow_links=True):
1045 try: 1046 if follow_links: 1047 return os.stat(filename) 1048 else: 1049 return os.lstat(filename) 1050 except OSError as oe: 1051 func_call = "stat('%s')" % filename 1052 if oe.errno == errno.EPERM: 1053 raise OperationNotPermitted(func_call) 1054 elif oe.errno == errno.EACCES: 1055 raise PermissionDenied(func_call) 1056 elif oe.errno == errno.ENOENT: 1057 raise FileNotFound(filename) 1058 else: 1059 raise
1060
1061 -def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1062 stat_cached=None, follow_links=True):
1063 """Apply user, group, and mode bits to a file if the existing bits do not 1064 already match. The default behavior is to force an exact match of mode 1065 bits. When mask=0 is specified, mode bits on the target file are allowed 1066 to be a superset of the mode argument (via logical OR). When mask>0, the 1067 mode bits that the target file is allowed to have are restricted via 1068 logical XOR. 1069 Returns True if the permissions were modified and False otherwise.""" 1070 1071 modified = False 1072 1073 # Since Python 3.4, chown requires int type (no proxies). 1074 uid = int(uid) 1075 gid = int(gid) 1076 1077 if stat_cached is None: 1078 stat_cached = _do_stat(filename, follow_links=follow_links) 1079 1080 if (uid != -1 and uid != stat_cached.st_uid) or \ 1081 (gid != -1 and gid != stat_cached.st_gid): 1082 try: 1083 if follow_links: 1084 os.chown(filename, uid, gid) 1085 else: 1086 portage.data.lchown(filename, uid, gid) 1087 modified = True 1088 except OSError as oe: 1089 func_call = "chown('%s', %i, %i)" % (filename, uid, gid) 1090 if oe.errno == errno.EPERM: 1091 raise OperationNotPermitted(func_call) 1092 elif oe.errno == errno.EACCES: 1093 raise PermissionDenied(func_call) 1094 elif oe.errno == errno.EROFS: 1095 raise ReadOnlyFileSystem(func_call) 1096 elif oe.errno == errno.ENOENT: 1097 raise FileNotFound(filename) 1098 else: 1099 raise 1100 1101 new_mode = -1 1102 st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits 1103 if mask >= 0: 1104 if mode == -1: 1105 mode = 0 # Don't add any mode bits when mode is unspecified. 1106 else: 1107 mode = mode & 0o7777 1108 if (mode & st_mode != mode) or \ 1109 ((mask ^ st_mode) & st_mode != st_mode): 1110 new_mode = mode | st_mode 1111 new_mode = (mask ^ new_mode) & new_mode 1112 elif mode != -1: 1113 mode = mode & 0o7777 # protect from unwanted bits 1114 if mode != st_mode: 1115 new_mode = mode 1116 1117 # The chown system call may clear S_ISUID and S_ISGID 1118 # bits, so those bits are restored if necessary. 1119 if modified and new_mode == -1 and \ 1120 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID): 1121 if mode == -1: 1122 new_mode = st_mode 1123 else: 1124 mode = mode & 0o7777 1125 if mask >= 0: 1126 new_mode = mode | st_mode 1127 new_mode = (mask ^ new_mode) & new_mode 1128 else: 1129 new_mode = mode 1130 if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID): 1131 new_mode = -1 1132 1133 if not follow_links and stat.S_ISLNK(stat_cached.st_mode): 1134 # Mode doesn't matter for symlinks. 1135 new_mode = -1 1136 1137 if new_mode != -1: 1138 try: 1139 os.chmod(filename, new_mode) 1140 modified = True 1141 except OSError as oe: 1142 func_call = "chmod('%s', %s)" % (filename, oct(new_mode)) 1143 if oe.errno == errno.EPERM: 1144 raise OperationNotPermitted(func_call) 1145 elif oe.errno == errno.EACCES: 1146 raise PermissionDenied(func_call) 1147 elif oe.errno == errno.EROFS: 1148 raise ReadOnlyFileSystem(func_call) 1149 elif oe.errno == errno.ENOENT: 1150 raise FileNotFound(filename) 1151 raise 1152 return modified
1153
1154 -def apply_stat_permissions(filename, newstat, **kwargs):
1155 """A wrapper around apply_secpass_permissions that gets 1156 uid, gid, and mode from a stat object""" 1157 return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid, 1158 mode=newstat.st_mode, **kwargs)
1159
1160 -def apply_recursive_permissions(top, uid=-1, gid=-1, 1161 dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
1162 """A wrapper around apply_secpass_permissions that applies permissions 1163 recursively. If optional argument onerror is specified, it should be a 1164 function; it will be called with one argument, a PortageException instance. 1165 Returns True if all permissions are applied and False if some are left 1166 unapplied.""" 1167 1168 # Avoid issues with circular symbolic links, as in bug #339670. 1169 follow_links = False 1170 1171 if onerror is None: 1172 # Default behavior is to dump errors to stderr so they won't 1173 # go unnoticed. Callers can pass in a quiet instance. 1174 def onerror(e): 1175 if isinstance(e, OperationNotPermitted): 1176 writemsg(_("Operation Not Permitted: %s\n") % str(e), 1177 noiselevel=-1) 1178 elif isinstance(e, FileNotFound): 1179 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1) 1180 else: 1181 raise
1182 1183 # For bug 554084, always apply permissions to a directory before 1184 # that directory is traversed. 1185 all_applied = True 1186 1187 try: 1188 stat_cached = _do_stat(top, follow_links=follow_links) 1189 except FileNotFound: 1190 # backward compatibility 1191 return True 1192 1193 if stat.S_ISDIR(stat_cached.st_mode): 1194 mode = dirmode 1195 mask = dirmask 1196 else: 1197 mode = filemode 1198 mask = filemask 1199 1200 try: 1201 applied = apply_secpass_permissions(top, 1202 uid=uid, gid=gid, mode=mode, mask=mask, 1203 stat_cached=stat_cached, follow_links=follow_links) 1204 if not applied: 1205 all_applied = False 1206 except PortageException as e: 1207 all_applied = False 1208 onerror(e) 1209 1210 for dirpath, dirnames, filenames in os.walk(top): 1211 for name, mode, mask in chain( 1212 ((x, filemode, filemask) for x in filenames), 1213 ((x, dirmode, dirmask) for x in dirnames)): 1214 try: 1215 applied = apply_secpass_permissions(os.path.join(dirpath, name), 1216 uid=uid, gid=gid, mode=mode, mask=mask, 1217 follow_links=follow_links) 1218 if not applied: 1219 all_applied = False 1220 except PortageException as e: 1221 # Ignore InvalidLocation exceptions such as FileNotFound 1222 # and DirectoryNotFound since sometimes things disappear, 1223 # like when adjusting permissions on DISTCC_DIR. 1224 if not isinstance(e, portage.exception.InvalidLocation): 1225 all_applied = False 1226 onerror(e) 1227 return all_applied 1228
1229 -def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1230 stat_cached=None, follow_links=True):
1231 """A wrapper around apply_permissions that uses secpass and simple 1232 logic to apply as much of the permissions as possible without 1233 generating an obviously avoidable permission exception. Despite 1234 attempts to avoid an exception, it's possible that one will be raised 1235 anyway, so be prepared. 1236 Returns True if all permissions are applied and False if some are left 1237 unapplied.""" 1238 1239 if stat_cached is None: 1240 stat_cached = _do_stat(filename, follow_links=follow_links) 1241 1242 all_applied = True 1243 1244 # Avoid accessing portage.data.secpass when possible, since 1245 # it triggers config loading (undesirable for chmod-lite). 1246 if (uid != -1 or gid != -1) and portage.data.secpass < 2: 1247 1248 if uid != -1 and \ 1249 uid != stat_cached.st_uid: 1250 all_applied = False 1251 uid = -1 1252 1253 if gid != -1 and \ 1254 gid != stat_cached.st_gid and \ 1255 gid not in os.getgroups(): 1256 all_applied = False 1257 gid = -1 1258 1259 apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask, 1260 stat_cached=stat_cached, follow_links=follow_links) 1261 return all_applied
1262
1263 -class atomic_ofstream(ObjectProxy):
1264 """Write a file atomically via os.rename(). Atomic replacement prevents 1265 interprocess interference and prevents corruption of the target 1266 file when the write is interrupted (for example, when an 'out of space' 1267 error occurs).""" 1268
1269 - def __init__(self, filename, mode='w', follow_links=True, **kargs):
1270 """Opens a temporary filename.pid in the same directory as filename.""" 1271 ObjectProxy.__init__(self) 1272 object.__setattr__(self, '_aborted', False) 1273 if 'b' in mode: 1274 open_func = open 1275 else: 1276 open_func = io.open 1277 kargs.setdefault('encoding', _encodings['content']) 1278 kargs.setdefault('errors', 'backslashreplace') 1279 1280 if follow_links: 1281 canonical_path = os.path.realpath(filename) 1282 object.__setattr__(self, '_real_name', canonical_path) 1283 tmp_name = "%s.%i" % (canonical_path, os.getpid()) 1284 try: 1285 object.__setattr__(self, '_file', 1286 open_func(_unicode_encode(tmp_name, 1287 encoding=_encodings['fs'], errors='strict'), 1288 mode=mode, **kargs)) 1289 return 1290 except IOError as e: 1291 if canonical_path == filename: 1292 raise 1293 # Ignore this error, since it's irrelevant 1294 # and the below open call will produce a 1295 # new error if necessary. 1296 1297 object.__setattr__(self, '_real_name', filename) 1298 tmp_name = "%s.%i" % (filename, os.getpid()) 1299 object.__setattr__(self, '_file', 1300 open_func(_unicode_encode(tmp_name, 1301 encoding=_encodings['fs'], errors='strict'), 1302 mode=mode, **kargs))
1303
1304 - def _get_target(self):
1305 return object.__getattribute__(self, '_file')
1306 1307 if sys.hexversion >= 0x3000000: 1308
1309 - def __getattribute__(self, attr):
1310 if attr in ('close', 'abort', '__del__'): 1311 return object.__getattribute__(self, attr) 1312 return getattr(object.__getattribute__(self, '_file'), attr)
1313 1314 else: 1315 1316 # For TextIOWrapper, automatically coerce write calls to 1317 # unicode, in order to avoid TypeError when writing raw 1318 # bytes with python2. 1319
1320 - def __getattribute__(self, attr):
1321 if attr in ('close', 'abort', 'write', '__del__'): 1322 return object.__getattribute__(self, attr) 1323 return getattr(object.__getattribute__(self, '_file'), attr)
1324
1325 - def write(self, s):
1326 f = object.__getattribute__(self, '_file') 1327 if isinstance(f, io.TextIOWrapper): 1328 s = _unicode_decode(s) 1329 return f.write(s)
1330
1331 - def close(self):
1332 """Closes the temporary file, copies permissions (if possible), 1333 and performs the atomic replacement via os.rename(). If the abort() 1334 method has been called, then the temp file is closed and removed.""" 1335 f = object.__getattribute__(self, '_file') 1336 real_name = object.__getattribute__(self, '_real_name') 1337 if not f.closed: 1338 try: 1339 f.close() 1340 if not object.__getattribute__(self, '_aborted'): 1341 try: 1342 apply_stat_permissions(f.name, os.stat(real_name)) 1343 except OperationNotPermitted: 1344 pass 1345 except FileNotFound: 1346 pass 1347 except OSError as oe: # from the above os.stat call 1348 if oe.errno in (errno.ENOENT, errno.EPERM): 1349 pass 1350 else: 1351 raise 1352 os.rename(f.name, real_name) 1353 finally: 1354 # Make sure we cleanup the temp file 1355 # even if an exception is raised. 1356 try: 1357 os.unlink(f.name) 1358 except OSError as oe: 1359 pass
1360
1361 - def abort(self):
1362 """If an error occurs while writing the file, the user should 1363 call this method in order to leave the target file unchanged. 1364 This will call close() automatically.""" 1365 if not object.__getattribute__(self, '_aborted'): 1366 object.__setattr__(self, '_aborted', True) 1367 self.close()
1368
1369 - def __del__(self):
1370 """If the user does not explicitly call close(), it is 1371 assumed that an error has occurred, so we abort().""" 1372 try: 1373 f = object.__getattribute__(self, '_file') 1374 except AttributeError: 1375 pass 1376 else: 1377 if not f.closed: 1378 self.abort() 1379 # ensure destructor from the base class is called 1380 base_destructor = getattr(ObjectProxy, '__del__', None) 1381 if base_destructor is not None: 1382 base_destructor(self)
1383
1384 -def write_atomic(file_path, content, **kwargs):
1385 f = None 1386 try: 1387 f = atomic_ofstream(file_path, **kwargs) 1388 f.write(content) 1389 f.close() 1390 except (IOError, OSError) as e: 1391 if f: 1392 f.abort() 1393 func_call = "write_atomic('%s')" % file_path 1394 if e.errno == errno.EPERM: 1395 raise OperationNotPermitted(func_call) 1396 elif e.errno == errno.EACCES: 1397 raise PermissionDenied(func_call) 1398 elif e.errno == errno.EROFS: 1399 raise ReadOnlyFileSystem(func_call) 1400 elif e.errno == errno.ENOENT: 1401 raise FileNotFound(file_path) 1402 else: 1403 raise
1404
1405 -def ensure_dirs(dir_path, **kwargs):
1406 """Create a directory and call apply_permissions. 1407 Returns True if a directory is created or the permissions needed to be 1408 modified, and False otherwise. 1409 1410 This function's handling of EEXIST errors makes it useful for atomic 1411 directory creation, in which multiple processes may be competing to 1412 create the same directory. 1413 """ 1414 1415 created_dir = False 1416 1417 try: 1418 os.makedirs(dir_path) 1419 created_dir = True 1420 except OSError as oe: 1421 func_call = "makedirs('%s')" % dir_path 1422 if oe.errno in (errno.EEXIST,): 1423 pass 1424 else: 1425 if os.path.isdir(dir_path): 1426 # NOTE: DragonFly raises EPERM for makedir('/') 1427 # and that is supposed to be ignored here. 1428 # Also, sometimes mkdir raises EISDIR on FreeBSD 1429 # and we want to ignore that too (bug #187518). 1430 pass 1431 elif oe.errno == errno.EPERM: 1432 raise OperationNotPermitted(func_call) 1433 elif oe.errno == errno.EACCES: 1434 raise PermissionDenied(func_call) 1435 elif oe.errno == errno.EROFS: 1436 raise ReadOnlyFileSystem(func_call) 1437 else: 1438 raise 1439 if kwargs: 1440 perms_modified = apply_permissions(dir_path, **kwargs) 1441 else: 1442 perms_modified = False 1443 return created_dir or perms_modified
1444
1445 -class LazyItemsDict(UserDict):
1446 """A mapping object that behaves like a standard dict except that it allows 1447 for lazy initialization of values via callable objects. Lazy items can be 1448 overwritten and deleted just as normal items.""" 1449 1450 __slots__ = ('lazy_items',) 1451
1452 - def __init__(self, *args, **kwargs):
1453 1454 self.lazy_items = {} 1455 UserDict.__init__(self, *args, **kwargs)
1456
1457 - def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1458 """Add a lazy item for the given key. When the item is requested, 1459 value_callable will be called with *pargs and **kwargs arguments.""" 1460 self.lazy_items[item_key] = \ 1461 self._LazyItem(value_callable, pargs, kwargs, False) 1462 # make it show up in self.keys(), etc... 1463 UserDict.__setitem__(self, item_key, None)
1464
1465 - def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1466 """This is like addLazyItem except value_callable will only be called 1467 a maximum of 1 time and the result will be cached for future requests.""" 1468 self.lazy_items[item_key] = \ 1469 self._LazyItem(value_callable, pargs, kwargs, True) 1470 # make it show up in self.keys(), etc... 1471 UserDict.__setitem__(self, item_key, None)
1472
1473 - def update(self, *args, **kwargs):
1474 if len(args) > 1: 1475 raise TypeError( 1476 "expected at most 1 positional argument, got " + \ 1477 repr(len(args))) 1478 if args: 1479 map_obj = args[0] 1480 else: 1481 map_obj = None 1482 if map_obj is None: 1483 pass 1484 elif isinstance(map_obj, LazyItemsDict): 1485 for k in map_obj: 1486 if k in map_obj.lazy_items: 1487 UserDict.__setitem__(self, k, None) 1488 else: 1489 UserDict.__setitem__(self, k, map_obj[k]) 1490 self.lazy_items.update(map_obj.lazy_items) 1491 else: 1492 UserDict.update(self, map_obj) 1493 if kwargs: 1494 UserDict.update(self, kwargs)
1495
1496 - def __getitem__(self, item_key):
1497 if item_key in self.lazy_items: 1498 lazy_item = self.lazy_items[item_key] 1499 pargs = lazy_item.pargs 1500 if pargs is None: 1501 pargs = () 1502 kwargs = lazy_item.kwargs 1503 if kwargs is None: 1504 kwargs = {} 1505 result = lazy_item.func(*pargs, **kwargs) 1506 if lazy_item.singleton: 1507 self[item_key] = result 1508 return result 1509 1510 else: 1511 return UserDict.__getitem__(self, item_key)
1512
1513 - def __setitem__(self, item_key, value):
1514 if item_key in self.lazy_items: 1515 del self.lazy_items[item_key] 1516 UserDict.__setitem__(self, item_key, value)
1517
1518 - def __delitem__(self, item_key):
1519 if item_key in self.lazy_items: 1520 del self.lazy_items[item_key] 1521 UserDict.__delitem__(self, item_key)
1522
1523 - def clear(self):
1524 self.lazy_items.clear() 1525 UserDict.clear(self)
1526
1527 - def copy(self):
1528 return self.__copy__()
1529
1530 - def __copy__(self):
1531 return self.__class__(self)
1532
1533 - def __deepcopy__(self, memo=None):
1534 """ 1535 This forces evaluation of each contained lazy item, and deepcopy of 1536 the result. A TypeError is raised if any contained lazy item is not 1537 a singleton, since it is not necessarily possible for the behavior 1538 of this type of item to be safely preserved. 1539 """ 1540 if memo is None: 1541 memo = {} 1542 result = self.__class__() 1543 memo[id(self)] = result 1544 for k in self: 1545 k_copy = deepcopy(k, memo) 1546 lazy_item = self.lazy_items.get(k) 1547 if lazy_item is not None: 1548 if not lazy_item.singleton: 1549 raise TypeError("LazyItemsDict " + \ 1550 "deepcopy is unsafe with lazy items that are " + \ 1551 "not singletons: key=%s value=%s" % (k, lazy_item,)) 1552 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo)) 1553 return result
1554
1555 - class _LazyItem(object):
1556 1557 __slots__ = ('func', 'pargs', 'kwargs', 'singleton') 1558
1559 - def __init__(self, func, pargs, kwargs, singleton):
1560 1561 if not pargs: 1562 pargs = None 1563 if not kwargs: 1564 kwargs = None 1565 1566 self.func = func 1567 self.pargs = pargs 1568 self.kwargs = kwargs 1569 self.singleton = singleton
1570
1571 - def __copy__(self):
1572 return self.__class__(self.func, self.pargs, 1573 self.kwargs, self.singleton)
1574
1575 - def __deepcopy__(self, memo=None):
1576 """ 1577 Override this since the default implementation can fail silently, 1578 leaving some attributes unset. 1579 """ 1580 if memo is None: 1581 memo = {} 1582 result = self.__copy__() 1583 memo[id(self)] = result 1584 result.func = deepcopy(self.func, memo) 1585 result.pargs = deepcopy(self.pargs, memo) 1586 result.kwargs = deepcopy(self.kwargs, memo) 1587 result.singleton = deepcopy(self.singleton, memo) 1588 return result
1589
1590 -class ConfigProtect(object):
1591 - def __init__(self, myroot, protect_list, mask_list, 1592 case_insensitive=False):
1593 self.myroot = myroot 1594 self.protect_list = protect_list 1595 self.mask_list = mask_list 1596 self.case_insensitive = case_insensitive 1597 self.updateprotect()
1598
1599 - def updateprotect(self):
1600 """Update internal state for isprotected() calls. Nonexistent paths 1601 are ignored.""" 1602 1603 os = _os_merge 1604 1605 self.protect = [] 1606 self._dirs = set() 1607 for x in self.protect_list: 1608 ppath = normalize_path( 1609 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1610 # Protect files that don't exist (bug #523684). If the 1611 # parent directory doesn't exist, we can safely skip it. 1612 if os.path.isdir(os.path.dirname(ppath)): 1613 self.protect.append(ppath) 1614 try: 1615 if stat.S_ISDIR(os.stat(ppath).st_mode): 1616 self._dirs.add(ppath) 1617 except OSError: 1618 pass 1619 1620 self.protectmask = [] 1621 for x in self.mask_list: 1622 ppath = normalize_path( 1623 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1624 if self.case_insensitive: 1625 ppath = ppath.lower() 1626 try: 1627 """Use lstat so that anything, even a broken symlink can be 1628 protected.""" 1629 if stat.S_ISDIR(os.lstat(ppath).st_mode): 1630 self._dirs.add(ppath) 1631 self.protectmask.append(ppath) 1632 """Now use stat in case this is a symlink to a directory.""" 1633 if stat.S_ISDIR(os.stat(ppath).st_mode): 1634 self._dirs.add(ppath) 1635 except OSError: 1636 # If it doesn't exist, there's no need to mask it. 1637 pass
1638
1639 - def isprotected(self, obj):
1640 """Returns True if obj is protected, False otherwise. The caller must 1641 ensure that obj is normalized with a single leading slash. A trailing 1642 slash is optional for directories.""" 1643 masked = 0 1644 protected = 0 1645 sep = os.path.sep 1646 if self.case_insensitive: 1647 obj = obj.lower() 1648 for ppath in self.protect: 1649 if len(ppath) > masked and obj.startswith(ppath): 1650 if ppath in self._dirs: 1651 if obj != ppath and not obj.startswith(ppath + sep): 1652 # /etc/foo does not match /etc/foobaz 1653 continue 1654 elif obj != ppath: 1655 # force exact match when CONFIG_PROTECT lists a 1656 # non-directory 1657 continue 1658 protected = len(ppath) 1659 #config file management 1660 for pmpath in self.protectmask: 1661 if len(pmpath) >= protected and obj.startswith(pmpath): 1662 if pmpath in self._dirs: 1663 if obj != pmpath and \ 1664 not obj.startswith(pmpath + sep): 1665 # /etc/foo does not match /etc/foobaz 1666 continue 1667 elif obj != pmpath: 1668 # force exact match when CONFIG_PROTECT_MASK lists 1669 # a non-directory 1670 continue 1671 #skip, it's in the mask 1672 masked = len(pmpath) 1673 return protected > masked
1674
1675 -def new_protect_filename(mydest, newmd5=None, force=False):
1676 """Resolves a config-protect filename for merging, optionally 1677 using the last filename if the md5 matches. If force is True, 1678 then a new filename will be generated even if mydest does not 1679 exist yet. 1680 (dest,md5) ==> 'string' --- path_to_target_filename 1681 (dest) ==> ('next', 'highest') --- next_target and most-recent_target 1682 """ 1683 1684 # config protection filename format: 1685 # ._cfg0000_foo 1686 # 0123456789012 1687 1688 os = _os_merge 1689 1690 prot_num = -1 1691 last_pfile = "" 1692 1693 if not force and \ 1694 not os.path.exists(mydest): 1695 return mydest 1696 1697 real_filename = os.path.basename(mydest) 1698 real_dirname = os.path.dirname(mydest) 1699 for pfile in os.listdir(real_dirname): 1700 if pfile[0:5] != "._cfg": 1701 continue 1702 if pfile[10:] != real_filename: 1703 continue 1704 try: 1705 new_prot_num = int(pfile[5:9]) 1706 if new_prot_num > prot_num: 1707 prot_num = new_prot_num 1708 last_pfile = pfile 1709 except ValueError: 1710 continue 1711 prot_num = prot_num + 1 1712 1713 new_pfile = normalize_path(os.path.join(real_dirname, 1714 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename)) 1715 old_pfile = normalize_path(os.path.join(real_dirname, last_pfile)) 1716 if last_pfile and newmd5: 1717 try: 1718 old_pfile_st = os.lstat(old_pfile) 1719 except OSError as e: 1720 if e.errno != errno.ENOENT: 1721 raise 1722 else: 1723 if stat.S_ISLNK(old_pfile_st.st_mode): 1724 try: 1725 # Read symlink target as bytes, in case the 1726 # target path has a bad encoding. 1727 pfile_link = os.readlink(_unicode_encode(old_pfile, 1728 encoding=_encodings['merge'], errors='strict')) 1729 except OSError: 1730 if e.errno != errno.ENOENT: 1731 raise 1732 else: 1733 pfile_link = _unicode_decode(pfile_link, 1734 encoding=_encodings['merge'], errors='replace') 1735 if pfile_link == newmd5: 1736 return old_pfile 1737 else: 1738 try: 1739 last_pfile_md5 = \ 1740 portage.checksum._perform_md5_merge(old_pfile) 1741 except FileNotFound: 1742 # The file suddenly disappeared or it's a 1743 # broken symlink. 1744 pass 1745 else: 1746 if last_pfile_md5 == newmd5: 1747 return old_pfile 1748 return new_pfile
1749
1750 -def find_updated_config_files(target_root, config_protect):
1751 """ 1752 Return a tuple of configuration files that needs to be updated. 1753 The tuple contains lists organized like this: 1754 [protected_dir, file_list] 1755 If the protected config isn't a protected_dir but a procted_file, list is: 1756 [protected_file, None] 1757 If no configuration files needs to be updated, None is returned 1758 """ 1759 1760 encoding = _encodings['fs'] 1761 1762 if config_protect: 1763 # directories with some protect files in them 1764 for x in config_protect: 1765 files = [] 1766 1767 x = os.path.join(target_root, x.lstrip(os.path.sep)) 1768 if not os.access(x, os.W_OK): 1769 continue 1770 try: 1771 mymode = os.lstat(x).st_mode 1772 except OSError: 1773 continue 1774 1775 if stat.S_ISLNK(mymode): 1776 # We want to treat it like a directory if it 1777 # is a symlink to an existing directory. 1778 try: 1779 real_mode = os.stat(x).st_mode 1780 if stat.S_ISDIR(real_mode): 1781 mymode = real_mode 1782 except OSError: 1783 pass 1784 1785 if stat.S_ISDIR(mymode): 1786 mycommand = \ 1787 "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x 1788 else: 1789 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \ 1790 os.path.split(x.rstrip(os.path.sep)) 1791 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0" 1792 cmd = shlex_split(mycommand) 1793 1794 if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000: 1795 # Python 3.1 _execvp throws TypeError for non-absolute executable 1796 # path passed as bytes (see https://bugs.python.org/issue8513). 1797 fullname = portage.process.find_binary(cmd[0]) 1798 if fullname is None: 1799 raise portage.exception.CommandNotFound(cmd[0]) 1800 cmd[0] = fullname 1801 1802 cmd = [_unicode_encode(arg, encoding=encoding, errors='strict') 1803 for arg in cmd] 1804 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, 1805 stderr=subprocess.STDOUT) 1806 output = _unicode_decode(proc.communicate()[0], encoding=encoding) 1807 status = proc.wait() 1808 if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK: 1809 files = output.split('\0') 1810 # split always produces an empty string as the last element 1811 if files and not files[-1]: 1812 del files[-1] 1813 if files: 1814 if stat.S_ISDIR(mymode): 1815 yield (x, files) 1816 else: 1817 yield (x, None)
1818 1819 _ld_so_include_re = re.compile(r'^include\s+(\S.*)') 1820
1821 -def getlibpaths(root, env=None):
1822 def read_ld_so_conf(path): 1823 for l in grabfile(path): 1824 include_match = _ld_so_include_re.match(l) 1825 if include_match is not None: 1826 subpath = os.path.join(os.path.dirname(path), 1827 include_match.group(1)) 1828 for p in glob.glob(subpath): 1829 for r in read_ld_so_conf(p): 1830 yield r 1831 else: 1832 yield l
1833 1834 """ Return a list of paths that are used for library lookups """ 1835 if env is None: 1836 env = os.environ 1837 # the following is based on the information from ld.so(8) 1838 rval = env.get("LD_LIBRARY_PATH", "").split(":") 1839 rval.extend(read_ld_so_conf(os.path.join(root, "etc", "ld.so.conf"))) 1840 rval.append("/usr/lib") 1841 rval.append("/lib") 1842 1843 return [normalize_path(x) for x in rval if x] 1844