Package portage :: Package util
[hide private]

Source Code for Package portage.util

   1  # Copyright 2004-2017 Gentoo Foundation 
   2  # Distributed under the terms of the GNU General Public License v2 
   3   
   4  from __future__ import unicode_literals 
   5   
   6  __all__ = ['apply_permissions', 'apply_recursive_permissions', 
   7          'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream', 
   8          'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs', 
   9          'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict', 
  10          'grabdict_package', 'grabfile', 'grabfile_package', 'grablines', 
  11          'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals', 
  12          'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist', 
  13          'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand', 
  14          'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout'] 
  15   
  16  from copy import deepcopy 
  17  import errno 
  18  import io 
  19  try: 
  20          from itertools import chain, filterfalse 
  21  except ImportError: 
  22          from itertools import chain, ifilterfalse as filterfalse 
  23  import logging 
  24  import re 
  25  import shlex 
  26  import stat 
  27  import string 
  28  import sys 
  29  import traceback 
  30  import glob 
  31   
  32  import portage 
  33  portage.proxy.lazyimport.lazyimport(globals(), 
  34          'pickle', 
  35          'portage.dep:Atom', 
  36          'subprocess', 
  37  ) 
  38   
  39  from portage import os 
  40  from portage import _encodings 
  41  from portage import _os_merge 
  42  from portage import _unicode_encode 
  43  from portage import _unicode_decode 
  44  from portage.const import VCS_DIRS 
  45  from portage.exception import InvalidAtom, PortageException, FileNotFound, \ 
  46          IsADirectory, OperationNotPermitted, ParseError, PermissionDenied, \ 
  47          ReadOnlyFileSystem 
  48  from portage.localization import _ 
  49  from portage.proxy.objectproxy import ObjectProxy 
  50  from portage.cache.mappings import UserDict 
  51   
  52  if sys.hexversion >= 0x3000000: 
  53          _unicode = str 
  54  else: 
  55          _unicode = unicode 
  56   
  57  noiselimit = 0 
  58   
59 -def initialize_logger(level=logging.WARNING):
60 """Sets up basic logging of portage activities 61 Args: 62 level: the level to emit messages at ('info', 'debug', 'warning' ...) 63 Returns: 64 None 65 """ 66 logging.basicConfig(level=level, format='[%(levelname)-4s] %(message)s')
67
68 -def writemsg(mystr, noiselevel=0, fd=None):
69 """Prints out warning and debug messages based on the noiselimit setting""" 70 global noiselimit 71 if fd is None: 72 fd = sys.stderr 73 if noiselevel <= noiselimit: 74 # avoid potential UnicodeEncodeError 75 if isinstance(fd, io.StringIO): 76 mystr = _unicode_decode(mystr, 77 encoding=_encodings['content'], errors='replace') 78 else: 79 mystr = _unicode_encode(mystr, 80 encoding=_encodings['stdio'], errors='backslashreplace') 81 if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr): 82 fd = fd.buffer 83 fd.write(mystr) 84 fd.flush()
85
86 -def writemsg_stdout(mystr, noiselevel=0):
87 """Prints messages stdout based on the noiselimit setting""" 88 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
89
90 -def writemsg_level(msg, level=0, noiselevel=0):
91 """ 92 Show a message for the given level as defined by the logging module 93 (default is 0). When level >= logging.WARNING then the message is 94 sent to stderr, otherwise it is sent to stdout. The noiselevel is 95 passed directly to writemsg(). 96 97 @type msg: str 98 @param msg: a message string, including newline if appropriate 99 @type level: int 100 @param level: a numeric logging level (see the logging module) 101 @type noiselevel: int 102 @param noiselevel: passed directly to writemsg 103 """ 104 if level >= logging.WARNING: 105 fd = sys.stderr 106 else: 107 fd = sys.stdout 108 writemsg(msg, noiselevel=noiselevel, fd=fd)
109
110 -def normalize_path(mypath):
111 """ 112 os.path.normpath("//foo") returns "//foo" instead of "/foo" 113 We dislike this behavior so we create our own normpath func 114 to fix it. 115 """ 116 if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes): 117 path_sep = os.path.sep.encode() 118 else: 119 path_sep = os.path.sep 120 121 if mypath.startswith(path_sep): 122 # posixpath.normpath collapses 3 or more leading slashes to just 1. 123 return os.path.normpath(2*path_sep + mypath) 124 else: 125 return os.path.normpath(mypath)
126
127 -def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
128 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line 129 begins with a #, it is ignored, as are empty lines""" 130 131 mylines = grablines(myfilename, recursive, remember_source_file=True) 132 newlines = [] 133 134 for x, source_file in mylines: 135 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 136 #into single spaces. 137 myline = x.split() 138 if x and x[0] != "#": 139 mylinetemp = [] 140 for item in myline: 141 if item[:1] != "#": 142 mylinetemp.append(item) 143 else: 144 break 145 myline = mylinetemp 146 147 myline = " ".join(myline) 148 if not myline: 149 continue 150 if myline[0] == "#": 151 # Check if we have a compat-level string. BC-integration data. 152 # '##COMPAT==>N<==' 'some string attached to it' 153 mylinetest = myline.split("<==", 1) 154 if len(mylinetest) == 2: 155 myline_potential = mylinetest[1] 156 mylinetest = mylinetest[0].split("##COMPAT==>") 157 if len(mylinetest) == 2: 158 if compat_level >= int(mylinetest[1]): 159 # It's a compat line, and the key matches. 160 newlines.append(myline_potential) 161 continue 162 else: 163 continue 164 if remember_source_file: 165 newlines.append((myline, source_file)) 166 else: 167 newlines.append(myline) 168 return newlines
169
170 -def map_dictlist_vals(func, myDict):
171 """Performs a function on each value of each key in a dictlist. 172 Returns a new dictlist.""" 173 new_dl = {} 174 for key in myDict: 175 new_dl[key] = [] 176 new_dl[key] = [func(x) for x in myDict[key]] 177 return new_dl
178
179 -def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
180 """ 181 Stacks an array of dict-types into one array. Optionally merging or 182 overwriting matching key/value pairs for the dict[key]->list. 183 Returns a single dict. Higher index in lists is preferenced. 184 185 Example usage: 186 >>> from portage.util import stack_dictlist 187 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}]) 188 >>> {'a':'b','x':'y'} 189 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True ) 190 >>> {'a':['b','c'] } 191 >>> a = {'KEYWORDS':['x86','alpha']} 192 >>> b = {'KEYWORDS':['-x86']} 193 >>> print stack_dictlist( [a,b] ) 194 >>> { 'KEYWORDS':['x86','alpha','-x86']} 195 >>> print stack_dictlist( [a,b], incremental=True) 196 >>> { 'KEYWORDS':['alpha'] } 197 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS']) 198 >>> { 'KEYWORDS':['alpha'] } 199 200 @param original_dicts a list of (dictionary objects or None) 201 @type list 202 @param incremental True or false depending on whether new keys should overwrite 203 keys which already exist. 204 @type boolean 205 @param incrementals A list of items that should be incremental (-foo removes foo from 206 the returned dict). 207 @type list 208 @param ignore_none Appears to be ignored, but probably was used long long ago. 209 @type boolean 210 211 """ 212 final_dict = {} 213 for mydict in original_dicts: 214 if mydict is None: 215 continue 216 for y in mydict: 217 if not y in final_dict: 218 final_dict[y] = [] 219 220 for thing in mydict[y]: 221 if thing: 222 if incremental or y in incrementals: 223 if thing == "-*": 224 final_dict[y] = [] 225 continue 226 elif thing[:1] == '-': 227 try: 228 final_dict[y].remove(thing[1:]) 229 except ValueError: 230 pass 231 continue 232 if thing not in final_dict[y]: 233 final_dict[y].append(thing) 234 if y in final_dict and not final_dict[y]: 235 del final_dict[y] 236 return final_dict
237
238 -def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
239 """Stacks an array of dict-types into one array. Optionally merging or 240 overwriting matching key/value pairs for the dict[key]->string. 241 Returns a single dict.""" 242 final_dict = {} 243 for mydict in dicts: 244 if not mydict: 245 continue 246 for k, v in mydict.items(): 247 if k in final_dict and (incremental or (k in incrementals)): 248 final_dict[k] += " " + v 249 else: 250 final_dict[k] = v 251 return final_dict
252
253 -def append_repo(atom_list, repo_name, remember_source_file=False):
254 """ 255 Takes a list of valid atoms without repo spec and appends ::repo_name. 256 If an atom already has a repo part, then it is preserved (see bug #461948). 257 """ 258 if remember_source_file: 259 return [(atom.repo is not None and atom or atom.with_repo(repo_name), source) \ 260 for atom, source in atom_list] 261 else: 262 return [atom.repo is not None and atom or atom.with_repo(repo_name) \ 263 for atom in atom_list]
264
265 -def stack_lists(lists, incremental=1, remember_source_file=False, 266 warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
267 """Stacks an array of list-types into one array. Optionally removing 268 distinct values using '-value' notation. Higher index is preferenced. 269 270 all elements must be hashable.""" 271 matched_removals = set() 272 unmatched_removals = {} 273 new_list = {} 274 for sub_list in lists: 275 for token in sub_list: 276 token_key = token 277 if remember_source_file: 278 token, source_file = token 279 else: 280 source_file = False 281 282 if token is None: 283 continue 284 285 if incremental: 286 if token == "-*": 287 new_list.clear() 288 elif token[:1] == '-': 289 matched = False 290 if ignore_repo and not "::" in token: 291 #Let -cat/pkg remove cat/pkg::repo. 292 to_be_removed = [] 293 token_slice = token[1:] 294 for atom in new_list: 295 atom_without_repo = atom 296 if atom.repo is not None: 297 # Atom.without_repo instantiates a new Atom, 298 # which is unnecessary here, so use string 299 # replacement instead. 300 atom_without_repo = \ 301 atom.replace("::" + atom.repo, "", 1) 302 if atom_without_repo == token_slice: 303 to_be_removed.append(atom) 304 if to_be_removed: 305 matched = True 306 for atom in to_be_removed: 307 new_list.pop(atom) 308 else: 309 try: 310 new_list.pop(token[1:]) 311 matched = True 312 except KeyError: 313 pass 314 315 if not matched: 316 if source_file and \ 317 (strict_warn_for_unmatched_removal or \ 318 token_key not in matched_removals): 319 unmatched_removals.setdefault(source_file, set()).add(token) 320 else: 321 matched_removals.add(token_key) 322 else: 323 new_list[token] = source_file 324 else: 325 new_list[token] = source_file 326 327 if warn_for_unmatched_removal: 328 for source_file, tokens in unmatched_removals.items(): 329 if len(tokens) > 3: 330 selected = [tokens.pop(), tokens.pop(), tokens.pop()] 331 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \ 332 (source_file, ", ".join(selected), len(tokens)), 333 noiselevel=-1) 334 else: 335 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)), 336 noiselevel=-1) 337 338 if remember_source_file: 339 return list(new_list.items()) 340 else: 341 return list(new_list)
342
343 -def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1, newlines=0):
344 """ 345 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary 346 347 @param myfilename: file to process 348 @type myfilename: string (path) 349 @param juststrings: only return strings 350 @type juststrings: Boolean (integer) 351 @param empty: Ignore certain lines 352 @type empty: Boolean (integer) 353 @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends ) 354 @type recursive: Boolean (integer) 355 @param incremental: Append to the return list, don't overwrite 356 @type incremental: Boolean (integer) 357 @param newlines: Append newlines 358 @type newlines: Boolean (integer) 359 @rtype: Dictionary 360 @return: 361 1. Returns the lines in a file in a dictionary, for example: 362 'sys-apps/portage x86 amd64 ppc' 363 would return 364 {"sys-apps/portage" : ['x86', 'amd64', 'ppc']} 365 """ 366 newdict = {} 367 for x in grablines(myfilename, recursive): 368 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line 369 #into single spaces. 370 if x[0] == "#": 371 continue 372 myline=x.split() 373 mylinetemp = [] 374 for item in myline: 375 if item[:1] != "#": 376 mylinetemp.append(item) 377 else: 378 break 379 myline = mylinetemp 380 if len(myline) < 2 and empty == 0: 381 continue 382 if len(myline) < 1 and empty == 1: 383 continue 384 if newlines: 385 myline.append("\n") 386 if incremental: 387 newdict.setdefault(myline[0], []).extend(myline[1:]) 388 else: 389 newdict[myline[0]] = myline[1:] 390 if juststrings: 391 for k, v in newdict.items(): 392 newdict[k] = " ".join(v) 393 return newdict
394 395 _eapi_cache = {} 396
397 -def read_corresponding_eapi_file(filename, default="0"):
398 """ 399 Read the 'eapi' file from the directory 'filename' is in. 400 Returns "0" if the file is not present or invalid. 401 """ 402 eapi_file = os.path.join(os.path.dirname(filename), "eapi") 403 try: 404 eapi = _eapi_cache[eapi_file] 405 except KeyError: 406 pass 407 else: 408 if eapi is None: 409 return default 410 return eapi 411 412 eapi = None 413 try: 414 with io.open(_unicode_encode(eapi_file, 415 encoding=_encodings['fs'], errors='strict'), 416 mode='r', encoding=_encodings['repo.content'], errors='replace') as f: 417 lines = f.readlines() 418 if len(lines) == 1: 419 eapi = lines[0].rstrip("\n") 420 else: 421 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file), 422 noiselevel=-1) 423 except IOError: 424 pass 425 426 _eapi_cache[eapi_file] = eapi 427 if eapi is None: 428 return default 429 return eapi
430
431 -def grabdict_package(myfilename, juststrings=0, recursive=0, newlines=0, 432 allow_wildcard=False, allow_repo=False, allow_build_id=False, 433 verify_eapi=False, eapi=None, eapi_default="0"):
434 """ Does the same thing as grabdict except it validates keys 435 with isvalidatom()""" 436 437 if recursive: 438 file_list = _recursive_file_list(myfilename) 439 else: 440 file_list = [myfilename] 441 442 atoms = {} 443 for filename in file_list: 444 d = grabdict(filename, juststrings=False, 445 empty=True, recursive=False, incremental=True, newlines=newlines) 446 if not d: 447 continue 448 if verify_eapi and eapi is None: 449 eapi = read_corresponding_eapi_file( 450 myfilename, default=eapi_default) 451 452 for k, v in d.items(): 453 try: 454 k = Atom(k, allow_wildcard=allow_wildcard, 455 allow_repo=allow_repo, 456 allow_build_id=allow_build_id, eapi=eapi) 457 except InvalidAtom as e: 458 writemsg(_("--- Invalid atom in %s: %s\n") % (filename, e), 459 noiselevel=-1) 460 else: 461 atoms.setdefault(k, []).extend(v) 462 463 if juststrings: 464 for k, v in atoms.items(): 465 atoms[k] = " ".join(v) 466 467 return atoms
468
469 -def grabfile_package(myfilename, compatlevel=0, recursive=0, 470 allow_wildcard=False, allow_repo=False, allow_build_id=False, 471 remember_source_file=False, verify_eapi=False, eapi=None, 472 eapi_default="0"):
473 474 pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True) 475 if not pkgs: 476 return pkgs 477 if verify_eapi and eapi is None: 478 eapi = read_corresponding_eapi_file( 479 myfilename, default=eapi_default) 480 mybasename = os.path.basename(myfilename) 481 is_packages_file = mybasename == 'packages' 482 atoms = [] 483 for pkg, source_file in pkgs: 484 pkg_orig = pkg 485 # for packages and package.mask files 486 if pkg[:1] == "-": 487 if is_packages_file and pkg == '-*': 488 if remember_source_file: 489 atoms.append((pkg, source_file)) 490 else: 491 atoms.append(pkg) 492 continue 493 pkg = pkg[1:] 494 if pkg[:1] == '*' and is_packages_file: 495 pkg = pkg[1:] 496 try: 497 pkg = Atom(pkg, allow_wildcard=allow_wildcard, 498 allow_repo=allow_repo, allow_build_id=allow_build_id, 499 eapi=eapi) 500 except InvalidAtom as e: 501 writemsg(_("--- Invalid atom in %s: %s\n") % (source_file, e), 502 noiselevel=-1) 503 else: 504 if pkg_orig == _unicode(pkg): 505 # normal atom, so return as Atom instance 506 if remember_source_file: 507 atoms.append((pkg, source_file)) 508 else: 509 atoms.append(pkg) 510 else: 511 # atom has special prefix, so return as string 512 if remember_source_file: 513 atoms.append((pkg_orig, source_file)) 514 else: 515 atoms.append(pkg_orig) 516 return atoms
517
518 -def _recursive_basename_filter(f):
519 return not f.startswith(".") and not f.endswith("~")
520
521 -def _recursive_file_list(path):
522 # path may be a regular file or a directory 523 524 def onerror(e): 525 if e.errno == PermissionDenied.errno: 526 raise PermissionDenied(path)
527 528 stack = [os.path.split(path)] 529 530 while stack: 531 parent, fname = stack.pop() 532 fullpath = os.path.join(parent, fname) 533 534 try: 535 st = os.stat(fullpath) 536 except OSError as e: 537 onerror(e) 538 continue 539 540 if stat.S_ISDIR(st.st_mode): 541 if fname in VCS_DIRS or not _recursive_basename_filter(fname): 542 continue 543 try: 544 children = os.listdir(fullpath) 545 except OSError as e: 546 onerror(e) 547 continue 548 549 # Sort in reverse, since we pop from the end of the stack. 550 # Include regular files in the stack, so files are sorted 551 # together with directories. 552 children.sort(reverse=True) 553 stack.extend((fullpath, x) for x in children) 554 555 elif stat.S_ISREG(st.st_mode): 556 if _recursive_basename_filter(fname): 557 yield fullpath 558
559 -def grablines(myfilename, recursive=0, remember_source_file=False):
560 mylines = [] 561 if recursive: 562 for f in _recursive_file_list(myfilename): 563 mylines.extend(grablines(f, recursive=False, 564 remember_source_file=remember_source_file)) 565 566 else: 567 try: 568 with io.open(_unicode_encode(myfilename, 569 encoding=_encodings['fs'], errors='strict'), 570 mode='r', encoding=_encodings['content'], errors='replace') as myfile: 571 if remember_source_file: 572 mylines = [(line, myfilename) for line in myfile.readlines()] 573 else: 574 mylines = myfile.readlines() 575 except IOError as e: 576 if e.errno == PermissionDenied.errno: 577 raise PermissionDenied(myfilename) 578 elif e.errno in (errno.ENOENT, errno.ESTALE): 579 pass 580 else: 581 raise 582 return mylines
583
584 -def writedict(mydict, myfilename, writekey=True):
585 """Writes out a dict to a file; writekey=0 mode doesn't write out 586 the key and assumes all values are strings, not lists.""" 587 lines = [] 588 if not writekey: 589 for v in mydict.values(): 590 lines.append(v + "\n") 591 else: 592 for k, v in mydict.items(): 593 lines.append("%s %s\n" % (k, " ".join(v))) 594 write_atomic(myfilename, "".join(lines))
595
596 -def shlex_split(s):
597 """ 598 This is equivalent to shlex.split, but if the current interpreter is 599 python2, it temporarily encodes unicode strings to bytes since python2's 600 shlex.split() doesn't handle unicode strings. 601 """ 602 convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes) 603 if convert_to_bytes: 604 s = _unicode_encode(s) 605 rval = shlex.split(s) 606 if convert_to_bytes: 607 rval = [_unicode_decode(x) for x in rval] 608 return rval
609
610 -class _getconfig_shlex(shlex.shlex):
611
612 - def __init__(self, portage_tolerant=False, **kwargs):
613 shlex.shlex.__init__(self, **kwargs) 614 self.__portage_tolerant = portage_tolerant
615
616 - def allow_sourcing(self, var_expand_map):
617 self.source = portage._native_string("source") 618 self.var_expand_map = var_expand_map
619
620 - def sourcehook(self, newfile):
621 try: 622 newfile = varexpand(newfile, self.var_expand_map) 623 return shlex.shlex.sourcehook(self, newfile) 624 except EnvironmentError as e: 625 if e.errno == PermissionDenied.errno: 626 raise PermissionDenied(newfile) 627 if e.errno not in (errno.ENOENT, errno.ENOTDIR): 628 writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1) 629 raise 630 631 msg = self.error_leader() 632 if e.errno == errno.ENOTDIR: 633 msg += _("%s: Not a directory") % newfile 634 else: 635 msg += _("%s: No such file or directory") % newfile 636 637 if self.__portage_tolerant: 638 writemsg("%s\n" % msg, noiselevel=-1) 639 else: 640 raise ParseError(msg) 641 return (newfile, io.StringIO())
642 643 _invalid_var_name_re = re.compile(r'^\d|\W') 644
645 -def getconfig(mycfg, tolerant=False, allow_sourcing=False, expand=True, 646 recursive=False):
647 648 if isinstance(expand, dict): 649 # Some existing variable definitions have been 650 # passed in, for use in substitutions. 651 expand_map = expand 652 expand = True 653 else: 654 expand_map = {} 655 mykeys = {} 656 657 if recursive: 658 # Emulate source commands so that syntax error messages 659 # can display real file names and line numbers. 660 if not expand: 661 expand_map = False 662 fname = None 663 for fname in _recursive_file_list(mycfg): 664 mykeys.update(getconfig(fname, tolerant=tolerant, 665 allow_sourcing=allow_sourcing, expand=expand_map, 666 recursive=False) or {}) 667 if fname is None: 668 return None 669 return mykeys 670 671 f = None 672 try: 673 # NOTE: shlex doesn't support unicode objects with Python 2 674 # (produces spurious \0 characters). 675 if sys.hexversion < 0x3000000: 676 f = open(_unicode_encode(mycfg, 677 encoding=_encodings['fs'], errors='strict'), 'rb') 678 else: 679 f = open(_unicode_encode(mycfg, 680 encoding=_encodings['fs'], errors='strict'), mode='r', 681 encoding=_encodings['content'], errors='replace') 682 content = f.read() 683 except IOError as e: 684 if e.errno == PermissionDenied.errno: 685 raise PermissionDenied(mycfg) 686 if e.errno != errno.ENOENT: 687 writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1) 688 if e.errno not in (errno.EISDIR,): 689 raise 690 return None 691 finally: 692 if f is not None: 693 f.close() 694 695 # Since this file has unicode_literals enabled, and Python 2's 696 # shlex implementation does not support unicode, the following code 697 # uses _native_string() to encode unicode literals when necessary. 698 699 # Workaround for avoiding a silent error in shlex that is 700 # triggered by a source statement at the end of the file 701 # without a trailing newline after the source statement. 702 if content and content[-1] != portage._native_string('\n'): 703 content += portage._native_string('\n') 704 705 # Warn about dos-style line endings since that prevents 706 # people from being able to source them with bash. 707 if portage._native_string('\r') in content: 708 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \ 709 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1) 710 711 lex = None 712 try: 713 # The default shlex.sourcehook() implementation 714 # only joins relative paths when the infile 715 # attribute is properly set. 716 lex = _getconfig_shlex(instream=content, infile=mycfg, posix=True, 717 portage_tolerant=tolerant) 718 lex.wordchars = portage._native_string(string.digits + 719 string.ascii_letters + r"~!@#$%*_\:;?,./-+{}") 720 lex.quotes = portage._native_string("\"'") 721 if allow_sourcing: 722 lex.allow_sourcing(expand_map) 723 724 while True: 725 key = _unicode_decode(lex.get_token()) 726 if key == "export": 727 key = _unicode_decode(lex.get_token()) 728 if key is None: 729 #normal end of file 730 break 731 732 equ = _unicode_decode(lex.get_token()) 733 if not equ: 734 msg = lex.error_leader() + _("Unexpected EOF") 735 if not tolerant: 736 raise ParseError(msg) 737 else: 738 writemsg("%s\n" % msg, noiselevel=-1) 739 return mykeys 740 741 elif equ != "=": 742 msg = lex.error_leader() + \ 743 _("Invalid token '%s' (not '=')") % (equ,) 744 if not tolerant: 745 raise ParseError(msg) 746 else: 747 writemsg("%s\n" % msg, noiselevel=-1) 748 return mykeys 749 750 val = _unicode_decode(lex.get_token()) 751 if val is None: 752 msg = lex.error_leader() + \ 753 _("Unexpected end of config file: variable '%s'") % (key,) 754 if not tolerant: 755 raise ParseError(msg) 756 else: 757 writemsg("%s\n" % msg, noiselevel=-1) 758 return mykeys 759 760 if _invalid_var_name_re.search(key) is not None: 761 msg = lex.error_leader() + \ 762 _("Invalid variable name '%s'") % (key,) 763 if not tolerant: 764 raise ParseError(msg) 765 writemsg("%s\n" % msg, noiselevel=-1) 766 continue 767 768 if expand: 769 mykeys[key] = varexpand(val, mydict=expand_map, 770 error_leader=lex.error_leader) 771 expand_map[key] = mykeys[key] 772 else: 773 mykeys[key] = val 774 except SystemExit as e: 775 raise 776 except Exception as e: 777 if isinstance(e, ParseError) or lex is None: 778 raise 779 msg = "%s%s" % (lex.error_leader(), e) 780 writemsg("%s\n" % msg, noiselevel=-1) 781 raise 782 783 return mykeys
784 785 _varexpand_word_chars = frozenset(string.ascii_letters + string.digits + "_") 786 _varexpand_unexpected_eof_msg = "unexpected EOF while looking for matching `}'" 787
788 -def varexpand(mystring, mydict=None, error_leader=None):
789 if mydict is None: 790 mydict = {} 791 792 """ 793 new variable expansion code. Preserves quotes, handles \n, etc. 794 This code is used by the configfile code, as well as others (parser) 795 This would be a good bunch of code to port to C. 796 """ 797 numvars = 0 798 # in single, double quotes 799 insing = 0 800 indoub = 0 801 pos = 0 802 length = len(mystring) 803 newstring = [] 804 while pos < length: 805 current = mystring[pos] 806 if current == "'": 807 if (indoub): 808 newstring.append("'") 809 else: 810 newstring.append("'") # Quote removal is handled by shlex. 811 insing=not insing 812 pos += 1 813 continue 814 elif current == '"': 815 if (insing): 816 newstring.append('"') 817 else: 818 newstring.append('"') # Quote removal is handled by shlex. 819 indoub=not indoub 820 pos += 1 821 continue 822 if not insing: 823 #expansion time 824 if current == "\n": 825 #convert newlines to spaces 826 newstring.append(" ") 827 pos += 1 828 elif current == "\\": 829 # For backslash expansion, this function used to behave like 830 # echo -e, but that's not needed for our purposes. We want to 831 # behave like bash does when expanding a variable assignment 832 # in a sourced file, in which case it performs backslash 833 # removal for \\ and \$ but nothing more. It also removes 834 # escaped newline characters. Note that we don't handle 835 # escaped quotes here, since getconfig() uses shlex 836 # to handle that earlier. 837 if pos + 1 >= len(mystring): 838 newstring.append(current) 839 break 840 else: 841 current = mystring[pos + 1] 842 pos += 2 843 if current == "$": 844 newstring.append(current) 845 elif current == "\\": 846 newstring.append(current) 847 # BUG: This spot appears buggy, but it's intended to 848 # be bug-for-bug compatible with existing behavior. 849 if pos < length and \ 850 mystring[pos] in ("'", '"', "$"): 851 newstring.append(mystring[pos]) 852 pos += 1 853 elif current == "\n": 854 pass 855 else: 856 newstring.append(mystring[pos - 2:pos]) 857 continue 858 elif current == "$": 859 pos += 1 860 if pos == length: 861 # shells handle this like \$ 862 newstring.append(current) 863 continue 864 865 if mystring[pos] == "{": 866 pos += 1 867 if pos == length: 868 msg = _varexpand_unexpected_eof_msg 869 if error_leader is not None: 870 msg = error_leader() + msg 871 writemsg(msg + "\n", noiselevel=-1) 872 return "" 873 874 braced = True 875 else: 876 braced = False 877 myvstart = pos 878 while mystring[pos] in _varexpand_word_chars: 879 if pos + 1 >= len(mystring): 880 if braced: 881 msg = _varexpand_unexpected_eof_msg 882 if error_leader is not None: 883 msg = error_leader() + msg 884 writemsg(msg + "\n", noiselevel=-1) 885 return "" 886 else: 887 pos += 1 888 break 889 pos += 1 890 myvarname = mystring[myvstart:pos] 891 if braced: 892 if mystring[pos] != "}": 893 msg = _varexpand_unexpected_eof_msg 894 if error_leader is not None: 895 msg = error_leader() + msg 896 writemsg(msg + "\n", noiselevel=-1) 897 return "" 898 else: 899 pos += 1 900 if len(myvarname) == 0: 901 msg = "$" 902 if braced: 903 msg += "{}" 904 msg += ": bad substitution" 905 if error_leader is not None: 906 msg = error_leader() + msg 907 writemsg(msg + "\n", noiselevel=-1) 908 return "" 909 numvars += 1 910 if myvarname in mydict: 911 newstring.append(mydict[myvarname]) 912 else: 913 newstring.append(current) 914 pos += 1 915 else: 916 newstring.append(current) 917 pos += 1 918 919 return "".join(newstring)
920 921 # broken and removed, but can still be imported 922 pickle_write = None 923
924 -def pickle_read(filename, default=None, debug=0):
925 if not os.access(filename, os.R_OK): 926 writemsg(_("pickle_read(): File not readable. '") + filename + "'\n", 1) 927 return default 928 data = None 929 try: 930 myf = open(_unicode_encode(filename, 931 encoding=_encodings['fs'], errors='strict'), 'rb') 932 mypickle = pickle.Unpickler(myf) 933 data = mypickle.load() 934 myf.close() 935 del mypickle, myf 936 writemsg(_("pickle_read(): Loaded pickle. '") + filename + "'\n", 1) 937 except SystemExit as e: 938 raise 939 except Exception as e: 940 writemsg(_("!!! Failed to load pickle: ") + str(e) + "\n", 1) 941 data = default 942 return data
943
944 -def dump_traceback(msg, noiselevel=1):
945 info = sys.exc_info() 946 if not info[2]: 947 stack = traceback.extract_stack()[:-1] 948 error = None 949 else: 950 stack = traceback.extract_tb(info[2]) 951 error = str(info[1]) 952 writemsg("\n====================================\n", noiselevel=noiselevel) 953 writemsg("%s\n\n" % msg, noiselevel=noiselevel) 954 for line in traceback.format_list(stack): 955 writemsg(line, noiselevel=noiselevel) 956 if error: 957 writemsg(error+"\n", noiselevel=noiselevel) 958 writemsg("====================================\n\n", noiselevel=noiselevel)
959
960 -class cmp_sort_key(object):
961 """ 962 In python-3.0 the list.sort() method no longer has a "cmp" keyword 963 argument. This class acts as an adapter which converts a cmp function 964 into one that's suitable for use as the "key" keyword argument to 965 list.sort(), making it easier to port code for python-3.0 compatibility. 966 It works by generating key objects which use the given cmp function to 967 implement their __lt__ method. 968 969 Beginning with Python 2.7 and 3.2, equivalent functionality is provided 970 by functools.cmp_to_key(). 971 """ 972 __slots__ = ("_cmp_func",) 973
974 - def __init__(self, cmp_func):
975 """ 976 @type cmp_func: callable which takes 2 positional arguments 977 @param cmp_func: A cmp function. 978 """ 979 self._cmp_func = cmp_func
980
981 - def __call__(self, lhs):
982 return self._cmp_key(self._cmp_func, lhs)
983
984 - class _cmp_key(object):
985 __slots__ = ("_cmp_func", "_obj") 986
987 - def __init__(self, cmp_func, obj):
988 self._cmp_func = cmp_func 989 self._obj = obj
990
991 - def __lt__(self, other):
992 if other.__class__ is not self.__class__: 993 raise TypeError("Expected type %s, got %s" % \ 994 (self.__class__, other.__class__)) 995 return self._cmp_func(self._obj, other._obj) < 0
996
997 -def unique_array(s):
998 """lifted from python cookbook, credit: Tim Peters 999 Return a list of the elements in s in arbitrary order, sans duplicates""" 1000 n = len(s) 1001 # assume all elements are hashable, if so, it's linear 1002 try: 1003 return list(set(s)) 1004 except TypeError: 1005 pass 1006 1007 # so much for linear. abuse sort. 1008 try: 1009 t = list(s) 1010 t.sort() 1011 except TypeError: 1012 pass 1013 else: 1014 assert n > 0 1015 last = t[0] 1016 lasti = i = 1 1017 while i < n: 1018 if t[i] != last: 1019 t[lasti] = last = t[i] 1020 lasti += 1 1021 i += 1 1022 return t[:lasti] 1023 1024 # blah. back to original portage.unique_array 1025 u = [] 1026 for x in s: 1027 if x not in u: 1028 u.append(x) 1029 return u
1030
1031 -def unique_everseen(iterable, key=None):
1032 """ 1033 List unique elements, preserving order. Remember all elements ever seen. 1034 Taken from itertools documentation. 1035 """ 1036 # unique_everseen('AAAABBBCCDAABBB') --> A B C D 1037 # unique_everseen('ABBCcAD', str.lower) --> A B C D 1038 seen = set() 1039 seen_add = seen.add 1040 if key is None: 1041 for element in filterfalse(seen.__contains__, iterable): 1042 seen_add(element) 1043 yield element 1044 else: 1045 for element in iterable: 1046 k = key(element) 1047 if k not in seen: 1048 seen_add(k) 1049 yield element
1050
1051 -def _do_stat(filename, follow_links=True):
1052 try: 1053 if follow_links: 1054 return os.stat(filename) 1055 else: 1056 return os.lstat(filename) 1057 except OSError as oe: 1058 func_call = "stat('%s')" % filename 1059 if oe.errno == errno.EPERM: 1060 raise OperationNotPermitted(func_call) 1061 elif oe.errno == errno.EACCES: 1062 raise PermissionDenied(func_call) 1063 elif oe.errno == errno.ENOENT: 1064 raise FileNotFound(filename) 1065 else: 1066 raise
1067
1068 -def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1069 stat_cached=None, follow_links=True):
1070 """Apply user, group, and mode bits to a file if the existing bits do not 1071 already match. The default behavior is to force an exact match of mode 1072 bits. When mask=0 is specified, mode bits on the target file are allowed 1073 to be a superset of the mode argument (via logical OR). When mask>0, the 1074 mode bits that the target file is allowed to have are restricted via 1075 logical XOR. 1076 Returns True if the permissions were modified and False otherwise.""" 1077 1078 modified = False 1079 1080 # Since Python 3.4, chown requires int type (no proxies). 1081 uid = int(uid) 1082 gid = int(gid) 1083 1084 if stat_cached is None: 1085 stat_cached = _do_stat(filename, follow_links=follow_links) 1086 1087 if (uid != -1 and uid != stat_cached.st_uid) or \ 1088 (gid != -1 and gid != stat_cached.st_gid): 1089 try: 1090 if follow_links: 1091 os.chown(filename, uid, gid) 1092 else: 1093 portage.data.lchown(filename, uid, gid) 1094 modified = True 1095 except OSError as oe: 1096 func_call = "chown('%s', %i, %i)" % (filename, uid, gid) 1097 if oe.errno == errno.EPERM: 1098 raise OperationNotPermitted(func_call) 1099 elif oe.errno == errno.EACCES: 1100 raise PermissionDenied(func_call) 1101 elif oe.errno == errno.EROFS: 1102 raise ReadOnlyFileSystem(func_call) 1103 elif oe.errno == errno.ENOENT: 1104 raise FileNotFound(filename) 1105 else: 1106 raise 1107 1108 new_mode = -1 1109 st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits 1110 if mask >= 0: 1111 if mode == -1: 1112 mode = 0 # Don't add any mode bits when mode is unspecified. 1113 else: 1114 mode = mode & 0o7777 1115 if (mode & st_mode != mode) or \ 1116 ((mask ^ st_mode) & st_mode != st_mode): 1117 new_mode = mode | st_mode 1118 new_mode = (mask ^ new_mode) & new_mode 1119 elif mode != -1: 1120 mode = mode & 0o7777 # protect from unwanted bits 1121 if mode != st_mode: 1122 new_mode = mode 1123 1124 # The chown system call may clear S_ISUID and S_ISGID 1125 # bits, so those bits are restored if necessary. 1126 if modified and new_mode == -1 and \ 1127 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID): 1128 if mode == -1: 1129 new_mode = st_mode 1130 else: 1131 mode = mode & 0o7777 1132 if mask >= 0: 1133 new_mode = mode | st_mode 1134 new_mode = (mask ^ new_mode) & new_mode 1135 else: 1136 new_mode = mode 1137 if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID): 1138 new_mode = -1 1139 1140 if not follow_links and stat.S_ISLNK(stat_cached.st_mode): 1141 # Mode doesn't matter for symlinks. 1142 new_mode = -1 1143 1144 if new_mode != -1: 1145 try: 1146 os.chmod(filename, new_mode) 1147 modified = True 1148 except OSError as oe: 1149 func_call = "chmod('%s', %s)" % (filename, oct(new_mode)) 1150 if oe.errno == errno.EPERM: 1151 raise OperationNotPermitted(func_call) 1152 elif oe.errno == errno.EACCES: 1153 raise PermissionDenied(func_call) 1154 elif oe.errno == errno.EROFS: 1155 raise ReadOnlyFileSystem(func_call) 1156 elif oe.errno == errno.ENOENT: 1157 raise FileNotFound(filename) 1158 raise 1159 return modified
1160
1161 -def apply_stat_permissions(filename, newstat, **kwargs):
1162 """A wrapper around apply_secpass_permissions that gets 1163 uid, gid, and mode from a stat object""" 1164 return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid, 1165 mode=newstat.st_mode, **kwargs)
1166
1167 -def apply_recursive_permissions(top, uid=-1, gid=-1, 1168 dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
1169 """A wrapper around apply_secpass_permissions that applies permissions 1170 recursively. If optional argument onerror is specified, it should be a 1171 function; it will be called with one argument, a PortageException instance. 1172 Returns True if all permissions are applied and False if some are left 1173 unapplied.""" 1174 1175 # Avoid issues with circular symbolic links, as in bug #339670. 1176 follow_links = False 1177 1178 if onerror is None: 1179 # Default behavior is to dump errors to stderr so they won't 1180 # go unnoticed. Callers can pass in a quiet instance. 1181 def onerror(e): 1182 if isinstance(e, OperationNotPermitted): 1183 writemsg(_("Operation Not Permitted: %s\n") % str(e), 1184 noiselevel=-1) 1185 elif isinstance(e, FileNotFound): 1186 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1) 1187 else: 1188 raise
1189 1190 # For bug 554084, always apply permissions to a directory before 1191 # that directory is traversed. 1192 all_applied = True 1193 1194 try: 1195 stat_cached = _do_stat(top, follow_links=follow_links) 1196 except FileNotFound: 1197 # backward compatibility 1198 return True 1199 1200 if stat.S_ISDIR(stat_cached.st_mode): 1201 mode = dirmode 1202 mask = dirmask 1203 else: 1204 mode = filemode 1205 mask = filemask 1206 1207 try: 1208 applied = apply_secpass_permissions(top, 1209 uid=uid, gid=gid, mode=mode, mask=mask, 1210 stat_cached=stat_cached, follow_links=follow_links) 1211 if not applied: 1212 all_applied = False 1213 except PortageException as e: 1214 all_applied = False 1215 onerror(e) 1216 1217 for dirpath, dirnames, filenames in os.walk(top): 1218 for name, mode, mask in chain( 1219 ((x, filemode, filemask) for x in filenames), 1220 ((x, dirmode, dirmask) for x in dirnames)): 1221 try: 1222 applied = apply_secpass_permissions(os.path.join(dirpath, name), 1223 uid=uid, gid=gid, mode=mode, mask=mask, 1224 follow_links=follow_links) 1225 if not applied: 1226 all_applied = False 1227 except PortageException as e: 1228 # Ignore InvalidLocation exceptions such as FileNotFound 1229 # and DirectoryNotFound since sometimes things disappear, 1230 # like when adjusting permissions on DISTCC_DIR. 1231 if not isinstance(e, portage.exception.InvalidLocation): 1232 all_applied = False 1233 onerror(e) 1234 return all_applied 1235
1236 -def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1, 1237 stat_cached=None, follow_links=True):
1238 """A wrapper around apply_permissions that uses secpass and simple 1239 logic to apply as much of the permissions as possible without 1240 generating an obviously avoidable permission exception. Despite 1241 attempts to avoid an exception, it's possible that one will be raised 1242 anyway, so be prepared. 1243 Returns True if all permissions are applied and False if some are left 1244 unapplied.""" 1245 1246 if stat_cached is None: 1247 stat_cached = _do_stat(filename, follow_links=follow_links) 1248 1249 all_applied = True 1250 1251 # Avoid accessing portage.data.secpass when possible, since 1252 # it triggers config loading (undesirable for chmod-lite). 1253 if (uid != -1 or gid != -1) and portage.data.secpass < 2: 1254 1255 if uid != -1 and \ 1256 uid != stat_cached.st_uid: 1257 all_applied = False 1258 uid = -1 1259 1260 if gid != -1 and \ 1261 gid != stat_cached.st_gid and \ 1262 gid not in os.getgroups(): 1263 all_applied = False 1264 gid = -1 1265 1266 apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask, 1267 stat_cached=stat_cached, follow_links=follow_links) 1268 return all_applied
1269
1270 -class atomic_ofstream(ObjectProxy):
1271 """Write a file atomically via os.rename(). Atomic replacement prevents 1272 interprocess interference and prevents corruption of the target 1273 file when the write is interrupted (for example, when an 'out of space' 1274 error occurs).""" 1275
1276 - def __init__(self, filename, mode='w', follow_links=True, **kargs):
1277 """Opens a temporary filename.pid in the same directory as filename.""" 1278 ObjectProxy.__init__(self) 1279 object.__setattr__(self, '_aborted', False) 1280 if 'b' in mode: 1281 open_func = open 1282 else: 1283 open_func = io.open 1284 kargs.setdefault('encoding', _encodings['content']) 1285 kargs.setdefault('errors', 'backslashreplace') 1286 1287 if follow_links: 1288 canonical_path = os.path.realpath(filename) 1289 object.__setattr__(self, '_real_name', canonical_path) 1290 tmp_name = "%s.%i" % (canonical_path, os.getpid()) 1291 try: 1292 object.__setattr__(self, '_file', 1293 open_func(_unicode_encode(tmp_name, 1294 encoding=_encodings['fs'], errors='strict'), 1295 mode=mode, **kargs)) 1296 return 1297 except IOError as e: 1298 if canonical_path == filename: 1299 raise 1300 # Ignore this error, since it's irrelevant 1301 # and the below open call will produce a 1302 # new error if necessary. 1303 1304 object.__setattr__(self, '_real_name', filename) 1305 tmp_name = "%s.%i" % (filename, os.getpid()) 1306 object.__setattr__(self, '_file', 1307 open_func(_unicode_encode(tmp_name, 1308 encoding=_encodings['fs'], errors='strict'), 1309 mode=mode, **kargs))
1310
1311 - def _get_target(self):
1312 return object.__getattribute__(self, '_file')
1313 1314 if sys.hexversion >= 0x3000000: 1315
1316 - def __getattribute__(self, attr):
1317 if attr in ('close', 'abort', '__del__'): 1318 return object.__getattribute__(self, attr) 1319 return getattr(object.__getattribute__(self, '_file'), attr)
1320 1321 else: 1322 1323 # For TextIOWrapper, automatically coerce write calls to 1324 # unicode, in order to avoid TypeError when writing raw 1325 # bytes with python2. 1326
1327 - def __getattribute__(self, attr):
1328 if attr in ('close', 'abort', 'write', '__del__'): 1329 return object.__getattribute__(self, attr) 1330 return getattr(object.__getattribute__(self, '_file'), attr)
1331
1332 - def write(self, s):
1333 f = object.__getattribute__(self, '_file') 1334 if isinstance(f, io.TextIOWrapper): 1335 s = _unicode_decode(s) 1336 return f.write(s)
1337
1338 - def close(self):
1339 """Closes the temporary file, copies permissions (if possible), 1340 and performs the atomic replacement via os.rename(). If the abort() 1341 method has been called, then the temp file is closed and removed.""" 1342 f = object.__getattribute__(self, '_file') 1343 real_name = object.__getattribute__(self, '_real_name') 1344 if not f.closed: 1345 try: 1346 f.close() 1347 if not object.__getattribute__(self, '_aborted'): 1348 try: 1349 apply_stat_permissions(f.name, os.stat(real_name)) 1350 except OperationNotPermitted: 1351 pass 1352 except FileNotFound: 1353 pass 1354 except OSError as oe: # from the above os.stat call 1355 if oe.errno in (errno.ENOENT, errno.EPERM): 1356 pass 1357 else: 1358 raise 1359 os.rename(f.name, real_name) 1360 finally: 1361 # Make sure we cleanup the temp file 1362 # even if an exception is raised. 1363 try: 1364 os.unlink(f.name) 1365 except OSError as oe: 1366 pass
1367
1368 - def abort(self):
1369 """If an error occurs while writing the file, the user should 1370 call this method in order to leave the target file unchanged. 1371 This will call close() automatically.""" 1372 if not object.__getattribute__(self, '_aborted'): 1373 object.__setattr__(self, '_aborted', True) 1374 self.close()
1375
1376 - def __del__(self):
1377 """If the user does not explicitly call close(), it is 1378 assumed that an error has occurred, so we abort().""" 1379 try: 1380 f = object.__getattribute__(self, '_file') 1381 except AttributeError: 1382 pass 1383 else: 1384 if not f.closed: 1385 self.abort() 1386 # ensure destructor from the base class is called 1387 base_destructor = getattr(ObjectProxy, '__del__', None) 1388 if base_destructor is not None: 1389 base_destructor(self)
1390
1391 -def write_atomic(file_path, content, **kwargs):
1392 f = None 1393 try: 1394 f = atomic_ofstream(file_path, **kwargs) 1395 f.write(content) 1396 f.close() 1397 except (IOError, OSError) as e: 1398 if f: 1399 f.abort() 1400 func_call = "write_atomic('%s')" % file_path 1401 if e.errno == errno.EPERM: 1402 raise OperationNotPermitted(func_call) 1403 elif e.errno == errno.EACCES: 1404 raise PermissionDenied(func_call) 1405 elif e.errno == errno.EROFS: 1406 raise ReadOnlyFileSystem(func_call) 1407 elif e.errno == errno.ENOENT: 1408 raise FileNotFound(file_path) 1409 else: 1410 raise
1411
1412 -def ensure_dirs(dir_path, **kwargs):
1413 """Create a directory and call apply_permissions. 1414 Returns True if a directory is created or the permissions needed to be 1415 modified, and False otherwise. 1416 1417 This function's handling of EEXIST errors makes it useful for atomic 1418 directory creation, in which multiple processes may be competing to 1419 create the same directory. 1420 """ 1421 1422 created_dir = False 1423 1424 try: 1425 os.makedirs(dir_path) 1426 created_dir = True 1427 except OSError as oe: 1428 func_call = "makedirs('%s')" % dir_path 1429 if oe.errno in (errno.EEXIST,): 1430 pass 1431 else: 1432 if os.path.isdir(dir_path): 1433 # NOTE: DragonFly raises EPERM for makedir('/') 1434 # and that is supposed to be ignored here. 1435 # Also, sometimes mkdir raises EISDIR on FreeBSD 1436 # and we want to ignore that too (bug #187518). 1437 pass 1438 elif oe.errno == errno.EPERM: 1439 raise OperationNotPermitted(func_call) 1440 elif oe.errno == errno.EACCES: 1441 raise PermissionDenied(func_call) 1442 elif oe.errno == errno.EROFS: 1443 raise ReadOnlyFileSystem(func_call) 1444 else: 1445 raise 1446 if kwargs: 1447 perms_modified = apply_permissions(dir_path, **kwargs) 1448 else: 1449 perms_modified = False 1450 return created_dir or perms_modified
1451
1452 -class LazyItemsDict(UserDict):
1453 """A mapping object that behaves like a standard dict except that it allows 1454 for lazy initialization of values via callable objects. Lazy items can be 1455 overwritten and deleted just as normal items.""" 1456 1457 __slots__ = ('lazy_items',) 1458
1459 - def __init__(self, *args, **kwargs):
1460 1461 self.lazy_items = {} 1462 UserDict.__init__(self, *args, **kwargs)
1463
1464 - def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1465 """Add a lazy item for the given key. When the item is requested, 1466 value_callable will be called with *pargs and **kwargs arguments.""" 1467 self.lazy_items[item_key] = \ 1468 self._LazyItem(value_callable, pargs, kwargs, False) 1469 # make it show up in self.keys(), etc... 1470 UserDict.__setitem__(self, item_key, None)
1471
1472 - def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1473 """This is like addLazyItem except value_callable will only be called 1474 a maximum of 1 time and the result will be cached for future requests.""" 1475 self.lazy_items[item_key] = \ 1476 self._LazyItem(value_callable, pargs, kwargs, True) 1477 # make it show up in self.keys(), etc... 1478 UserDict.__setitem__(self, item_key, None)
1479
1480 - def update(self, *args, **kwargs):
1481 if len(args) > 1: 1482 raise TypeError( 1483 "expected at most 1 positional argument, got " + \ 1484 repr(len(args))) 1485 if args: 1486 map_obj = args[0] 1487 else: 1488 map_obj = None 1489 if map_obj is None: 1490 pass 1491 elif isinstance(map_obj, LazyItemsDict): 1492 for k in map_obj: 1493 if k in map_obj.lazy_items: 1494 UserDict.__setitem__(self, k, None) 1495 else: 1496 UserDict.__setitem__(self, k, map_obj[k]) 1497 self.lazy_items.update(map_obj.lazy_items) 1498 else: 1499 UserDict.update(self, map_obj) 1500 if kwargs: 1501 UserDict.update(self, kwargs)
1502
1503 - def __getitem__(self, item_key):
1504 if item_key in self.lazy_items: 1505 lazy_item = self.lazy_items[item_key] 1506 pargs = lazy_item.pargs 1507 if pargs is None: 1508 pargs = () 1509 kwargs = lazy_item.kwargs 1510 if kwargs is None: 1511 kwargs = {} 1512 result = lazy_item.func(*pargs, **kwargs) 1513 if lazy_item.singleton: 1514 self[item_key] = result 1515 return result 1516 1517 else: 1518 return UserDict.__getitem__(self, item_key)
1519
1520 - def __setitem__(self, item_key, value):
1521 if item_key in self.lazy_items: 1522 del self.lazy_items[item_key] 1523 UserDict.__setitem__(self, item_key, value)
1524
1525 - def __delitem__(self, item_key):
1526 if item_key in self.lazy_items: 1527 del self.lazy_items[item_key] 1528 UserDict.__delitem__(self, item_key)
1529
1530 - def clear(self):
1531 self.lazy_items.clear() 1532 UserDict.clear(self)
1533
1534 - def copy(self):
1535 return self.__copy__()
1536
1537 - def __copy__(self):
1538 return self.__class__(self)
1539
1540 - def __deepcopy__(self, memo=None):
1541 """ 1542 This forces evaluation of each contained lazy item, and deepcopy of 1543 the result. A TypeError is raised if any contained lazy item is not 1544 a singleton, since it is not necessarily possible for the behavior 1545 of this type of item to be safely preserved. 1546 """ 1547 if memo is None: 1548 memo = {} 1549 result = self.__class__() 1550 memo[id(self)] = result 1551 for k in self: 1552 k_copy = deepcopy(k, memo) 1553 lazy_item = self.lazy_items.get(k) 1554 if lazy_item is not None: 1555 if not lazy_item.singleton: 1556 raise TypeError("LazyItemsDict " + \ 1557 "deepcopy is unsafe with lazy items that are " + \ 1558 "not singletons: key=%s value=%s" % (k, lazy_item,)) 1559 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo)) 1560 return result
1561
1562 - class _LazyItem(object):
1563 1564 __slots__ = ('func', 'pargs', 'kwargs', 'singleton') 1565
1566 - def __init__(self, func, pargs, kwargs, singleton):
1567 1568 if not pargs: 1569 pargs = None 1570 if not kwargs: 1571 kwargs = None 1572 1573 self.func = func 1574 self.pargs = pargs 1575 self.kwargs = kwargs 1576 self.singleton = singleton
1577
1578 - def __copy__(self):
1579 return self.__class__(self.func, self.pargs, 1580 self.kwargs, self.singleton)
1581
1582 - def __deepcopy__(self, memo=None):
1583 """ 1584 Override this since the default implementation can fail silently, 1585 leaving some attributes unset. 1586 """ 1587 if memo is None: 1588 memo = {} 1589 result = self.__copy__() 1590 memo[id(self)] = result 1591 result.func = deepcopy(self.func, memo) 1592 result.pargs = deepcopy(self.pargs, memo) 1593 result.kwargs = deepcopy(self.kwargs, memo) 1594 result.singleton = deepcopy(self.singleton, memo) 1595 return result
1596
1597 -class ConfigProtect(object):
1598 - def __init__(self, myroot, protect_list, mask_list, 1599 case_insensitive=False):
1600 self.myroot = myroot 1601 self.protect_list = protect_list 1602 self.mask_list = mask_list 1603 self.case_insensitive = case_insensitive 1604 self.updateprotect()
1605
1606 - def updateprotect(self):
1607 """Update internal state for isprotected() calls. Nonexistent paths 1608 are ignored.""" 1609 1610 os = _os_merge 1611 1612 self.protect = [] 1613 self._dirs = set() 1614 for x in self.protect_list: 1615 ppath = normalize_path( 1616 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1617 # Protect files that don't exist (bug #523684). If the 1618 # parent directory doesn't exist, we can safely skip it. 1619 if os.path.isdir(os.path.dirname(ppath)): 1620 self.protect.append(ppath) 1621 try: 1622 if stat.S_ISDIR(os.stat(ppath).st_mode): 1623 self._dirs.add(ppath) 1624 except OSError: 1625 pass 1626 1627 self.protectmask = [] 1628 for x in self.mask_list: 1629 ppath = normalize_path( 1630 os.path.join(self.myroot, x.lstrip(os.path.sep))) 1631 if self.case_insensitive: 1632 ppath = ppath.lower() 1633 try: 1634 """Use lstat so that anything, even a broken symlink can be 1635 protected.""" 1636 if stat.S_ISDIR(os.lstat(ppath).st_mode): 1637 self._dirs.add(ppath) 1638 self.protectmask.append(ppath) 1639 """Now use stat in case this is a symlink to a directory.""" 1640 if stat.S_ISDIR(os.stat(ppath).st_mode): 1641 self._dirs.add(ppath) 1642 except OSError: 1643 # If it doesn't exist, there's no need to mask it. 1644 pass
1645
1646 - def isprotected(self, obj):
1647 """Returns True if obj is protected, False otherwise. The caller must 1648 ensure that obj is normalized with a single leading slash. A trailing 1649 slash is optional for directories.""" 1650 masked = 0 1651 protected = 0 1652 sep = os.path.sep 1653 if self.case_insensitive: 1654 obj = obj.lower() 1655 for ppath in self.protect: 1656 if len(ppath) > masked and obj.startswith(ppath): 1657 if ppath in self._dirs: 1658 if obj != ppath and not obj.startswith(ppath + sep): 1659 # /etc/foo does not match /etc/foobaz 1660 continue 1661 elif obj != ppath: 1662 # force exact match when CONFIG_PROTECT lists a 1663 # non-directory 1664 continue 1665 protected = len(ppath) 1666 #config file management 1667 for pmpath in self.protectmask: 1668 if len(pmpath) >= protected and obj.startswith(pmpath): 1669 if pmpath in self._dirs: 1670 if obj != pmpath and \ 1671 not obj.startswith(pmpath + sep): 1672 # /etc/foo does not match /etc/foobaz 1673 continue 1674 elif obj != pmpath: 1675 # force exact match when CONFIG_PROTECT_MASK lists 1676 # a non-directory 1677 continue 1678 #skip, it's in the mask 1679 masked = len(pmpath) 1680 return protected > masked
1681
1682 -def new_protect_filename(mydest, newmd5=None, force=False):
1683 """Resolves a config-protect filename for merging, optionally 1684 using the last filename if the md5 matches. If force is True, 1685 then a new filename will be generated even if mydest does not 1686 exist yet. 1687 (dest,md5) ==> 'string' --- path_to_target_filename 1688 (dest) ==> ('next', 'highest') --- next_target and most-recent_target 1689 """ 1690 1691 # config protection filename format: 1692 # ._cfg0000_foo 1693 # 0123456789012 1694 1695 os = _os_merge 1696 1697 prot_num = -1 1698 last_pfile = "" 1699 1700 if not force and \ 1701 not os.path.exists(mydest): 1702 return mydest 1703 1704 real_filename = os.path.basename(mydest) 1705 real_dirname = os.path.dirname(mydest) 1706 for pfile in os.listdir(real_dirname): 1707 if pfile[0:5] != "._cfg": 1708 continue 1709 if pfile[10:] != real_filename: 1710 continue 1711 try: 1712 new_prot_num = int(pfile[5:9]) 1713 if new_prot_num > prot_num: 1714 prot_num = new_prot_num 1715 last_pfile = pfile 1716 except ValueError: 1717 continue 1718 prot_num = prot_num + 1 1719 1720 new_pfile = normalize_path(os.path.join(real_dirname, 1721 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename)) 1722 old_pfile = normalize_path(os.path.join(real_dirname, last_pfile)) 1723 if last_pfile and newmd5: 1724 try: 1725 old_pfile_st = os.lstat(old_pfile) 1726 except OSError as e: 1727 if e.errno != errno.ENOENT: 1728 raise 1729 else: 1730 if stat.S_ISLNK(old_pfile_st.st_mode): 1731 try: 1732 # Read symlink target as bytes, in case the 1733 # target path has a bad encoding. 1734 pfile_link = os.readlink(_unicode_encode(old_pfile, 1735 encoding=_encodings['merge'], errors='strict')) 1736 except OSError: 1737 if e.errno != errno.ENOENT: 1738 raise 1739 else: 1740 pfile_link = _unicode_decode(pfile_link, 1741 encoding=_encodings['merge'], errors='replace') 1742 if pfile_link == newmd5: 1743 return old_pfile 1744 else: 1745 try: 1746 last_pfile_md5 = \ 1747 portage.checksum._perform_md5_merge(old_pfile) 1748 except FileNotFound: 1749 # The file suddenly disappeared or it's a 1750 # broken symlink. 1751 pass 1752 else: 1753 if last_pfile_md5 == newmd5: 1754 return old_pfile 1755 return new_pfile
1756
1757 -def find_updated_config_files(target_root, config_protect):
1758 """ 1759 Return a tuple of configuration files that needs to be updated. 1760 The tuple contains lists organized like this: 1761 [protected_dir, file_list] 1762 If the protected config isn't a protected_dir but a procted_file, list is: 1763 [protected_file, None] 1764 If no configuration files needs to be updated, None is returned 1765 """ 1766 1767 encoding = _encodings['fs'] 1768 1769 if config_protect: 1770 # directories with some protect files in them 1771 for x in config_protect: 1772 files = [] 1773 1774 x = os.path.join(target_root, x.lstrip(os.path.sep)) 1775 if not os.access(x, os.W_OK): 1776 continue 1777 try: 1778 mymode = os.lstat(x).st_mode 1779 except OSError: 1780 continue 1781 1782 if stat.S_ISLNK(mymode): 1783 # We want to treat it like a directory if it 1784 # is a symlink to an existing directory. 1785 try: 1786 real_mode = os.stat(x).st_mode 1787 if stat.S_ISDIR(real_mode): 1788 mymode = real_mode 1789 except OSError: 1790 pass 1791 1792 if stat.S_ISDIR(mymode): 1793 mycommand = \ 1794 "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x 1795 else: 1796 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \ 1797 os.path.split(x.rstrip(os.path.sep)) 1798 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0" 1799 cmd = shlex_split(mycommand) 1800 1801 if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000: 1802 # Python 3.1 _execvp throws TypeError for non-absolute executable 1803 # path passed as bytes (see https://bugs.python.org/issue8513). 1804 fullname = portage.process.find_binary(cmd[0]) 1805 if fullname is None: 1806 raise portage.exception.CommandNotFound(cmd[0]) 1807 cmd[0] = fullname 1808 1809 cmd = [_unicode_encode(arg, encoding=encoding, errors='strict') 1810 for arg in cmd] 1811 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, 1812 stderr=subprocess.STDOUT) 1813 output = _unicode_decode(proc.communicate()[0], encoding=encoding) 1814 status = proc.wait() 1815 if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK: 1816 files = output.split('\0') 1817 # split always produces an empty string as the last element 1818 if files and not files[-1]: 1819 del files[-1] 1820 if files: 1821 if stat.S_ISDIR(mymode): 1822 yield (x, files) 1823 else: 1824 yield (x, None)
1825 1826 _ld_so_include_re = re.compile(r'^include\s+(\S.*)') 1827
1828 -def getlibpaths(root, env=None):
1829 def read_ld_so_conf(path): 1830 for l in grabfile(path): 1831 include_match = _ld_so_include_re.match(l) 1832 if include_match is not None: 1833 subpath = os.path.join(os.path.dirname(path), 1834 include_match.group(1)) 1835 for p in glob.glob(subpath): 1836 for r in read_ld_so_conf(p): 1837 yield r 1838 else: 1839 yield l
1840 1841 """ Return a list of paths that are used for library lookups """ 1842 if env is None: 1843 env = os.environ 1844 # the following is based on the information from ld.so(8) 1845 rval = env.get("LD_LIBRARY_PATH", "").split(":") 1846 rval.extend(read_ld_so_conf(os.path.join(root, "etc", "ld.so.conf"))) 1847 rval.append("/usr/lib") 1848 rval.append("/lib") 1849 1850 return [normalize_path(x) for x in rval if x] 1851