From mboxrd@z Thu Jan 1 00:00:00 1970 Received: from pigeon.gentoo.org ([208.92.234.80] helo=lists.gentoo.org) by finch.gentoo.org with esmtp (Exim 4.60) (envelope-from ) id 1QLFNA-0001MV-T5 for garchives@archives.gentoo.org; Sat, 14 May 2011 13:59:13 +0000 Received: from pigeon.gentoo.org (localhost [127.0.0.1]) by pigeon.gentoo.org (Postfix) with SMTP id 01A311C04D; Sat, 14 May 2011 13:59:05 +0000 (UTC) Received: from smtp.gentoo.org (smtp.gentoo.org [140.211.166.183]) by pigeon.gentoo.org (Postfix) with ESMTP id 8CD971C04D for ; Sat, 14 May 2011 13:59:05 +0000 (UTC) Received: from pelican.gentoo.org (unknown [66.219.59.40]) (using TLSv1 with cipher ADH-CAMELLIA256-SHA (256/256 bits)) (No client certificate requested) by smtp.gentoo.org (Postfix) with ESMTPS id EB1A41B4032 for ; Sat, 14 May 2011 13:59:04 +0000 (UTC) Received: from localhost.localdomain (localhost [127.0.0.1]) by pelican.gentoo.org (Postfix) with ESMTP id 5F4AF80507 for ; Sat, 14 May 2011 13:59:04 +0000 (UTC) From: "Fabian Groffen" To: gentoo-commits@lists.gentoo.org Content-type: text/plain; charset=UTF-8 Reply-To: gentoo-dev@lists.gentoo.org, "Fabian Groffen" Message-ID: <5f826caddbc8b4eb68e2c535d47b2025ed3d2e32.grobian@gentoo> Subject: [gentoo-commits] proj/portage:prefix commit in: pym/portage/util/_dyn_libs/ X-VCS-Repository: proj/portage X-VCS-Files: pym/portage/util/_dyn_libs/LinkageMapMachO.py pym/portage/util/_dyn_libs/LinkageMapPeCoff.py pym/portage/util/_dyn_libs/LinkageMapXCoff.py X-VCS-Directories: pym/portage/util/_dyn_libs/ X-VCS-Committer: grobian X-VCS-Committer-Name: Fabian Groffen X-VCS-Revision: 5f826caddbc8b4eb68e2c535d47b2025ed3d2e32 Date: Sat, 14 May 2011 13:59:04 +0000 (UTC) Precedence: bulk List-Post: List-Help: List-Unsubscribe: List-Subscribe: List-Id: Gentoo Linux mail X-BeenThere: gentoo-commits@lists.gentoo.org Content-Transfer-Encoding: quoted-printable X-Archives-Salt: X-Archives-Hash: 198d5d3fb4c6b17c799495eed43ca1e1 commit: 5f826caddbc8b4eb68e2c535d47b2025ed3d2e32 Author: Fabian Groffen gentoo org> AuthorDate: Sat May 14 13:56:32 2011 +0000 Commit: Fabian Groffen gentoo org> CommitDate: Sat May 14 13:56:32 2011 +0000 URL: http://git.overlays.gentoo.org/gitweb/?p=3Dproj/portage.git;a= =3Dcommit;h=3D5f826cad LinkageMap*: align with LinkageMapELF Prefix' non-ELF LinkageMaps have diverged from the ELF map over time a bit. Manually vimdiffed all against the ELF map. --- pym/portage/util/_dyn_libs/LinkageMapMachO.py | 121 ++++++++++++----- pym/portage/util/_dyn_libs/LinkageMapPeCoff.py | 128 +++++++++++++++---= - pym/portage/util/_dyn_libs/LinkageMapXCoff.py | 170 ++++++++++++++----= ----- 3 files changed, 295 insertions(+), 124 deletions(-) diff --git a/pym/portage/util/_dyn_libs/LinkageMapMachO.py b/pym/portage/= util/_dyn_libs/LinkageMapMachO.py index f03215a..cbdf6c2 100644 --- a/pym/portage/util/_dyn_libs/LinkageMapMachO.py +++ b/pym/portage/util/_dyn_libs/LinkageMapMachO.py @@ -1,4 +1,4 @@ -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 =20 import errno @@ -29,7 +29,7 @@ class LinkageMapMachO(object): =20 def __init__(self, vardbapi): self._dbapi =3D vardbapi - self._root =3D self._dbapi.root + self._root =3D self._dbapi.settings['ROOT'] self._libs =3D {} self._obj_properties =3D {} self._obj_key_cache =3D {} @@ -109,7 +109,7 @@ class LinkageMapMachO(object): else: os =3D portage.os =20 - abs_path =3D os.path.join(root, obj.lstrip(os.path.sep)) + abs_path =3D os.path.join(root, obj.lstrip(os.sep)) try: object_stat =3D os.stat(abs_path) except OSError: @@ -141,55 +141,98 @@ class LinkageMapMachO(object): def __str__(self): return str(sorted(self.alt_paths)) =20 - def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone): + def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone, + preserve_paths=3DNone): """ Raises CommandNotFound if there are preserved libs and the scanmacho binary is not available. + + @param exclude_pkgs: A set of packages that should be excluded from + the LinkageMap, since they are being unmerged and their NEEDED + entries are therefore irrelevant and would only serve to corrupt + the LinkageMap. + @type exclude_pkgs: set + @param include_file: The path of a file containing NEEDED entries for + a package which does not exist in the vardbapi yet because it is + currently being merged. + @type include_file: String + @param preserve_paths: Libraries preserved by a package instance that + is currently being merged. They need to be explicitly passed to the + LinkageMap, since they are not registered in the + PreservedLibsRegistry yet. + @type preserve_paths: set """ + + os =3D _os_merge root =3D self._root root_len =3D len(root) - 1 self._clear_cache() libs =3D self._libs - obj_key_cache =3D self._obj_key_cache obj_properties =3D self._obj_properties =20 - os =3D _os_merge - lines =3D [] =20 # Data from include_file is processed first so that it # overrides any data from previously installed files. if include_file is not None: - lines +=3D grabfile(include_file) + for line in grabfile(include_file): + lines.append((include_file, line)) =20 aux_keys =3D [self._needed_aux_key] - for cpv in self._dbapi.cpv_all(): - if exclude_pkgs is not None and cpv in exclude_pkgs: - continue - lines +=3D self._dbapi.aux_get(cpv, aux_keys)[0].split('\n') - # Cache NEEDED.* files avoid doing excessive IO for every rebuild. - self._dbapi.flush_cache() + can_lock =3D os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK) + if can_lock: + self._dbapi.lock() + try: + for cpv in self._dbapi.cpv_all(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + continue + needed_file =3D self._dbapi.getpath(cpv, + filename=3Dself._needed_aux_key) + for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines(): + lines.append((needed_file, line)) + finally: + if can_lock: + self._dbapi.unlock() =20 # have to call scanmacho for preserved libs here as they aren't=20 # registered in NEEDED.MACHO.3 files plibs =3D set() - if self._dbapi._plib_registry and self._dbapi._plib_registry.getPreser= vedLibs(): - args =3D [EPREFIX+"/usr/bin/scanmacho", "-qF", "%a;%F;%S;%n"] - for items in self._dbapi._plib_registry.getPreservedLibs().values(): + if preserve_paths is not None: + plibs.update(preserve_paths) + if self._dbapi._plib_registry and \ + self._dbapi._plib_registry.hasEntries(): + for cpv, items in \ + self._dbapi._plib_registry.getPreservedLibs().items(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + # These preserved libs will either be unmerged, + # rendering them irrelevant, or they will be + # preserved in the replacement package and are + # already represented via the preserve_paths + # parameter. + continue plibs.update(items) - args.extend(os.path.join(root, x.lstrip("." + os.sep)) \ - for x in items) + if plibs: + args =3D [EPREFIX+"/usr/bin/scanmacho", "-qF", "%a;%F;%S;%n"] + args.extend(os.path.join(root, x.lstrip("." + os.sep)) \ + for x in plibs) try: proc =3D subprocess.Popen(args, stdout=3Dsubprocess.PIPE) - except EnvironmentError, e: + except EnvironmentError as e: if e.errno !=3D errno.ENOENT: raise raise CommandNotFound(args[0]) else: for l in proc.stdout: - if not isinstance(l, unicode): - l =3D unicode(l, encoding=3D'utf_8', errors=3D'replace') - l =3D l.rstrip("\n") + try: + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'strict') + except UnicodeDecodeError: + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'replace') + writemsg_level(_("\nError decoding characters " \ + "returned from scanmacho: %s\n\n") % (l,), + level=3Dlogging.ERROR, noiselevel=3D-1) + l =3D l[3:].rstrip("\n") if not l: continue fields =3D l.split(";") @@ -200,7 +243,7 @@ class LinkageMapMachO(object): continue fields[1] =3D fields[1][root_len:] plibs.discard(fields[1]) - lines.append(";".join(fields)) + lines.append(("scanmacho", ";".join(fields))) proc.wait() =20 if plibs: @@ -211,16 +254,16 @@ class LinkageMapMachO(object): # self._obj_properties. This is important in order to # prevent findConsumers from raising an unwanted KeyError. for x in plibs: - lines.append(";".join(['', x, '', ''])) + lines.append(("plibs", ";".join(['', x, '', '', '']))) =20 - for l in lines: + for location, l in lines: l =3D l.rstrip("\n") if not l: continue fields =3D l.split(";") if len(fields) < 4: writemsg_level("\nWrong number of fields " + \ - "in %s: %s\n\n" % (self._needed_aux_key, l), + "in %s: %s\n\n") % (location, l), level=3Dlogging.ERROR, noiselevel=3D-1) continue arch =3D fields[0] @@ -263,7 +306,7 @@ class LinkageMapMachO(object): providers=3Dset(), consumers=3Dset()) arch_map[needed_installname] =3D installname_map installname_map.consumers.add(obj_key) - =09 + def listBrokenBinaries(self, debug=3DFalse): """ Find binaries and their needed install_names, which have no providers. @@ -277,6 +320,7 @@ class LinkageMapMachO(object): corresponding libraries to fulfill the dependency. =20 """ + os =3D _os_merge =20 class _LibraryCache(object): @@ -377,10 +421,14 @@ class LinkageMapMachO(object): rValue.setdefault(lib, set()).add(install_name) if debug: if not os.path.isfile(lib): - print(_("Missing library:"), lib) + writemsg_level(_("Missing library:") + " %s\n" % (lib,), + level=3Dlogging.DEBUG, + noiselevel=3D-1) else: - print(_("Possibly missing symlink:"), \ - install_name) + writemsg_level(_("Possibly missing symlink:") + \ + "%s\n" % (os.path.join(os.path.dirname(lib), soname)), + level=3Dlogging.DEBUG, + noiselevel=3D-1) return rValue =20 def listProviders(self): @@ -397,7 +445,7 @@ class LinkageMapMachO(object): rValue =3D {} if not self._libs: self.rebuild() - # Iterate over all binaries within LinkageMapMachO. + # Iterate over all object keys within LinkageMapMachO. for obj_key in self._obj_properties: rValue.setdefault(obj_key, self.findProviders(obj_key)) return rValue @@ -440,7 +488,7 @@ class LinkageMapMachO(object): for obj_key in soname_map.providers: rValue.extend(self._obj_properties[obj_key][3]) return rValue -=09 + def getSoname(self, obj): """ Return the soname associated with an object. @@ -482,6 +530,7 @@ class LinkageMapMachO(object): set-of-library-paths satisfy install_name. =20 """ + os =3D _os_merge =20 rValue =3D {} @@ -531,6 +580,11 @@ class LinkageMapMachO(object): fail to preserve binutils libs that are needed by these unrecognized consumers. =20 + Note that library consumption via dlopen (common for kde plugins) is + currently undetected. However, it is possible to use the + corresponding libtool archive (*.la) files to detect such consumers + (revdep-rebuild is able to detect them). + @param obj: absolute path to an object or a key from _obj_properties @type obj: string (example: '/usr/bin/bar') or _ObjectKey @rtype: set of strings (example: set(['/bin/foo', '/usr/bin/bar'])) @@ -538,6 +592,7 @@ class LinkageMapMachO(object): set-of-library-paths satisfy install_name. =20 """ + os =3D _os_merge =20 rValue =3D set() diff --git a/pym/portage/util/_dyn_libs/LinkageMapPeCoff.py b/pym/portage= /util/_dyn_libs/LinkageMapPeCoff.py index 9df84f1..c90947e 100644 --- a/pym/portage/util/_dyn_libs/LinkageMapPeCoff.py +++ b/pym/portage/util/_dyn_libs/LinkageMapPeCoff.py @@ -1,4 +1,4 @@ -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 =20 import errno @@ -46,7 +46,24 @@ class LinkageMapPeCoff(LinkageMapELF): @return: 2-tuple of boolean indicating existance, and absolut path """ + os =3D _os_merge + + try: + _unicode_encode(obj, + encoding=3D_encodings['merge'], errors=3D'strict') + except UnicodeEncodeError: + # The package appears to have been merged with a=20 + # different value of sys.getfilesystemencoding(), + # so fall back to utf_8 if appropriate. + try: + _unicode_encode(obj, + encoding=3D_encodings['fs'], errors=3D'strict') + except UnicodeEncodeError: + pass + else: + os =3D portage.os + abs_path =3D os.path.join(root, obj.lstrip(os.sep)) try: object_stat =3D os.stat(abs_path) @@ -78,67 +95,130 @@ class LinkageMapPeCoff(LinkageMapELF): def __str__(self): return str(sorted(self.alt_paths)) =20 - def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone): + def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone, + preserve_paths=3DNone): """ Raises CommandNotFound if there are preserved libs and the readpecoff binary is not available. + + @param exclude_pkgs: A set of packages that should be excluded from + the LinkageMap, since they are being unmerged and their NEEDED + entries are therefore irrelevant and would only serve to corrupt + the LinkageMap. + @type exclude_pkgs: set + @param include_file: The path of a file containing NEEDED entries for + a package which does not exist in the vardbapi yet because it is + currently being merged. + @type include_file: String + @param preserve_paths: Libraries preserved by a package instance that + is currently being merged. They need to be explicitly passed to the + LinkageMap, since they are not registered in the + PreservedLibsRegistry yet. + @type preserve_paths: set """ + + os =3D _os_merge root =3D self._root root_len =3D len(root) - 1 self._clear_cache() self._defpath.update(getlibpaths(self._root)) libs =3D self._libs - obj_key_cache =3D self._obj_key_cache obj_properties =3D self._obj_properties =20 - os =3D _os_merge - lines =3D [] =20 # Data from include_file is processed first so that it # overrides any data from previously installed files. if include_file is not None: - lines +=3D grabfile(include_file) + for line in grabfile(include_file): + lines.append((include_file, line)) =20 aux_keys =3D [self._needed_aux_key] - for cpv in self._dbapi.cpv_all(): - if exclude_pkgs is not None and cpv in exclude_pkgs: - continue - lines +=3D self._dbapi.aux_get(cpv, aux_keys)[0].split('\n') - # Cache NEEDED.* files avoid doing excessive IO for every rebuild. - self._dbapi.flush_cache() + can_lock =3D os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK) + if can_lock: + self._dbapi.lock() + try: + for cpv in self._dbapi.cpv_all(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + continue + needed_file =3D self._dbapi.getpath(cpv, + filename=3Dself._needed_aux_key) + for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines(): + lines.append((needed_file, line)) + finally: + if can_lock: + self._dbapi.unlock() =20 # have to call readpecoff for preserved libs here as they aren't=20 # registered in NEEDED.PECOFF.1 files - if self._dbapi._plib_registry and self._dbapi._plib_registry.getPreser= vedLibs(): + plibs =3D set() + if preserve_paths is not None: + plibs.update(preserve_paths) + if self._dbapi._plib_registry and \ + self._dbapi._plib_registry.hasEntries(): + for cpv, items in \ + self._dbapi._plib_registry.getPreservedLibs().items(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + # These preserved libs will either be unmerged, + # rendering them irrelevant, or they will be + # preserved in the replacement package and are + # already represented via the preserve_paths + # parameter. + continue + plibs.update(items) + if plibs: args =3D ["readpecoff", self._dbapi.settings.get('CHOST')] - for items in self._dbapi._plib_registry.getPreservedLibs().values(): - args.extend(os.path.join(root, x.lstrip("." + os.path.sep)) \ - for x in items) + args.extend(os.path.join(root, x.lstrip("." + os.sep)) \ + for x in plibs) try: proc =3D subprocess.Popen(args, stdout=3Dsubprocess.PIPE) - except EnvironmentError, e: + except EnvironmentError as e: if e.errno !=3D errno.ENOENT: raise raise CommandNotFound(args[0]) else: for l in proc.stdout: - if not isinstance(l, unicode): - l =3D unicode(l, encoding=3D'utf_8', errors=3D'replace') - l =3D l.lstrip().rstrip() + try: + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'strict') + except UnicodeDecodeError: + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'replace') + writemsg_level(_("\nError decoding characters " \ + "returned from readpecoff: %s\n\n") % (l,), + level=3Dlogging.ERROR, noiselevel=3D-1) + l =3D l[3:].rstrip("\n") if not l: continue - lines.append(l) + fields =3D l.split(";") + if len(fields) < 5: + writemsg_level(_("\nWrong number of fields " \ + "returned from readpecoff: %s\n\n") % (l,), + level=3Dlogging.ERROR, noiselevel=3D-1) + continue + fields[1] =3D fields[1][root_len:] + plibs.discard(fields[1]) + lines.append(("readpecoff", ";".join(fields))) proc.wait() =20 - for l in lines: + if plibs: + # Preserved libraries that did not appear in the scanelf output. + # This is known to happen with statically linked libraries. + # Generate dummy lines for these, so we can assume that every + # preserved library has an entry in self._obj_properties. This + # is important in order to prevent findConsumers from raising + # an unwanted KeyError. + for x in plibs: + lines.append(("plibs", ";".join(['', x, '', '', '']))) + + for location, l in lines: l =3D l.rstrip("\n") if not l: continue fields =3D l.split(";") if len(fields) < 5: writemsg_level(_("\nWrong number of fields " \ - "in %s: %s\n\n") % (self._needed_aux_key, l), + "in %s: %s\n\n") % (location, l), level=3Dlogging.ERROR, noiselevel=3D-1) continue arch =3D fields[0] @@ -148,7 +228,7 @@ class LinkageMapPeCoff(LinkageMapELF): for x in filter(None, fields[3].replace( "${ORIGIN}", os.path.dirname(obj)).replace( "$ORIGIN", os.path.dirname(obj)).split(":"))]) - needed =3D filter(None, fields[4].split(",")) + needed =3D [x for x in fields[4].split(",") if x] =20 obj_key =3D self._obj_key(obj) indexed =3D True diff --git a/pym/portage/util/_dyn_libs/LinkageMapXCoff.py b/pym/portage/= util/_dyn_libs/LinkageMapXCoff.py index a6233b1..0e930fe 100644 --- a/pym/portage/util/_dyn_libs/LinkageMapXCoff.py +++ b/pym/portage/util/_dyn_libs/LinkageMapXCoff.py @@ -1,4 +1,4 @@ -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 =20 import errno @@ -96,10 +96,26 @@ class LinkageMapXCoff(LinkageMapELF): def __str__(self): return str(sorted(self.alt_paths)) =20 - def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone): + def rebuild(self, exclude_pkgs=3DNone, include_file=3DNone, + preserve_paths=3DNone): """ Raises CommandNotFound if there are preserved libs and the scanelf binary is not available. + + @param exclude_pkgs: A set of packages that should be excluded from + the LinkageMap, since they are being unmerged and their NEEDED + entries are therefore irrelevant and would only serve to corrupt + the LinkageMap. + @type exclude_pkgs: set + @param include_file: The path of a file containing NEEDED entries for + a package which does not exist in the vardbapi yet because it is + currently being merged. + @type include_file: String + @param preserve_paths: Libraries preserved by a package instance that + is currently being merged. They need to be explicitly passed to the + LinkageMap, since they are not registered in the + PreservedLibsRegistry yet. + @type preserve_paths: set """ =20 os =3D _os_merge @@ -108,7 +124,6 @@ class LinkageMapXCoff(LinkageMapELF): self._clear_cache() self._defpath.update(getlibpaths(self._root)) libs =3D self._libs - obj_key_cache =3D self._obj_key_cache obj_properties =3D self._obj_properties =20 lines =3D [] @@ -116,75 +131,96 @@ class LinkageMapXCoff(LinkageMapELF): # Data from include_file is processed first so that it # overrides any data from previously installed files. if include_file is not None: - lines +=3D grabfile(include_file) + for line in grabfile(include_file): + lines.append((include_file, line)) =20 aux_keys =3D [self._needed_aux_key] - for cpv in self._dbapi.cpv_all(): - if exclude_pkgs is not None and cpv in exclude_pkgs: - continue - lines +=3D self._dbapi.aux_get(cpv, aux_keys)[0].split('\n') - # Cache NEEDED.* files avoid doing excessive IO for every rebuild. - self._dbapi.flush_cache() + can_lock =3D os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK) + if can_lock: + self._dbapi.lock() + try: + for cpv in self._dbapi.cpv_all(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + continue + needed_file =3D self._dbapi.getpath(cpv, + filename=3Dself._needed_aux_key) + for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines(): + lines.append((needed_file, line)) + finally: + if can_lock: + self._dbapi.unlock() =20 # have to call scanelf for preserved libs here as they aren't=20 # registered in NEEDED.XCOFF.1 files plibs =3D set() - if self._dbapi._plib_registry and self._dbapi._plib_registry.getPreser= vedLibs(): - for items in self._dbapi._plib_registry.getPreservedLibs().values(): + if preserve_paths is not None: + plibs.update(preserve_paths) + if self._dbapi._plib_registry and \ + self._dbapi._plib_registry.hasEntries(): + for cpv, items in \ + self._dbapi._plib_registry.getPreservedLibs().items(): + if exclude_pkgs is not None and cpv in exclude_pkgs: + # These preserved libs will either be unmerged, + # rendering them irrelevant, or they will be + # preserved in the replacement package and are + # already represented via the preserve_paths + # parameter. + continue plibs.update(items) - for x in items: - args =3D [BASH_BINARY, "-c", ':' - + '; member=3D"' + x + '"' - + '; archive=3D${member}' - + '; if [[ ${member##*/} =3D=3D .*"["*"]" ]]' - + '; then member=3D${member%/.*}/${member##*/.}' - + '; archive=3D${member%[*}' - + '; fi' - + '; member=3D${member#${archive}}' - + '; [[ -r ${archive} ]] || chmod a+r "${archive}"' - + '; eval $(aixdll-query "${archive}${member}" FILE MEMBER FLAGS F= ORMAT RUNPATH DEPLIBS)' - + '; [[ -n ${member} ]] && needed=3D${FILE##*/} || needed=3D' - + '; for deplib in ${DEPLIBS}' - + '; do eval deplib=3D${deplib}' - + '; if [[ ${deplib} !=3D "." && ${deplib} !=3D ".." ]]' - + '; then needed=3D"${needed}${needed:+,}${deplib}"' - + '; fi' - + '; done' - + '; [[ -n ${MEMBER} ]] && MEMBER=3D"[${MEMBER}]"' - + '; [[ " ${FLAGS} " =3D=3D *" SHROBJ "* ]] && soname=3D${FILE##*/= }${MEMBER} || soname=3D' - + '; echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}}${MEMBER};$= {soname};${RUNPATH};${needed}"' - + '; [[ -z ${member} && -n ${MEMBER} ]] && echo "${FORMAT##* }${FO= RMAT%%-*};${FILE#${ROOT%/}};${FILE##*/};;"' - ] + if plibs: + for x in plibs: + args =3D [BASH_BINARY, "-c", ':' + + '; member=3D"' + x + '"' + + '; archive=3D${member}' + + '; if [[ ${member##*/} =3D=3D .*"["*"]" ]]' + + '; then member=3D${member%/.*}/${member##*/.}' + + '; archive=3D${member%[*}' + + '; fi' + + '; member=3D${member#${archive}}' + + '; [[ -r ${archive} ]] || chmod a+r "${archive}"' + + '; eval $(aixdll-query "${archive}${member}" FILE MEMBER FLAGS FO= RMAT RUNPATH DEPLIBS)' + + '; [[ -n ${member} ]] && needed=3D${FILE##*/} || needed=3D' + + '; for deplib in ${DEPLIBS}' + + '; do eval deplib=3D${deplib}' + + '; if [[ ${deplib} !=3D "." && ${deplib} !=3D ".." ]]' + + '; then needed=3D"${needed}${needed:+,}${deplib}"' + + '; fi' + + '; done' + + '; [[ -n ${MEMBER} ]] && MEMBER=3D"[${MEMBER}]"' + + '; [[ " ${FLAGS} " =3D=3D *" SHROBJ "* ]] && soname=3D${FILE##*/}= ${MEMBER} || soname=3D' + + '; echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}}${MEMBER};${= soname};${RUNPATH};${needed}"' + + '; [[ -z ${member} && -n ${MEMBER} ]] && echo "${FORMAT##* }${FOR= MAT%%-*};${FILE#${ROOT%/}};${FILE##*/};;"' + ] + try: + proc =3D subprocess.Popen(args, stdout=3Dsubprocess.PIPE) + except EnvironmentError as e: + if e.errno !=3D errno.ENOENT: + raise + raise CommandNotFound(args[0]) + else: + for l in proc.stdout: try: - proc =3D subprocess.Popen(args, stdout=3Dsubprocess.PIPE) - except EnvironmentError as e: - if e.errno !=3D errno.ENOENT: - raise - raise CommandNotFound("aixdll-query via " + argv[0]) - else: - for l in proc.stdout: - try: - l =3D _unicode_decode(l, - encoding=3D_encodings['content'], errors=3D'strict') - except UnicodeDecodeError: - l =3D _unicode_decode(l, - encoding=3D_encodings['content'], errors=3D'replace') - writemsg_level(_("\nError decoding characters " \ - "returned from aixdll-query: %s\n\n") % (l,), - level=3Dlogging.ERROR, noiselevel=3D-1) - l =3D l.rstrip("\n") - if not l: - continue - fields =3D l.split(";") - if len(fields) < 5: - writemsg_level(_("\nWrong number of fields " \ - "returned from aixdll-query: %s\n\n") % (l,), - level=3Dlogging.ERROR, noiselevel=3D-1) - continue - fields[1] =3D fields[1][root_len:] - plibs.discard(fields[1]) - lines.append(";".join(fields)) - proc.wait() + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'strict') + except UnicodeDecodeError: + l =3D _unicode_decode(l, + encoding=3D_encodings['content'], errors=3D'replace') + writemsg_level(_("\nError decoding characters " \ + "returned from aixdll-query: %s\n\n") % (l,), + level=3Dlogging.ERROR, noiselevel=3D-1) + l =3D l.rstrip("\n") + if not l: + continue + fields =3D l.split(";") + if len(fields) < 5: + writemsg_level(_("\nWrong number of fields " \ + "returned from aixdll-query: %s\n\n") % (l,), + level=3Dlogging.ERROR, noiselevel=3D-1) + continue + fields[1] =3D fields[1][root_len:] + plibs.discard(fields[1]) + lines.append(("aixdll-query", ";".join(fields))) + proc.wait() =20 if plibs: # Preserved libraries that did not appear in the bash @@ -195,16 +231,16 @@ class LinkageMapXCoff(LinkageMapELF): # order to prevent findConsumers from raising an unwanted # KeyError. for x in plibs: - lines.append(";".join(['', x, '', '', ''])) + lines.append(("plibs", ";".join(['', x, '', '', '']))) =20 - for l in lines: + for location, l in lines: l =3D l.rstrip("\n") if not l: continue fields =3D l.split(";") if len(fields) < 5: writemsg_level(_("\nWrong number of fields " \ - "in %s: %s\n\n") % (self._needed_aux_key, l), + "in %s: %s\n\n") % (location, l), level=3Dlogging.ERROR, noiselevel=3D-1) continue arch =3D fields[0]