From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from lists.gentoo.org (pigeon.gentoo.org [208.92.234.80]) by finch.gentoo.org (Postfix) with ESMTP id 382801381F3 for ; Thu, 6 Dec 2012 23:53:11 +0000 (UTC) Received: from pigeon.gentoo.org (localhost [127.0.0.1]) by pigeon.gentoo.org (Postfix) with SMTP id 54A85E01BE; Thu, 6 Dec 2012 23:53:01 +0000 (UTC) Received: from smtp.gentoo.org (smtp.gentoo.org [140.211.166.183]) (using TLSv1 with cipher AECDH-AES256-SHA (256/256 bits)) (No client certificate requested) by pigeon.gentoo.org (Postfix) with ESMTPS id A17D6E0025 for ; Thu, 6 Dec 2012 23:53:00 +0000 (UTC) Received: from hornbill.gentoo.org (hornbill.gentoo.org [94.100.119.163]) (using TLSv1 with cipher AECDH-AES256-SHA (256/256 bits)) (No client certificate requested) by smtp.gentoo.org (Postfix) with ESMTPS id 7388133D938 for ; Thu, 6 Dec 2012 23:52:59 +0000 (UTC) Received: from localhost.localdomain (localhost [127.0.0.1]) by hornbill.gentoo.org (Postfix) with ESMTP id 14F01E5436 for ; Thu, 6 Dec 2012 23:52:58 +0000 (UTC) From: "Magnus Granberg" To: gentoo-commits@lists.gentoo.org Content-Transfer-Encoding: 8bit Content-type: text/plain; charset=UTF-8 Reply-To: gentoo-dev@lists.gentoo.org, "Magnus Granberg" Message-ID: <1354837959.a2d7ab6659623b2b4b5a4d7b86acecd5044684b6.zorry@gentoo> Subject: [gentoo-commits] dev/zorry:master commit in: gobs/pym/ X-VCS-Repository: dev/zorry X-VCS-Files: gobs/pym/build_log.py gobs/pym/build_queru.py gobs/pym/pgsql_querys.py X-VCS-Directories: gobs/pym/ X-VCS-Committer: zorry X-VCS-Committer-Name: Magnus Granberg X-VCS-Revision: a2d7ab6659623b2b4b5a4d7b86acecd5044684b6 X-VCS-Branch: master Date: Thu, 6 Dec 2012 23:52:58 +0000 (UTC) Precedence: bulk List-Post: List-Help: List-Unsubscribe: List-Subscribe: List-Id: Gentoo Linux mail X-BeenThere: gentoo-commits@lists.gentoo.org X-Archives-Salt: b6fd1afb-57e0-429c-8664-d19b0b711c2b X-Archives-Hash: c3a841010a3ec210c4d1337e255bf796 commit: a2d7ab6659623b2b4b5a4d7b86acecd5044684b6 Author: Magnus Granberg gentoo org> AuthorDate: Thu Dec 6 23:52:39 2012 +0000 Commit: Magnus Granberg gentoo org> CommitDate: Thu Dec 6 23:52:39 2012 +0000 URL: http://git.overlays.gentoo.org/gitweb/?p=dev/zorry.git;a=commit;h=a2d7ab66 fix error in sql queru and move log_fail_queru() --- gobs/pym/build_log.py | 811 ++++++++++++++++------------------------------ gobs/pym/build_queru.py | 54 +--- gobs/pym/pgsql_querys.py | 7 +- 3 files changed, 278 insertions(+), 594 deletions(-) diff --git a/gobs/pym/build_log.py b/gobs/pym/build_log.py index 5ecd8a5..537103e 100644 --- a/gobs/pym/build_log.py +++ b/gobs/pym/build_log.py @@ -2,526 +2,212 @@ from __future__ import print_function import re import os import platform -import logging -try: - from subprocess import getstatusoutput as subprocess_getstatusoutput -except ImportError: - from commands import getstatusoutput as subprocess_getstatusoutput from gobs.text import get_log_text_list -from _emerge.main import parse_opts, load_emerge_config, \ - getportageversion -from portage.util import writemsg, \ - writemsg_level, writemsg_stdout -from _emerge.actions import _info_pkgs_ver -from portage.exception import InvalidAtom -from portage.dep import Atom -from portage.dbapi._expand_new_virt import expand_new_virt -from portage.const import GLOBAL_CONFIG_PATH, NEWS_LIB_PATH -from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_SET_CONFIG from portage.versions import catpkgsplit, cpv_getversion -from portage import _encodings -from portage import _unicode_encode from gobs.repoman_gobs import gobs_repoman import portage +from portage.util import writemsg, \ + writemsg_level, writemsg_stdout +from portage import _encodings +from portage import _unicode_encode from gobs.package import gobs_package from gobs.readconf import get_conf_settings from gobs.flags import gobs_use_flags + reader=get_conf_settings() gobs_settings_dict=reader.read_gobs_settings_all() +config_profile = gobs_settings_dict['gobs_config'] # make a CM from gobs.ConnectionManager import connectionManager CM=connectionManager(gobs_settings_dict) #selectively import the pgsql/mysql querys if CM.getName()=='pgsql': - from gobs.pgsql import * - -class gobs_buildlog(object): + from gobs.pgsql_querys import * - def __init__(self): - self._config_profile = gobs_settings_dict['gobs_config'] - - def get_build_dict_db(self, settings, pkg): - conn=CM.getConnection() - myportdb = portage.portdbapi(mysettings=settings) - cpvr_list = catpkgsplit(pkg.cpv, silent=1) - categories = cpvr_list[0] - package = cpvr_list[1] - ebuild_version = cpv_getversion(pkg.cpv) - log_msg = "cpv: %s" % (pkg.cpv,) - add_gobs_logs(conn, log_msg, "info", self._config_profile) - init_package = gobs_package(settings, myportdb) - package_id = have_package_db(conn, categories, package) - # print("package_id %s" % package_id, file=sys.stdout) - build_dict = {} - mybuild_dict = {} - build_dict['ebuild_version'] = ebuild_version - build_dict['package_id'] = package_id - build_dict['cpv'] = pkg.cpv - build_dict['categories'] = categories - build_dict['package'] = package - build_dict['config_profile'] = self._config_profile - init_useflags = gobs_use_flags(settings, myportdb, pkg.cpv) - iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, settings) - #print 'final_use_list', final_use_list - if final_use_list != []: - build_dict['build_useflags'] = sorted(final_use_list) - else: - build_dict['build_useflags'] = None - #print "build_dict['build_useflags']", build_dict['build_useflags'] - pkgdir = os.path.join(settings['PORTDIR'], categories + "/" + package) - ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" + package + "-" + ebuild_version + ".ebuild")[0] - build_dict['checksum'] = ebuild_version_checksum_tree +def get_build_dict_db(settings, pkg): + conn=CM.getConnection() + myportdb = portage.portdbapi(mysettings=settings) + cpvr_list = catpkgsplit(pkg.cpv, silent=1) + categories = cpvr_list[0] + package = cpvr_list[1] + ebuild_version = cpv_getversion(pkg.cpv) + log_msg = "cpv: %s" % (pkg.cpv,) + add_gobs_logs(conn, log_msg, "info", config_profile) + init_package = gobs_package(settings, myportdb) + package_id = have_package_db(conn, categories, package) + # print("package_id %s" % package_id, file=sys.stdout) + build_dict = {} + mybuild_dict = {} + build_dict['ebuild_version'] = ebuild_version + build_dict['package_id'] = package_id + build_dict['cpv'] = pkg.cpv + build_dict['categories'] = categories + build_dict['package'] = package + build_dict['config_profile'] = config_profile + init_useflags = gobs_use_flags(settings, myportdb, pkg.cpv) + iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, settings) + #print 'final_use_list', final_use_list + if final_use_list != []: + build_dict['build_useflags'] = sorted(final_use_list) + else: + build_dict['build_useflags'] = None + #print "build_dict['build_useflags']", build_dict['build_useflags'] + pkgdir = os.path.join(settings['PORTDIR'], categories + "/" + package) + ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" + package + "-" + ebuild_version + ".ebuild")[0] + build_dict['checksum'] = ebuild_version_checksum_tree + ebuild_id = get_ebuild_id_db_checksum(conn, build_dict) + if ebuild_id is None: + #print 'have any ebuild', get_ebuild_checksum(conn, package_id, ebuild_version) + init_package.update_ebuild_db(build_dict) ebuild_id = get_ebuild_id_db_checksum(conn, build_dict) - if ebuild_id is None: - #print 'have any ebuild', get_ebuild_checksum(conn, package_id, ebuild_version) - init_package.update_ebuild_db(build_dict) - ebuild_id = get_ebuild_id_db_checksum(conn, build_dict) - build_dict['ebuild_id'] = ebuild_id - queue_id = check_revision(conn, build_dict) - if queue_id is None: - build_dict['queue_id'] = None - else: - build_dict['queue_id'] = queue_id - CM.putConnection(conn) - return build_dict - - def add_new_ebuild_buildlog(self, settings, pkg, build_dict, build_error, summary_error, build_log_dict): - conn=CM.getConnection() - portdb = portage.portdbapi(mysettings=settings) - init_useflags = gobs_use_flags(settings, portdb, build_dict['cpv']) - iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, settings) - iuse = [] - use_flags_list = [] - use_enable_list = [] - for iuse_line in iuse_flags_list: - iuse.append(init_useflags.reduce_flag(iuse_line)) - iuse_flags_list2 = list(set(iuse)) - use_enable = final_use_list - use_disable = list(set(iuse_flags_list2).difference(set(use_enable))) - use_flagsDict = {} - for x in use_enable: - use_flagsDict[x] = True - for x in use_disable: - use_flagsDict[x] = False - for u, s in use_flagsDict.iteritems(): - use_flags_list.append(u) - use_enable_list.append(s) - build_id = add_new_buildlog(conn, build_dict, use_flags_list, use_enable_list, build_error, summary_error, build_log_dict) - CM.putConnection(conn) - return build_id - - def search_info(self, textline, error_log_list): - if re.search(" * Package:", textline): - error_log_list.append(textline + '\n') - if re.search(" * Repository:", textline): - error_log_list.append(textline + '\n') - if re.search(" * Maintainer:", textline): - error_log_list.append(textline + '\n') - if re.search(" * USE:", textline): - error_log_list.append(textline + '\n') - if re.search(" * FEATURES:", textline): - error_log_list.append(textline + '\n') - return error_log_list - - def search_error(self, logfile_text, textline, error_log_list, sum_build_log_list, i): - if re.search("Error 1", textline): - x = i - 20 - endline = True - error_log_list.append(".....\n") - while x != i + 3 and endline: - try: - error_log_list.append(logfile_text[x] + '\n') - except: - endline = False - else: - x = x +1 - if re.search(" * ERROR:", textline): - x = i - endline= True - field = textline.split(" ") - sum_build_log_list.append("fail") - error_log_list.append(".....\n") - while x != i + 10 and endline: - try: - error_log_list.append(logfile_text[x] + '\n') - except: - endline = False - else: - x = x +1 - if re.search("configure: error:", textline): - x = i - 4 - endline = True - error_log_list.append(".....\n") - while x != i + 3 and endline: - try: - error_log_list.append(logfile_text[x] + '\n') - except: - endline = False - else: - x = x +1 - return error_log_list, sum_build_log_list - - def search_qa(self, logfile_text, textline, qa_error_list, error_log_list,i): - if re.search(" * QA Notice:", textline): - x = i - qa_error_list.append(logfile_text[x] + '\n') - endline= True - error_log_list.append(".....\n") - while x != i + 3 and endline: - try: - error_log_list.append(logfile_text[x] + '\n') - except: - endline = False - else: - x = x +1 - return qa_error_list, error_log_list - - def get_buildlog_info(self, settings, build_dict): - myportdb = portage.portdbapi(mysettings=settings) - init_repoman = gobs_repoman(settings, myportdb) - logfile_text = get_log_text_list(settings.get("PORTAGE_LOG_FILE")) - # FIXME to support more errors and stuff - i = 0 - build_log_dict = {} - error_log_list = [] - qa_error_list = [] - repoman_error_list = [] - sum_build_log_list = [] - for textline in logfile_text: - error_log_list = self.search_info(textline, error_log_list) - error_log_list, sum_build_log_list = self.search_error(logfile_text, textline, error_log_list, sum_build_log_list, i) - qa_error_list, error_log_list = self.search_qa(logfile_text, textline, qa_error_list, error_log_list, i) - i = i +1 - # Run repoman check_repoman() - repoman_error_list = init_repoman.check_repoman(build_dict['categories'], build_dict['package'], build_dict['ebuild_version'], build_dict['config_profile']) - if repoman_error_list != []: - sum_build_log_list.append("repoman") - if qa_error_list != []: - sum_build_log_list.append("qa") - build_log_dict['repoman_error_list'] = repoman_error_list - build_log_dict['qa_error_list'] = qa_error_list - build_log_dict['error_log_list'] = error_log_list - build_log_dict['summary_error_list'] = sum_build_log_list - return build_log_dict - - # Copy of the portage action_info but fixed so it post info to a list. - def action_info(self, settings, trees): - argscmd = [] - myaction, myopts, myfiles = parse_opts(argscmd, silent=True) - msg = [] - root = '/' - root_config = root - # root_config = trees[settings['ROOT']]['root_config'] - msg.append(getportageversion(settings["PORTDIR"], settings["ROOT"], - settings.profile_path, settings["CHOST"], - trees[settings["ROOT"]]["vartree"].dbapi) + "\n") - - header_width = 65 - header_title = "System Settings" - if myfiles: - msg.append(header_width * "=" + "\n") - msg.append(header_title.rjust(int(header_width/2 + len(header_title)/2)) + "\n") - msg.append(header_width * "=" + "\n") - msg.append("System uname: "+platform.platform(aliased=1) + "\n") - - lastSync = portage.grabfile(os.path.join( - settings["PORTDIR"], "metadata", "timestamp.chk")) - if lastSync: - msg.append("Timestamp of tree:" + lastSync[0] + "\n") - else: - msg.append("Timestamp of tree: Unknown" + "\n") - - output=subprocess_getstatusoutput("distcc --version") - if not output[0]: - msg.append(str(output[1].split("\n",1)[0])) - if "distcc" in settings.features: - msg.append("[enabled]") - else: - msg.append("[disabled]") - - output=subprocess_getstatusoutput("ccache -V") - if not output[0]: - msg.append(str(output[1].split("\n",1)[0]), end=' ') - if "ccache" in settings.features: - msg.append("[enabled]") + build_dict['ebuild_id'] = ebuild_id + queue_id = check_revision(conn, build_dict) + if queue_id is None: + build_dict['queue_id'] = None + else: + build_dict['queue_id'] = queue_id + CM.putConnection(conn) + return build_dict + +def add_new_ebuild_buildlog(settings, pkg, build_dict, build_error, summary_error, build_log_dict): + conn=CM.getConnection() + portdb = portage.portdbapi(mysettings=settings) + init_useflags = gobs_use_flags(settings, portdb, build_dict['cpv']) + iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, settings) + iuse = [] + use_flags_list = [] + use_enable_list = [] + for iuse_line in iuse_flags_list: + iuse.append(init_useflags.reduce_flag(iuse_line)) + iuse_flags_list2 = list(set(iuse)) + use_enable = final_use_list + use_disable = list(set(iuse_flags_list2).difference(set(use_enable))) + use_flagsDict = {} + for x in use_enable: + use_flagsDict[x] = True + for x in use_disable: + use_flagsDict[x] = False + for u, s in use_flagsDict.iteritems(): + use_flags_list.append(u) + use_enable_list.append(s) + build_id = add_new_buildlog(conn, build_dict, use_flags_list, use_enable_list, build_error, summary_error, build_log_dict) + CM.putConnection(conn) + return build_id + +def search_info(self, textline, error_log_list): + if re.search(" * Package:", textline): + error_log_list.append(textline + '\n') + if re.search(" * Repository:", textline): + error_log_list.append(textline + '\n') + if re.search(" * Maintainer:", textline): + error_log_list.append(textline + '\n') + if re.search(" * USE:", textline): + error_log_list.append(textline + '\n') + if re.search(" * FEATURES:", textline): + error_log_list.append(textline + '\n') + return error_log_list + +def search_error(self, logfile_text, textline, error_log_list, sum_build_log_list, i): + if re.search("Error 1", textline): + x = i - 20 + endline = True + error_log_list.append(".....\n") + while x != i + 3 and endline: + try: + error_log_list.append(logfile_text[x] + '\n') + except: + endline = False else: - msg.append("[disabled]") - - myvars = ["sys-devel/autoconf", "sys-devel/automake", "virtual/os-headers", - "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"] - myvars += portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_pkgs") - atoms = [] - vardb = trees["/"]["vartree"].dbapi - for x in myvars: + x = x +1 + if re.search(" * ERROR:", textline): + x = i + endline= True + field = textline.split(" ") + sum_build_log_list.append("fail") + error_log_list.append(".....\n") + while x != i + 10 and endline: try: - x = Atom(x) - except InvalidAtom: - writemsg_stdout("%-20s %s\n" % (x+":", "[NOT VALID]"), - noiselevel=-1) + error_log_list.append(logfile_text[x] + '\n') + except: + endline = False else: - for atom in expand_new_virt(vardb, x): - if not atom.blocker: - atoms.append((x, atom)) - - myvars = sorted(set(atoms)) - - portdb = trees["/"]["porttree"].dbapi - main_repo = portdb.getRepositoryName(portdb.porttree_root) - cp_map = {} - cp_max_len = 0 - - for orig_atom, x in myvars: - pkg_matches = vardb.match(x) - - versions = [] - for cpv in pkg_matches: - matched_cp = portage.versions.cpv_getkey(cpv) - ver = portage.versions.cpv_getversion(cpv) - ver_map = cp_map.setdefault(matched_cp, {}) - prev_match = ver_map.get(ver) - if prev_match is not None: - if prev_match.provide_suffix: - # prefer duplicate matches that include - # additional virtual provider info - continue - - if len(matched_cp) > cp_max_len: - cp_max_len = len(matched_cp) - repo = vardb.aux_get(cpv, ["repository"])[0] - if repo == main_repo: - repo_suffix = "" - elif not repo: - repo_suffix = "::" - else: - repo_suffix = "::" + repo - - if matched_cp == orig_atom.cp: - provide_suffix = "" - else: - provide_suffix = " (%s)" % (orig_atom,) - - ver_map[ver] = _info_pkgs_ver(ver, repo_suffix, provide_suffix) - - for cp in sorted(cp_map): - versions = sorted(cp_map[cp].values()) - versions = ", ".join(ver.toString() for ver in versions) - msg_extra = "%s %s\n" % \ - ((cp + ":").ljust(cp_max_len + 1), versions) - msg.append(msg_extra) - - libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool")) - - repos = portdb.settings.repositories - msg_extra = "Repositories: %s\n" % \ - " ".join(repo.name for repo in repos) - msg.append(msg_extra) - - if _ENABLE_SET_CONFIG: - sets_line = "Installed sets: " - sets_line += ", ".join(s for s in \ - sorted(root_config.sets['selected'].getNonAtoms()) \ - if s.startswith(SETPREFIX)) - sets_line += "\n" - msg.append(sets_line) - - myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK', - 'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR', - 'PORTDIR_OVERLAY', 'PORTAGE_BUNZIP2_COMMAND', - 'PORTAGE_BZIP2_COMMAND', - 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS', - 'ACCEPT_KEYWORDS', 'ACCEPT_LICENSE', 'SYNC', 'FEATURES', - 'EMERGE_DEFAULT_OPTS'] - myvars.extend(portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_vars")) - - myvars_ignore_defaults = { - 'PORTAGE_BZIP2_COMMAND' : 'bzip2', - } - - myvars = portage.util.unique_array(myvars) - use_expand = settings.get('USE_EXPAND', '').split() - use_expand.sort() - use_expand_hidden = set( - settings.get('USE_EXPAND_HIDDEN', '').upper().split()) - alphabetical_use = '--alphabetical' in myopts - unset_vars = [] - myvars.sort() - for x in myvars: - if x in settings: - if x != "USE": - default = myvars_ignore_defaults.get(x) - if default is not None and \ - default == settings[x]: - continue - msg_extra = '%s="%s"\n' % (x, settings[x]) - msg.append(msg_extra) - else: - use = set(settings["USE"].split()) - for varname in use_expand: - flag_prefix = varname.lower() + "_" - for f in list(use): - if f.startswith(flag_prefix): - use.remove(f) - use = list(use) - use.sort() - msg_extra = 'USE=%s' % " ".join(use) - msg.append(msg_extra + "\n") - for varname in use_expand: - myval = settings.get(varname) - if myval: - msg.append(varname + '=' + myval + "\n") + x = x +1 + if re.search("configure: error:", textline): + x = i - 4 + endline = True + error_log_list.append(".....\n") + while x != i + 3 and endline: + try: + error_log_list.append(logfile_text[x] + '\n') + except: + endline = False else: - unset_vars.append(x) - if unset_vars: - msg_extra = "Unset: "+", ".join(unset_vars) - msg.append(msg_extra + "\n") - - # See if we can find any packages installed matching the strings - # passed on the command line - mypkgs = [] - vardb = trees[settings["ROOT"]]["vartree"].dbapi - portdb = trees[settings["ROOT"]]["porttree"].dbapi - bindb = trees[settings["ROOT"]]["bintree"].dbapi - for x in myfiles: - match_found = False - installed_match = vardb.match(x) - for installed in installed_match: - mypkgs.append((installed, "installed")) - match_found = True - - if match_found: - continue - - for db, pkg_type in ((portdb, "ebuild"), (bindb, "binary")): - if pkg_type == "binary" and "--usepkg" not in myopts: - continue - - matches = db.match(x) - matches.reverse() - for match in matches: - if pkg_type == "binary": - if db.bintree.isremote(match): - continue - auxkeys = ["EAPI", "DEFINED_PHASES"] - metadata = dict(zip(auxkeys, db.aux_get(match, auxkeys))) - if metadata["EAPI"] not in ("0", "1", "2", "3") and \ - "info" in metadata["DEFINED_PHASES"].split(): - mypkgs.append((match, pkg_type)) - break - - # If some packages were found... - if mypkgs: - # Get our global settings (we only print stuff if it varies from - # the current config) - mydesiredvars = [ 'CHOST', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS' ] - auxkeys = mydesiredvars + list(vardb._aux_cache_keys) - auxkeys.append('DEFINED_PHASES') - global_vals = {} - pkgsettings = portage.config(clone=settings) - - # Loop through each package - # Only print settings if they differ from global settings - header_title = "Package Settings" - msg.append(header_width * "=") - msg.append(header_title.rjust(int(header_width/2 + len(header_title)/2))) - msg.append(header_width * "=") - from portage.output import EOutput - out = EOutput() - for mypkg in mypkgs: - cpv = mypkg[0] - pkg_type = mypkg[1] - # Get all package specific variables - if pkg_type == "installed": - metadata = dict(zip(auxkeys, vardb.aux_get(cpv, auxkeys))) - elif pkg_type == "ebuild": - metadata = dict(zip(auxkeys, portdb.aux_get(cpv, auxkeys))) - elif pkg_type == "binary": - metadata = dict(zip(auxkeys, bindb.aux_get(cpv, auxkeys))) - - pkg = Package(built=(pkg_type!="ebuild"), cpv=cpv, - installed=(pkg_type=="installed"), metadata=zip(Package.metadata_keys, - (metadata.get(x, '') for x in Package.metadata_keys)), - root_config=root_config, type_name=pkg_type) - - if pkg_type == "installed": - msg.append("\n%s was built with the following:" % \ - colorize("INFORM", str(pkg.cpv))) - elif pkg_type == "ebuild": - msg.append("\n%s would be build with the following:" % \ - colorize("INFORM", str(pkg.cpv))) - elif pkg_type == "binary": - msg.append("\n%s (non-installed binary) was built with the following:" % \ - colorize("INFORM", str(pkg.cpv))) - - writemsg_stdout('%s\n' % pkg_use_display(pkg, myopts), - noiselevel=-1) - if pkg_type == "installed": - for myvar in mydesiredvars: - if metadata[myvar].split() != settings.get(myvar, '').split(): - msg.append("%s=\"%s\"" % (myvar, metadata[myvar])) - - if metadata['DEFINED_PHASES']: - if 'info' not in metadata['DEFINED_PHASES'].split(): - continue - - msg.append(">>> Attempting to run pkg_info() for '%s'" % pkg.cpv) - - if pkg_type == "installed": - ebuildpath = vardb.findname(pkg.cpv) - elif pkg_type == "ebuild": - ebuildpath = portdb.findname(pkg.cpv, myrepo=pkg.repo) - elif pkg_type == "binary": - tbz2_file = bindb.bintree.getname(pkg.cpv) - ebuild_file_name = pkg.cpv.split("/")[1] + ".ebuild" - ebuild_file_contents = portage.xpak.tbz2(tbz2_file).getfile(ebuild_file_name) - tmpdir = tempfile.mkdtemp() - ebuildpath = os.path.join(tmpdir, ebuild_file_name) - file = open(ebuildpath, 'w') - file.write(ebuild_file_contents) - file.close() - - if not ebuildpath or not os.path.exists(ebuildpath): - out.ewarn("No ebuild found for '%s'" % pkg.cpv) - continue - - if pkg_type == "installed": - portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"], - pkgsettings, debug=(settings.get("PORTAGE_DEBUG", "") == 1), - mydbapi=trees[settings["ROOT"]]["vartree"].dbapi, - tree="vartree") - elif pkg_type == "ebuild": - portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"], - pkgsettings, debug=(settings.get("PORTAGE_DEBUG", "") == 1), - mydbapi=trees[settings["ROOT"]]["porttree"].dbapi, - tree="porttree") - elif pkg_type == "binary": - portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"], - pkgsettings, debug=(settings.get("PORTAGE_DEBUG", "") == 1), - mydbapi=trees[settings["ROOT"]]["bintree"].dbapi, - tree="bintree") - shutil.rmtree(tmpdir) - return msg - - def write_msg_file(self, msg, log_path): - """ - Output msg to stdout if not self._background. If log_path - is not None then append msg to the log (appends with - compression if the filename extension of log_path - corresponds to a supported compression type). - """ - msg_shown = False - if log_path is not None: + x = x +1 + return error_log_list, sum_build_log_list + +def search_qa(logfile_text, textline, qa_error_list, error_log_list,i): + if re.search(" * QA Notice:", textline): + x = i + qa_error_list.append(logfile_text[x] + '\n') + endline= True + error_log_list.append(".....\n") + while x != i + 3 and endline: try: - f = open(_unicode_encode(log_path, + error_log_list.append(logfile_text[x] + '\n') + except: + endline = False + else: + x = x +1 + return qa_error_list, error_log_list + +def get_buildlog_info(settings, build_dict): + myportdb = portage.portdbapi(mysettings=settings) + init_repoman = gobs_repoman(settings, myportdb) + logfile_text = get_log_text_list(settings.get("PORTAGE_LOG_FILE")) + # FIXME to support more errors and stuff + i = 0 + build_log_dict = {} + error_log_list = [] + qa_error_list = [] + repoman_error_list = [] + sum_build_log_list = [] + for textline in logfile_text: + error_log_list = search_info(textline, error_log_list) + error_log_list, sum_build_log_list = search_error(logfile_text, textline, error_log_list, sum_build_log_list, i) + qa_error_list, error_log_list = search_qa(logfile_text, textline, qa_error_list, error_log_list, i) + i = i +1 + # Run repoman check_repoman() + repoman_error_list = init_repoman.check_repoman(build_dict['categories'], build_dict['package'], build_dict['ebuild_version'], build_dict['config_profile']) + if repoman_error_list != []: + sum_build_log_list.append("repoman") + if qa_error_list != []: + sum_build_log_list.append("qa") + build_log_dict['repoman_error_list'] = repoman_error_list + build_log_dict['qa_error_list'] = qa_error_list + build_log_dict['error_log_list'] = error_log_list + build_log_dict['summary_error_list'] = sum_build_log_list + return build_log_dict + +def write_msg_file(msg, log_path): + """ + Output msg to stdout if not self._background. If log_path + is not None then append msg to the log (appends with + compression if the filename extension of log_path + corresponds to a supported compression type). + """ + msg_shown = False + if log_path is not None: + try: + f = open(_unicode_encode(log_path, encoding=_encodings['fs'], errors='strict'), mode='ab') - f_real = f - except IOError as e: - if e.errno not in (errno.ENOENT, errno.ESTALE): - raise - if not msg_shown: - writemsg_level(msg, level=level, noiselevel=noiselevel) + f_real = f + except IOError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + raise + if not msg_shown: + writemsg_level(msg, level=level, noiselevel=noiselevel) else: - if log_path.endswith('.gz'): # NOTE: The empty filename argument prevents us from # triggering a bug in python3 which causes GzipFile @@ -534,45 +220,98 @@ class gobs_buildlog(object): if f_real is not f: f_real.close() - def add_buildlog_main(self, settings, pkg, trees): - conn=CM.getConnection() - build_dict = self.get_build_dict_db(settings, pkg) - build_log_dict = {} - build_log_dict = self.get_buildlog_info(settings, build_dict) - sum_build_log_list = build_log_dict['summary_error_list'] - error_log_list = build_log_dict['error_log_list'] - build_error = "" - if error_log_list != []: - for log_line in error_log_list: - build_error = build_error + log_line - summary_error = "" - if sum_build_log_list != []: - for sum_log_line in sum_build_log_list: - summary_error = summary_error + " " + sum_log_line - build_log_dict['logfilename'] = settings.get("PORTAGE_LOG_FILE").split(self._config_profile)[1] - log_msg = "Logfile name: %s" % (settings.get("PORTAGE_LOG_FILE"),) +def add_buildlog_main(settings, pkg, trees): + conn=CM.getConnection() + build_dict = get_build_dict_db(settings, pkg) + build_log_dict = {} + build_log_dict = get_buildlog_info(settings, build_dict) + sum_build_log_list = build_log_dict['summary_error_list'] + error_log_list = build_log_dict['error_log_list'] + build_error = "" + if error_log_list != []: + for log_line in error_log_list: + build_error = build_error + log_line + summary_error = "" + if sum_build_log_list != []: + for sum_log_line in sum_build_log_list: + summary_error = summary_error + " " + sum_log_line + build_log_dict['logfilename'] = settings.get("PORTAGE_LOG_FILE").split(self._config_profile)[1] + log_msg = "Logfile name: %s" % (settings.get("PORTAGE_LOG_FILE"),) + add_gobs_logs(conn, log_msg, "info", config_profile) + if build_dict['queue_id'] is None: + build_id = .add_new_ebuild_buildlog(settings, pkg, build_dict, build_error, summary_error, build_log_dict) + else: + build_id = move_queru_buildlog(conn, build_dict['queue_id'], build_error, summary_error, build_log_dict) + # update_qa_repoman(conn, build_id, build_log_dict) + msg = "" + emerge_info_logfilename = settings.get("PORTAGE_LOG_FILE")[:-3] + "emerge_log.log" + if build_id is not None: + for msg_line in msg: + write_msg_file(msg_line, emerge_info_logfilename) + os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664) + os.chmod(emerge_info_logfilename, 0o664) + log_msg = "Package: %s logged to db." % (pkg.cpv,) add_gobs_logs(conn, log_msg, "info", self._config_profile) - if build_dict['queue_id'] is None: - build_id = self.add_new_ebuild_buildlog(settings, pkg, build_dict, build_error, summary_error, build_log_dict) + else: + # FIXME Remove the log some way so + # mergetask._locate_failure_log(x) works in action_build() + #try: + # os.remove(settings.get("PORTAGE_LOG_FILE")) + #except: + # pass + log_msg = "Package %s NOT logged to db." % (pkg.cpv,) + add_gobs_logs(conn, log_msg, "info", config_profile) + CM.putConnection(conn) + +def log_fail_queru(build_dict, settings): + config = gobs_settings_dict['gobs_config'] + conn=CM.getConnection() + print('build_dict', build_dict) + fail_querue_dict = get_fail_querue_dict(conn, build_dict) + print('fail_querue_dict', fail_querue_dict) + if fail_querue_dict is None: + fail_querue_dict = {} + fail_querue_dict['build_job_id'] = build_dict['build_job_id'] + fail_querue_dict['fail_type'] = build_dict['type_fail'] + fail_querue_dict['fail_times'] = 1 + print('fail_querue_dict', fail_querue_dict) + add_fail_querue_dict(conn, fail_querue_dict) + else: + if fail_querue_dict['fail_times'][0] < 6: + fail_querue_dict['fail_times'] = fail_querue_dict['fail_times'][0] + 1 + fail_querue_dict['build_job_id'] = build_dict['build_job_id'] + fail_querue_dict['fail_type'] = build_dict['type_fail'] + update_fail_times(conn, fail_querue_dict) + CM.putConnection(conn) + return else: - build_id = move_queru_buildlog(conn, build_dict['queue_id'], build_error, summary_error, build_log_dict) - # update_qa_repoman(conn, build_id, build_log_dict) - msg = self.action_info(settings, trees) - emerge_info_logfilename = settings.get("PORTAGE_LOG_FILE")[:-3] + "emerge_log.log" - if build_id is not None: - for msg_line in msg: - self.write_msg_file(msg_line, emerge_info_logfilename) - os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664) - os.chmod(emerge_info_logfilename, 0o664) - log_msg = "Package: %s logged to db." % (pkg.cpv,) - add_gobs_logs(conn, log_msg, "info", self._config_profile) - else: - # FIXME Remove the log some way so - # mergetask._locate_failure_log(x) works in action_build() - #try: - # os.remove(settings.get("PORTAGE_LOG_FILE")) - #except: - # pass - log_msg = "Package %s NOT logged to db." % (pkg.cpv,) - add_gobs_logs(conn, log_msg, "info", self._config_profile) - CM.putConnection(conn) + build_log_dict = {} + error_log_list = [] + qa_error_list = [] + repoman_error_list = [] + sum_build_log_list = [] + sum_build_log_list.append("fail") + error_log_list.append(build_dict['type_fail']) + build_log_dict['repoman_error_list'] = repoman_error_list + build_log_dict['qa_error_list'] = qa_error_list + build_log_dict['summary_error_list'] = sum_build_log_list + if build_dict['type_fail'] == 'merge fail': + error_log_list = [] + for k, v in build_dict['failed_merge'].iteritems(): + error_log_list.append(v['fail_msg']) + build_log_dict['error_log_list'] = error_log_list + build_error = "" + if error_log_list != []: + for log_line in error_log_list: + build_error = build_error + log_line + summary_error = "" + if sum_build_log_list != []: + for sum_log_line in sum_build_log_list: + summary_error = summary_error + " " + sum_log_line + if settings.get("PORTAGE_LOG_FILE") is not None: + build_log_dict['logfilename'] = settings.get("PORTAGE_LOG_FILE").split(config_profile)[1] + os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o224) + else: + build_log_dict['logfilename'] = "" + move_queru_buildlog(conn, build_dict['build_job_id'], build_error, summary_error, build_log_dict) + CM.putConnection(conn) \ No newline at end of file diff --git a/gobs/pym/build_queru.py b/gobs/pym/build_queru.py index 28d8352..c071aaf 100644 --- a/gobs/pym/build_queru.py +++ b/gobs/pym/build_queru.py @@ -25,59 +25,7 @@ from portage import _unicode_decode from portage.versions import cpv_getkey from portage.dep import check_required_use from gobs.main import emerge_main - -def log_fail_queru(build_dict, settings): - config = gobs_settings_dict['gobs_config'] - conn=CM.getConnection() - print('build_dict', build_dict) - fail_querue_dict = get_fail_querue_dict(conn, build_dict) - print('fail_querue_dict', fail_querue_dict) - if fail_querue_dict is None: - fail_querue_dict = {} - fail_querue_dict['build_job_id'] = build_dict['build_job_id'] - fail_querue_dict['fail_type'] = build_dict['type_fail'] - fail_querue_dict['fail_times'] = 1 - print('fail_querue_dict', fail_querue_dict) - add_fail_querue_dict(conn, fail_querue_dict) - else: - if fail_querue_dict['fail_times'][0] < 6: - fail_querue_dict['fail_times'] = fail_querue_dict['fail_times'][0] + 1 - fail_querue_dict['build_job_id'] = build_dict['build_job_id'] - fail_querue_dict['fail_type'] = build_dict['type_fail'] - update_fail_times(conn, fail_querue_dict) - CM.putConnection(conn) - return - else: - build_log_dict = {} - error_log_list = [] - qa_error_list = [] - repoman_error_list = [] - sum_build_log_list = [] - sum_build_log_list.append("fail") - error_log_list.append(build_dict['type_fail']) - build_log_dict['repoman_error_list'] = repoman_error_list - build_log_dict['qa_error_list'] = qa_error_list - build_log_dict['summary_error_list'] = sum_build_log_list - if build_dict['type_fail'] == 'merge fail': - error_log_list = [] - for k, v in build_dict['failed_merge'].iteritems(): - error_log_list.append(v['fail_msg']) - build_log_dict['error_log_list'] = error_log_list - build_error = "" - if error_log_list != []: - for log_line in error_log_list: - build_error = build_error + log_line - summary_error = "" - if sum_build_log_list != []: - for sum_log_line in sum_build_log_list: - summary_error = summary_error + " " + sum_log_line - if settings.get("PORTAGE_LOG_FILE") is not None: - build_log_dict['logfilename'] = settings.get("PORTAGE_LOG_FILE").split(config)[1] - os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o224) - else: - build_log_dict['logfilename'] = "" - move_queru_buildlog(conn, build_dict['build_job_id'], build_error, summary_error, build_log_dict) - CM.putConnection(conn) +from gobs.build_log import log_fail_queru class queruaction(object): diff --git a/gobs/pym/pgsql_querys.py b/gobs/pym/pgsql_querys.py index c3d2784..d031bde 100644 --- a/gobs/pym/pgsql_querys.py +++ b/gobs/pym/pgsql_querys.py @@ -323,12 +323,9 @@ def get_profile_checksum(connection, config_profile): def get_packages_to_build(connection, config): cursor =connection.cursor() - sqlQ1 = "SELECT build_job_id.build_jobs, ebuild_id.build_jobs, package_id.ebuilds FROM build_jobs, ebuilds WHERE \ - config_id.build_jobs = (SELECT config_id FROM configs WHERE config = %s) \ - AND extract(epoch from (NOW()) - time_stamp.build_jobs) > 7200 AND ebuild_id.build_jobs = ebuild_id.ebuilds \ - AND ebuilds.active = 'True' ORDER BY LIMIT 1" + sqlQ1 = "SELECT build_jobs.build_job_id, build_jobs.ebuild_id, ebuilds.package_id FROM build_jobs, ebuilds WHERE build_jobs.config_id = (SELECT config_id FROM configs WHERE config = %s) AND build_jobs.ebuild_id = ebuilds.ebuild_id AND ebuilds.active = 'True' AND extract(epoch from (NOW()) - build_jobs.time_stamp) > 7200 ORDER BY build_jobs.build_job_id LIMIT 1" sqlQ2 = 'SELECT version, checksum FROM ebuilds WHERE ebuild_id = %s' - sqlQ3 = 'SELECT flag.uses, status.build_jobs_use FROM build_jobs_use, uses WHERE build_job_id.build_jobs_use = %s use_id.build_jobs_use = use_id.uses' + sqlQ3 = 'SELECT uses.flag, build_jobs_use.status FROM build_jobs_use, uses WHERE build_jobs_use.build_job_id = %s AND build_jobs.use_id = uses.use_id' cursor.execute(sqlQ1, (config,)) build_dict={} entries = cursor.fetchone()