Summary: infer/lib/python/'s not pinin'! It's passed on! This library is no more! It has ceased to be! It's expired and gone to meet its maker! It's a stiff! Bereft of life, it rests in peace! If you hadn't nailed it to the perch it'd be pushing up the daisies! 'ts metabolic processes are now 'istory! It's off the twig! It's kicked the bucket, it's shuffled off its mortal coil, run down the curtain and joined the bleedin' choir invisible!! THIS IS AN EX-PYTHON!! Reviewed By: ngorogiannis Differential Revision: D20672771 fbshipit-source-id: 7808c0ecemaster
parent
93f51a063b
commit
1b2829b352
@ -1,390 +0,0 @@
|
|||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import codecs
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from inferlib import colorize, config, issues, source, utils
|
|
||||||
|
|
||||||
HTML_REPORT_DIR = 'report.html'
|
|
||||||
TRACES_REPORT_DIR = 'traces'
|
|
||||||
SOURCE_REMOTE_GITHUB_URL_TEMPLATE = ('https://github.com/{project}/blob/'
|
|
||||||
'{hash}/{relative-path}/'
|
|
||||||
'{file-name}#L{line-number}')
|
|
||||||
SOURCE_REMOTE_GITHUB_RE = re.compile('.*github.com[:/](?P<project>.*)')
|
|
||||||
|
|
||||||
|
|
||||||
base_parser = argparse.ArgumentParser(
|
|
||||||
description='Explore the error traces in Infer reports.')
|
|
||||||
base_parser.add_argument('-o', '--out', metavar='<directory>',
|
|
||||||
default=config.DEFAULT_INFER_OUT, dest='infer_out',
|
|
||||||
action=utils.AbsolutePathAction,
|
|
||||||
help='Set the Infer results directory')
|
|
||||||
base_parser.add_argument('--only-show',
|
|
||||||
action='store_true',
|
|
||||||
help='Show the list of reports and exit')
|
|
||||||
base_parser.add_argument('--no-source',
|
|
||||||
action='store_true',
|
|
||||||
help='Do not print code excerpts')
|
|
||||||
base_parser.add_argument('--select',
|
|
||||||
metavar='N',
|
|
||||||
type=int,
|
|
||||||
help='Select bug number N. '
|
|
||||||
'If omitted, prompts you for input.')
|
|
||||||
base_parser.add_argument('--max-level',
|
|
||||||
metavar='N',
|
|
||||||
type=int,
|
|
||||||
help='Level of nested procedure calls to show. '
|
|
||||||
'By default, all levels are shown.')
|
|
||||||
base_parser.add_argument('--html',
|
|
||||||
action='store_true',
|
|
||||||
help='Generate HTML report.')
|
|
||||||
|
|
||||||
|
|
||||||
def show_error_and_exit(err, show_help):
|
|
||||||
utils.stderr(err)
|
|
||||||
if show_help:
|
|
||||||
print('')
|
|
||||||
base_parser.print_help()
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class Tracer(object):
|
|
||||||
def __init__(self, args, level=sys.maxsize):
|
|
||||||
self.args = args
|
|
||||||
self.max_level = level
|
|
||||||
self.indenter = source.Indenter()
|
|
||||||
|
|
||||||
def build_node_tags(self, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def build_node(self, node):
|
|
||||||
if node[issues.JSON_INDEX_TRACE_LEVEL] > self.max_level:
|
|
||||||
return
|
|
||||||
|
|
||||||
report_line = node[issues.JSON_INDEX_TRACE_LINE]
|
|
||||||
report_col = node[issues.JSON_INDEX_TRACE_COLUMN]
|
|
||||||
fname = node[issues.JSON_INDEX_TRACE_FILENAME]
|
|
||||||
|
|
||||||
self.indenter.newline()
|
|
||||||
self.indenter.add('%s:%d:%d: %s' % (
|
|
||||||
fname,
|
|
||||||
report_line,
|
|
||||||
report_col,
|
|
||||||
node[issues.JSON_INDEX_TRACE_DESCRIPTION],
|
|
||||||
))
|
|
||||||
self.indenter.newline()
|
|
||||||
|
|
||||||
if not self.args.no_source:
|
|
||||||
self.indenter.indent_push(node[issues.JSON_INDEX_TRACE_LEVEL])
|
|
||||||
mode = colorize.TERMINAL_FORMATTER
|
|
||||||
if self.args.html:
|
|
||||||
mode = colorize.PLAIN_FORMATTER
|
|
||||||
empty_desc = len(node[issues.JSON_INDEX_TRACE_DESCRIPTION]) == 0
|
|
||||||
self.indenter.add(source.build_source_context(fname,
|
|
||||||
mode,
|
|
||||||
report_line,
|
|
||||||
report_col,
|
|
||||||
empty_desc
|
|
||||||
))
|
|
||||||
self.indenter.indent_pop()
|
|
||||||
self.indenter.newline()
|
|
||||||
|
|
||||||
def build_trace(self, trace):
|
|
||||||
total_nodes = len(trace)
|
|
||||||
hidden_nodes = len(
|
|
||||||
filter(lambda n: n[issues.JSON_INDEX_TRACE_LEVEL] > self.max_level,
|
|
||||||
trace))
|
|
||||||
shown_nodes = total_nodes - hidden_nodes
|
|
||||||
hidden_str = ''
|
|
||||||
all_str = 'all '
|
|
||||||
if hidden_nodes > 0:
|
|
||||||
hidden_str = ' (%d steps too deeply nested)' % hidden_nodes
|
|
||||||
all_str = ''
|
|
||||||
self.indenter.add('Showing %s%d steps of the trace%s\n\n'
|
|
||||||
% (all_str, shown_nodes, hidden_str))
|
|
||||||
self.indenter.newline()
|
|
||||||
|
|
||||||
for node in trace:
|
|
||||||
self.build_node(node)
|
|
||||||
|
|
||||||
def build_report(self, report):
|
|
||||||
self.build_trace(report[issues.JSON_INDEX_TRACE])
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return unicode(self.indenter)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.indenter)
|
|
||||||
|
|
||||||
|
|
||||||
class Selector(object):
|
|
||||||
def __init__(self, args, reports):
|
|
||||||
self.args = args
|
|
||||||
|
|
||||||
def has_trace(report):
|
|
||||||
return len(report[issues.JSON_INDEX_TRACE]) > 0
|
|
||||||
self.reports = [report for report in reports if has_trace(report)]
|
|
||||||
|
|
||||||
def show_choices(self):
|
|
||||||
n = 0
|
|
||||||
n_length = len(str(len(self)))
|
|
||||||
for report in self.reports:
|
|
||||||
# the goal is to get the following output for each report:
|
|
||||||
# 1234. <first line of report #1234 goes here>
|
|
||||||
# <second line of report goes here>
|
|
||||||
msg = issues.text_of_report(report) \
|
|
||||||
.replace('\n', '\n%s' % ((n_length + 2) * ' '))
|
|
||||||
utils.stdout('%s. %s\n' % (str(n).rjust(n_length), msg))
|
|
||||||
n += 1
|
|
||||||
|
|
||||||
def prompt_report(self):
|
|
||||||
report_number = 0
|
|
||||||
if self.args.select is not None:
|
|
||||||
report_number = self.parse_report_number(self.args.select, True)
|
|
||||||
else:
|
|
||||||
self.show_choices()
|
|
||||||
|
|
||||||
if len(self) > 1:
|
|
||||||
report_number_str = raw_input(
|
|
||||||
'Choose report to display (default=0): ')
|
|
||||||
if report_number_str != '':
|
|
||||||
report_number = self.parse_report_number(report_number_str)
|
|
||||||
elif len(self) == 1:
|
|
||||||
print('Auto-selecting the only report.')
|
|
||||||
|
|
||||||
return self.reports[report_number]
|
|
||||||
|
|
||||||
def prompt_level(self):
|
|
||||||
return self.parse_max_level(self.args.max_level, True)
|
|
||||||
|
|
||||||
def parse_report_number(self, s, show_help=False):
|
|
||||||
try:
|
|
||||||
n = int(s)
|
|
||||||
except ValueError:
|
|
||||||
show_error_and_exit(
|
|
||||||
'ERROR: integer report number expected',
|
|
||||||
show_help)
|
|
||||||
|
|
||||||
if n >= len(self) or n < 0:
|
|
||||||
show_error_and_exit('ERROR: invalid report number.', show_help)
|
|
||||||
|
|
||||||
return n
|
|
||||||
|
|
||||||
def parse_max_level(self, s, show_help=False):
|
|
||||||
if s is None:
|
|
||||||
return sys.maxsize
|
|
||||||
|
|
||||||
try:
|
|
||||||
n = int(s)
|
|
||||||
except ValueError:
|
|
||||||
show_error_and_exit(
|
|
||||||
'ERROR: integer max level expected',
|
|
||||||
show_help)
|
|
||||||
|
|
||||||
if n < 0:
|
|
||||||
show_error_and_exit('ERROR: invalid max level.', show_help)
|
|
||||||
|
|
||||||
return n
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.reports)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self.reports.__iter__()
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
return self.reports.__next__()
|
|
||||||
|
|
||||||
|
|
||||||
def path_of_bug_number(traces_dir, i):
|
|
||||||
return os.path.join(traces_dir, 'bug_%d.txt' % (i+1))
|
|
||||||
|
|
||||||
|
|
||||||
def url_of_bug_number(i):
|
|
||||||
return '%s/bug_%d.txt' % (TRACES_REPORT_DIR, i+1)
|
|
||||||
|
|
||||||
|
|
||||||
def get_remote_source_template():
|
|
||||||
"""Return a template that given 'file-name' and 'line-number' entries
|
|
||||||
gives a remote url to that source location. Return the empty
|
|
||||||
template if no remote source has been detected. Currently only
|
|
||||||
detects GitHub projects.
|
|
||||||
"""
|
|
||||||
# see if we are in a GitHub project clone
|
|
||||||
try:
|
|
||||||
git_remote = subprocess.check_output(
|
|
||||||
['git',
|
|
||||||
'config',
|
|
||||||
'--get',
|
|
||||||
'remote.origin.url']).decode().strip()
|
|
||||||
m = SOURCE_REMOTE_GITHUB_RE.match(git_remote)
|
|
||||||
if m is not None:
|
|
||||||
project = m.group('project')
|
|
||||||
# some remotes end in .git, but the http urls don't have
|
|
||||||
# these
|
|
||||||
if project.endswith('.git'):
|
|
||||||
project = project[:-len('.git')]
|
|
||||||
utils.stdout('Detected GitHub project %s' % project)
|
|
||||||
hash = subprocess.check_output(
|
|
||||||
['git',
|
|
||||||
'rev-parse',
|
|
||||||
'HEAD']).decode().strip()
|
|
||||||
root = subprocess.check_output(
|
|
||||||
['git',
|
|
||||||
'rev-parse',
|
|
||||||
'--show-toplevel']).decode().strip()
|
|
||||||
# FIXME(t8921813): we should have a way to get absolute
|
|
||||||
# paths in traces. In the meantime, trust that we run from
|
|
||||||
# the same directory from which infer was run.
|
|
||||||
relative_path = os.path.relpath(os.getcwd(), root)
|
|
||||||
d = {
|
|
||||||
'project': project,
|
|
||||||
'hash': hash,
|
|
||||||
'relative-path': relative_path,
|
|
||||||
'file-name': '{file-name}',
|
|
||||||
'line-number': '{line-number}',
|
|
||||||
}
|
|
||||||
return SOURCE_REMOTE_GITHUB_URL_TEMPLATE.format(**d)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def html_bug_trace(args, report, bug_id):
|
|
||||||
bug_trace = ''
|
|
||||||
bug_trace += '%s\n' % issues.text_of_report(report)
|
|
||||||
tracer = Tracer(args)
|
|
||||||
tracer.build_report(report)
|
|
||||||
bug_trace += unicode(tracer)
|
|
||||||
return bug_trace
|
|
||||||
|
|
||||||
|
|
||||||
def html_list_of_bugs(args, remote_source_template, selector):
|
|
||||||
template = '\n'.join([
|
|
||||||
'<html>',
|
|
||||||
'<head>',
|
|
||||||
'<title>Infer found {num-bugs} bugs</title>',
|
|
||||||
'</head>',
|
|
||||||
'<body>',
|
|
||||||
'<h2>List of bugs found</h2>',
|
|
||||||
'{list-of-bugs}',
|
|
||||||
'</body>',
|
|
||||||
'</html>',
|
|
||||||
])
|
|
||||||
|
|
||||||
report_template = '\n'.join([
|
|
||||||
'<li>',
|
|
||||||
'{description}',
|
|
||||||
'({source-uri}<a href="{trace-url}">trace</a>)',
|
|
||||||
'</li>',
|
|
||||||
])
|
|
||||||
|
|
||||||
def source_uri(report):
|
|
||||||
d = {
|
|
||||||
'file-name': report[issues.JSON_INDEX_FILENAME],
|
|
||||||
'line-number': report[issues.JSON_INDEX_LINE],
|
|
||||||
}
|
|
||||||
if remote_source_template is not None:
|
|
||||||
link = remote_source_template.format(**d)
|
|
||||||
return '<a href="%s">source</a> | ' % link
|
|
||||||
return ''
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
list_of_bugs = '<ol>'
|
|
||||||
for report in selector:
|
|
||||||
d = {
|
|
||||||
'description': issues.text_of_report(report),
|
|
||||||
'trace-url': url_of_bug_number(i),
|
|
||||||
'source-uri': source_uri(report),
|
|
||||||
}
|
|
||||||
list_of_bugs += report_template.format(**d)
|
|
||||||
i += 1
|
|
||||||
list_of_bugs += '</ol>'
|
|
||||||
|
|
||||||
d = {
|
|
||||||
'num-bugs': len(selector),
|
|
||||||
'list-of-bugs': list_of_bugs,
|
|
||||||
}
|
|
||||||
return template.format(**d)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_html_report(args, reports):
|
|
||||||
html_dir = os.path.join(args.infer_out, HTML_REPORT_DIR)
|
|
||||||
shutil.rmtree(html_dir, True)
|
|
||||||
utils.mkdir_if_not_exists(html_dir)
|
|
||||||
|
|
||||||
traces_dir = os.path.join(html_dir, TRACES_REPORT_DIR)
|
|
||||||
utils.mkdir_if_not_exists(traces_dir)
|
|
||||||
|
|
||||||
sel = Selector(args, reports)
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for bug in sel:
|
|
||||||
bug_trace_path = path_of_bug_number(traces_dir, i)
|
|
||||||
with codecs.open(bug_trace_path, 'w',
|
|
||||||
encoding=config.CODESET,
|
|
||||||
errors='xmlcharrefreplace') as bug_trace_file:
|
|
||||||
bug_trace_file.write(html_bug_trace(args, bug, i))
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
remote_source_template = get_remote_source_template()
|
|
||||||
bug_list_path = os.path.join(html_dir, 'index.html')
|
|
||||||
with codecs.open(bug_list_path, 'w',
|
|
||||||
encoding=config.CODESET,
|
|
||||||
errors='xmlcharrefreplace') as bug_list_file:
|
|
||||||
bug_list_file.write(html_list_of_bugs(args,
|
|
||||||
remote_source_template,
|
|
||||||
sel))
|
|
||||||
|
|
||||||
utils.stdout('Saved html report in:\n%s' % bug_list_path)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = base_parser.parse_args()
|
|
||||||
|
|
||||||
report_filename = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
|
|
||||||
reports = utils.load_json_from_path(report_filename)
|
|
||||||
|
|
||||||
if args.html:
|
|
||||||
generate_html_report(args, reports)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
sel = Selector(args, reports)
|
|
||||||
|
|
||||||
if len(sel) == 0:
|
|
||||||
print('No issues found')
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
if args.only_show:
|
|
||||||
sel.show_choices()
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
report = sel.prompt_report()
|
|
||||||
max_level = sel.prompt_level()
|
|
||||||
|
|
||||||
utils.stdout(issues.text_of_report(report))
|
|
||||||
|
|
||||||
tracer = Tracer(args, max_level)
|
|
||||||
tracer.build_report(report)
|
|
||||||
utils.stdout(unicode(tracer))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,4 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
@ -1,97 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import pygments
|
|
||||||
import pygments.formatters
|
|
||||||
import pygments.lexers
|
|
||||||
except ImportError:
|
|
||||||
pygments = None
|
|
||||||
|
|
||||||
# syntax highlighting modes
|
|
||||||
PLAIN_FORMATTER = 0
|
|
||||||
TERMINAL_FORMATTER = 1
|
|
||||||
|
|
||||||
|
|
||||||
def terminal_only(s):
|
|
||||||
if not sys.stdout.isatty():
|
|
||||||
return ''
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
BLUE = terminal_only('\033[34m')
|
|
||||||
BLUE_BG = terminal_only('\033[44m')
|
|
||||||
MAGENTA = terminal_only('\033[35m')
|
|
||||||
MAGENTA_BG = terminal_only('\033[45m')
|
|
||||||
BRIGHT = terminal_only('\033[1m')
|
|
||||||
DIM = terminal_only('\033[2m')
|
|
||||||
GREEN = terminal_only('\033[32m')
|
|
||||||
RED = terminal_only('\033[31m')
|
|
||||||
RESET = terminal_only('\033[0m')
|
|
||||||
WHITE = terminal_only('\033[37m')
|
|
||||||
WHITE_BG = terminal_only('\033[47m')
|
|
||||||
YELLOW = terminal_only('\033[33m')
|
|
||||||
|
|
||||||
HEADER = BRIGHT
|
|
||||||
SUCCESS = BLUE_BG + WHITE + BRIGHT
|
|
||||||
|
|
||||||
ERROR = RED
|
|
||||||
WARNING = YELLOW
|
|
||||||
ADVICE = BLUE
|
|
||||||
LIKE = GREEN
|
|
||||||
|
|
||||||
|
|
||||||
class Invalid_mode(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def syntax_highlighting(source_name, mode, s):
|
|
||||||
if pygments is None or mode == PLAIN_FORMATTER:
|
|
||||||
return s
|
|
||||||
|
|
||||||
try:
|
|
||||||
lexer = pygments.lexers.get_lexer_for_filename(source_name)
|
|
||||||
except pygments.lexers.ClassNotFound:
|
|
||||||
return s
|
|
||||||
|
|
||||||
formatter = None
|
|
||||||
if mode == TERMINAL_FORMATTER:
|
|
||||||
if not sys.stdout.isatty():
|
|
||||||
return s
|
|
||||||
formatter = pygments.formatters.TerminalFormatter()
|
|
||||||
# there's a bug in pygments.highlight() where it will remove all starting
|
|
||||||
# newline characters, so we have to add them back!
|
|
||||||
initial_newlines = ''
|
|
||||||
i = 0
|
|
||||||
while (i < len(s) and s[i] == '\n'):
|
|
||||||
initial_newlines += '\n'
|
|
||||||
i += 1
|
|
||||||
# pygments.highlight() also insists that all string end with exactly one
|
|
||||||
# newline character regardless of the input string!
|
|
||||||
final_newlines = ''
|
|
||||||
i = 1
|
|
||||||
while (i <= len(s) and s[-i] == '\n'):
|
|
||||||
final_newlines += '\n'
|
|
||||||
i += 1
|
|
||||||
colorized_string = pygments.highlight(s, lexer, formatter)
|
|
||||||
# strip the result from pygments.highlight() to get rid of the
|
|
||||||
# potentially spurious final newline, and also to continue to
|
|
||||||
# work in case the bugs in pygments.highlight() gets fixed.
|
|
||||||
return initial_newlines + colorized_string.strip('\n') + final_newlines
|
|
||||||
|
|
||||||
|
|
||||||
def color(s, color, mode):
|
|
||||||
if mode == TERMINAL_FORMATTER:
|
|
||||||
return color + s + RESET
|
|
||||||
if mode == PLAIN_FORMATTER:
|
|
||||||
return s
|
|
||||||
raise Invalid_mode()
|
|
@ -1,59 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import locale
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_ALL, '')
|
|
||||||
CODESET = locale.getlocale(locale.LC_CTYPE)[1]
|
|
||||||
except:
|
|
||||||
CODESET = None
|
|
||||||
if CODESET is None:
|
|
||||||
CODESET = 'ascii'
|
|
||||||
|
|
||||||
# this assumes that this file lives in infer/lib/python/infer/ and the binaries
|
|
||||||
# are in infer/bin/
|
|
||||||
INFER_PYTHON_DIRECTORY = os.path.dirname(os.path.realpath(__file__)
|
|
||||||
.decode(CODESET))
|
|
||||||
INFER_INFER_DIRECTORY = os.path.join(INFER_PYTHON_DIRECTORY,
|
|
||||||
os.pardir, os.pardir, os.pardir)
|
|
||||||
INFER_ROOT_DIRECTORY = os.path.join(INFER_INFER_DIRECTORY, os.pardir)
|
|
||||||
FCP_DIRECTORY = os.path.join(INFER_ROOT_DIRECTORY, 'facebook-clang-plugins')
|
|
||||||
LIB_DIRECTORY = os.path.join(INFER_INFER_DIRECTORY, 'lib')
|
|
||||||
BIN_DIRECTORY = os.path.join(INFER_INFER_DIRECTORY, 'bin')
|
|
||||||
JAVA_LIB_DIRECTORY = os.path.join(LIB_DIRECTORY, 'java')
|
|
||||||
MODELS_JAR = os.path.join(JAVA_LIB_DIRECTORY, 'models.jar')
|
|
||||||
WRAPPERS_DIRECTORY = os.path.join(LIB_DIRECTORY, 'wrappers')
|
|
||||||
|
|
||||||
DEFAULT_INFER_OUT = os.path.join(os.getcwd().decode(CODESET), 'infer-out')
|
|
||||||
|
|
||||||
JSON_REPORT_FILENAME = 'report.json'
|
|
||||||
JSON_COSTS_REPORT_FILENAME = 'costs-report.json'
|
|
||||||
INFER_BUCK_DEPS_FILENAME = 'infer-deps.txt'
|
|
||||||
BUGS_FILENAME = 'report.txt'
|
|
||||||
JAVAC_FILELISTS_FILENAME = 'filelists'
|
|
||||||
PMD_XML_FILENAME = 'report.xml'
|
|
||||||
|
|
||||||
IOS_CAPTURE_ERRORS = 'errors'
|
|
||||||
IOS_BUILD_OUTPUT = 'build_output'
|
|
||||||
|
|
||||||
LOG_FILE = 'infer.py.log'
|
|
||||||
|
|
||||||
BUCK_INFER_OUT = 'infer'
|
|
||||||
|
|
||||||
BUCK_OUT = 'buck-out'
|
|
||||||
|
|
||||||
TRASH = '.trash'
|
|
||||||
|
|
||||||
BUCK_OUT_TRASH = os.path.join(BUCK_OUT, TRASH)
|
|
||||||
|
|
||||||
BUCK_OUT_GEN = os.path.join(BUCK_OUT, 'gen')
|
|
@ -1,178 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import datetime
|
|
||||||
import itertools
|
|
||||||
import operator
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
from lxml import etree
|
|
||||||
except ImportError:
|
|
||||||
etree = None
|
|
||||||
|
|
||||||
from . import colorize, config, source, utils
|
|
||||||
|
|
||||||
|
|
||||||
ISSUE_SEVERITY_ERROR = 'ERROR'
|
|
||||||
ISSUE_SEVERITY_WARNING = 'WARNING'
|
|
||||||
ISSUE_SEVERITY_INFO = 'INFO'
|
|
||||||
ISSUE_SEVERITY_ADVICE = 'ADVICE'
|
|
||||||
ISSUE_SEVERITY_LIKE = 'LIKE'
|
|
||||||
|
|
||||||
# field names in rows of json reports
|
|
||||||
JSON_INDEX_CENSORED_REASON = 'censored_reason'
|
|
||||||
JSON_INDEX_DOTTY = 'dotty'
|
|
||||||
JSON_INDEX_FILENAME = 'file'
|
|
||||||
JSON_INDEX_HASH = 'hash'
|
|
||||||
JSON_INDEX_INFER_SOURCE_LOC = 'infer_source_loc'
|
|
||||||
JSON_INDEX_ISL_FILE = 'file'
|
|
||||||
JSON_INDEX_ISL_LNUM = 'lnum'
|
|
||||||
JSON_INDEX_ISL_CNUM = 'cnum'
|
|
||||||
JSON_INDEX_ISL_ENUM = 'enum'
|
|
||||||
JSON_INDEX_SEVERITY = 'severity'
|
|
||||||
JSON_INDEX_LINE = 'line'
|
|
||||||
JSON_INDEX_PROCEDURE = 'procedure'
|
|
||||||
JSON_INDEX_QUALIFIER = 'qualifier'
|
|
||||||
JSON_INDEX_QUALIFIER_TAGS = 'qualifier_tags'
|
|
||||||
JSON_INDEX_TYPE = 'bug_type'
|
|
||||||
JSON_INDEX_TRACE = 'bug_trace'
|
|
||||||
JSON_INDEX_TRACE_LEVEL = 'level'
|
|
||||||
JSON_INDEX_TRACE_FILENAME = 'filename'
|
|
||||||
JSON_INDEX_TRACE_LINE = 'line_number'
|
|
||||||
JSON_INDEX_TRACE_COLUMN = 'column_number'
|
|
||||||
JSON_INDEX_TRACE_DESCRIPTION = 'description'
|
|
||||||
JSON_INDEX_TRACEVIEW_ID = 'traceview_id'
|
|
||||||
|
|
||||||
|
|
||||||
ISSUE_TYPES_URL = 'http://fbinfer.com/docs/infer-issue-types.html#'
|
|
||||||
|
|
||||||
|
|
||||||
def text_of_infer_loc(loc):
|
|
||||||
return ' ({}:{}:{}-{}:)'.format(
|
|
||||||
loc[JSON_INDEX_ISL_FILE],
|
|
||||||
loc[JSON_INDEX_ISL_LNUM],
|
|
||||||
loc[JSON_INDEX_ISL_CNUM],
|
|
||||||
loc[JSON_INDEX_ISL_ENUM],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def text_of_report(report):
|
|
||||||
filename = report[JSON_INDEX_FILENAME]
|
|
||||||
severity = report[JSON_INDEX_SEVERITY]
|
|
||||||
line = report[JSON_INDEX_LINE]
|
|
||||||
error_type = report[JSON_INDEX_TYPE]
|
|
||||||
msg = report[JSON_INDEX_QUALIFIER]
|
|
||||||
infer_loc = ''
|
|
||||||
if JSON_INDEX_INFER_SOURCE_LOC in report:
|
|
||||||
infer_loc = text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
|
|
||||||
return '%s:%d: %s: %s%s\n %s' % (
|
|
||||||
filename,
|
|
||||||
line,
|
|
||||||
severity.lower(),
|
|
||||||
error_type,
|
|
||||||
infer_loc,
|
|
||||||
msg,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _text_of_report_list(project_root, reports, bugs_txt_path, limit=None,
|
|
||||||
console_out=False,
|
|
||||||
formatter=colorize.TERMINAL_FORMATTER):
|
|
||||||
n_issues = len(reports)
|
|
||||||
if n_issues == 0:
|
|
||||||
msg = 'No issues found'
|
|
||||||
if formatter == colorize.TERMINAL_FORMATTER:
|
|
||||||
msg = colorize.color(' %s ' % msg,
|
|
||||||
colorize.SUCCESS, formatter)
|
|
||||||
if console_out:
|
|
||||||
utils.stderr(msg)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
text_errors_list = []
|
|
||||||
for report in reports[:limit]:
|
|
||||||
filename = report[JSON_INDEX_FILENAME]
|
|
||||||
line = report[JSON_INDEX_LINE]
|
|
||||||
|
|
||||||
source_context = ''
|
|
||||||
source_context = source.build_source_context(
|
|
||||||
os.path.join(project_root, filename),
|
|
||||||
formatter,
|
|
||||||
line,
|
|
||||||
1,
|
|
||||||
True
|
|
||||||
)
|
|
||||||
indenter = source.Indenter() \
|
|
||||||
.indent_push() \
|
|
||||||
.add(source_context)
|
|
||||||
source_context = '\n' + unicode(indenter)
|
|
||||||
|
|
||||||
msg = text_of_report(report)
|
|
||||||
if report[JSON_INDEX_SEVERITY] == ISSUE_SEVERITY_ERROR:
|
|
||||||
msg = colorize.color(msg, colorize.ERROR, formatter)
|
|
||||||
elif report[JSON_INDEX_SEVERITY] == ISSUE_SEVERITY_WARNING:
|
|
||||||
msg = colorize.color(msg, colorize.WARNING, formatter)
|
|
||||||
elif report[JSON_INDEX_SEVERITY] == ISSUE_SEVERITY_ADVICE:
|
|
||||||
msg = colorize.color(msg, colorize.ADVICE, formatter)
|
|
||||||
elif report[JSON_INDEX_SEVERITY] == ISSUE_SEVERITY_LIKE:
|
|
||||||
msg = colorize.color(msg, colorize.LIKE, formatter)
|
|
||||||
text = '%s%s' % (msg, source_context)
|
|
||||||
text_errors_list.append(text)
|
|
||||||
|
|
||||||
error_types_count = {}
|
|
||||||
for report in reports:
|
|
||||||
t = report[JSON_INDEX_TYPE]
|
|
||||||
# assert failures are not very informative without knowing
|
|
||||||
# which assertion failed
|
|
||||||
if t == 'Assert_failure' and JSON_INDEX_INFER_SOURCE_LOC in report:
|
|
||||||
t += text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
|
|
||||||
if t not in error_types_count:
|
|
||||||
error_types_count[t] = 1
|
|
||||||
else:
|
|
||||||
error_types_count[t] += 1
|
|
||||||
|
|
||||||
max_type_length = max(map(len, error_types_count.keys())) + 2
|
|
||||||
sorted_error_types = error_types_count.items()
|
|
||||||
sorted_error_types.sort(key=operator.itemgetter(1), reverse=True)
|
|
||||||
types_text_list = map(lambda (t, count): '%s: %d' % (
|
|
||||||
t.rjust(max_type_length),
|
|
||||||
count,
|
|
||||||
), sorted_error_types)
|
|
||||||
|
|
||||||
text_errors = '\n\n'.join(text_errors_list)
|
|
||||||
if limit >= 0 and n_issues > limit:
|
|
||||||
text_errors += colorize.color(
|
|
||||||
('\n\n...too many issues to display (limit=%d exceeded), please ' +
|
|
||||||
'see %s or run `infer-explore` for the remaining issues.')
|
|
||||||
% (limit, bugs_txt_path), colorize.HEADER, formatter)
|
|
||||||
|
|
||||||
issues_found = 'Found {n_issues}'.format(
|
|
||||||
n_issues=utils.get_plural('issue', n_issues),
|
|
||||||
)
|
|
||||||
bug_list = '{issues_found}\n\n{issues}\n\n'.format(
|
|
||||||
issues_found=colorize.color(issues_found,
|
|
||||||
colorize.HEADER,
|
|
||||||
formatter),
|
|
||||||
issues=text_errors,
|
|
||||||
)
|
|
||||||
summary = '{header}\n\n{summary}'.format(
|
|
||||||
header=colorize.color('Summary of the reports',
|
|
||||||
colorize.HEADER, formatter),
|
|
||||||
summary='\n'.join(types_text_list),
|
|
||||||
)
|
|
||||||
|
|
||||||
if console_out:
|
|
||||||
utils.stderr(bug_list)
|
|
||||||
utils.stdout(summary)
|
|
||||||
|
|
||||||
return bug_list + summary
|
|
@ -1,110 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import colorize, config, utils
|
|
||||||
|
|
||||||
BASE_INDENT = 2
|
|
||||||
# how many lines of context around each report
|
|
||||||
SOURCE_CONTEXT = 2
|
|
||||||
|
|
||||||
|
|
||||||
class Indenter(unicode):
|
|
||||||
def __init__(self):
|
|
||||||
super(Indenter, self).__init__()
|
|
||||||
self.text = ''
|
|
||||||
self.indent = []
|
|
||||||
|
|
||||||
def indent_get(self):
|
|
||||||
indent = ''
|
|
||||||
for i in self.indent:
|
|
||||||
indent += i
|
|
||||||
return indent
|
|
||||||
|
|
||||||
def indent_push(self, n=1):
|
|
||||||
self.indent.append(n * BASE_INDENT * ' ')
|
|
||||||
return self
|
|
||||||
|
|
||||||
def indent_pop(self):
|
|
||||||
return self.indent.pop()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def newline(self):
|
|
||||||
self.text += '\n'
|
|
||||||
return self
|
|
||||||
|
|
||||||
def add(self, x):
|
|
||||||
if type(x) != unicode:
|
|
||||||
x = utils.decode(x)
|
|
||||||
lines = x.splitlines()
|
|
||||||
indent = self.indent_get()
|
|
||||||
lines = [indent + l for l in lines]
|
|
||||||
self.text += '\n'.join(lines)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.text
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return utils.encode(unicode(self))
|
|
||||||
|
|
||||||
|
|
||||||
def build_source_context(source_name, mode, report_line,
|
|
||||||
report_col, empty_desc):
|
|
||||||
start_line = max(1, report_line - SOURCE_CONTEXT)
|
|
||||||
start_col = max(0, report_col)
|
|
||||||
# could go beyond last line, checked in the loop
|
|
||||||
end_line = report_line + SOURCE_CONTEXT
|
|
||||||
|
|
||||||
# get source excerpt
|
|
||||||
line_number = 1
|
|
||||||
excerpt = ''
|
|
||||||
if not os.path.isfile(source_name):
|
|
||||||
return ''
|
|
||||||
with codecs.open(source_name, 'r',
|
|
||||||
encoding=config.CODESET, errors="replace") as source_file:
|
|
||||||
# avoid going past the end of the file
|
|
||||||
for line in source_file:
|
|
||||||
last_line = line_number
|
|
||||||
if start_line <= line_number <= end_line:
|
|
||||||
excerpt += line
|
|
||||||
elif line_number > end_line:
|
|
||||||
# OPTIM: no need to read past the last line of the excerpt
|
|
||||||
break
|
|
||||||
line_number += 1
|
|
||||||
excerpt = colorize.syntax_highlighting(source_name, mode, excerpt)
|
|
||||||
|
|
||||||
# number lines and add caret at the right position
|
|
||||||
n_length = len(str(last_line))
|
|
||||||
s = ''
|
|
||||||
line_number = start_line
|
|
||||||
for line in excerpt.split('\n')[:-1]:
|
|
||||||
num = colorize.color((str(line_number) + '.').zfill(n_length),
|
|
||||||
colorize.DIM, mode)
|
|
||||||
caret = ' '
|
|
||||||
do_mark_column = (line_number == report_line and
|
|
||||||
start_col > 1 and not empty_desc)
|
|
||||||
|
|
||||||
# mark the line if we are not also marking the column
|
|
||||||
if line_number == report_line and not do_mark_column:
|
|
||||||
caret = colorize.color('> ',
|
|
||||||
colorize.BLUE + colorize.BRIGHT, mode)
|
|
||||||
s += '%s %s%s\n' % (num, caret, line)
|
|
||||||
# mark the column position
|
|
||||||
if do_mark_column:
|
|
||||||
pad = ' ' * (3 + n_length + start_col)
|
|
||||||
s += pad + colorize.color('^',
|
|
||||||
colorize.BLUE + colorize.BRIGHT,
|
|
||||||
mode) + '\n'
|
|
||||||
line_number += 1
|
|
||||||
|
|
||||||
return s
|
|
@ -1,182 +0,0 @@
|
|||||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the MIT license found in the
|
|
||||||
# LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import codecs
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from . import config
|
|
||||||
|
|
||||||
|
|
||||||
FORMAT = '[%(levelname)s] %(message)s'
|
|
||||||
DEBUG_FORMAT = '[%(levelname)s:%(filename)s:%(lineno)03d] %(message)s'
|
|
||||||
|
|
||||||
|
|
||||||
# Monkey patching subprocess (I'm so sorry!).
|
|
||||||
if 'check_output' not in dir(subprocess):
|
|
||||||
def f(*popenargs, **kwargs):
|
|
||||||
if 'stdout' in kwargs:
|
|
||||||
raise ValueError('stdout not supported')
|
|
||||||
process = subprocess.Popen(
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
*popenargs,
|
|
||||||
**kwargs)
|
|
||||||
output, unused_err = process.communicate()
|
|
||||||
retcode = process.poll()
|
|
||||||
if retcode:
|
|
||||||
cmd = kwargs.get('args')
|
|
||||||
if cmd is None:
|
|
||||||
cmd = popenargs[0]
|
|
||||||
raise subprocess.CalledProcessError(retcode, cmd)
|
|
||||||
return output
|
|
||||||
subprocess.check_output = f
|
|
||||||
|
|
||||||
|
|
||||||
def configure_logging(args):
|
|
||||||
"""Configures the default logger. This can be called only once and has to
|
|
||||||
be called before any logging is done.
|
|
||||||
"""
|
|
||||||
logging.TIMING = logging.ERROR + 5
|
|
||||||
logging.addLevelName(logging.TIMING, 'TIMING')
|
|
||||||
|
|
||||||
def timing(msg, *args, **kwargs):
|
|
||||||
logging.log(logging.TIMING, msg, *args, **kwargs)
|
|
||||||
|
|
||||||
logging.timing = timing
|
|
||||||
if args.debug:
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format=DEBUG_FORMAT)
|
|
||||||
else:
|
|
||||||
logging.basicConfig(level=logging.INFO,
|
|
||||||
format=FORMAT,
|
|
||||||
filename=os.path.join(args.infer_out,
|
|
||||||
config.LOG_FILE),
|
|
||||||
filemode='w')
|
|
||||||
|
|
||||||
|
|
||||||
def elapsed_time(start_time):
|
|
||||||
return time.time() - start_time
|
|
||||||
|
|
||||||
|
|
||||||
def get_cmd_in_bin_dir(binary_name):
|
|
||||||
return os.path.join(config.BIN_DIRECTORY, binary_name)
|
|
||||||
|
|
||||||
|
|
||||||
def load_json_from_path(path, errors='replace'):
|
|
||||||
with codecs.open(path, 'r',
|
|
||||||
encoding=config.CODESET, errors=errors) as file_in:
|
|
||||||
return json.load(file_in, encoding=config.CODESET)
|
|
||||||
|
|
||||||
|
|
||||||
def dump_json_to_path(
|
|
||||||
data, path,
|
|
||||||
skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True,
|
|
||||||
cls=None,
|
|
||||||
indent=2, # customized
|
|
||||||
separators=None,
|
|
||||||
encoding=config.CODESET, # customized
|
|
||||||
default=None, sort_keys=False, **kw):
|
|
||||||
with codecs.open(path, 'w',
|
|
||||||
encoding=config.CODESET, errors='replace') as file_out:
|
|
||||||
json.dump(data, file_out, skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, cls=cls,
|
|
||||||
indent=indent, separators=separators, encoding=encoding,
|
|
||||||
default=default, sort_keys=sort_keys, **kw)
|
|
||||||
|
|
||||||
|
|
||||||
def mkdir_if_not_exists(path):
|
|
||||||
try:
|
|
||||||
os.mkdir(path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def invoke_function_with_callbacks(
|
|
||||||
func,
|
|
||||||
args,
|
|
||||||
on_terminate=None,
|
|
||||||
on_exception=None):
|
|
||||||
try:
|
|
||||||
res = func(*args)
|
|
||||||
if on_terminate:
|
|
||||||
on_terminate(res)
|
|
||||||
return res
|
|
||||||
except Exception as exc:
|
|
||||||
if on_exception:
|
|
||||||
return on_exception(exc)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def get_plural(_str, count):
|
|
||||||
plural_str = _str if count == 1 else _str + 's'
|
|
||||||
return '%d %s' % (count, plural_str)
|
|
||||||
|
|
||||||
|
|
||||||
def decode(s, errors='replace'):
|
|
||||||
return s.decode(encoding=config.CODESET, errors=errors)
|
|
||||||
|
|
||||||
|
|
||||||
def encode(u, errors='replace'):
|
|
||||||
return u.encode(encoding=config.CODESET, errors=errors)
|
|
||||||
|
|
||||||
|
|
||||||
def decode_or_not(s, errors='replace'):
|
|
||||||
try:
|
|
||||||
return decode(s, errors)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def encode_or_not(u, errors='replace'):
|
|
||||||
try:
|
|
||||||
return encode(u, errors)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return u
|
|
||||||
|
|
||||||
|
|
||||||
def stdout(s, errors='replace'):
|
|
||||||
print(encode(s, errors=errors))
|
|
||||||
|
|
||||||
|
|
||||||
def stderr(s, errors='replace'):
|
|
||||||
print(encode(s, errors=errors), file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_and_dedup_files_into_path(files_to_merge, dest):
|
|
||||||
lines = set()
|
|
||||||
for file_to_merge in files_to_merge:
|
|
||||||
with open(file_to_merge, 'r') as fsrc:
|
|
||||||
lines |= set(fsrc.readlines())
|
|
||||||
with open(dest, 'w') as fdest:
|
|
||||||
fdest.writelines(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def read_env():
|
|
||||||
env = dict(os.environ).copy()
|
|
||||||
for k, v in env.iteritems():
|
|
||||||
env[k] = decode(v)
|
|
||||||
return env
|
|
||||||
|
|
||||||
|
|
||||||
def encode_env(env):
|
|
||||||
new_env = env.copy()
|
|
||||||
for k, v in new_env.iteritems():
|
|
||||||
new_env[k] = encode(v)
|
|
||||||
return new_env
|
|
||||||
|
|
||||||
|
|
||||||
class AbsolutePathAction(argparse.Action):
|
|
||||||
"""Convert a path from relative to absolute in the arg parser"""
|
|
||||||
def __call__(self, parser, namespace, values, option_string=None):
|
|
||||||
setattr(namespace, self.dest, encode(os.path.abspath(values)))
|
|
@ -1,21 +0,0 @@
|
|||||||
dnl Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
dnl
|
|
||||||
dnl This source code is licensed under the MIT license found in the
|
|
||||||
dnl LICENSE file in the root directory of this source tree.
|
|
||||||
|
|
||||||
dnl AC_CHECK_PYTHON_MODULE([python],[module])
|
|
||||||
dnl
|
|
||||||
dnl checks if the given module is available from the given Python interpreter
|
|
||||||
AC_DEFUN([AC_CHECK_PYTHON_MODULE],
|
|
||||||
[dnl
|
|
||||||
AC_MSG_CHECKING([for Python module $2])
|
|
||||||
if printf "import %s" $2 | $1 - 1> /dev/null 2> /dev/null; then
|
|
||||||
AC_MSG_RESULT([ok])
|
|
||||||
AS_TR_SH([PYTHON_$2])=yes
|
|
||||||
else
|
|
||||||
AC_MSG_RESULT([unavailable])
|
|
||||||
AS_TR_SH([PYTHON_$2])=no
|
|
||||||
fi
|
|
||||||
|
|
||||||
AC_SUBST(AS_TR_SH([PYTHON_$2]))
|
|
||||||
])
|
|
Loading…
Reference in new issue