handle utf8 PWD

Summary:
- [python] decode strings coming from `os.*` commands
- [python] decode strings coming from the command-line
- [python] encode a few remaining unicodes into strings
- [java] replace lex/yacc parser for javac verbose output by regex-based matching to handle unicode in paths
- [make] random fix of `make test` to have `make clean test` work
- [integration tests] add e2e build integration tests for utf8 in the PWD

Closes #76

Reviewed By: martinoluca

Differential Revision: D3240809

fb-gh-sync-id: 8c2e1ed
fbshipit-source-id: 8c2e1ed
master
Jules Villard 9 years ago committed by Facebook Github Bot 0
parent f88e5be395
commit d4521fd70d

@ -147,7 +147,7 @@ endif
@rm -fr __test-infer-out__ @rm -fr __test-infer-out__
test: test:
$(MAKE) -C $(INFER_DIR) $(INFER_BIN) $(MAKE) -C $(INFER_DIR) $(INFER_BIN_RELPATH)
$(MAKE) -C $(SRC_DIR) init $(MAKE) -C $(SRC_DIR) init
$(MAKE) -j$(NCPU) test_build $(MAKE) -j$(NCPU) test_build
$(MAKE) -C $(SRC_DIR) mod_dep.dot $(MAKE) -C $(SRC_DIR) mod_dep.dot

@ -69,11 +69,11 @@ def load_module(mod_name):
def split_args_to_parse(): def split_args_to_parse():
sys_argv = map(utils.decode, sys.argv)
dd_index = \ dd_index = \
sys.argv.index(CMD_MARKER) if CMD_MARKER in sys.argv else len(sys.argv) sys_argv.index(CMD_MARKER) if CMD_MARKER in sys_argv else len(sys_argv)
cmd_raw = sys.argv[dd_index + 1:] cmd_raw = sys_argv[dd_index + 1:]
return (map(utils.decode, sys.argv[1:dd_index]), return (sys_argv[1:dd_index], cmd_raw)
map(utils.decode, cmd_raw))
def create_argparser(parents=[]): def create_argparser(parents=[]):
@ -131,18 +131,28 @@ def main():
touch_if_present=not args.continue_capture) touch_if_present=not args.continue_capture)
utils.configure_logging(args) utils.configure_logging(args)
logging.info('Running command %s', ' '.join(sys.argv))
logging.info('Path to infer script %s (%s)', __file__,
os.path.realpath(__file__))
logging.info(analyze.get_infer_version())
logging.info('Platform: %s', platform.platform())
logging.info('PATH=%s', os.getenv('PATH'))
logging.info('SHELL=%s', os.getenv('SHELL'))
logging.info('PWD=%s', os.getenv('PWD'))
logging.info('output of locale.getdefaultlocale(): %s', logging.info('output of locale.getdefaultlocale(): %s',
str(locale.getdefaultlocale())) str(locale.getdefaultlocale()))
logging.info('encoding we chose in the end: %s', logging.info('encoding we chose in the end: %s',
config.CODESET) config.CODESET)
logging.info('Running command %s',
' '.join(map(utils.decode, sys.argv)))
logging.info('Path to infer script %s (%s)', utils.decode(__file__),
os.path.realpath(utils.decode(__file__)))
logging.info(analyze.get_infer_version())
logging.info('Platform: %s', utils.decode(platform.platform()))
def log_getenv(k):
v = os.getenv(k)
if v is not None:
v = utils.decode(v)
else:
v = '<NOT SET>'
logging.info('%s=%s', k, v)
log_getenv('PATH')
log_getenv('SHELL')
log_getenv('PWD')
capture_exitcode = imported_module.gen_instance(args, cmd).capture() capture_exitcode = imported_module.gen_instance(args, cmd).capture()
if capture_exitcode != os.EX_OK: if capture_exitcode != os.EX_OK:

@ -56,7 +56,9 @@ class VersionAction(argparse._VersionAction):
base_parser = argparse.ArgumentParser(add_help=False) base_parser = argparse.ArgumentParser(add_help=False)
base_group = base_parser.add_argument_group('global arguments') base_group = base_parser.add_argument_group('global arguments')
base_group.add_argument('-o', '--out', metavar='<directory>', base_group.add_argument('-o', '--out', metavar='<directory>',
default=config.DEFAULT_INFER_OUT, dest='infer_out', default=utils.encode(config.DEFAULT_INFER_OUT),
dest='infer_out',
type=utils.decode,
action=utils.AbsolutePathAction, action=utils.AbsolutePathAction,
help='Set the Infer results directory') help='Set the Infer results directory')
base_group.add_argument('-i', '--incremental', base_group.add_argument('-i', '--incremental',
@ -129,10 +131,11 @@ infer_group.add_argument('--infer_cache', metavar='<directory>',
help='Select a directory to contain the infer cache') help='Select a directory to contain the infer cache')
infer_group.add_argument('-pr', '--project_root', infer_group.add_argument('-pr', '--project_root',
dest='project_root', dest='project_root',
default=os.getcwd(), default=os.getcwd(),
help='Location of the project root ' type=utils.decode,
'(default is current directory)') help='Location of the project root '
'(default is current directory)')
infer_group.add_argument('--absolute-paths', infer_group.add_argument('--absolute-paths',
action='store_true', action='store_true',
@ -280,7 +283,7 @@ class AnalyzerWrapper(object):
# to be reported # to be reported
infer_options += ['-allow_specs_cleanup'] infer_options += ['-allow_specs_cleanup']
infer_options += ['-inferconfig_home', os.getcwd()] infer_options += ['-inferconfig_home', utils.decode(os.getcwd())]
if self.args.analyzer == config.ANALYZER_ERADICATE: if self.args.analyzer == config.ANALYZER_ERADICATE:
infer_options += ['-eradicate'] infer_options += ['-eradicate']
@ -336,7 +339,8 @@ class AnalyzerWrapper(object):
exit_status = os.EX_OK exit_status = os.EX_OK
if self.javac is not None and self.args.buck: if self.javac is not None and self.args.buck:
infer_options += ['-project_root', os.getcwd(), '-java'] infer_options += ['-project_root', utils.decode(os.getcwd()),
'-java']
if self.javac.args.classpath is not None: if self.javac.args.classpath is not None:
for path in self.javac.args.classpath.split(os.pathsep): for path in self.javac.args.classpath.split(os.pathsep):
if os.path.isfile(path): if os.path.isfile(path):
@ -348,7 +352,9 @@ class AnalyzerWrapper(object):
config.ANALYZER_TRACING]: config.ANALYZER_TRACING]:
os.environ['INFER_ONDEMAND'] = 'Y' os.environ['INFER_ONDEMAND'] = 'Y'
os.environ['INFER_OPTIONS'] = ' '.join(infer_options) infer_options = map(utils.decode_or_not, infer_options)
infer_options_str = ' '.join(infer_options)
os.environ['INFER_OPTIONS'] = utils.encode(infer_options_str)
javac_original_arguments = \ javac_original_arguments = \
self.javac.original_arguments if self.javac is not None else [] self.javac.original_arguments if self.javac is not None else []

@ -35,7 +35,7 @@ from inferlib import analyze, config, issues, utils
ANALYSIS_SUMMARY_OUTPUT = 'analysis_summary.txt' ANALYSIS_SUMMARY_OUTPUT = 'analysis_summary.txt'
DEFAULT_BUCK_OUT = os.path.join(os.getcwd(), 'buck-out') DEFAULT_BUCK_OUT = os.path.join(utils.decode(os.getcwd()), 'buck-out')
DEFAULT_BUCK_OUT_GEN = os.path.join(DEFAULT_BUCK_OUT, 'gen') DEFAULT_BUCK_OUT_GEN = os.path.join(DEFAULT_BUCK_OUT, 'gen')
INFER_CSV_REPORT = os.path.join(config.BUCK_INFER_OUT, INFER_CSV_REPORT = os.path.join(config.BUCK_INFER_OUT,
@ -103,7 +103,7 @@ def prepare_build(args):
dir='.') as infer_script: dir='.') as infer_script:
logging.info('Creating %s' % infer_script.name) logging.info('Creating %s' % infer_script.name)
infer_script.file.write( infer_script.file.write(
INFER_SCRIPT.format(sys.executable, infer).encode()) utils.encode(INFER_SCRIPT.format(sys.executable, infer)))
st = os.stat(infer_script.name) st = os.stat(infer_script.name)
os.chmod(infer_script.name, st.st_mode | stat.S_IEXEC) os.chmod(infer_script.name, st.st_mode | stat.S_IEXEC)
@ -148,7 +148,7 @@ def init_stats(args, start_time):
'analyzer': args.analyzer, 'analyzer': args.analyzer,
'machine': platform.machine(), 'machine': platform.machine(),
'node': platform.node(), 'node': platform.node(),
'project': os.path.basename(os.getcwd()), 'project': utils.decode(os.path.basename(os.getcwd())),
'revision': utils.vcs_revision(), 'revision': utils.vcs_revision(),
'branch': utils.vcs_branch(), 'branch': utils.vcs_branch(),
'system': platform.system(), 'system': platform.system(),
@ -415,7 +415,7 @@ def cleanup(temp_files):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--build-report', metavar='PATH', type=str) parser.add_argument('--build-report', metavar='PATH', type=utils.decode)
parser.add_argument('--deep', action='store_true') parser.add_argument('--deep', action='store_true')
parser.add_argument('--keep-going', action='store_true') parser.add_argument('--keep-going', action='store_true')
parser.add_argument('--load-limit', '-L') parser.add_argument('--load-limit', '-L')

@ -10,8 +10,7 @@ import os
import util import util
import tempfile import tempfile
from inferlib import jwlib from inferlib import config, jwlib, utils
from inferlib import config
MODULE_NAME = __name__ MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like: MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
@ -43,7 +42,7 @@ class GradleCapture:
config.JAVAC_FILELISTS_FILENAME) config.JAVAC_FILELISTS_FILENAME)
if not os.path.exists(path): if not os.path.exists(path):
os.mkdir(path) os.mkdir(path)
logging.info('Running with:\n' + version_str) logging.info('Running with:\n' + utils.decode(version_str))
def get_infer_commands(self, verbose_output): def get_infer_commands(self, verbose_output):
argument_start_pattern = ' Compiler arguments: ' argument_start_pattern = ' Compiler arguments: '
@ -74,12 +73,11 @@ class GradleCapture:
dir=os.path.join(self.args.infer_out, dir=os.path.join(self.args.infer_out,
config.JAVAC_FILELISTS_FILENAME), config.JAVAC_FILELISTS_FILENAME),
delete=False) as sources: delete=False) as sources:
sources.write('\n'.join(java_files)) sources.write('\n'.join(map(utils.encode, java_files)))
sources.flush() sources.flush()
java_args.append('@' + sources.name) java_args.append('@' + sources.name)
capture = jwlib.create_infer_command(self.args, capture = jwlib.create_infer_command(self.args, java_args)
java_args) calls.append(capture)
calls.append(capture)
return calls return calls
def capture(self): def capture(self):

@ -48,7 +48,7 @@ class MakeCapture:
cmd[0] = command_name cmd[0] = command_name
def get_envvars(self): def get_envvars(self):
env_vars = dict(os.environ) env_vars = utils.read_env()
wrappers_path = config.WRAPPERS_DIRECTORY wrappers_path = config.WRAPPERS_DIRECTORY
env_vars['INFER_OLD_PATH'] = env_vars['PATH'] env_vars['INFER_OLD_PATH'] = env_vars['PATH']
env_vars['PATH'] = '{wrappers}{sep}{path}'.format( env_vars['PATH'] = '{wrappers}{sep}{path}'.format(
@ -63,9 +63,10 @@ class MakeCapture:
def capture(self): def capture(self):
try: try:
env = self.get_envvars() env = utils.encode_env(self.get_envvars())
logging.info('Running command %s with env:\n%s' % (self.cmd, env)) cmd = map(utils.encode, self.cmd)
subprocess.check_call(self.cmd, env=env) logging.info('Running command %s with env:\n%s' % (cmd, env))
subprocess.check_call(cmd, env=env)
capture_dir = os.path.join(self.args.infer_out, 'captured') capture_dir = os.path.join(self.args.infer_out, 'captured')
if len(os.listdir(capture_dir)) < 1: if len(os.listdir(capture_dir)) < 1:
# Don't return with a failure code unless we're # Don't return with a failure code unless we're

@ -24,7 +24,7 @@ def get_build_output(build_cmd):
# TODO make it return generator to be able to handle large builds # TODO make it return generator to be able to handle large builds
proc = subprocess.Popen(build_cmd, stdout=subprocess.PIPE) proc = subprocess.Popen(build_cmd, stdout=subprocess.PIPE)
(verbose_out_chars, _) = proc.communicate() (verbose_out_chars, _) = proc.communicate()
return verbose_out_chars.split('\n') return utils.decode(verbose_out_chars).split('\n')
def run_compilation_commands(cmds, clean_cmd): def run_compilation_commands(cmds, clean_cmd):

@ -50,7 +50,7 @@ class XcodebuildCapture:
self.cmd = cmd self.cmd = cmd
def get_envvars(self): def get_envvars(self):
env_vars = dict(os.environ) env_vars = utils.read_env()
env_vars['FCP_APPLE_CLANG'] = self.apple_clang_path env_vars['FCP_APPLE_CLANG'] = self.apple_clang_path
@ -70,7 +70,9 @@ class XcodebuildCapture:
self.cmd += ['GCC_PRECOMPILE_PREFIX_HEADER=NO'] self.cmd += ['GCC_PRECOMPILE_PREFIX_HEADER=NO']
try: try:
subprocess.check_call(self.cmd, env=self.get_envvars()) env = utils.encode_env(self.get_envvars())
cmd = map(utils.encode, self.cmd)
subprocess.check_call(cmd, env=env)
return os.EX_OK return os.EX_OK
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
if self.args.debug: if self.args.debug:

@ -22,16 +22,16 @@ from . import analyze, config, utils
# javac options # javac options
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
current_directory = os.getcwd() current_directory = utils.decode(os.getcwd())
parser.add_argument('-version', action='store_true') parser.add_argument('-version', action='store_true')
parser.add_argument('-deprecation', action='store_true') parser.add_argument('-deprecation', action='store_true')
parser.add_argument('-cp', '-classpath', type=str, parser.add_argument('-cp', '-classpath', type=utils.decode,
dest='classpath', default=os.getcwd()) dest='classpath', default=os.getcwd())
parser.add_argument('-bootclasspath', type=str) parser.add_argument('-bootclasspath', type=utils.decode)
parser.add_argument('-d', dest='classes_out', default=current_directory) parser.add_argument('-d', dest='classes_out', default=current_directory)
parser.add_argument('-processorpath', type=str, dest='processorpath') parser.add_argument('-processorpath', type=utils.decode, dest='processorpath')
parser.add_argument('-processor', type=str, dest='processor') parser.add_argument('-processor', type=utils.decode, dest='processor')
def _get_javac_args(args): def _get_javac_args(args):

@ -295,6 +295,20 @@ def encode(u, errors='replace'):
return u.encode(encoding=config.CODESET, errors=errors) return u.encode(encoding=config.CODESET, errors=errors)
def decode_or_not(s, errors='replace'):
try:
return decode(s, errors)
except UnicodeEncodeError:
return s
def encode_or_not(u, errors='replace'):
try:
return encode(u, errors)
except UnicodeDecodeError:
return u
def stdout(s, errors='replace'): def stdout(s, errors='replace'):
print(encode(s, errors=errors)) print(encode(s, errors=errors))
@ -312,7 +326,21 @@ def merge_and_dedup_files_into_path(files_to_merge, dest):
fdest.writelines(lines) fdest.writelines(lines)
def read_env():
env = dict(os.environ).copy()
for k, v in env.iteritems():
env[k] = decode(v)
return env
def encode_env(env):
new_env = env.copy()
for k, v in new_env.iteritems():
new_env[k] = encode(v)
return new_env
class AbsolutePathAction(argparse.Action): class AbsolutePathAction(argparse.Action):
"""Convert a path from relative to absolute in the arg parser""" """Convert a path from relative to absolute in the arg parser"""
def __call__(self, parser, namespace, values, option_string=None): def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, os.path.abspath(values)) setattr(namespace, self.dest, encode(os.path.abspath(values)))

@ -162,26 +162,39 @@ let add_source_file path map =
let load_sources_and_classes () = let load_sources_and_classes () =
let file_in = open_in !javac_verbose_out in let file_in = open_in !javac_verbose_out in
let class_filename_re =
Str.regexp
"\\[wrote RegularFileObject\\[\\(.*\\)\\]\\]" in
let source_filename_re =
Str.regexp
"\\[parsing started RegularFileObject\\[\\(.*\\)\\]\\]" in
let classpath_re =
Str.regexp
"\\[search path for class files: \\(.*\\)\\]" in
let rec loop paths roots sources classes = let rec loop paths roots sources classes =
try try
let lexbuf = Lexing.from_string (input_line file_in) in let line = input_line file_in in
match JVerboseParser.line JVerboseLexer.token lexbuf with if Str.string_match class_filename_re line 0 then
| JVerbose.Source path -> let path = Str.matched_group 1 line in
loop paths roots (add_source_file path sources) classes let cn, root_info = Javalib.extract_class_name_from_file path in
| JVerbose.Class path -> let root_dir = if root_info = "" then Filename.current_dir_name else root_info in
let cn, root_info = Javalib.extract_class_name_from_file path in let updated_roots =
let root_dir = if root_info = "" then Filename.current_dir_name else root_info in if IList.exists (fun p -> p = root_dir) roots then roots
let updated_roots = else root_dir:: roots in
if IList.exists (fun p -> p = root_dir) roots then roots loop paths updated_roots sources (JBasics.ClassSet.add cn classes)
else root_dir:: roots in else if Str.string_match source_filename_re line 0 then
loop paths updated_roots sources (JBasics.ClassSet.add cn classes) let path = Str.matched_group 1 line in
| JVerbose.Classpath parsed_paths -> loop paths roots (add_source_file path sources) classes
loop parsed_paths roots sources classes else if Str.string_match classpath_re line 0 then
let classpath = Str.matched_group 1 line in
let parsed_paths = Str.split (Str.regexp_string ",") classpath in
loop parsed_paths roots sources classes
else
(* skip this line *)
loop paths roots sources classes
with with
| JBasics.Class_structure_error _ | JBasics.Class_structure_error _
| Parsing.Parse_error | Invalid_argument _ -> loop paths roots sources classes
| Invalid_argument _
| Failure "lexing: empty token" -> loop paths roots sources classes
| End_of_file -> | End_of_file ->
close_in file_in; close_in file_in;
let classpath = IList.fold_left append_path "" (roots @ (add_android_jar paths)) in let classpath = IList.fold_left append_path "" (roots @ (add_android_jar paths)) in

@ -1,61 +0,0 @@
(*
* Copyright (c) 2015 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
{
open JVerboseParser
}
let space = [' ' '\t']
let lowerletter = ['a'-'z']
let upperletter = ['A'-'Z']
let letter = lowerletter | upperletter
let underscore = '_'
let minus = '-'
let dot = '.'
let hash = '#'
let dollar = '$'
let equal = '='
let tilde = '~'
let digit = ['0'-'9']
let number = digit* | digit+ '.' digit* | digit* '.' digit+
let char = letter | digit
let dir_sep = '/'
let dot_java = dot "java"
let dot_class = dot "class"
let path = (char | dir_sep | underscore | minus | dot | hash | dollar | equal | tilde)+
let source_basename = path dot_java
let class_basename = path dot_class
let source_filename = (dir_sep path | path)* source_basename
let class_filename = (dir_sep path | path)* class_basename
let search_path = "search path for class files"
rule token = parse
| [' ' '\t'] { token lexbuf } (* skip blanks *)
| ['\n'] { EOL }
| '[' { LEFT_SQUARE_BRACKET }
| ']' { RIGHT_SQUARE_BRACKET }
| ':' { COLON }
| ',' { COMMA }
| "\'\'" { EMPTY }
| "parsing" { PARSING }
| "started" { STARTED }
| "wrote" { WROTE }
| search_path { SEARCH_PATH }
| "RegularFileObject" { REGULARFILEOBJECT }
| "ZipFileIndexFileObject" { ZIPFILEINDEXFILEOBJECT }
| source_filename as p { SOURCE_FILENAME p }
| class_filename as p { CLASS_FILENAME p }
| path as p { PATH p }
| eof { EOF }

@ -1,52 +0,0 @@
/*
* Copyright (c) 2009 - 2013 Monoidics ltd.
* Copyright (c) 2013 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
%{
%}
%token EOL EOF
%token LEFT_SQUARE_BRACKET RIGHT_SQUARE_BRACKET
%token COLON COMMA
%token EMPTY
%token PARSING STARTED WROTE SEARCH_PATH
%token REGULARFILEOBJECT ZIPFILEINDEXFILEOBJECT
%token <string> CLASS_FILENAME
%token <string> SOURCE_FILENAME
%token <string> PATH
%start line
%type <JVerbose.parsed_data> line
%%
line:
| source_filename { $1 }
| class_filename { $1 }
| classpath { $1 }
;
source_filename:
LEFT_SQUARE_BRACKET PARSING STARTED REGULARFILEOBJECT LEFT_SQUARE_BRACKET SOURCE_FILENAME RIGHT_SQUARE_BRACKET RIGHT_SQUARE_BRACKET { JVerbose.Source $6 }
;
class_filename:
LEFT_SQUARE_BRACKET WROTE REGULARFILEOBJECT LEFT_SQUARE_BRACKET CLASS_FILENAME RIGHT_SQUARE_BRACKET RIGHT_SQUARE_BRACKET { JVerbose.Class $5 }
;
classpath:
LEFT_SQUARE_BRACKET SEARCH_PATH COLON classpath_parts RIGHT_SQUARE_BRACKET { JVerbose.Classpath $4 }
;
classpath_parts:
| EMPTY { [] }
| PATH { [$1] }
| EMPTY COMMA classpath_parts { $3 }
| PATH COMMA classpath_parts { $1 :: $3 }
;

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7 #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (c) 2015 - present Facebook, Inc. # Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved. # All rights reserved.
# #
@ -34,7 +35,7 @@ sys.path.insert(0,
os.path.join(SCRIPT_DIR, os.path.join(SCRIPT_DIR,
os.pardir, os.pardir, 'lib', 'python')) os.pardir, os.pardir, 'lib', 'python'))
from inferlib import issues, utils from inferlib import config, issues, utils
ROOT_DIR = os.path.join(SCRIPT_DIR, os.pardir, os.pardir, os.pardir) ROOT_DIR = os.path.join(SCRIPT_DIR, os.pardir, os.pardir, os.pardir)
@ -54,7 +55,17 @@ REPORT_FIELDS = [
CODETOANALYZE_DIR = os.path.join(SCRIPT_DIR, 'codetoanalyze') CODETOANALYZE_DIR = os.path.join(SCRIPT_DIR, 'codetoanalyze')
EXPECTED_OUTPUTS_DIR = os.path.join(SCRIPT_DIR, 'expected_outputs') EXPECTED_OUTPUTS_DIR = os.path.join(SCRIPT_DIR, 'expected_outputs')
ALL_TESTS = ['ant', 'buck', 'gradle', 'make', 'locale', 'waf'] ALL_TESTS = [
'ant',
'buck',
'cmake',
'gradle',
'javac',
'locale',
'make',
'utf8_in_pwd',
'waf',
]
to_test = ALL_TESTS to_test = ALL_TESTS
@ -188,6 +199,13 @@ def do_test(errors, expected_errors_filename):
check_results(errors, patterns) check_results(errors, patterns)
def make_paths_relative_in_report(root, errors):
for error in errors:
# remove "root/" from each file name
rel_fname = error[issues.JSON_INDEX_FILENAME][len(root) + 1:]
error[issues.JSON_INDEX_FILENAME] = rel_fname
class BuildIntegrationTest(unittest.TestCase): class BuildIntegrationTest(unittest.TestCase):
def test_ant_integration(self): def test_ant_integration(self):
@ -205,14 +223,38 @@ class BuildIntegrationTest(unittest.TestCase):
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'ant_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, 'ant_report.json')
do_test(errors, original) do_test(errors, original)
def test_javac_integration(
self,
enabled=None,
root=os.path.join(ROOT_DIR, 'examples'),
report_name='javac_report.json'):
if enabled is None:
enabled = 'javac' in to_test
if not enabled:
print('\nSkipping javac integration test')
return
def test_gradle_integration(self): print('\nRunning javac integration test')
if 'gradle' not in to_test: errors = run_analysis(
root,
[],
[['javac', 'Hello.java']],
INFER_EXECUTABLE)
original = os.path.join(EXPECTED_OUTPUTS_DIR, report_name)
do_test(errors, original)
def test_gradle_integration(
self,
enabled=None,
root=os.path.join(ROOT_DIR, 'examples', 'java_hello'),
report_name='gradle_report.json'):
if enabled is None:
enabled = 'gradle' in to_test
if not enabled:
print('\nSkipping Gradle integration test') print('\nSkipping Gradle integration test')
return return
print('\nRunning Gradle integration test using mock gradle') print('\nRunning Gradle integration test using mock gradle')
root = os.path.join(ROOT_DIR, 'examples', 'java_hello')
env = os.environ env = os.environ
env['PATH'] = '{}:{}'.format( env['PATH'] = '{}:{}'.format(
os.path.join(SCRIPT_DIR, 'mock'), os.path.join(SCRIPT_DIR, 'mock'),
@ -224,7 +266,7 @@ class BuildIntegrationTest(unittest.TestCase):
[['gradle', 'build']], [['gradle', 'build']],
INFER_EXECUTABLE, INFER_EXECUTABLE,
env=env) env=env)
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'gradle_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, report_name)
do_test(errors, original) do_test(errors, original)
def test_buck_integration(self): def test_buck_integration(self):
@ -242,19 +284,24 @@ class BuildIntegrationTest(unittest.TestCase):
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'buck_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, 'buck_report.json')
do_test(errors, original) do_test(errors, original)
def test_make_integration(self): def test_make_integration(
if 'make' not in to_test: self,
enabled=None,
root=os.path.join(CODETOANALYZE_DIR, 'make'),
report_name='make_report.json'):
if enabled is None:
enabled = 'make' in to_test
if not enabled:
print('\nSkipping make integration test') print('\nSkipping make integration test')
return return
print('\nRunning make integration test') print('\nRunning make integration test')
root = os.path.join(CODETOANALYZE_DIR, 'make')
errors = run_analysis( errors = run_analysis(
root, root,
[['make', 'clean']], [['make', 'clean']],
[['make', 'all']], [['make', 'all']],
INFER_EXECUTABLE) INFER_EXECUTABLE)
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'make_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, report_name)
do_test(errors, original) do_test(errors, original)
def test_wonky_locale_integration(self): def test_wonky_locale_integration(self):
@ -293,14 +340,23 @@ class BuildIntegrationTest(unittest.TestCase):
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'waf_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, 'waf_report.json')
do_test(errors, original) do_test(errors, original)
def test_cmake_integration(self): def test_cmake_integration(
if not ('cmake' in to_test and self,
enabled=None,
root=os.path.join(CODETOANALYZE_DIR, 'cmake'),
report_name='cmake_report.json'):
if enabled is None:
enabled = 'cmake' in to_test
if not (enabled and
is_tool_available(['cmake', '--version'])): is_tool_available(['cmake', '--version'])):
print('\nSkipping cmake integration test') print('\nSkipping cmake integration test')
return return
print('\nRunning cmake integration test') print('\nRunning cmake integration test')
root = os.path.join(CODETOANALYZE_DIR, 'cmake', 'build') orig_root = root
root = os.path.join(root, 'build')
# remove build/ directory just in case
shutil.rmtree(root, True)
errors = run_analysis( errors = run_analysis(
root, root,
[], [],
@ -308,9 +364,42 @@ class BuildIntegrationTest(unittest.TestCase):
INFER_EXECUTABLE) INFER_EXECUTABLE)
# remove build/ directory # remove build/ directory
shutil.rmtree(root) shutil.rmtree(root)
original = os.path.join(EXPECTED_OUTPUTS_DIR, 'cmake_report.json') original = os.path.join(EXPECTED_OUTPUTS_DIR, report_name)
# cmake produces absolute paths using the real path
make_paths_relative_in_report(os.path.realpath(orig_root), errors)
do_test(errors, original) do_test(errors, original)
def test_utf8_in_pwd_integration(self):
if not 'utf8_in_pwd' in to_test:
print('\nSkipping utf8_in_pwd integration test')
return
print('\nRunning utf8_in_pwd integration test')
utf8_in_pwd_path = os.path.join(CODETOANALYZE_DIR, 'utf8_ιn_pwd')
# copy non-unicode dir to one with unicode in it
shutil.rmtree(utf8_in_pwd_path, True) # remove just in case
shutil.copytree(os.path.join(CODETOANALYZE_DIR, 'utf8_in_pwd'),
utf8_in_pwd_path)
self.test_cmake_integration(
enabled=True,
root=os.path.join(utf8_in_pwd_path, 'cmake'),
report_name='utf8_in_pwd_cmake_report.json')
self.test_gradle_integration(
enabled=True,
root=os.path.join(utf8_in_pwd_path, 'gradle'),
report_name='utf8_in_pwd_gradle_report.json')
self.test_javac_integration(
enabled=True,
root=os.path.join(utf8_in_pwd_path),
report_name='utf8_in_pwd_javac_report.json')
self.test_make_integration(
enabled=True,
root=os.path.join(utf8_in_pwd_path, 'make'),
report_name='utf8_in_pwd_make_report.json')
shutil.rmtree(utf8_in_pwd_path, True) # remove copied dir
if __name__ == '__main__': if __name__ == '__main__':
# hackish capturing of the arguments after '--' # hackish capturing of the arguments after '--'

@ -1,16 +1,15 @@
(* /*
* Copyright (c) 2009 - 2013 Monoidics ltd. * Copyright (c) 2015 - present Facebook, Inc.
* Copyright (c) 2013 - present Facebook, Inc.
* All rights reserved. * All rights reserved.
* *
* This source code is licensed under the BSD style license found in the * This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant * LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory. * of patent rights can be found in the PATENTS file in the same directory.
*) */
open! Utils class Hello {
int test() {
type parsed_data = String s = null;
| Source of string return s.length();
| Class of string }
| Classpath of string list }

@ -0,0 +1 @@
../../../../../examples/java_hello/

@ -1,7 +1,7 @@
[ [
{ {
"bug_type": "NULL_DEREFERENCE", "bug_type": "NULL_DEREFERENCE",
"file": "/home/jul/infer/infer/tests/build_systems/codetoanalyze/cmake/hello.c", "file": "hello.c",
"procedure": "test" "procedure": "test"
} }
] ]

@ -0,0 +1,7 @@
[
{
"bug_type": "NULL_DEREFERENCE",
"file": "Hello.java",
"procedure": "int Hello.test()"
}
]

@ -0,0 +1,7 @@
[
{
"bug_type": "NULL_DEREFERENCE",
"file": "hello.c",
"procedure": "test"
}
]

@ -0,0 +1,17 @@
[
{
"bug_type": "NULL_DEREFERENCE",
"file": "Hello.java",
"procedure": "void Hello.mayCauseNPE()"
},
{
"bug_type": "RESOURCE_LEAK",
"file": "Hello.java",
"procedure": "void Hello.mayLeakResource()"
},
{
"bug_type": "RESOURCE_LEAK",
"file": "Hello.java",
"procedure": "void Hello.twoResources()"
}
]

@ -0,0 +1,7 @@
[
{
"bug_type": "NULL_DEREFERENCE",
"file": "Hello.java",
"procedure": "int Hello.test()"
}
]

@ -0,0 +1,7 @@
[
{
"bug_type": "NULL_DEREFERENCE",
"file": "utf8_in_function_names.c",
"procedure": "test_\uc131\uacf5"
}
]
Loading…
Cancel
Save