Remove outdated stats reporting

Reviewed By: jeremydubreil

Differential Revision: D4364525

fbshipit-source-id: d87d914
master
Josh Berdine 8 years ago committed by Facebook Github Bot
parent 5289ccfb23
commit 8d143d87df

@ -168,9 +168,6 @@ class AnalyzerWrapper(object):
if not os.path.isdir(self.args.infer_out):
raise e
self.stats = {'int': {}}
self.timing = {}
def clean_exit(self):
if os.path.isdir(self.args.infer_out):
utils.stdout('removing {}'.format(self.args.infer_out))
@ -190,7 +187,6 @@ class AnalyzerWrapper(object):
self.javac.original_arguments if self.javac is not None else []
if self.args.multicore == 1:
analysis_start_time = time.time()
analyze_cmd = infer_analyze
exit_status = run_command(
analyze_cmd,
@ -199,10 +195,6 @@ class AnalyzerWrapper(object):
'analysis',
self.args.analyzer
)
elapsed = utils.elapsed_time(analysis_start_time)
self.timing['analysis'] = elapsed
self.timing['makefile_generation'] = 0
else:
multicore_dir = os.path.join(self.args.infer_out, 'multicore')
pwd = os.getcwd()
@ -211,7 +203,6 @@ class AnalyzerWrapper(object):
os.mkdir(multicore_dir)
os.chdir(multicore_dir)
analyze_cmd = infer_analyze + ['-makefile', 'Makefile']
makefile_generation_start_time = time.time()
makefile_status = run_command(
analyze_cmd,
self.args.debug,
@ -219,8 +210,6 @@ class AnalyzerWrapper(object):
'create_makefile',
self.args.analyzer
)
elapsed = utils.elapsed_time(makefile_generation_start_time)
self.timing['makefile_generation'] = elapsed
exit_status += makefile_status
if makefile_status == os.EX_OK:
make_cmd = ['make', '-k']
@ -229,7 +218,6 @@ class AnalyzerWrapper(object):
make_cmd += ['-l', str(self.args.load_average)]
if not self.args.debug:
make_cmd += ['-s']
analysis_start_time = time.time()
make_status = run_command(
make_cmd,
self.args.debug,
@ -237,8 +225,6 @@ class AnalyzerWrapper(object):
'run_makefile',
self.args.analyzer
)
elapsed = utils.elapsed_time(analysis_start_time)
self.timing['analysis'] = elapsed
os.chdir(pwd)
exit_status += make_status
@ -247,14 +233,6 @@ class AnalyzerWrapper(object):
return exit_status
def update_stats_with_warnings(self, json_report):
with open(json_report, 'r') as file_in:
entries = json.load(file_in)
for entry in entries:
key = entry[issues.JSON_INDEX_TYPE]
previous_value = self.stats['int'].get(key, 0)
self.stats['int'][key] = previous_value + 1
def create_report(self):
"""Report statistics about the computation and create a CSV file
containing the list or errors found during the analysis"""
@ -280,47 +258,11 @@ class AnalyzerWrapper(object):
logging.error(
'Error with InferPrint with the command: {}'.format(
infer_print_cmd))
else:
self.update_stats_with_warnings(json_report)
return exit_status
def read_proc_stats(self):
proc_stats_path = os.path.join(
self.args.infer_out,
config.PROC_STATS_FILENAME)
# capture and compile mode do not create proc_stats.json
if os.path.isfile(proc_stats_path):
proc_stats = utils.load_json_from_path(proc_stats_path)
self.stats['int'].update(proc_stats)
def save_stats(self):
"""Print timing information to infer_out/stats.json"""
self.stats['float'] = {
'capture_time': self.timing.get('capture', 0.0),
'makefile_generation_time': self.timing.get(
'makefile_generation', 0.0),
'analysis_time': self.timing.get('analysis', 0.0),
'reporting_time': self.timing.get('reporting', 0.0),
}
self.stats['normal'] = {
'analyzer': self.args.analyzer,
'infer_version': utils.infer_version()
}
stats_path = os.path.join(self.args.infer_out, config.STATS_FILENAME)
utils.dump_json_to_path(self.stats, stats_path)
def report_proc_stats(self):
self.read_proc_stats()
self.print_analysis_stats()
def report(self):
reporting_start_time = time.time()
report_status = self.create_report()
elapsed = utils.elapsed_time(reporting_start_time)
self.timing['reporting'] = elapsed
if report_status == os.EX_OK and not self.args.buck:
infer_out = self.args.infer_out
json_report = os.path.join(infer_out, config.JSON_REPORT_FILENAME)
@ -335,10 +277,4 @@ class AnalyzerWrapper(object):
if self.args.analyzer == config.ANALYZER_LINTERS:
self.report()
elif self.analyze() == os.EX_OK:
self.report_proc_stats()
self.report()
def print_analysis_stats(self):
files_total = self.stats['int']['files']
files_str = utils.get_plural('file', files_total)
print('Analyzed {}'.format(files_str))

@ -39,7 +39,6 @@ DEFAULT_BUCK_OUT_GEN = os.path.join(DEFAULT_BUCK_OUT, 'gen')
INFER_JSON_REPORT = os.path.join(config.BUCK_INFER_OUT,
config.JSON_REPORT_FILENAME)
INFER_STATS = os.path.join(config.BUCK_INFER_OUT, config.STATS_FILENAME)
INFER_SCRIPT = """\
#!/usr/bin/env {python_executable}
@ -138,136 +137,10 @@ def get_normalized_targets(targets):
raise e
def init_stats(args, start_time):
"""Returns dictionary with target independent statistics.
"""
return {
'float': {},
'int': {
'cores': multiprocessing.cpu_count(),
'time': int(time.time()),
'start_time': int(round(start_time)),
},
'normal': {
'debug': str(args.debug),
'analyzer': args.analyzer,
'machine': platform.machine(),
'node': platform.node(),
'project': utils.decode(os.path.basename(os.getcwd())),
'revision': utils.vcs_revision(),
'branch': utils.vcs_branch(),
'system': platform.system(),
'infer_version': utils.infer_version(),
'infer_branch': utils.infer_branch(),
}
}
def store_performances_csv(infer_out, stats):
"""Stores the statistics about perfromances into a CSV file to be exported
to a database"""
perf_filename = os.path.join(infer_out, config.CSV_PERF_FILENAME)
with open(perf_filename, 'w') as csv_file_out:
csv_writer = csv.writer(csv_file_out)
keys = ['infer_version', 'project', 'revision', 'files', 'lines',
'cores', 'system', 'machine', 'node', 'total_time',
'capture_time', 'analysis_time', 'reporting_time', 'time']
int_stats = list(stats['int'].items())
normal_stats = list(stats['normal'].items())
flat_stats = dict(int_stats + normal_stats)
values = []
for key in keys:
if key in flat_stats:
values.append(flat_stats[key])
csv_writer.writerow(keys)
csv_writer.writerow(values)
csv_file_out.flush()
def get_harness_code():
all_harness_code = '\nGenerated harness code:\n'
for filename in os.listdir(DEFAULT_BUCK_OUT_GEN):
if 'InferGeneratedHarness' in filename:
all_harness_code += '\n' + filename + ':\n'
with open(os.path.join(DEFAULT_BUCK_OUT_GEN,
filename), 'r') as file_in:
all_harness_code += file_in.read()
return all_harness_code + '\n'
def get_basic_stats(stats):
files_analyzed = '{0} files ({1} lines) analyzed in {2}s\n\n'.format(
stats['int'].get('files', 0),
stats['int'].get('lines', 0),
stats['int']['total_time'],
)
phase_times = 'Capture time: {0}s\nAnalysis time: {1}s\n\n'.format(
stats['int'].get('capture_time', 0),
stats['int'].get('analysis_time', 0),
)
to_skip = {
'files',
'procedures',
'lines',
'cores',
'time',
'start_time',
'capture_time',
'analysis_time',
'reporting_time',
'total_time',
'makefile_generation_time'
}
bugs_found = 'Errors found:\n\n'
for key, value in sorted(stats['int'].items()):
if key not in to_skip:
bugs_found += ' {0:>8} {1}\n'.format(value, key)
basic_stats_message = files_analyzed + phase_times + bugs_found + '\n'
return basic_stats_message
def get_buck_stats():
trace_filename = os.path.join(
DEFAULT_BUCK_OUT,
'log',
'traces',
'build.trace'
)
ARGS = 'args'
SUCCESS_STATUS = 'success_type'
buck_stats = {}
try:
trace = utils.load_json_from_path(trace_filename)
for t in trace:
if SUCCESS_STATUS in t[ARGS]:
status = t[ARGS][SUCCESS_STATUS]
count = buck_stats.get(status, 0)
buck_stats[status] = count + 1
buck_stats_message = 'Buck build statistics:\n\n'
for key, value in sorted(buck_stats.items()):
buck_stats_message += ' {0:>8} {1}\n'.format(value, key)
return buck_stats_message
except IOError as e:
logging.error('Caught %s: %s' % (e.__class__.__name__, str(e)))
logging.error(traceback.format_exc())
return ''
class NotFoundInJar(Exception):
pass
def load_stats(opened_jar):
try:
return json.loads(opened_jar.read(INFER_STATS).decode())
except KeyError:
raise NotFoundInJar
def load_json_report(opened_jar):
try:
return json.loads(opened_jar.read(INFER_JSON_REPORT).decode())
@ -289,13 +162,7 @@ def collect_results(args, start_time, targets):
"""Walks through buck-gen, collects results for the different buck targets
and stores them in in args.infer_out/results.csv.
"""
buck_stats = get_buck_stats()
logging.info(buck_stats)
with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'w') as f:
f.write(buck_stats)
all_json_rows = set()
stats = init_stats(args, start_time)
accumulation_whitelist = list(map(re.compile, [
'^cores$',
@ -304,36 +171,12 @@ def collect_results(args, start_time, targets):
'.*_pc',
]))
expected_analyzer = stats['normal']['analyzer']
expected_version = stats['normal']['infer_version']
for path in get_output_jars(targets):
try:
with zipfile.ZipFile(path) as jar:
# Accumulate integers and float values
target_stats = load_stats(jar)
found_analyzer = target_stats['normal']['analyzer']
found_version = target_stats['normal']['infer_version']
if found_analyzer != expected_analyzer \
or found_version != expected_version:
continue
else:
for type_k in ['int', 'float']:
items = target_stats.get(type_k, {}).items()
for key, value in items:
if not any(map(lambda r: r.match(key),
accumulation_whitelist)):
old_value = stats[type_k].get(key, 0)
stats[type_k][key] = old_value + value
json_rows = load_json_report(jar)
for row in json_rows:
all_json_rows.add(json.dumps(row))
# Override normals
stats['normal'].update(target_stats.get('normal', {}))
except NotFoundInJar:
pass
except zipfile.BadZipfile:
@ -341,13 +184,6 @@ def collect_results(args, start_time, targets):
json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
# Convert all float values to integer values
for key, value in stats.get('float', {}).items():
stats['int'][key] = int(round(value))
# Delete the float entries before exporting the results
del(stats['float'])
with open(json_report, 'w') as report:
json_string = '['
json_string += ','.join(all_json_rows)
@ -355,29 +191,10 @@ def collect_results(args, start_time, targets):
report.write(json_string)
report.flush()
print('\n')
json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
bugs_out = os.path.join(args.infer_out, config.BUGS_FILENAME)
issues.print_and_save_errors(args.infer_out, args.project_root,
json_report, bugs_out, args.pmd_xml)
stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))
store_performances_csv(args.infer_out, stats)
stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
utils.dump_json_to_path(stats, stats_filename)
basic_stats = get_basic_stats(stats)
if args.print_harness:
harness_code = get_harness_code()
basic_stats += harness_code
logging.info(basic_stats)
with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'a') as f:
f.write(basic_stats)
shutil.copy(bugs_out, os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT))
def cleanup(temp_files):

@ -38,9 +38,6 @@ WRAPPERS_DIRECTORY = os.path.join(LIB_DIRECTORY, 'wrappers')
XCODE_WRAPPERS_DIRECTORY = os.path.join(LIB_DIRECTORY, 'xcode_wrappers')
DEFAULT_INFER_OUT = os.path.join(os.getcwd().decode(CODESET), 'infer-out')
CSV_PERF_FILENAME = 'performances.csv'
STATS_FILENAME = 'stats.json'
PROC_STATS_FILENAME = 'proc_stats.json'
JSON_REPORT_FILENAME = 'report.json'
INFER_BUCK_DEPS_FILENAME = 'infer-deps.txt'

@ -138,23 +138,18 @@ class AnalyzerWithFrontendWrapper(analyze.AnalyzerWrapper):
raise Exception('No javac command detected')
def start(self):
start_time = time.time()
self._compile()
if self.args.analyzer == config.ANALYZER_COMPILE:
return os.EX_OK
self._run_infer_frontend()
self.timing['capture'] = utils.elapsed_time(start_time)
if self.args.analyzer == config.ANALYZER_CAPTURE:
return os.EX_OK
self.analyze_and_report()
self._close()
self.timing['total'] = utils.elapsed_time(start_time)
self.save_stats()
return self.stats
return os.EX_OK
def _run_infer_frontend(self):
infer_cmd = [utils.get_cmd_in_bin_dir('InferJava')]

Loading…
Cancel
Save