cleanup print_errors()

Summary: public
This function is useful to understand how infer prints reports, it's worth
cleaning it up a notch.

Reviewed By: jeremydubreil

Differential Revision: D2646878

fb-gh-sync-id: 2ecd894
master
Jules Villard 9 years ago committed by facebook-github-bot-1
parent bf408a1d03
commit 10db97be1e

@ -448,7 +448,7 @@ def collect_results(args, start_time):
report.flush() report.flush()
print('\n') print('\n')
issues.print_errors(json_report, bugs_out) issues.print_and_save_errors(json_report, bugs_out)
stats['int']['total_time'] = int(round(utils.elapsed_time(start_time))) stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))

@ -544,7 +544,7 @@ class Infer:
config.JSON_REPORT_FILENAME) config.JSON_REPORT_FILENAME)
bugs_out = os.path.join(self.args.infer_out, bugs_out = os.path.join(self.args.infer_out,
config.BUGS_FILENAME) config.BUGS_FILENAME)
issues.print_errors(json_report, bugs_out) issues.print_and_save_errors(json_report, bugs_out)
def print_analysis_stats(self): def print_analysis_stats(self):
procs_total = self.stats['int']['procedures'] procs_total = self.stats['int']['procedures']

@ -141,7 +141,6 @@ class BuckAnalyzer:
config.JSON_REPORT_FILENAME) config.JSON_REPORT_FILENAME)
with open(merged_results_path, 'w') as file_out: with open(merged_results_path, 'w') as file_out:
json.dump(all_results, file_out, indent=2) json.dump(all_results, file_out, indent=2)
# TODO: adapt issues.print_errors to support json and print on screen
print('Results saved in {results_path}'.format( print('Results saved in {results_path}'.format(
results_path=merged_results_path)) results_path=merged_results_path))
return os.EX_OK return os.EX_OK

@ -140,48 +140,48 @@ def clean_json(args, json_report):
shutil.move(temporary_file, json_report) shutil.move(temporary_file, json_report)
def print_errors(json_report, bugs_out): def print_and_save_errors(json_report, bugs_out):
errors = []
with codecs.open(json_report, 'r', encoding=config.LOCALE) as file_in: with codecs.open(json_report, 'r', encoding=config.LOCALE) as file_in:
errors = json.load(file_in)
errors = filter(lambda row: row[JSON_INDEX_KIND] in errors = filter(lambda row: row[JSON_INDEX_KIND] in
[ISSUE_KIND_ERROR, ISSUE_KIND_WARNING], [ISSUE_KIND_ERROR, ISSUE_KIND_WARNING],
errors) json.load(file_in))
with codecs.open(bugs_out, 'w', encoding=config.LOCALE) as file_out: text_errors_list = []
text_errors_list = [] for row in errors:
for row in errors: filename = row[JSON_INDEX_FILENAME]
filename = row[JSON_INDEX_FILENAME] if not os.path.isfile(filename):
if os.path.isfile(filename): continue
kind = row[JSON_INDEX_KIND]
line = row[JSON_INDEX_LINE] kind = row[JSON_INDEX_KIND]
error_type = row[JSON_INDEX_TYPE] line = row[JSON_INDEX_LINE]
msg = row[JSON_INDEX_QUALIFIER] error_type = row[JSON_INDEX_TYPE]
indenter = source.Indenter() msg = row[JSON_INDEX_QUALIFIER]
indenter.indent_push() source_context = source.build_source_context(filename,
indenter.add( source.TERMINAL_FORMATTER,
source.build_source_context(filename, int(line))
source.TERMINAL_FORMATTER, indenter = source.Indenter() \
int(line))) .indent_push() \
source_context = unicode(indenter) .add(source_context)
text_errors_list.append( source_context = unicode(indenter)
u'{0}:{1}: {2}: {3}\n {4}\n{5}'.format( text_errors_list.append(u'%s:%d: %s: %s\n %s\n%s' % (
filename, filename,
line, line,
kind.lower(), kind.lower(),
error_type, error_type,
msg, msg,
source_context, source_context,
) ))
)
n_issues = len(text_errors_list) n_issues = len(text_errors_list)
if n_issues == 0: with codecs.open(bugs_out, 'w', encoding=config.LOCALE) as file_out:
_print_and_write(file_out, 'No issues found') if n_issues == 0:
else: _print_and_write(file_out, 'No issues found')
msg = '\nFound %s\n' % utils.get_plural('issue', n_issues) else:
_print_and_write(file_out, msg) msg = '\nFound %s\n' % utils.get_plural('issue', n_issues)
text_errors = '\n\n'.join(text_errors_list) _print_and_write(file_out, msg)
_print_and_write(file_out, text_errors) text_errors = '\n\n'.join(text_errors_list)
_print_and_write(file_out, text_errors)
def _compare_issues(filename_1, line_1, filename_2, line_2): def _compare_issues(filename_1, line_1, filename_2, line_2):

@ -44,12 +44,15 @@ class Indenter(str):
def indent_push(self, n=1): def indent_push(self, n=1):
self.indent.append(n * BASE_INDENT * ' ') self.indent.append(n * BASE_INDENT * ' ')
return self
def indent_pop(self): def indent_pop(self):
return self.indent.pop() return self.indent.pop()
return self
def newline(self): def newline(self):
self.text += '\n' self.text += '\n'
return self
def add(self, x): def add(self, x):
if type(x) != unicode: if type(x) != unicode:
@ -58,6 +61,7 @@ class Indenter(str):
indent = self.indent_get() indent = self.indent_get()
lines = [indent + l for l in lines] lines = [indent + l for l in lines]
self.text += '\n'.join(lines) self.text += '\n'.join(lines)
return self
def __unicode__(self): def __unicode__(self):
return self.text return self.text

Loading…
Cancel
Save