cleanup print_errors()

Summary: public
This function is useful to understand how infer prints reports, it's worth
cleaning it up a notch.

Reviewed By: jeremydubreil

Differential Revision: D2646878

fb-gh-sync-id: 2ecd894
master
Jules Villard 9 years ago committed by facebook-github-bot-1
parent bf408a1d03
commit 10db97be1e

@ -448,7 +448,7 @@ def collect_results(args, start_time):
report.flush()
print('\n')
issues.print_errors(json_report, bugs_out)
issues.print_and_save_errors(json_report, bugs_out)
stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))

@ -544,7 +544,7 @@ class Infer:
config.JSON_REPORT_FILENAME)
bugs_out = os.path.join(self.args.infer_out,
config.BUGS_FILENAME)
issues.print_errors(json_report, bugs_out)
issues.print_and_save_errors(json_report, bugs_out)
def print_analysis_stats(self):
procs_total = self.stats['int']['procedures']

@ -141,7 +141,6 @@ class BuckAnalyzer:
config.JSON_REPORT_FILENAME)
with open(merged_results_path, 'w') as file_out:
json.dump(all_results, file_out, indent=2)
# TODO: adapt issues.print_errors to support json and print on screen
print('Results saved in {results_path}'.format(
results_path=merged_results_path))
return os.EX_OK

@ -140,48 +140,48 @@ def clean_json(args, json_report):
shutil.move(temporary_file, json_report)
def print_errors(json_report, bugs_out):
def print_and_save_errors(json_report, bugs_out):
errors = []
with codecs.open(json_report, 'r', encoding=config.LOCALE) as file_in:
errors = json.load(file_in)
errors = filter(lambda row: row[JSON_INDEX_KIND] in
[ISSUE_KIND_ERROR, ISSUE_KIND_WARNING],
errors)
with codecs.open(bugs_out, 'w', encoding=config.LOCALE) as file_out:
text_errors_list = []
for row in errors:
filename = row[JSON_INDEX_FILENAME]
if os.path.isfile(filename):
kind = row[JSON_INDEX_KIND]
line = row[JSON_INDEX_LINE]
error_type = row[JSON_INDEX_TYPE]
msg = row[JSON_INDEX_QUALIFIER]
indenter = source.Indenter()
indenter.indent_push()
indenter.add(
source.build_source_context(filename,
source.TERMINAL_FORMATTER,
int(line)))
source_context = unicode(indenter)
text_errors_list.append(
u'{0}:{1}: {2}: {3}\n {4}\n{5}'.format(
filename,
line,
kind.lower(),
error_type,
msg,
source_context,
)
)
n_issues = len(text_errors_list)
if n_issues == 0:
_print_and_write(file_out, 'No issues found')
else:
msg = '\nFound %s\n' % utils.get_plural('issue', n_issues)
_print_and_write(file_out, msg)
text_errors = '\n\n'.join(text_errors_list)
_print_and_write(file_out, text_errors)
json.load(file_in))
text_errors_list = []
for row in errors:
filename = row[JSON_INDEX_FILENAME]
if not os.path.isfile(filename):
continue
kind = row[JSON_INDEX_KIND]
line = row[JSON_INDEX_LINE]
error_type = row[JSON_INDEX_TYPE]
msg = row[JSON_INDEX_QUALIFIER]
source_context = source.build_source_context(filename,
source.TERMINAL_FORMATTER,
int(line))
indenter = source.Indenter() \
.indent_push() \
.add(source_context)
source_context = unicode(indenter)
text_errors_list.append(u'%s:%d: %s: %s\n %s\n%s' % (
filename,
line,
kind.lower(),
error_type,
msg,
source_context,
))
n_issues = len(text_errors_list)
with codecs.open(bugs_out, 'w', encoding=config.LOCALE) as file_out:
if n_issues == 0:
_print_and_write(file_out, 'No issues found')
else:
msg = '\nFound %s\n' % utils.get_plural('issue', n_issues)
_print_and_write(file_out, msg)
text_errors = '\n\n'.join(text_errors_list)
_print_and_write(file_out, text_errors)
def _compare_issues(filename_1, line_1, filename_2, line_2):

@ -44,12 +44,15 @@ class Indenter(str):
def indent_push(self, n=1):
self.indent.append(n * BASE_INDENT * ' ')
return self
def indent_pop(self):
return self.indent.pop()
return self
def newline(self):
self.text += '\n'
return self
def add(self, x):
if type(x) != unicode:
@ -58,6 +61,7 @@ class Indenter(str):
indent = self.indent_get()
lines = [indent + l for l in lines]
self.text += '\n'.join(lines)
return self
def __unicode__(self):
return self.text

Loading…
Cancel
Save