refactor dumping of json

Summary: public
Small refactoring.

Reviewed By: jeremydubreil

Differential Revision: D2679580

fb-gh-sync-id: b1e6061
master
Jules Villard 9 years ago committed by facebook-github-bot-1
parent 04e4546a91
commit fc2879ca8f

@ -408,8 +408,7 @@ def collect_results(args, start_time):
store_performances_csv(args.infer_out, stats)
stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
with open(stats_filename, 'w') as stats_out:
json.dump(stats, stats_out, indent=2)
utils.dump_json_to_path(stats, stats_filename)
basic_stats = get_basic_stats(stats)

@ -535,9 +535,7 @@ class Infer:
}
stats_path = os.path.join(self.args.infer_out, config.STATS_FILENAME)
with codecs.open(stats_path, 'w',
encoding=config.LOCALE) as stats_file:
json.dump(self.stats, stats_file, indent=2)
utils.dump_json_to_path(self.stats, stats_path)
def close(self):

@ -139,8 +139,7 @@ class BuckAnalyzer:
all_results = utils.merge_json_arrays_from_files(result_files)
merged_results_path = os.path.join(self.args.infer_out,
config.JSON_REPORT_FILENAME)
with open(merged_results_path, 'w') as file_out:
json.dump(all_results, file_out, indent=2)
utils.dump_json_to_path(all_results, merged_results_path)
print('Results saved in {results_path}'.format(
results_path=merged_results_path))
return os.EX_OK

@ -134,9 +134,7 @@ def clean_json(args, json_report):
collected_rows,
cmp=_compare_json_rows)
temporary_file = tempfile.mktemp()
with open(temporary_file, 'w') as file_out:
json.dump(collected_rows, file_out, indent=2)
file_out.flush()
utils.dump_json_to_path(collected_rows, temporary_file)
shutil.move(temporary_file, json_report)

@ -128,6 +128,20 @@ def run_command(cmd, debug_mode, infer_out, message, env=os.environ):
raise e
def dump_json_to_path(
data, path,
skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True,
cls=None,
indent=2, # customized
separators=None,
encoding=config.LOCALE, # customized
default=None, sort_keys=False, **kw):
with codecs.open(path, 'w', encoding=config.LOCALE) as file_out:
json.dump(data, file_out,
skipkeys, ensure_ascii, check_circular, allow_nan, cls,
indent, separators, encoding, default, sort_keys, **kw)
def merge_json_arrays_from_files(report_paths):
# TODO: use streams instead of loading the entire json in memory
json_data = []

Loading…
Cancel
Save