Stats aggregator for frontend/backend/reporting

Summary:
This change introduces a new binary, called `InferStatsAggregator`, that once invoked, aggregates
together all the stats generated by the single invocations of frontend/backend/reporting, that can
be used for performance measurements.

Reviewed By: jvillard

Differential Revision: D3317000

fbshipit-source-id: 61ec615
master
Martino Luca 9 years ago committed by Facebook Github Bot 7
parent 83a14a3541
commit f89e66dc60

1
.gitignore vendored

@ -73,6 +73,7 @@ buck-out/
/infer/bin/InferJava
/infer/bin/InferLLVM
/infer/bin/InferPrint
/infer/bin/InferStatsAggregator
/infer/bin/InferUnit
/infer/bin/Typeprop
/infer/bin/infer

@ -106,7 +106,6 @@ let pname_is_cpp_model callee_pname =>
| None => false
};
let is_whitelisted_cpp_method method_name =>
IList.exists
(
@ -120,21 +119,57 @@ let is_whitelisted_cpp_method method_name =>
)
Config.whitelisted_cpp_methods;
let stats () => {
let stats = Procname.Hash.stats attr_tbl;
let {Hashtbl.num_bindings: num_bindings, num_buckets, max_bucket_length} = stats;
let serialized_size = lazy (Marshal.data_size (Marshal.to_bytes attr_tbl []) 0 / 1024);
(
"AttributesTable.attr_tbl",
type t = {
num_bindings: int,
num_buckets: int,
max_bucket_length: int,
serialized_size_kb: option int
};
let to_json at => {
let extra_field =
switch at.serialized_size_kb {
| Some v => [("serialized_size_kb", `Int v)]
| None => []
};
`Assoc (
[
("num_bindings", `Int num_bindings),
("num_buckets", `Int num_buckets),
("max_bucket_length", `Int max_bucket_length)
("num_bindings", `Int at.num_bindings),
("num_buckets", `Int at.num_buckets),
("max_bucket_length", `Int at.num_buckets)
]
@ (
Config.developer_mode ? [("serialized_size_kb", `Int (Lazy.force serialized_size))] : []
)
)
@ extra_field
)
};
let from_json json => {
let open! Yojson.Basic.Util;
{
num_bindings: json |> member "num_bindings" |> to_int,
num_buckets: json |> member "num_buckets" |> to_int,
max_bucket_length: json |> member "max_bucket_length" |> to_int,
serialized_size_kb: json |> member "serialized_size_kb" |> to_option to_int
}
};
let aggregate s => {
let all_num_bindings = IList.map (fun stats => float_of_int stats.num_bindings) s;
let all_num_buckets = IList.map (fun stats => float_of_int stats.num_buckets) s;
let all_max_bucket_length = IList.map (fun stats => float_of_int stats.max_bucket_length) s;
let aggr_num_bindings = StatisticsToolbox.compute_statistics all_num_bindings;
let aggr_num_buckets = StatisticsToolbox.compute_statistics all_num_buckets;
let aggr_max_bucket_length = StatisticsToolbox.compute_statistics all_max_bucket_length;
`Assoc [
("num_bindings", StatisticsToolbox.to_json aggr_num_bindings),
("num_buckets", StatisticsToolbox.to_json aggr_num_buckets),
("max_bucket_length", StatisticsToolbox.to_json aggr_max_bucket_length)
]
};
let stats () => {
let stats = Procname.Hash.stats attr_tbl;
let {Hashtbl.num_bindings: num_bindings, num_buckets, max_bucket_length} = stats;
let serialized_size_kb =
Config.developer_mode ? Some (Marshal.data_size (Marshal.to_bytes attr_tbl []) 0 / 1024) : None;
{num_bindings, num_buckets, max_bucket_length, serialized_size_kb}
};

@ -38,4 +38,12 @@ let pname_is_cpp_model: Procname.t => bool;
let is_whitelisted_cpp_method: string => bool;
let stats: unit => (string, Yojson.json);
type t;
let stats: unit => t;
let to_json: t => Yojson.Basic.json;
let from_json: Yojson.Basic.json => t;
let aggregate: list t => Yojson.Basic.json;

@ -117,6 +117,9 @@ SCRIPT_SOURCES = scripts
CHECKCOPYRIGHT_BIN = $(SCRIPT_DIR)/checkCopyright
CHECKCOPYRIGHT_MAIN = $(SCRIPT_SOURCES)/checkCopyright
STATSAGGREGATOR_BIN = $(BIN_DIR)/InferStatsAggregator
STATSAGGREGATOR_MAIN = $(SCRIPT_SOURCES)/StatsAggregator
#### End of declarations ####
ifeq (@IS_FACEBOOK_TREE@,yes)
@ -139,6 +142,7 @@ INFER_BASE_TARGETS = \
$(INFERPRINT_MAIN).native \
$(INFERUNIT_MAIN).native \
$(CHECKCOPYRIGHT_MAIN).native \
$(STATSAGGREGATOR_MAIN).native \
$(INFERUNIT_MAIN).native
INFER_ALL_TARGETS = $(INFER_BASE_TARGETS) \
@ -173,6 +177,7 @@ infer: init $(INFERPRINT_ATDGEN_STUBS)
$(COPY) $(INFER_BUILD_DIR)/$(INFERANALYZE_MAIN).native $(INFERANALYZE_BIN)
$(COPY) $(INFER_BUILD_DIR)/$(INFERPRINT_MAIN).native $(INFERPRINT_BIN)
$(COPY) $(INFER_BUILD_DIR)/$(CHECKCOPYRIGHT_MAIN).native $(CHECKCOPYRIGHT_BIN)
$(COPY) $(INFER_BUILD_DIR)/$(STATSAGGREGATOR_MAIN).native $(STATSAGGREGATOR_BIN)
$(COPY) $(INFER_BUILD_DIR)/$(INFERUNIT_MAIN).native $(INFERUNIT_BIN)
ifeq (@BUILD_LLVM_ANALYZERS@,yes)
$(COPY) $(INFER_BUILD_DIR)/$(INFERLLVM_MAIN).native $(INFERLLVM_BIN)
@ -320,7 +325,7 @@ endif
$(REMOVE) backend/version.ml.tmp.*
$(REMOVE) backend/jsonbug_{j,t}.ml{,i}
$(REMOVE) $(INFERJAVA_BIN) $(INFERCLANG_BIN) $(INFERLLVM_BIN) $(INFERUNIT_BIN)
$(REMOVE) $(INFERANALYZE_BIN) $(INFERPRINT_BIN) $(CHECKCOPYRIGHT_BIN)
$(REMOVE) $(INFERANALYZE_BIN) $(INFERPRINT_BIN) $(CHECKCOPYRIGHT_BIN) $(STATSAGGREGATOR_BIN)
$(REMOVE) $(CLANG_ATDGEN_STUBS)
$(REMOVE) $(INFER_CLANG_FCP_MIRRORED_FILES)
$(REMOVE) mod_dep.dot

@ -17,7 +17,7 @@ module YBU = Yojson.Basic.Util
(* Each command line option may appear in the --help list of any executable, these tags are used to
specify which executables for which an option will be documented. *)
type exe = A | C | J | L | P | T
type exe = A | C | J | L | P | StatsAggregator | T
let current_exe =
match Filename.basename Sys.executable_name with
@ -26,6 +26,7 @@ let current_exe =
| "InferJava" -> J
| "InferLLVM" -> L
| "InferPrint" -> P
| "InferStatsAggregator" -> StatsAggregator
| _ -> T
@ -115,7 +116,15 @@ let check_no_duplicates desc_list =
let full_desc_list = ref []
let exe_desc_lists = [(A, ref []); (C, ref []); (J, ref []); (L, ref []); (P, ref []); (T, ref [])]
let exe_desc_lists = [
(A, ref []);
(C, ref []);
(J, ref []);
(L, ref []);
(P, ref []);
(StatsAggregator, ref []);
(T, ref []);
]
(* add desc to all desc_lists for the purposes of parsing, include desc in --help only for exes *)
let add exes desc =

@ -11,7 +11,7 @@
open! Utils
type exe = A | C | J | L | P | T
type exe = A | C | J | L | P | StatsAggregator | T
val current_exe : exe

@ -11,44 +11,147 @@
open! Utils
let words_to_kb n = n *. float_of_int (Sys.word_size / 8) /. 1024.
let words_to_mb n = words_to_kb n /. 1024.
let words_to_gb n = words_to_mb n /. 1024.
type perf_stats = {
rtime : float;
utime : float;
stime : float;
cutime : float;
cstime : float;
minor_gb : float;
promoted_gb : float;
major_gb : float;
allocated_gb : float;
minor_collections : int;
major_collections : int;
compactions : int;
top_heap_gb : float;
stack_kb : float;
minor_heap_kb : float;
attributes_table : AttributesTable.t;
}
let register_report_at_exit file =
Pervasives.at_exit (fun () ->
try
let to_json ps =
let attributes_table = AttributesTable.stats () in
`Assoc [
("rtime", `Float ps.rtime);
("utime", `Float ps.utime);
("stime", `Float ps.stime);
("cutime", `Float ps.cutime);
("cstime", `Float ps.cstime);
("minor_gb", `Float ps.minor_gb);
("promoted_gb", `Float ps.promoted_gb);
("major_gb", `Float ps.major_gb);
("allocated_gb", `Float ps.allocated_gb);
("minor_collections", `Int ps.minor_collections);
("major_collections", `Int ps.major_collections);
("compactions", `Int ps.compactions);
("top_heap_gb", `Float ps.top_heap_gb);
("stack_kb", `Float ps.stack_kb);
("minor_heap_kb", `Float ps.minor_heap_kb);
("attributes_table", AttributesTable.to_json attributes_table);
]
let from_json json =
let open Yojson.Basic.Util in
{
rtime = json |> member "rtime" |> to_float;
utime = json |> member "utime" |> to_float;
stime = json |> member "stime" |> to_float;
cutime = json |> member "cutime" |> to_float;
cstime = json |> member "cstime" |> to_float;
minor_gb = json |> member "minor_gb" |> to_float;
promoted_gb = json |> member "promoted_gb" |> to_float;
major_gb = json |> member "major_gb" |> to_float;
allocated_gb = json |> member "allocated_gb" |> to_float;
minor_collections = json |> member "minor_collections" |> to_int;
major_collections = json |> member "major_collections" |> to_int;
compactions = json |> member "compactions" |> to_int;
top_heap_gb = json |> member "top_heap_gb" |> to_float;
stack_kb = json |> member "stack_kb" |> to_float;
minor_heap_kb = json |> member "minor_heap_kb" |> to_float;
attributes_table =
json |> member "attributes_table" |> AttributesTable.from_json;
}
let aggregate s =
let mk_stats f = StatisticsToolbox.compute_statistics (IList.map f s) in
let aggr_rtime = mk_stats (fun stats -> stats.rtime) in
let aggr_utime = mk_stats (fun stats -> stats.utime) in
let aggr_stime = mk_stats (fun stats -> stats.stime) in
let aggr_cutime = mk_stats (fun stats -> stats.cutime) in
let aggr_cstime = mk_stats (fun stats -> stats.cstime) in
let aggr_minor_gb = mk_stats (fun stats -> stats.minor_gb) in
let aggr_promoted_gb = mk_stats (fun stats -> stats.promoted_gb) in
let aggr_major_gb = mk_stats (fun stats -> stats.major_gb) in
let aggr_allocated_gb = mk_stats (fun stats -> stats.allocated_gb) in
let aggr_minor_collections = mk_stats (fun stats -> float_of_int stats.minor_collections) in
let aggr_major_collections = mk_stats (fun stats -> float_of_int stats.major_collections) in
let aggr_compactions = mk_stats (fun stats -> float_of_int stats.compactions) in
let aggr_top_heap_gb = mk_stats (fun stats -> stats.top_heap_gb) in
let aggr_stack_kb = mk_stats (fun stats -> stats.stack_kb) in
let aggr_minor_heap_kb = mk_stats (fun stats -> stats.minor_heap_kb) in
let aggr_attributes_table =
AttributesTable.aggregate (IList.map (fun stats -> stats.attributes_table) s) in
`Assoc [
("rtime", StatisticsToolbox.to_json aggr_rtime);
("utime", StatisticsToolbox.to_json aggr_utime);
("stime", StatisticsToolbox.to_json aggr_stime);
("cutime", StatisticsToolbox.to_json aggr_cutime);
("cstime", StatisticsToolbox.to_json aggr_cstime);
("minor_gb", StatisticsToolbox.to_json aggr_minor_gb);
("promoted_gb", StatisticsToolbox.to_json aggr_promoted_gb);
("major_gb", StatisticsToolbox.to_json aggr_major_gb);
("allocated_gb", StatisticsToolbox.to_json aggr_allocated_gb);
("minor_collections", StatisticsToolbox.to_json aggr_minor_collections);
("major_collections", StatisticsToolbox.to_json aggr_major_collections);
("compactions", StatisticsToolbox.to_json aggr_compactions);
("top_heap_gb", StatisticsToolbox.to_json aggr_top_heap_gb);
("stack_kb", StatisticsToolbox.to_json aggr_stack_kb);
("minor_heap_kb", StatisticsToolbox.to_json aggr_minor_heap_kb);
("attributes_table", aggr_attributes_table);
]
let stats () =
let words_to_kb n = n *. float_of_int (Sys.word_size / 8) /. 1024. in
let words_to_mb n = words_to_kb n /. 1024. in
let words_to_gb n = words_to_mb n /. 1024. in
let gc_stats = Gc.quick_stat () in
let allocated_words =
gc_stats.minor_words +. gc_stats.major_words -. gc_stats.promoted_words in
let gc_ctrl = Gc.get () in
let exit_timeofday = Unix.gettimeofday () in
let exit_times = Unix.times () in
let stats =
`Assoc ([
("rtime", `Float (exit_timeofday -. initial_timeofday)) ;
("utime", `Float (exit_times.tms_utime -. initial_times.tms_utime)) ;
("stime", `Float (exit_times.tms_stime -. initial_times.tms_stime)) ;
("cutime", `Float (exit_times.tms_cutime -. initial_times.tms_cutime)) ;
("cstime", `Float (exit_times.tms_cstime -. initial_times.tms_cstime)) ;
("minor_gb", `Float (words_to_gb gc_stats.minor_words)) ;
("promoted_gb", `Float (words_to_gb gc_stats.promoted_words)) ;
("major_gb", `Float (words_to_gb gc_stats.major_words)) ;
("allocated_gb", `Float (words_to_gb allocated_words)) ;
("minor_collections", `Int gc_stats.minor_collections) ;
("major_collections", `Int gc_stats.major_collections) ;
("compactions", `Int gc_stats.compactions) ;
("top_heap_gb", `Float (words_to_gb (float_of_int gc_stats.top_heap_words))) ;
("stack_kb", `Float (words_to_kb (float_of_int gc_stats.stack_size))) ;
("minor_heap_kb", `Float (words_to_kb (float_of_int gc_ctrl.minor_heap_size)))
] @ [AttributesTable.stats ()]) in
let at = AttributesTable.stats () in
{
rtime = exit_timeofday -. initial_timeofday;
utime = exit_times.tms_utime -. initial_times.tms_utime;
stime = exit_times.tms_stime -. initial_times.tms_stime;
cutime = exit_times.tms_cutime -. initial_times.tms_cutime;
cstime = exit_times.tms_cstime -. initial_times.tms_cstime;
minor_gb = words_to_gb gc_stats.minor_words;
promoted_gb = words_to_gb gc_stats.promoted_words;
major_gb = words_to_gb gc_stats.major_words;
allocated_gb = words_to_gb allocated_words;
minor_collections = gc_stats.minor_collections;
major_collections = gc_stats.major_collections;
compactions = gc_stats.compactions;
top_heap_gb = words_to_gb (float_of_int gc_stats.top_heap_words);
stack_kb = words_to_kb (float_of_int gc_stats.stack_size);
minor_heap_kb = words_to_kb (float_of_int gc_ctrl.minor_heap_size);
attributes_table = at
}
let register_report_at_exit file =
Pervasives.at_exit (fun () ->
try
let json_stats = to_json (stats ()) in
try
let stats_oc = open_out file in
Yojson.pretty_to_channel stats_oc stats ;
Yojson.Basic.pretty_to_channel stats_oc json_stats ;
close_out stats_oc
with exc ->
Format.eprintf "Info: failed to write stats to %s@\n%s@\n%s@\n%s@."
file (Printexc.to_string exc) (Yojson.pretty_to_string stats)
file (Printexc.to_string exc) (Yojson.Basic.pretty_to_string json_stats)
(Printexc.get_backtrace ())
with exc ->
Format.eprintf "Info: failed to compute stats for %s@\n%s@\n%s@."

@ -11,4 +11,10 @@
open! Utils
type perf_stats
val from_json : Yojson.Basic.json -> perf_stats
val aggregate : perf_stats list -> Yojson.Basic.json
val register_report_at_exit : string -> unit

@ -0,0 +1,88 @@
/*
* vim: set ft=rust:
* vim: set ft=reason:
*
* Copyright (c) 2016 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
type t = {
sum: float,
avg: float,
min: float,
p10: float,
median: float,
p75: float,
max: float,
count: int
};
let to_json s =>
`Assoc [
("sum", `Float s.sum),
("avg", `Float s.avg),
("min", `Float s.min),
("p10", `Float s.p10),
("median", `Float s.median),
("p75", `Float s.p75),
("max", `Float s.max),
("count", `Int s.count)
];
let from_json json => {
let open! Yojson.Basic.Util;
{
sum: json |> member "sum" |> to_float,
avg: json |> member "avg" |> to_float,
min: json |> member "min" |> to_float,
p10: json |> member "p10" |> to_float,
median: json |> member "median" |> to_float,
p75: json |> member "p75" |> to_float,
max: json |> member "max" |> to_float,
count: json |> member "count" |> to_int
}
};
let compute_statistics values => {
let num_elements = IList.length values;
let sum = IList.fold_left (fun acc v => acc +. v) 0.0 values;
let average = sum /. float_of_int num_elements;
let values_arr = Array.of_list values;
Array.sort
(
fun a b =>
if (a == b) {
0
} else if (a -. b < 0.0) {
(-1)
} else {
1
}
)
values_arr;
let percentile pct => {
assert (pct >= 0.0 && pct <= 1.0);
assert (num_elements > 0);
let max_index = num_elements - 1;
let pct_index = float_of_int max_index *. pct;
let low_index = int_of_float (Pervasives.floor pct_index);
let high_index = int_of_float (Pervasives.ceil pct_index);
let low = values_arr.(low_index);
let high = values_arr.(high_index);
(low +. high) /. 2.0
};
{
sum,
avg: average,
min: percentile 0.0,
p10: percentile 0.10,
median: percentile 0.50,
p75: percentile 0.75,
max: percentile 1.0,
count: num_elements
}
};

@ -0,0 +1,19 @@
/*
* vim: set ft=rust:
* vim: set ft=reason:
*
* Copyright (c) 2016 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
type t;
let to_json: t => Yojson.Basic.json;
let from_json: Yojson.Basic.json => t;
let compute_statistics: list float => t;

@ -66,6 +66,8 @@ let backend_stats_dir_name = "backend_stats"
continues *)
let bound_error_allowed_in_procedure_call = true
let buck_infer_deps_file_name = "infer-deps.txt"
let captured_dir_name = "captured"
let checks_disabled_by_default = [
@ -450,6 +452,10 @@ and ast_file =
CLOpt.mk_string_opt ~long:"ast-file" ~short:"ast"
~meta:"file" "AST file for the translation"
and buck_out =
CLOpt.mk_string_opt ~long:"buck-out"
~exes:CLOpt.[StatsAggregator] ~meta:"dir" "Specify the root directory of buck-out"
(** Outfile to save bugs stats in csv format *)
and bugs_csv =
CLOpt.mk_option ~deprecated:["bugs"] ~long:"bugs-csv" ~f:create_outfile
@ -746,7 +752,7 @@ and reports_include_ml_loc =
and results_dir =
CLOpt.mk_string ~deprecated:["results_dir"] ~long:"results-dir"
~default:(Filename.concat (Sys.getcwd ()) "infer-out")
~exes:CLOpt.[A;C;J;L;P] ~meta:"dir" "Specify the project results directory"
~exes:CLOpt.[A;C;J;L;P;StatsAggregator] ~meta:"dir" "Specify the project results directory"
(** name of the file to load save results to *)
and save_results =
@ -1046,6 +1052,9 @@ let exe_usage (exe : CLOpt.exe) =
To process all the .specs in the current directory, pass . as only parameter \
To process all the .specs in the results directory, use option --results-dir \
Each spec is printed to standard output unless option -q is used."
| StatsAggregator ->
"Usage: InferStatsAggregator --results-dir <dir> --buck-out <dir>\n \
Aggregates all the perf stats generated by Buck on each target"
| T ->
version_string
@ -1168,6 +1177,7 @@ and angelic_execution = !angelic_execution
and arc_mode = objc_arc
and array_level = !array_level
and ast_file = !ast_file
and buck_out = !buck_out
and bugs_csv = !bugs_csv
and bugs_json = !bugs_json
and bugs_txt = !bugs_txt

@ -48,6 +48,7 @@ val assign : string
val attributes_dir_name : string
val backend_stats_dir_name : string
val bound_error_allowed_in_procedure_call : bool
val buck_infer_deps_file_name : string
val checks_disabled_by_default : string list
val captured_dir_name : string
val default_failure_name : string
@ -124,6 +125,7 @@ val analyzer : analyzer option
val angelic_execution : bool
val array_level : int
val ast_file : string option
val buck_out : string option
val bugs_csv : outfile option
val bugs_json : outfile option
val bugs_txt : outfile option

@ -25,7 +25,7 @@ let buck_out () =
| None ->
Filename.concat (Filename.dirname Config.results_dir) "buck-out"
let infer_deps () = Filename.concat Config.results_dir "infer-deps.txt"
let infer_deps () = Filename.concat Config.results_dir Config.buck_infer_deps_file_name
let modified_targets = ref StringSet.empty

@ -0,0 +1,149 @@
/*
* vim: set ft=rust:
* vim: set ft=reason:
*
* Copyright (c) 2016 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
open! Utils;
let filename = "aggregated_stats.json";
let dir_exists dir =>
try (Sys.is_directory dir) {
| Sys_error _ => false
};
let find_json_files_in_dir dir => {
let is_valid_json_file path => {
let s = Unix.lstat path;
let json_regex = Str.regexp_case_fold ".*\\.json$";
not (Str.string_match (Str.regexp (".*" ^ Str.quote filename ^ "$")) path 0) &&
Str.string_match json_regex path 0 && s.st_kind == Unix.S_REG
};
dir_exists dir ?
{
let content = Array.to_list (Sys.readdir dir);
let content_with_path = IList.map (fun p => Filename.concat dir p) content;
IList.filter is_valid_json_file content_with_path
} :
[]
};
let find_stats_files_in_dir dir => {
let frontend_stats_files = find_json_files_in_dir (
Filename.concat dir Config.frontend_stats_dir_name
);
let backend_stats_files = find_json_files_in_dir (
Filename.concat dir Config.backend_stats_dir_name
);
let reporting_stats_files = find_json_files_in_dir (
Filename.concat dir Config.reporting_stats_dir_name
);
(frontend_stats_files, backend_stats_files, reporting_stats_files)
};
let load_data_from_infer_deps file => {
let extract_path line =>
switch (Str.split_delim (Str.regexp (Str.quote "\t")) line) {
| [target, _, path, ..._] =>
if (dir_exists path) {
(target, path)
} else {
raise (Failure ("path '" ^ path ^ "' is not a valid directory"))
}
| _ => raise (Failure "malformed input")
};
let lines = Option.get (Utils.read_file file);
try (Ok (IList.map extract_path lines)) {
| Failure msg => Error msg
}
};
let find_all_stats_files () => {
let accumulate_paths acc paths => {
let (f, b, r) = acc;
let (f', b', r') = paths;
(f @ f', b @ b', r @ r')
};
let concatenate_paths p1 p2 =>
if (Filename.is_relative p2) {
Filename.concat p1 p2
} else {
p2
};
let infer_out = Config.results_dir;
let result =
switch Config.buck_out {
| Some p =>
if (dir_exists p) {
let data = load_data_from_infer_deps (
Filename.concat infer_out Config.buck_infer_deps_file_name
);
switch data {
| Ok r =>
let paths = IList.map (fun (_, path) => path) r;
Ok (Filename.concat p Filename.parent_dir_name, paths)
| Error _ as e => e
}
} else {
Error ("buck-out path '" ^ p ^ "' not found")
}
| None => Ok (infer_out, [infer_out])
};
switch result {
| Ok (base_path, paths_to_explore) =>
Ok (
IList.fold_left
(
fun acc path =>
accumulate_paths acc (find_stats_files_in_dir (concatenate_paths base_path path))
)
([], [], [])
paths_to_explore
)
| Error _ as e => e
}
};
let open_json_file file => Yojson.Basic.from_file file;
let write_to_json_file destfile json => {
let stats_oc = open_out destfile;
Yojson.Basic.pretty_to_channel stats_oc json;
close_out stats_oc
};
let aggregate_stats_to_file paths destfile => {
let load_stats paths => IList.map (fun path => PerfStats.from_json (open_json_file path)) paths;
let all_perf_stats = load_stats paths;
switch all_perf_stats {
| [] => Printf.eprintf "No stats to aggregate into %s\n" destfile
| _ =>
let aggr_stats = PerfStats.aggregate all_perf_stats;
write_to_json_file destfile aggr_stats
}
};
let () = {
let infer_out = Config.results_dir;
let result = find_all_stats_files ();
switch result {
| Ok (f, b, r) =>
let aggregated_frontend_stats_dir = Filename.concat infer_out Config.frontend_stats_dir_name;
let aggregated_backend_stats_dir = Filename.concat infer_out Config.backend_stats_dir_name;
let aggregated_reporting_stats_dir = Filename.concat infer_out Config.reporting_stats_dir_name;
DB.create_dir aggregated_frontend_stats_dir;
DB.create_dir aggregated_backend_stats_dir;
DB.create_dir aggregated_reporting_stats_dir;
aggregate_stats_to_file f (Filename.concat aggregated_frontend_stats_dir filename);
aggregate_stats_to_file b (Filename.concat aggregated_backend_stats_dir filename);
aggregate_stats_to_file r (Filename.concat aggregated_reporting_stats_dir filename)
| Error msg => failwith msg
}
};

@ -0,0 +1,13 @@
/*
* vim: set ft=rust:
* vim: set ft=reason:
*
* Copyright (c) 2016 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
open! Utils;
Loading…
Cancel
Save