Summary: Warning: This might be a bit brutal. PerfStats and EventLogger are pretty much subsumed by `ScubaLogging`. It seems no one has been looking at the data they generate recently. Let's delete them! If we need to re-implement some parts later on, let's do that using `ScubaLogging`, which is better (eg, still produces data when infer crashes). Things we lose: - errors in the clang frontend due to missing decl translation, etc. - errors in biabduction due to timeouts, functions not found, etc. We could also re-implement these using BackendStats and ScubaLogging instead of brutally deleting everything. Reviewed By: ngorogiannis Differential Revision: D20343087 fbshipit-source-id: 90a3121camaster
parent
6e1cca731c
commit
957337bba2
@ -1,344 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
(** Performance Statistics gathering and reporting *)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
module F = Format
|
|
||||||
module L = Logging
|
|
||||||
|
|
||||||
type mem_perf =
|
|
||||||
{ minor_gb: float
|
|
||||||
; promoted_gb: float
|
|
||||||
; major_gb: float
|
|
||||||
; allocated_gb: float
|
|
||||||
; minor_collections: int
|
|
||||||
; major_collections: int
|
|
||||||
; compactions: int
|
|
||||||
; top_heap_gb: float
|
|
||||||
; stack_kb: float
|
|
||||||
; minor_heap_kb: float }
|
|
||||||
|
|
||||||
type time_perf = {rtime: float; utime: float; stime: float; cutime: float; cstime: float}
|
|
||||||
|
|
||||||
type perf_stats = {mem: mem_perf option; time: time_perf option}
|
|
||||||
|
|
||||||
type stats_kind = Time of Mtime_clock.counter * Unix.process_times | Memory | TimeAndMemory
|
|
||||||
|
|
||||||
type stats_type =
|
|
||||||
| ClangLinters of SourceFile.t
|
|
||||||
| ClangFrontend of SourceFile.t
|
|
||||||
| ClangProcessAST of SourceFile.t
|
|
||||||
| JavaFrontend of SourceFile.t
|
|
||||||
| TotalFrontend
|
|
||||||
| Backend of SourceFile.t
|
|
||||||
| TotalBackend
|
|
||||||
| Reporting
|
|
||||||
| Driver
|
|
||||||
|
|
||||||
let source_file_of_stats_type = function
|
|
||||||
| ClangLinters source_file
|
|
||||||
| ClangFrontend source_file
|
|
||||||
| ClangProcessAST source_file
|
|
||||||
| JavaFrontend source_file
|
|
||||||
| Backend source_file ->
|
|
||||||
Some source_file
|
|
||||||
| _ ->
|
|
||||||
None
|
|
||||||
|
|
||||||
|
|
||||||
let relative_path_of_stats_type stats_type =
|
|
||||||
let abbrev_source_file =
|
|
||||||
Option.map ~f:DB.source_file_encoding (source_file_of_stats_type stats_type)
|
|
||||||
in
|
|
||||||
let filename =
|
|
||||||
match abbrev_source_file with
|
|
||||||
| Some abbrev ->
|
|
||||||
F.sprintf "%s_%s.json" Config.perf_stats_prefix abbrev
|
|
||||||
| None ->
|
|
||||||
F.sprintf "%s.json" Config.perf_stats_prefix
|
|
||||||
in
|
|
||||||
let dirname =
|
|
||||||
match stats_type with
|
|
||||||
| ClangLinters _ | ClangFrontend _ | ClangProcessAST _ | JavaFrontend _ | TotalFrontend ->
|
|
||||||
Config.frontend_stats_dir_name
|
|
||||||
| Backend _ | TotalBackend ->
|
|
||||||
Config.backend_stats_dir_name
|
|
||||||
| Reporting ->
|
|
||||||
Config.reporting_stats_dir_name
|
|
||||||
| Driver ->
|
|
||||||
Config.driver_stats_dir_name
|
|
||||||
in
|
|
||||||
Filename.concat dirname filename
|
|
||||||
|
|
||||||
|
|
||||||
let string_of_stats_type = function
|
|
||||||
| ClangLinters _ ->
|
|
||||||
"linters"
|
|
||||||
| ClangFrontend _ ->
|
|
||||||
"clang_frontend"
|
|
||||||
| ClangProcessAST _ ->
|
|
||||||
"clang_process_ast"
|
|
||||||
| JavaFrontend _ ->
|
|
||||||
"java_frontend"
|
|
||||||
| TotalFrontend ->
|
|
||||||
"total_frontend"
|
|
||||||
| Backend _ ->
|
|
||||||
"backend"
|
|
||||||
| TotalBackend ->
|
|
||||||
"total_backend"
|
|
||||||
| Reporting ->
|
|
||||||
"reporting"
|
|
||||||
| Driver ->
|
|
||||||
"driver"
|
|
||||||
|
|
||||||
|
|
||||||
let to_json ps =
|
|
||||||
let time =
|
|
||||||
Option.value_map ~default:[] ps.time ~f:(fun time_perf ->
|
|
||||||
[ ( "time"
|
|
||||||
, `Assoc
|
|
||||||
[ ("rtime", `Float time_perf.rtime)
|
|
||||||
; ("utime", `Float time_perf.utime)
|
|
||||||
; ("stime", `Float time_perf.stime)
|
|
||||||
; ("cutime", `Float time_perf.cutime)
|
|
||||||
; ("cstime", `Float time_perf.cstime) ] ) ] )
|
|
||||||
in
|
|
||||||
let mem =
|
|
||||||
Option.value_map ~default:[] ps.mem ~f:(fun mem_perf ->
|
|
||||||
[ ( "mem"
|
|
||||||
, `Assoc
|
|
||||||
[ ("minor_gb", `Float mem_perf.minor_gb)
|
|
||||||
; ("promoted_gb", `Float mem_perf.promoted_gb)
|
|
||||||
; ("major_gb", `Float mem_perf.major_gb)
|
|
||||||
; ("allocated_gb", `Float mem_perf.allocated_gb)
|
|
||||||
; ("minor_collections", `Int mem_perf.minor_collections)
|
|
||||||
; ("major_collections", `Int mem_perf.major_collections)
|
|
||||||
; ("compactions", `Int mem_perf.compactions)
|
|
||||||
; ("top_heap_gb", `Float mem_perf.top_heap_gb)
|
|
||||||
; ("stack_kb", `Float mem_perf.stack_kb)
|
|
||||||
; ("minor_heap_kb", `Float mem_perf.minor_heap_kb) ] ) ] )
|
|
||||||
in
|
|
||||||
`Assoc (time @ mem)
|
|
||||||
|
|
||||||
|
|
||||||
let from_json json =
|
|
||||||
let open! Yojson.Basic.Util in
|
|
||||||
let mem =
|
|
||||||
json |> member "mem"
|
|
||||||
|> to_option (fun mem_perf ->
|
|
||||||
{ minor_gb= mem_perf |> member "minor_gb" |> to_float
|
|
||||||
; promoted_gb= mem_perf |> member "promoted_gb" |> to_float
|
|
||||||
; major_gb= mem_perf |> member "major_gb" |> to_float
|
|
||||||
; allocated_gb= mem_perf |> member "allocated_gb" |> to_float
|
|
||||||
; minor_collections= mem_perf |> member "minor_collections" |> to_int
|
|
||||||
; major_collections= mem_perf |> member "major_collections" |> to_int
|
|
||||||
; compactions= mem_perf |> member "compactions" |> to_int
|
|
||||||
; top_heap_gb= mem_perf |> member "top_heap_gb" |> to_float
|
|
||||||
; stack_kb= mem_perf |> member "stack_kb" |> to_float
|
|
||||||
; minor_heap_kb= mem_perf |> member "minor_heap_kb" |> to_float } )
|
|
||||||
in
|
|
||||||
let time =
|
|
||||||
json |> member "time"
|
|
||||||
|> to_option (fun time_perf ->
|
|
||||||
{ rtime= time_perf |> member "rtime" |> to_float
|
|
||||||
; utime= time_perf |> member "utime" |> to_float
|
|
||||||
; stime= time_perf |> member "stime" |> to_float
|
|
||||||
; cutime= time_perf |> member "cutime" |> to_float
|
|
||||||
; cstime= time_perf |> member "cstime" |> to_float } )
|
|
||||||
in
|
|
||||||
{mem; time}
|
|
||||||
|
|
||||||
|
|
||||||
let aggregate_mem_stats s =
|
|
||||||
let mk_stats f =
|
|
||||||
StatisticsToolbox.compute_statistics
|
|
||||||
(List.filter_map s ~f:(fun stats -> Option.map stats.mem ~f))
|
|
||||||
in
|
|
||||||
let aggr_minor_gb = mk_stats (fun mem_perf -> mem_perf.minor_gb) in
|
|
||||||
let aggr_promoted_gb = mk_stats (fun mem_perf -> mem_perf.promoted_gb) in
|
|
||||||
let aggr_major_gb = mk_stats (fun mem_perf -> mem_perf.major_gb) in
|
|
||||||
let aggr_allocated_gb = mk_stats (fun mem_perf -> mem_perf.allocated_gb) in
|
|
||||||
let aggr_minor_collections = mk_stats (fun mem -> float_of_int mem.minor_collections) in
|
|
||||||
let aggr_major_collections = mk_stats (fun mem -> float_of_int mem.major_collections) in
|
|
||||||
let aggr_compactions = mk_stats (fun mem -> float_of_int mem.compactions) in
|
|
||||||
let aggr_top_heap_gb = mk_stats (fun mem -> mem.top_heap_gb) in
|
|
||||||
let aggr_stack_kb = mk_stats (fun mem -> mem.stack_kb) in
|
|
||||||
let aggr_minor_heap_kb = mk_stats (fun mem -> mem.minor_heap_kb) in
|
|
||||||
[ ("minor_gb", aggr_minor_gb)
|
|
||||||
; ("promoted_gb", aggr_promoted_gb)
|
|
||||||
; ("major_gb", aggr_major_gb)
|
|
||||||
; ("allocated_gb", aggr_allocated_gb)
|
|
||||||
; ("minor_collections", aggr_minor_collections)
|
|
||||||
; ("major_collections", aggr_major_collections)
|
|
||||||
; ("compactions", aggr_compactions)
|
|
||||||
; ("top_heap_gb", aggr_top_heap_gb)
|
|
||||||
; ("stack_kb", aggr_stack_kb)
|
|
||||||
; ("minor_heap_kb", aggr_minor_heap_kb) ]
|
|
||||||
|
|
||||||
|
|
||||||
let aggregate_time_stats s =
|
|
||||||
let mk_stats f =
|
|
||||||
StatisticsToolbox.compute_statistics
|
|
||||||
(List.filter_map s ~f:(fun stats -> Option.map stats.time ~f))
|
|
||||||
in
|
|
||||||
let aggr_rtime = mk_stats (fun time -> time.rtime) in
|
|
||||||
let aggr_utime = mk_stats (fun time -> time.utime) in
|
|
||||||
let aggr_stime = mk_stats (fun time -> time.stime) in
|
|
||||||
let aggr_cutime = mk_stats (fun time -> time.cutime) in
|
|
||||||
let aggr_cstime = mk_stats (fun time -> time.cstime) in
|
|
||||||
[ ("rtime", aggr_rtime)
|
|
||||||
; ("utime", aggr_utime)
|
|
||||||
; ("stime", aggr_stime)
|
|
||||||
; ("cutime", aggr_cutime)
|
|
||||||
; ("cstime", aggr_cstime) ]
|
|
||||||
|
|
||||||
|
|
||||||
let aggregate s =
|
|
||||||
let build_json_list =
|
|
||||||
List.fold_right ~init:[] ~f:(fun (key, stats) l ->
|
|
||||||
match stats with Some stats -> (key, StatisticsToolbox.to_json stats) :: l | None -> l )
|
|
||||||
in
|
|
||||||
let mem_stats = build_json_list (aggregate_mem_stats s) in
|
|
||||||
let time_stats = build_json_list (aggregate_time_stats s) in
|
|
||||||
let mem_json = if List.is_empty mem_stats then [] else [("mem", `Assoc mem_stats)] in
|
|
||||||
let time_json = if List.is_empty time_stats then [] else [("time", `Assoc time_stats)] in
|
|
||||||
`Assoc (time_json @ mem_json)
|
|
||||||
|
|
||||||
|
|
||||||
let compute_mem_stats () =
|
|
||||||
let bytes_per_word = Sys.word_size / 8 in
|
|
||||||
let words_to_bytes n = n *. float_of_int bytes_per_word in
|
|
||||||
let words_to_kb n = words_to_bytes n /. 1024. in
|
|
||||||
let words_to_mb n = words_to_kb n /. 1024. in
|
|
||||||
let words_to_gb n = words_to_mb n /. 1024. in
|
|
||||||
let gc_stats = Gc.quick_stat () in
|
|
||||||
let allocated_words = gc_stats.minor_words +. gc_stats.major_words -. gc_stats.promoted_words in
|
|
||||||
let gc_ctrl = Gc.get () in
|
|
||||||
let stats =
|
|
||||||
Some
|
|
||||||
{ minor_gb= words_to_gb gc_stats.minor_words
|
|
||||||
; promoted_gb= words_to_gb gc_stats.promoted_words
|
|
||||||
; major_gb= words_to_gb gc_stats.major_words
|
|
||||||
; allocated_gb= words_to_gb allocated_words
|
|
||||||
; minor_collections= gc_stats.minor_collections
|
|
||||||
; major_collections= gc_stats.major_collections
|
|
||||||
; compactions= gc_stats.compactions
|
|
||||||
; top_heap_gb= words_to_gb (float_of_int gc_stats.top_heap_words)
|
|
||||||
; stack_kb= words_to_kb (float_of_int gc_stats.stack_size)
|
|
||||||
; minor_heap_kb= words_to_kb (float_of_int gc_ctrl.minor_heap_size) }
|
|
||||||
in
|
|
||||||
(* We log number of bytes instead of a larger unit in EventLogger so the EventLogger output can
|
|
||||||
display in whatever format fits best *)
|
|
||||||
let mem =
|
|
||||||
Some
|
|
||||||
{ EventLogger.minor_heap_mem= words_to_bytes gc_stats.minor_words
|
|
||||||
; promoted_minor_heap_mem= words_to_bytes gc_stats.promoted_words
|
|
||||||
; major_heap_mem= words_to_bytes gc_stats.major_words
|
|
||||||
; total_allocated_mem= words_to_bytes allocated_words
|
|
||||||
; minor_collections= gc_stats.minor_collections
|
|
||||||
; major_collections= gc_stats.major_collections
|
|
||||||
; heap_compactions= gc_stats.compactions
|
|
||||||
; top_heap_size= gc_stats.top_heap_words * bytes_per_word
|
|
||||||
; stack_size= gc_stats.stack_size * bytes_per_word
|
|
||||||
; minor_heap_size= gc_ctrl.minor_heap_size * bytes_per_word }
|
|
||||||
in
|
|
||||||
(stats, mem)
|
|
||||||
|
|
||||||
|
|
||||||
let compute_time_stats ?rtime_counter (initial_times : Unix.process_times) =
|
|
||||||
let exit_times = Unix.times () in
|
|
||||||
let rtime_span = Mtime_clock.elapsed () in
|
|
||||||
let rtime =
|
|
||||||
Option.value_map ~default:rtime_span ~f:Mtime_clock.count rtime_counter |> Mtime.Span.to_s
|
|
||||||
in
|
|
||||||
let utime = exit_times.tms_utime -. initial_times.tms_utime in
|
|
||||||
let stime = exit_times.tms_stime -. initial_times.tms_stime in
|
|
||||||
let cutime = exit_times.tms_cutime -. initial_times.tms_cutime in
|
|
||||||
let cstime = exit_times.tms_cstime -. initial_times.tms_cstime in
|
|
||||||
let stats = Some {rtime; utime; stime; cutime; cstime} in
|
|
||||||
let time =
|
|
||||||
Some
|
|
||||||
{ EventLogger.real_time= rtime
|
|
||||||
; user_time= utime
|
|
||||||
; sys_time= stime
|
|
||||||
; children_user_time= cutime
|
|
||||||
; children_sys_time= cstime }
|
|
||||||
in
|
|
||||||
(stats, time)
|
|
||||||
|
|
||||||
|
|
||||||
let compute_stats stats_kind stats_type =
|
|
||||||
let (mem, mem_perf), (time, time_perf) =
|
|
||||||
match stats_kind with
|
|
||||||
| Time (rtime_counter, initial_times) ->
|
|
||||||
((None, None), compute_time_stats ~rtime_counter initial_times)
|
|
||||||
| Memory ->
|
|
||||||
(compute_mem_stats (), (None, None))
|
|
||||||
| TimeAndMemory ->
|
|
||||||
(compute_mem_stats (), compute_time_stats Utils.initial_times)
|
|
||||||
in
|
|
||||||
let stats = {mem; time} in
|
|
||||||
let stats_event =
|
|
||||||
EventLogger.PerformanceStats
|
|
||||||
{ lang= Language.to_explicit_string !Language.curr_language
|
|
||||||
; source_file= source_file_of_stats_type stats_type
|
|
||||||
; stats_type= string_of_stats_type stats_type
|
|
||||||
; mem_perf
|
|
||||||
; time_perf }
|
|
||||||
in
|
|
||||||
(stats, stats_event)
|
|
||||||
|
|
||||||
|
|
||||||
let report stats_kind file stats_type () =
|
|
||||||
try
|
|
||||||
let stats, stats_event = compute_stats stats_kind stats_type in
|
|
||||||
let json_stats = to_json stats in
|
|
||||||
EventLogger.log stats_event ;
|
|
||||||
(* We always log to EventLogger, but json files are unnecessary to log outside of developer mode *)
|
|
||||||
if Config.developer_mode then
|
|
||||||
try
|
|
||||||
Unix.mkdir_p (Filename.dirname file) ;
|
|
||||||
(* the same report may be registered across different infer processes *)
|
|
||||||
Utils.with_intermediate_temp_file_out file ~f:(fun stats_oc ->
|
|
||||||
Yojson.Basic.pretty_to_channel stats_oc json_stats )
|
|
||||||
with exc ->
|
|
||||||
L.internal_error "Info: failed to write stats to %s@\n%s@\n%s@\n%s@." file
|
|
||||||
(Exn.to_string exc)
|
|
||||||
(Yojson.Basic.pretty_to_string json_stats)
|
|
||||||
(Printexc.get_backtrace ())
|
|
||||||
with exc ->
|
|
||||||
L.internal_error "Info: failed to compute stats for %s@\n%s@\n%s@." file (Exn.to_string exc)
|
|
||||||
(Printexc.get_backtrace ())
|
|
||||||
|
|
||||||
|
|
||||||
let registered = String.Table.create ~size:4 ()
|
|
||||||
|
|
||||||
let register_report stats_kind stats_type =
|
|
||||||
let relative_path = relative_path_of_stats_type stats_type in
|
|
||||||
let absolute_path = Filename.concat Config.results_dir relative_path in
|
|
||||||
let f = report stats_kind absolute_path stats_type in
|
|
||||||
(* make sure to not double register the same perf stat report *)
|
|
||||||
match String.Table.add registered ~key:relative_path ~data:f with
|
|
||||||
| `Ok ->
|
|
||||||
()
|
|
||||||
| `Duplicate ->
|
|
||||||
L.d_warning "Attempting to register same perf stats report multiple times"
|
|
||||||
|
|
||||||
|
|
||||||
let dummy_reporter () = ()
|
|
||||||
|
|
||||||
let get_reporter stats_type =
|
|
||||||
let relative_path = relative_path_of_stats_type stats_type in
|
|
||||||
String.Table.find registered relative_path |> Option.value ~default:dummy_reporter
|
|
||||||
|
|
||||||
|
|
||||||
let register_report_at_exit stats_type =
|
|
||||||
let relative_path = relative_path_of_stats_type stats_type in
|
|
||||||
register_report TimeAndMemory stats_type ;
|
|
||||||
Epilogues.register ~f:(get_reporter stats_type)
|
|
||||||
~description:("stats reporting in " ^ relative_path)
|
|
@ -1,38 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
(** Performance Statistics gathering and reporting *)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type perf_stats
|
|
||||||
|
|
||||||
type stats_kind = Time of Mtime_clock.counter * Unix.process_times | Memory | TimeAndMemory
|
|
||||||
|
|
||||||
type stats_type =
|
|
||||||
| ClangLinters of SourceFile.t
|
|
||||||
| ClangFrontend of SourceFile.t
|
|
||||||
| ClangProcessAST of SourceFile.t
|
|
||||||
| JavaFrontend of SourceFile.t
|
|
||||||
| TotalFrontend
|
|
||||||
| Backend of SourceFile.t
|
|
||||||
| TotalBackend
|
|
||||||
| Reporting
|
|
||||||
| Driver
|
|
||||||
|
|
||||||
val from_json : Yojson.Basic.t -> perf_stats
|
|
||||||
|
|
||||||
val aggregate : perf_stats list -> Yojson.Basic.t
|
|
||||||
|
|
||||||
val register_report : stats_kind -> stats_type -> unit
|
|
||||||
(** Register performance reporting function *)
|
|
||||||
|
|
||||||
val get_reporter : stats_type -> unit -> unit
|
|
||||||
(** Get reporting function that can be called at any time to create a performance report *)
|
|
||||||
|
|
||||||
val register_report_at_exit : stats_type -> unit
|
|
||||||
(** Create performance report when the current process terminates *)
|
|
@ -1,183 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
open! IStd
|
|
||||||
open PolyVariantEqual
|
|
||||||
module L = Logging
|
|
||||||
|
|
||||||
let aggregated_stats_filename = "aggregated_stats.json"
|
|
||||||
|
|
||||||
let aggregated_stats_by_target_filename = "aggregated_stats_by_target.json"
|
|
||||||
|
|
||||||
let json_files_to_ignore_regex =
|
|
||||||
Str.regexp
|
|
||||||
( ".*\\("
|
|
||||||
^ Str.quote aggregated_stats_filename
|
|
||||||
^ "\\|"
|
|
||||||
^ Str.quote aggregated_stats_by_target_filename
|
|
||||||
^ "\\)$" )
|
|
||||||
|
|
||||||
|
|
||||||
let dir_exists dir = Sys.is_directory dir = `Yes
|
|
||||||
|
|
||||||
let find_json_files_in_dir dir =
|
|
||||||
let is_valid_json_file path =
|
|
||||||
let s = Unix.lstat path in
|
|
||||||
let json_regex = Str.regexp_case_fold ".*\\.json$" in
|
|
||||||
(not (Str.string_match json_files_to_ignore_regex path 0))
|
|
||||||
&& Str.string_match json_regex path 0
|
|
||||||
&& Poly.equal s.st_kind Unix.S_REG
|
|
||||||
in
|
|
||||||
match dir_exists dir with
|
|
||||||
| true ->
|
|
||||||
let content = Array.to_list (Sys.readdir dir) in
|
|
||||||
let content_with_path = List.map ~f:(fun p -> Filename.concat dir p) content in
|
|
||||||
List.filter ~f:is_valid_json_file content_with_path
|
|
||||||
| false ->
|
|
||||||
[]
|
|
||||||
|
|
||||||
|
|
||||||
type stats_paths =
|
|
||||||
{frontend_paths: string list; backend_paths: string list; reporting_paths: string list}
|
|
||||||
|
|
||||||
type origin = Buck_out of (string * stats_paths) list | Infer_out of stats_paths
|
|
||||||
|
|
||||||
let find_stats_files_in_dir dir =
|
|
||||||
let frontend_paths =
|
|
||||||
find_json_files_in_dir (Filename.concat dir Config.frontend_stats_dir_name)
|
|
||||||
in
|
|
||||||
let backend_paths = find_json_files_in_dir (Filename.concat dir Config.backend_stats_dir_name) in
|
|
||||||
let reporting_paths =
|
|
||||||
find_json_files_in_dir (Filename.concat dir Config.reporting_stats_dir_name)
|
|
||||||
in
|
|
||||||
{frontend_paths; backend_paths; reporting_paths}
|
|
||||||
|
|
||||||
|
|
||||||
let load_data_from_infer_deps file =
|
|
||||||
let error msg = Printf.sprintf ("Error reading '%s': " ^^ msg) file in
|
|
||||||
let extract_target_and_path line =
|
|
||||||
match String.split ~on:'\t' line with
|
|
||||||
| target :: _ :: path :: _ ->
|
|
||||||
if dir_exists path then Ok (target, path)
|
|
||||||
else Error (error "path '%s' is not a valid directory" path)
|
|
||||||
| _ ->
|
|
||||||
Error (error "malformed input")
|
|
||||||
in
|
|
||||||
let parse_lines lines = List.map lines ~f:extract_target_and_path |> Result.all in
|
|
||||||
Utils.read_file file
|
|
||||||
|> Result.map_error ~f:(fun msg -> error "%s" msg)
|
|
||||||
|> Result.bind ~f:parse_lines
|
|
||||||
|
|
||||||
|
|
||||||
let collect_all_stats_files () =
|
|
||||||
let infer_out = Config.results_dir in
|
|
||||||
let concatenate_paths p1 p2 = if Filename.is_relative p2 then Filename.concat p1 p2 else p2 in
|
|
||||||
match Config.buck_out with
|
|
||||||
| Some p ->
|
|
||||||
if dir_exists p then
|
|
||||||
let data =
|
|
||||||
load_data_from_infer_deps (Filename.concat infer_out Config.buck_infer_deps_file_name)
|
|
||||||
in
|
|
||||||
match data with
|
|
||||||
| Ok r ->
|
|
||||||
let buck_out_parent = Filename.concat p Filename.parent_dir_name in
|
|
||||||
let targets_files =
|
|
||||||
List.map
|
|
||||||
~f:(fun (t, p) -> (t, find_stats_files_in_dir (concatenate_paths buck_out_parent p)))
|
|
||||||
r
|
|
||||||
in
|
|
||||||
Ok (Buck_out targets_files)
|
|
||||||
| Error _ as e ->
|
|
||||||
e
|
|
||||||
else Error ("buck-out path '" ^ p ^ "' not found")
|
|
||||||
| None ->
|
|
||||||
Ok (Infer_out (find_stats_files_in_dir infer_out))
|
|
||||||
|
|
||||||
|
|
||||||
let aggregate_stats_files paths =
|
|
||||||
let open_json_file file = Yojson.Basic.from_file file in
|
|
||||||
let load_stats paths =
|
|
||||||
List.map ~f:(fun path -> PerfStats.from_json (open_json_file path)) paths
|
|
||||||
in
|
|
||||||
let all_perf_stats = load_stats paths in
|
|
||||||
match all_perf_stats with [] -> None | _ -> Some (PerfStats.aggregate all_perf_stats)
|
|
||||||
|
|
||||||
|
|
||||||
type json_aggregated_stats =
|
|
||||||
{ frontend_json_data: Yojson.Basic.t option
|
|
||||||
; backend_json_data: Yojson.Basic.t option
|
|
||||||
; reporting_json_data: Yojson.Basic.t option }
|
|
||||||
|
|
||||||
let aggregate_all_stats origin =
|
|
||||||
let accumulate_paths acc paths =
|
|
||||||
{ frontend_paths= paths.frontend_paths @ acc.frontend_paths
|
|
||||||
; backend_paths= paths.backend_paths @ acc.backend_paths
|
|
||||||
; reporting_paths= paths.reporting_paths @ acc.reporting_paths }
|
|
||||||
in
|
|
||||||
let empty_stats_paths = {frontend_paths= []; backend_paths= []; reporting_paths= []} in
|
|
||||||
let stats_paths =
|
|
||||||
match origin with
|
|
||||||
| Buck_out tf ->
|
|
||||||
List.fold ~f:(fun acc (_, paths) -> accumulate_paths acc paths) ~init:empty_stats_paths tf
|
|
||||||
| Infer_out paths ->
|
|
||||||
paths
|
|
||||||
in
|
|
||||||
{ frontend_json_data= aggregate_stats_files stats_paths.frontend_paths
|
|
||||||
; backend_json_data= aggregate_stats_files stats_paths.backend_paths
|
|
||||||
; reporting_json_data= aggregate_stats_files stats_paths.reporting_paths }
|
|
||||||
|
|
||||||
|
|
||||||
let aggregate_stats_by_target tp =
|
|
||||||
let to_json f aggr_stats =
|
|
||||||
let collect_valid_stats acc t p = match p with Some v -> (t, v) :: acc | None -> acc in
|
|
||||||
let l = List.fold ~f:(fun acc (t, p) -> collect_valid_stats acc t (f p)) ~init:[] aggr_stats in
|
|
||||||
match l with [] -> None | _ as v -> Some (`Assoc v)
|
|
||||||
in
|
|
||||||
let frontend_json_data = to_json (fun p -> aggregate_stats_files p.frontend_paths) tp in
|
|
||||||
let backend_json_data = to_json (fun p -> aggregate_stats_files p.backend_paths) tp in
|
|
||||||
let reporting_json_data = to_json (fun p -> aggregate_stats_files p.reporting_paths) tp in
|
|
||||||
{frontend_json_data; backend_json_data; reporting_json_data}
|
|
||||||
|
|
||||||
|
|
||||||
let generate_files () =
|
|
||||||
let infer_out = Config.results_dir in
|
|
||||||
let stats_files = collect_all_stats_files () in
|
|
||||||
let origin =
|
|
||||||
match stats_files with Ok origin -> origin | Error e -> L.(die InternalError) "%s" e
|
|
||||||
in
|
|
||||||
let aggregated_frontend_stats_dir = Filename.concat infer_out Config.frontend_stats_dir_name in
|
|
||||||
let aggregated_backend_stats_dir = Filename.concat infer_out Config.backend_stats_dir_name in
|
|
||||||
let aggregated_reporting_stats_dir = Filename.concat infer_out Config.reporting_stats_dir_name in
|
|
||||||
Utils.create_dir aggregated_frontend_stats_dir ;
|
|
||||||
Utils.create_dir aggregated_backend_stats_dir ;
|
|
||||||
Utils.create_dir aggregated_reporting_stats_dir ;
|
|
||||||
let write_to_json_file_opt destfile json =
|
|
||||||
match json with Some j -> Utils.write_json_to_file destfile j | None -> ()
|
|
||||||
in
|
|
||||||
( match origin with
|
|
||||||
| Buck_out tp ->
|
|
||||||
let j = aggregate_stats_by_target tp in
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_frontend_stats_dir aggregated_stats_by_target_filename)
|
|
||||||
j.frontend_json_data ;
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_backend_stats_dir aggregated_stats_by_target_filename)
|
|
||||||
j.backend_json_data ;
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_reporting_stats_dir aggregated_stats_by_target_filename)
|
|
||||||
j.reporting_json_data
|
|
||||||
| Infer_out _ ->
|
|
||||||
() ) ;
|
|
||||||
let j = aggregate_all_stats origin in
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_frontend_stats_dir aggregated_stats_filename)
|
|
||||||
j.frontend_json_data ;
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_backend_stats_dir aggregated_stats_filename)
|
|
||||||
j.backend_json_data ;
|
|
||||||
write_to_json_file_opt
|
|
||||||
(Filename.concat aggregated_reporting_stats_dir aggregated_stats_filename)
|
|
||||||
j.reporting_json_data
|
|
@ -1,13 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
(** Generate files containing statistics aggregated from individual statistics files under
|
|
||||||
Config.results_dir *)
|
|
||||||
|
|
||||||
val generate_files : unit -> unit
|
|
@ -1,411 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
module CLOpt = CommandLineOption
|
|
||||||
|
|
||||||
let hostname = Unix.gethostname ()
|
|
||||||
|
|
||||||
module IO = struct
|
|
||||||
let log_file_extension = ".log"
|
|
||||||
|
|
||||||
let events_dir = Config.toplevel_results_dir ^/ Config.events_dir_name
|
|
||||||
|
|
||||||
let out_chan = ref None
|
|
||||||
|
|
||||||
let close () =
|
|
||||||
match !out_chan with
|
|
||||||
| None ->
|
|
||||||
()
|
|
||||||
| Some chan ->
|
|
||||||
Out_channel.close chan ;
|
|
||||||
out_chan := None
|
|
||||||
|
|
||||||
|
|
||||||
let prepare () =
|
|
||||||
close () ;
|
|
||||||
let fname = events_dir ^/ (Unix.getpid () |> Pid.to_string) ^ log_file_extension in
|
|
||||||
let oc = Stdlib.open_out_gen [Open_append; Open_creat] 0o666 fname in
|
|
||||||
out_chan := Some oc
|
|
||||||
|
|
||||||
|
|
||||||
let write fmt =
|
|
||||||
match !out_chan with Some oc -> Printf.fprintf oc fmt | _ -> Printf.ifprintf stdout fmt
|
|
||||||
|
|
||||||
|
|
||||||
let write_skipped_pname pname =
|
|
||||||
let fname = events_dir ^/ "skipped_functions" ^ log_file_extension in
|
|
||||||
let oc = Stdlib.open_out_gen [Open_append; Open_creat] 0o666 fname in
|
|
||||||
Out_channel.output_string oc pname ;
|
|
||||||
Out_channel.output_char oc '\n' ;
|
|
||||||
Out_channel.close oc
|
|
||||||
|
|
||||||
|
|
||||||
let dump () =
|
|
||||||
let dump_file_to_stdout fname =
|
|
||||||
let ic = In_channel.create fname in
|
|
||||||
In_channel.iter_lines ic ~f:print_endline
|
|
||||||
in
|
|
||||||
let log_files = Utils.find_files ~path:events_dir ~extension:log_file_extension in
|
|
||||||
List.iter log_files ~f:dump_file_to_stdout
|
|
||||||
|
|
||||||
|
|
||||||
let () = Epilogues.register_late ~f:close ~description:"closing EventLogger file"
|
|
||||||
end
|
|
||||||
|
|
||||||
module Random_id : sig
|
|
||||||
val get : unit -> string
|
|
||||||
end = struct
|
|
||||||
let () = Random.self_init ()
|
|
||||||
|
|
||||||
let generate () = Random.int64 1_000_000_000_000L |> Int64.to_string
|
|
||||||
|
|
||||||
let infer_run_identifier_env_var = "INFER_RUN_IDENTIFIER"
|
|
||||||
|
|
||||||
let get () =
|
|
||||||
match Sys.getenv infer_run_identifier_env_var with
|
|
||||||
| Some id ->
|
|
||||||
id
|
|
||||||
| None ->
|
|
||||||
let new_id = generate () in
|
|
||||||
Unix.putenv ~key:infer_run_identifier_env_var ~data:new_id ;
|
|
||||||
new_id
|
|
||||||
end
|
|
||||||
|
|
||||||
type analysis_issue =
|
|
||||||
{ bug_kind: string
|
|
||||||
; bug_type: string
|
|
||||||
; clang_method_kind: string option
|
|
||||||
; exception_triggered_location: Logging.ocaml_pos option
|
|
||||||
; lang: string
|
|
||||||
; procedure_name: string
|
|
||||||
; source_location: Location.t }
|
|
||||||
|
|
||||||
let create_analysis_issue_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
base
|
|
||||||
|> add_string ~key:"bug_kind" ~data:record.bug_kind
|
|
||||||
|> add_string ~key:"bug_type" ~data:record.bug_type
|
|
||||||
|> add_string_opt ~key:"clang_method_kind" ~data:record.clang_method_kind
|
|
||||||
|> add_string_opt ~key:"exception_triggered_location"
|
|
||||||
~data:(Option.map ~f:Logging.ocaml_pos_to_string record.exception_triggered_location)
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_string ~key:"procedure_name" ~data:record.procedure_name
|
|
||||||
|> add_string ~key:"source_location"
|
|
||||||
~data:
|
|
||||||
(String.concat
|
|
||||||
[ string_of_int record.source_location.line
|
|
||||||
; ":"
|
|
||||||
; string_of_int record.source_location.col ])
|
|
||||||
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.source_location.file)
|
|
||||||
|
|
||||||
|
|
||||||
type analysis_stats =
|
|
||||||
{ analysis_nodes_visited: int
|
|
||||||
; analysis_status: SymOp.failure_kind option
|
|
||||||
; analysis_total_nodes: int
|
|
||||||
; clang_method_kind: string option
|
|
||||||
; lang: string
|
|
||||||
; method_location: Location.t
|
|
||||||
; method_name: string
|
|
||||||
; num_preposts: int
|
|
||||||
; symops: int }
|
|
||||||
|
|
||||||
let create_analysis_stats_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
base
|
|
||||||
|> add_int ~key:"analysis_nodes_visited" ~data:record.analysis_nodes_visited
|
|
||||||
|> add_string ~key:"analysis_status"
|
|
||||||
~data:
|
|
||||||
(Option.value_map record.analysis_status ~default:"OK" ~f:(fun stats_failure ->
|
|
||||||
SymOp.failure_kind_to_string stats_failure ))
|
|
||||||
|> add_int ~key:"analysis_total_nodes" ~data:record.analysis_total_nodes
|
|
||||||
|> add_string_opt ~key:"clang_method_kind" ~data:record.clang_method_kind
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_string ~key:"method_location"
|
|
||||||
~data:
|
|
||||||
(String.concat
|
|
||||||
[ string_of_int record.method_location.line
|
|
||||||
; ":"
|
|
||||||
; string_of_int record.method_location.col ])
|
|
||||||
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.method_location.file)
|
|
||||||
|> add_string ~key:"method_name" ~data:record.method_name
|
|
||||||
|> add_int ~key:"num_preposts" ~data:record.num_preposts
|
|
||||||
|> add_int ~key:"symops" ~data:record.symops
|
|
||||||
|
|
||||||
|
|
||||||
type dynamic_dispatch =
|
|
||||||
| Dynamic_dispatch_successful
|
|
||||||
| Dynamic_dispatch_parameters_arguments_mismatch
|
|
||||||
| Dynamic_dispatch_model_specialization_failure
|
|
||||||
|
|
||||||
let string_of_dynamic_dispatch_opt dd =
|
|
||||||
match dd with
|
|
||||||
| Some Dynamic_dispatch_successful ->
|
|
||||||
"dynamic dispatch successful"
|
|
||||||
| Some Dynamic_dispatch_parameters_arguments_mismatch ->
|
|
||||||
"dynamic dispatch failed with arguments mismatch"
|
|
||||||
| Some Dynamic_dispatch_model_specialization_failure ->
|
|
||||||
"dynamic dispatch model specialized failed"
|
|
||||||
| None ->
|
|
||||||
"no dynamic dispatch"
|
|
||||||
|
|
||||||
|
|
||||||
type call_trace =
|
|
||||||
{ call_location: Location.t
|
|
||||||
; call_result: string
|
|
||||||
; callee_clang_method_kind: string option
|
|
||||||
; callee_source_file: SourceFile.t option
|
|
||||||
; callee_name: string
|
|
||||||
; caller_name: string
|
|
||||||
; lang: string
|
|
||||||
; reason: string option
|
|
||||||
; dynamic_dispatch: dynamic_dispatch option }
|
|
||||||
|
|
||||||
let create_call_trace_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
base
|
|
||||||
|> add_string ~key:"call_location"
|
|
||||||
~data:
|
|
||||||
(String.concat
|
|
||||||
[string_of_int record.call_location.line; ":"; string_of_int record.call_location.col])
|
|
||||||
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.call_location.file)
|
|
||||||
|> add_string ~key:"call_result" ~data:record.call_result
|
|
||||||
|> add_string_opt ~key:"callee_clang_method_kind" ~data:record.callee_clang_method_kind
|
|
||||||
|> add_string_opt ~key:"callee_source_file"
|
|
||||||
~data:(Option.map ~f:SourceFile.to_rel_path record.callee_source_file)
|
|
||||||
|> add_string ~key:"callee_name" ~data:record.callee_name
|
|
||||||
|> add_string ~key:"caller_name" ~data:record.caller_name
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_string_opt ~key:"reason" ~data:record.reason
|
|
||||||
|> add_string ~key:"dynamic_dispatch"
|
|
||||||
~data:(string_of_dynamic_dispatch_opt record.dynamic_dispatch)
|
|
||||||
|
|
||||||
|
|
||||||
type frontend_exception =
|
|
||||||
{ ast_node: string option
|
|
||||||
; exception_triggered_location: Logging.ocaml_pos
|
|
||||||
; exception_type: string
|
|
||||||
; lang: string
|
|
||||||
; source_location_start: Location.t
|
|
||||||
; source_location_end: Location.t }
|
|
||||||
|
|
||||||
let create_frontend_exception_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
base
|
|
||||||
|> add_string_opt ~key:"ast_node" ~data:record.ast_node
|
|
||||||
|> add_string ~key:"exception_triggered_location"
|
|
||||||
~data:(Logging.ocaml_pos_to_string record.exception_triggered_location)
|
|
||||||
|> add_string ~key:"exception_type" ~data:record.exception_type
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_string ~key:"source_location_start_file"
|
|
||||||
~data:(SourceFile.to_rel_path record.source_location_start.file)
|
|
||||||
|> add_string ~key:"source_location_start_pos"
|
|
||||||
~data:
|
|
||||||
(String.concat
|
|
||||||
[ string_of_int record.source_location_start.line
|
|
||||||
; ":"
|
|
||||||
; string_of_int record.source_location_start.col ])
|
|
||||||
|> add_string ~key:"source_location_end_file"
|
|
||||||
~data:(SourceFile.to_rel_path record.source_location_end.file)
|
|
||||||
|> add_string ~key:"source_location_end_pos"
|
|
||||||
~data:
|
|
||||||
(String.concat
|
|
||||||
[ string_of_int record.source_location_end.line
|
|
||||||
; ":"
|
|
||||||
; string_of_int record.source_location_end.col ])
|
|
||||||
|
|
||||||
|
|
||||||
type mem_perf =
|
|
||||||
{ minor_heap_mem: float
|
|
||||||
; promoted_minor_heap_mem: float
|
|
||||||
; major_heap_mem: float
|
|
||||||
; total_allocated_mem: float
|
|
||||||
; minor_collections: int
|
|
||||||
; major_collections: int
|
|
||||||
; heap_compactions: int
|
|
||||||
; top_heap_size: int
|
|
||||||
; stack_size: int
|
|
||||||
; minor_heap_size: int }
|
|
||||||
|
|
||||||
type time_perf =
|
|
||||||
{ real_time: float
|
|
||||||
; user_time: float
|
|
||||||
; sys_time: float
|
|
||||||
; children_user_time: float
|
|
||||||
; children_sys_time: float }
|
|
||||||
|
|
||||||
type performance_stats =
|
|
||||||
{ lang: string
|
|
||||||
; source_file: SourceFile.t option
|
|
||||||
; stats_type: string
|
|
||||||
; mem_perf: mem_perf option
|
|
||||||
; time_perf: time_perf option }
|
|
||||||
|
|
||||||
let create_performance_stats_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
let add_mem_perf t =
|
|
||||||
Option.value_map ~default:t record.mem_perf ~f:(fun mem_perf ->
|
|
||||||
t
|
|
||||||
|> add_float ~key:"minor_heap_mem" ~data:mem_perf.minor_heap_mem
|
|
||||||
|> add_float ~key:"promoted_minor_heap_mem" ~data:mem_perf.promoted_minor_heap_mem
|
|
||||||
|> add_float ~key:"major_heap_mem" ~data:mem_perf.major_heap_mem
|
|
||||||
|> add_float ~key:"total_allocated_mem" ~data:mem_perf.total_allocated_mem
|
|
||||||
|> add_int ~key:"minor_collections" ~data:mem_perf.minor_collections
|
|
||||||
|> add_int ~key:"major_collections" ~data:mem_perf.major_collections
|
|
||||||
|> add_int ~key:"heap_compactions" ~data:mem_perf.heap_compactions
|
|
||||||
|> add_int ~key:"top_heap_size" ~data:mem_perf.top_heap_size
|
|
||||||
|> add_int ~key:"stack_size" ~data:mem_perf.stack_size
|
|
||||||
|> add_int ~key:"minor_heap_size" ~data:mem_perf.minor_heap_size )
|
|
||||||
in
|
|
||||||
let add_time_perf t =
|
|
||||||
Option.value_map ~default:t record.time_perf ~f:(fun time_perf ->
|
|
||||||
t
|
|
||||||
|> add_float ~key:"real_time" ~data:time_perf.real_time
|
|
||||||
|> add_float ~key:"user_time" ~data:time_perf.user_time
|
|
||||||
|> add_float ~key:"sys_time" ~data:time_perf.sys_time
|
|
||||||
|> add_float ~key:"children_user_time" ~data:time_perf.children_user_time
|
|
||||||
|> add_float ~key:"children_sys_time" ~data:time_perf.children_sys_time )
|
|
||||||
in
|
|
||||||
base
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_string_opt ~key:"source_file"
|
|
||||||
~data:(Option.map ~f:SourceFile.to_rel_path record.source_file)
|
|
||||||
|> add_string ~key:"stats_type" ~data:record.stats_type
|
|
||||||
|> add_mem_perf |> add_time_perf
|
|
||||||
|
|
||||||
|
|
||||||
type procedures_translated =
|
|
||||||
{ lang: string
|
|
||||||
; procedures_translated_failed: int
|
|
||||||
; procedures_translated_total: int
|
|
||||||
; source_file: SourceFile.t }
|
|
||||||
|
|
||||||
let create_procedures_translated_row base record =
|
|
||||||
let open JsonBuilder in
|
|
||||||
base
|
|
||||||
|> add_string ~key:"lang" ~data:record.lang
|
|
||||||
|> add_int ~key:"procedures_translated_failed" ~data:record.procedures_translated_failed
|
|
||||||
|> add_int ~key:"procedures_translated_total" ~data:record.procedures_translated_total
|
|
||||||
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.source_file)
|
|
||||||
|
|
||||||
|
|
||||||
type event =
|
|
||||||
| AnalysisIssue of analysis_issue
|
|
||||||
| AnalysisStats of analysis_stats
|
|
||||||
| CallTrace of call_trace
|
|
||||||
| FrontendException of frontend_exception
|
|
||||||
| PerformanceStats of performance_stats
|
|
||||||
| ProceduresTranslatedSummary of procedures_translated
|
|
||||||
| UncaughtException of exn * int
|
|
||||||
|
|
||||||
let string_of_event event =
|
|
||||||
match event with
|
|
||||||
| AnalysisIssue _ ->
|
|
||||||
"AnalysisIssue"
|
|
||||||
| AnalysisStats _ ->
|
|
||||||
"AnalysisStats"
|
|
||||||
| CallTrace _ ->
|
|
||||||
"CallTrace"
|
|
||||||
| FrontendException _ ->
|
|
||||||
"FrontendException"
|
|
||||||
| PerformanceStats _ ->
|
|
||||||
"PerformanceStats"
|
|
||||||
| ProceduresTranslatedSummary _ ->
|
|
||||||
"ProceduresTranslatedSummary"
|
|
||||||
| UncaughtException _ ->
|
|
||||||
"UncaughtException"
|
|
||||||
|
|
||||||
|
|
||||||
let sequence_ctr = ref 0
|
|
||||||
|
|
||||||
let pid () = Pid.to_int (Unix.getpid ())
|
|
||||||
|
|
||||||
let sysname =
|
|
||||||
try
|
|
||||||
Utils.with_process_in "uname 2>/dev/null" (fun chan ->
|
|
||||||
Scanf.bscanf (Scanf.Scanning.from_channel chan) "%s" (fun n -> n) )
|
|
||||||
|> fst
|
|
||||||
with _ -> "Unknown"
|
|
||||||
|
|
||||||
|
|
||||||
module type S = sig
|
|
||||||
val get_log_identifier : unit -> string
|
|
||||||
|
|
||||||
val prepare : unit -> unit
|
|
||||||
|
|
||||||
val log : event -> unit
|
|
||||||
|
|
||||||
val log_skipped_pname : string -> unit
|
|
||||||
|
|
||||||
val dump : unit -> unit
|
|
||||||
end
|
|
||||||
|
|
||||||
module LoggerImpl : S = struct
|
|
||||||
let get_log_identifier () = Random_id.get ()
|
|
||||||
|
|
||||||
let create_row event =
|
|
||||||
incr sequence_ctr ;
|
|
||||||
let open JsonBuilder in
|
|
||||||
let base =
|
|
||||||
empty
|
|
||||||
|> add_string ~key:"command" ~data:(InferCommand.to_string Config.command)
|
|
||||||
|> add_string ~key:"event_tag" ~data:(string_of_event event)
|
|
||||||
|> add_string ~key:"hostname" ~data:hostname
|
|
||||||
|> add_string ~key:"infer_commit" ~data:Version.commit
|
|
||||||
|> add_int ~key:"is_originator" ~data:(if CLOpt.is_originator then 1 else 0)
|
|
||||||
|> add_string_opt ~key:"job_id" ~data:Config.job_id
|
|
||||||
|> add_int ~key:"pid" ~data:(pid ())
|
|
||||||
|> add_string ~key:"run_identifier" ~data:(get_log_identifier ())
|
|
||||||
|> add_int ~key:"sequence" ~data:(!sequence_ctr - 1)
|
|
||||||
|> add_string ~key:"sysname" ~data:sysname
|
|
||||||
|> add_int ~key:"time" ~data:(int_of_float (Unix.time ()))
|
|
||||||
in
|
|
||||||
( match event with
|
|
||||||
| AnalysisIssue record ->
|
|
||||||
create_analysis_issue_row base record
|
|
||||||
| AnalysisStats record ->
|
|
||||||
create_analysis_stats_row base record
|
|
||||||
| CallTrace record ->
|
|
||||||
create_call_trace_row base record
|
|
||||||
| FrontendException record ->
|
|
||||||
create_frontend_exception_row base record
|
|
||||||
| PerformanceStats record ->
|
|
||||||
create_performance_stats_row base record
|
|
||||||
| ProceduresTranslatedSummary record ->
|
|
||||||
create_procedures_translated_row base record
|
|
||||||
| UncaughtException (exn, exitcode) ->
|
|
||||||
base
|
|
||||||
|> add_string ~key:"exception" ~data:(Caml.Printexc.exn_slot_name exn)
|
|
||||||
|> add_string ~key:"exception_info" ~data:(Exn.to_string exn)
|
|
||||||
|> add_int ~key:"exitcode" ~data:exitcode )
|
|
||||||
|> JsonBuilder.to_json
|
|
||||||
|
|
||||||
|
|
||||||
let prepare = IO.prepare
|
|
||||||
|
|
||||||
let log event = IO.write "%s\n" (create_row event)
|
|
||||||
|
|
||||||
let dump = IO.dump
|
|
||||||
|
|
||||||
let log_skipped_pname pname = if Config.log_skipped then IO.write_skipped_pname pname else ()
|
|
||||||
end
|
|
||||||
|
|
||||||
module DummyLogger : S = struct
|
|
||||||
let get_log_identifier () = ""
|
|
||||||
|
|
||||||
let prepare () = ()
|
|
||||||
|
|
||||||
let log _ = ()
|
|
||||||
|
|
||||||
let dump _ = ()
|
|
||||||
|
|
||||||
let log_skipped_pname _ = ()
|
|
||||||
end
|
|
||||||
|
|
||||||
(* use real logger if logging is enabled, dummy logger otherwise *)
|
|
||||||
include (val if Config.log_events then (module LoggerImpl : S) else (module DummyLogger : S))
|
|
@ -1,103 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type analysis_issue =
|
|
||||||
{ bug_kind: string
|
|
||||||
; bug_type: string
|
|
||||||
; clang_method_kind: string option
|
|
||||||
; exception_triggered_location: Logging.ocaml_pos option
|
|
||||||
; lang: string
|
|
||||||
; procedure_name: string
|
|
||||||
; source_location: Location.t }
|
|
||||||
|
|
||||||
type analysis_stats =
|
|
||||||
{ analysis_nodes_visited: int
|
|
||||||
; analysis_status: SymOp.failure_kind option
|
|
||||||
; analysis_total_nodes: int
|
|
||||||
; clang_method_kind: string option
|
|
||||||
; lang: string
|
|
||||||
; method_location: Location.t
|
|
||||||
; method_name: string
|
|
||||||
; num_preposts: int
|
|
||||||
; symops: int }
|
|
||||||
|
|
||||||
type dynamic_dispatch =
|
|
||||||
| Dynamic_dispatch_successful
|
|
||||||
| Dynamic_dispatch_parameters_arguments_mismatch
|
|
||||||
| Dynamic_dispatch_model_specialization_failure
|
|
||||||
|
|
||||||
type call_trace =
|
|
||||||
{ call_location: Location.t
|
|
||||||
; call_result: string
|
|
||||||
; callee_clang_method_kind: string option
|
|
||||||
; callee_source_file: SourceFile.t option
|
|
||||||
; callee_name: string
|
|
||||||
; caller_name: string
|
|
||||||
; lang: string
|
|
||||||
; reason: string option
|
|
||||||
; dynamic_dispatch: dynamic_dispatch option }
|
|
||||||
|
|
||||||
type frontend_exception =
|
|
||||||
{ ast_node: string option
|
|
||||||
; exception_triggered_location: Logging.ocaml_pos
|
|
||||||
; exception_type: string
|
|
||||||
; lang: string
|
|
||||||
; source_location_start: Location.t
|
|
||||||
; source_location_end: Location.t }
|
|
||||||
|
|
||||||
type mem_perf =
|
|
||||||
{ minor_heap_mem: float
|
|
||||||
; promoted_minor_heap_mem: float
|
|
||||||
; major_heap_mem: float
|
|
||||||
; total_allocated_mem: float
|
|
||||||
; minor_collections: int
|
|
||||||
; major_collections: int
|
|
||||||
; heap_compactions: int
|
|
||||||
; top_heap_size: int
|
|
||||||
; stack_size: int
|
|
||||||
; minor_heap_size: int }
|
|
||||||
|
|
||||||
type time_perf =
|
|
||||||
{ real_time: float
|
|
||||||
; user_time: float
|
|
||||||
; sys_time: float
|
|
||||||
; children_user_time: float
|
|
||||||
; children_sys_time: float }
|
|
||||||
|
|
||||||
type performance_stats =
|
|
||||||
{ lang: string
|
|
||||||
; source_file: SourceFile.t option
|
|
||||||
; stats_type: string
|
|
||||||
; mem_perf: mem_perf option
|
|
||||||
; time_perf: time_perf option }
|
|
||||||
|
|
||||||
type procedures_translated =
|
|
||||||
{ lang: string
|
|
||||||
; procedures_translated_failed: int
|
|
||||||
; procedures_translated_total: int
|
|
||||||
; source_file: SourceFile.t }
|
|
||||||
|
|
||||||
type event =
|
|
||||||
| AnalysisIssue of analysis_issue
|
|
||||||
| AnalysisStats of analysis_stats
|
|
||||||
| CallTrace of call_trace
|
|
||||||
| FrontendException of frontend_exception
|
|
||||||
| PerformanceStats of performance_stats
|
|
||||||
| ProceduresTranslatedSummary of procedures_translated
|
|
||||||
| UncaughtException of exn * int (** exception, exitcode *)
|
|
||||||
|
|
||||||
val get_log_identifier : unit -> string
|
|
||||||
|
|
||||||
val prepare : unit -> unit
|
|
||||||
|
|
||||||
val log : event -> unit
|
|
||||||
|
|
||||||
val log_skipped_pname : string -> unit
|
|
||||||
|
|
||||||
val dump : unit -> unit
|
|
@ -1,42 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type t = {integers: int String.Map.t; floats: float String.Map.t; strings: string String.Map.t}
|
|
||||||
|
|
||||||
let empty = {integers= String.Map.empty; floats= String.Map.empty; strings= String.Map.empty}
|
|
||||||
|
|
||||||
let add_int ({integers} as t) ~key ~data = {t with integers= String.Map.set integers ~key ~data}
|
|
||||||
|
|
||||||
let add_float ({floats} as t) ~key ~data = {t with floats= String.Map.set floats ~key ~data}
|
|
||||||
|
|
||||||
let add_string ({strings} as t) ~key ~data = {t with strings= String.Map.set strings ~key ~data}
|
|
||||||
|
|
||||||
let add_string_opt t ~key ~data = match data with Some data -> add_string t ~key ~data | None -> t
|
|
||||||
|
|
||||||
let yojson_of_integers integers =
|
|
||||||
let f ~key ~data acc = (key, `Int data) :: acc in
|
|
||||||
`Assoc (String.Map.fold integers ~init:[] ~f)
|
|
||||||
|
|
||||||
|
|
||||||
let yojson_of_floats floats =
|
|
||||||
let f ~key ~data acc = (key, `Float data) :: acc in
|
|
||||||
`Assoc (String.Map.fold floats ~init:[] ~f)
|
|
||||||
|
|
||||||
|
|
||||||
let yojson_of_strings strings =
|
|
||||||
let f ~key ~data acc = (key, `String data) :: acc in
|
|
||||||
`Assoc (String.Map.fold strings ~init:[] ~f)
|
|
||||||
|
|
||||||
|
|
||||||
let to_json {integers; floats; strings} =
|
|
||||||
`Assoc
|
|
||||||
[ ("int", yojson_of_integers integers)
|
|
||||||
; ("double", yojson_of_floats floats)
|
|
||||||
; ("normal", yojson_of_strings strings) ]
|
|
||||||
|> Yojson.Basic.to_string
|
|
@ -1,22 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type t
|
|
||||||
|
|
||||||
val empty : t
|
|
||||||
|
|
||||||
val add_int : t -> key:string -> data:int -> t
|
|
||||||
|
|
||||||
val add_float : t -> key:string -> data:float -> t
|
|
||||||
|
|
||||||
val add_string : t -> key:string -> data:string -> t
|
|
||||||
|
|
||||||
val add_string_opt : t -> key:string -> data:string option -> t
|
|
||||||
|
|
||||||
val to_json : t -> string
|
|
@ -1,34 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
let log_frontend_exception (trans_unit_ctx : CFrontend_config.translation_unit_context)
|
|
||||||
exception_type exception_triggered_location (source_location_start, source_location_end)
|
|
||||||
ast_node =
|
|
||||||
let frontend_exception =
|
|
||||||
EventLogger.FrontendException
|
|
||||||
{ exception_type
|
|
||||||
; source_location_start=
|
|
||||||
CLocation.clang_to_sil_location trans_unit_ctx.source_file source_location_start
|
|
||||||
; source_location_end=
|
|
||||||
CLocation.clang_to_sil_location trans_unit_ctx.source_file source_location_end
|
|
||||||
; exception_triggered_location
|
|
||||||
; ast_node
|
|
||||||
; lang= CFrontend_config.string_of_clang_lang trans_unit_ctx.lang }
|
|
||||||
in
|
|
||||||
EventLogger.log frontend_exception
|
|
||||||
|
|
||||||
|
|
||||||
let log_caught_exception trans_unit_ctx exception_type exception_triggered_location source_range
|
|
||||||
ast_node =
|
|
||||||
log_frontend_exception trans_unit_ctx exception_type exception_triggered_location source_range
|
|
||||||
ast_node
|
|
||||||
|
|
||||||
|
|
||||||
let log_unexpected_decl trans_unit_ctx exception_triggered_location source_range ast_node =
|
|
||||||
log_frontend_exception trans_unit_ctx "Skipped declaration inside a class"
|
|
||||||
exception_triggered_location source_range ast_node
|
|
@ -1,23 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
val log_caught_exception :
|
|
||||||
CFrontend_config.translation_unit_context
|
|
||||||
-> string
|
|
||||||
-> Logging.ocaml_pos
|
|
||||||
-> Clang_ast_t.source_location * Clang_ast_t.source_location
|
|
||||||
-> string option
|
|
||||||
-> unit
|
|
||||||
|
|
||||||
val log_unexpected_decl :
|
|
||||||
CFrontend_config.translation_unit_context
|
|
||||||
-> Logging.ocaml_pos
|
|
||||||
-> Clang_ast_t.source_location * Clang_ast_t.source_location
|
|
||||||
-> string option
|
|
||||||
-> unit
|
|
@ -1,56 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type t =
|
|
||||||
{sum: float; avg: float; min: float; p10: float; median: float; p75: float; max: float; count: int}
|
|
||||||
|
|
||||||
let to_json s =
|
|
||||||
`Assoc
|
|
||||||
[ ("sum", `Float s.sum)
|
|
||||||
; ("avg", `Float s.avg)
|
|
||||||
; ("min", `Float s.min)
|
|
||||||
; ("p10", `Float s.p10)
|
|
||||||
; ("median", `Float s.median)
|
|
||||||
; ("p75", `Float s.p75)
|
|
||||||
; ("max", `Float s.max)
|
|
||||||
; ("count", `Int s.count) ]
|
|
||||||
|
|
||||||
|
|
||||||
let compute_statistics values =
|
|
||||||
match values with
|
|
||||||
| [] ->
|
|
||||||
None
|
|
||||||
| _ :: _ as values ->
|
|
||||||
let open Float in
|
|
||||||
let num_elements = List.length values in
|
|
||||||
let sum = List.fold ~f:(fun acc v -> acc +. v) ~init:0.0 values in
|
|
||||||
let average = sum /. float_of_int num_elements in
|
|
||||||
let values_arr = Array.of_list values in
|
|
||||||
Array.sort
|
|
||||||
~compare:(fun a b -> if equal a b then 0 else if a -. b < 0.0 then -1 else 1)
|
|
||||||
values_arr ;
|
|
||||||
let percentile pct =
|
|
||||||
assert (pct >= 0.0 && pct <= 1.0) ;
|
|
||||||
assert (Int.(num_elements > 0)) ;
|
|
||||||
let max_index = Int.(num_elements - 1) in
|
|
||||||
let pct_index = float_of_int max_index *. pct in
|
|
||||||
let low_index = int_of_float (Stdlib.floor pct_index) in
|
|
||||||
let high_index = int_of_float (Stdlib.ceil pct_index) in
|
|
||||||
let low = values_arr.(low_index) in
|
|
||||||
let high = values_arr.(high_index) in
|
|
||||||
(low +. high) /. 2.0
|
|
||||||
in
|
|
||||||
Some
|
|
||||||
{ sum
|
|
||||||
; avg= average
|
|
||||||
; min= percentile 0.0
|
|
||||||
; p10= percentile 0.10
|
|
||||||
; median= percentile 0.50
|
|
||||||
; p75= percentile 0.75
|
|
||||||
; max= percentile 1.0
|
|
||||||
; count= num_elements }
|
|
@ -1,14 +0,0 @@
|
|||||||
(*
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*)
|
|
||||||
|
|
||||||
open! IStd
|
|
||||||
|
|
||||||
type t
|
|
||||||
|
|
||||||
val to_json : t -> Yojson.Basic.t
|
|
||||||
|
|
||||||
val compute_statistics : float list -> t option
|
|
Loading…
Reference in new issue