[log] delete PerfStats and EventLogger frameworks

Summary:
Warning: This might be a bit brutal.

PerfStats and EventLogger are pretty much subsumed by `ScubaLogging`.
It seems no one has been looking at the data they generate recently.
Let's delete them! If we need to re-implement some parts later on, let's
do that using `ScubaLogging`, which is better (eg, still produces data
when infer crashes).

Things we lose:
- errors in the clang frontend due to missing decl translation, etc.
- errors in biabduction due to timeouts, functions not found, etc.

We could also re-implement these using BackendStats and ScubaLogging
instead of brutally deleting everything.

Reviewed By: ngorogiannis

Differential Revision: D20343087

fbshipit-source-id: 90a3121ca
master
Jules Villard 5 years ago committed by Facebook Github Bot
parent 6e1cca731c
commit 957337bba2

@ -145,10 +145,6 @@ BUCK OPTIONS
successful capture. Only valid for --buck-clang. (Conversely: successful capture. Only valid for --buck-clang. (Conversely:
--no-buck-merge-all-deps) --no-buck-merge-all-deps)
--buck-out dir
Specify the root directory of buck-out. Only valid for
--buck-java.
--buck-targets-blacklist +regex --buck-targets-blacklist +regex
Skip capture of buck targets matched by the specified regular Skip capture of buck targets matched by the specified regular
expression. Only valid for --buck-compilation-database. expression. Only valid for --buck-compilation-database.

@ -167,10 +167,6 @@ OPTIONS
successful capture. Only valid for --buck-clang. (Conversely: successful capture. Only valid for --buck-clang. (Conversely:
--no-buck-merge-all-deps) See also infer-capture(1). --no-buck-merge-all-deps) See also infer-capture(1).
--buck-out dir
Specify the root directory of buck-out. Only valid for
--buck-java. See also infer-capture(1).
--buck-targets-blacklist +regex --buck-targets-blacklist +regex
Skip capture of buck targets matched by the specified regular Skip capture of buck targets matched by the specified regular
expression. Only valid for --buck-compilation-database. expression. Only valid for --buck-compilation-database.
@ -291,8 +287,7 @@ OPTIONS
--reports-include-ml-loc, --no-only-cheap-debug, --trace-error, --reports-include-ml-loc, --no-only-cheap-debug, --trace-error,
--write-dotty, --write-html) (Conversely: --no-debug | -G) --write-dotty, --write-html) (Conversely: --no-debug | -G)
See also infer-analyze(1), infer-capture(1), infer-compile(1), See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level level --debug-level level
Debug level (sets --bo-debug level, --debug-level-analysis level, Debug level (sets --bo-debug level, --debug-level-analysis level,
@ -301,26 +296,22 @@ OPTIONS
- 1: verbose debugging enabled - 1: verbose debugging enabled
- 2: very verbose debugging enabled - 2: very verbose debugging enabled
See also infer-analyze(1), infer-capture(1), infer-compile(1), See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-analysis int --debug-level-analysis int
Debug level for the analysis. See --debug-level for accepted Debug level for the analysis. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-capture int --debug-level-capture int
Debug level for the capture. See --debug-level for accepted Debug level for the capture. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-linters int --debug-level-linters int
Debug level for the linters. See --debug-level for accepted Debug level for the linters. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--no-deduplicate --no-deduplicate
Deactivates: Apply issue-specific deduplication during analysis Deactivates: Apply issue-specific deduplication during analysis
@ -603,21 +594,21 @@ OPTIONS
--help --help
Show this manual See also infer-analyze(1), infer-capture(1), infer-compile(1), Show this manual See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--help-format { auto | groff | pager | plain } --help-format { auto | groff | pager | plain }
Show this help in the specified format. auto sets the format to Show this help in the specified format. auto sets the format to
plain if the environment variable TERM is "dumb" or undefined, and plain if the environment variable TERM is "dumb" or undefined, and
to pager otherwise. See also infer-analyze(1), infer-capture(1), infer-compile(1), to pager otherwise. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--help-full --help-full
Show this manual with all internal options in the INTERNAL OPTIONS Show this manual with all internal options in the INTERNAL OPTIONS
section See also infer-analyze(1), infer-capture(1), infer-compile(1), section See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--no-hoisting-report-only-expensive --no-hoisting-report-only-expensive
Deactivates: [Hoisting] Report loop-invariant calls only when the Deactivates: [Hoisting] Report loop-invariant calls only when the
@ -764,16 +755,6 @@ OPTIONS
Do not start new parallel jobs if the load average is greater than Do not start new parallel jobs if the load average is greater than
that specified (Buck and make only) See also infer-capture(1). that specified (Buck and make only) See also infer-capture(1).
--log-events
Activates: Turn on the feature that logs events in a
machine-readable format (Conversely: --no-log-events)
See also infer-run(1).
--log-skipped
Activates: Turn on the feature that logs skipped functions (one
per file) in a machine-readable format (Conversely:
--no-log-skipped) See also infer-run(1).
--loop-hoisting --loop-hoisting
Activates: checker for loop-hoisting (Conversely: Activates: checker for loop-hoisting (Conversely:
--no-loop-hoisting) See also infer-analyze(1). --no-loop-hoisting) See also infer-analyze(1).
@ -814,11 +795,6 @@ OPTIONS
Activates: Print the active checkers before starting the analysis Activates: Print the active checkers before starting the analysis
(Conversely: --no-print-active-checkers) See also infer-analyze(1). (Conversely: --no-print-active-checkers) See also infer-analyze(1).
--print-log-identifier
Activates: Print the unique identifier that is common to all
logged events (Conversely: --no-print-log-identifier)
See also infer-run(1).
--print-logs --print-logs
Activates: Also log messages to stdout and stderr (Conversely: Activates: Also log messages to stdout and stderr (Conversely:
--no-print-logs) See also infer-analyze(1), infer-capture(1), infer-report(1), and --no-print-logs) See also infer-analyze(1), infer-capture(1), infer-report(1), and
@ -1212,9 +1188,6 @@ INTERNAL OPTIONS
--buck-compilation-database-reset --buck-compilation-database-reset
Cancel the effect of --buck-compilation-database. Cancel the effect of --buck-compilation-database.
--buck-out-reset
Cancel the effect of --buck-out.
--buck-targets-blacklist-reset --buck-targets-blacklist-reset
Set --buck-targets-blacklist to the empty list. Set --buck-targets-blacklist to the empty list.
@ -1936,7 +1909,7 @@ FILES
SEE ALSO SEE ALSO
infer-analyze(1), infer-capture(1), infer-compile(1), infer-events(1), infer-analyze(1), infer-capture(1), infer-compile(1),
infer-explore(1), infer-report(1), infer-reportdiff(1), infer-run(1) infer-explore(1), infer-report(1), infer-reportdiff(1), infer-run(1)

@ -88,23 +88,10 @@ OPTIONS
Show this manual with all internal options in the INTERNAL OPTIONS Show this manual with all internal options in the INTERNAL OPTIONS
section section
--log-events
Activates: Turn on the feature that logs events in a
machine-readable format (Conversely: --no-log-events)
--log-skipped
Activates: Turn on the feature that logs skipped functions (one
per file) in a machine-readable format (Conversely:
--no-log-skipped)
--pmd-xml --pmd-xml
Activates: Output issues in (PMD) XML format (Conversely: Activates: Output issues in (PMD) XML format (Conversely:
--no-pmd-xml) --no-pmd-xml)
--print-log-identifier
Activates: Print the unique identifier that is common to all
logged events (Conversely: --no-print-log-identifier)
--print-logs --print-logs
Activates: Also log messages to stdout and stderr (Conversely: Activates: Also log messages to stdout and stderr (Conversely:
--no-print-logs) --no-print-logs)

@ -167,10 +167,6 @@ OPTIONS
successful capture. Only valid for --buck-clang. (Conversely: successful capture. Only valid for --buck-clang. (Conversely:
--no-buck-merge-all-deps) See also infer-capture(1). --no-buck-merge-all-deps) See also infer-capture(1).
--buck-out dir
Specify the root directory of buck-out. Only valid for
--buck-java. See also infer-capture(1).
--buck-targets-blacklist +regex --buck-targets-blacklist +regex
Skip capture of buck targets matched by the specified regular Skip capture of buck targets matched by the specified regular
expression. Only valid for --buck-compilation-database. expression. Only valid for --buck-compilation-database.
@ -291,8 +287,7 @@ OPTIONS
--reports-include-ml-loc, --no-only-cheap-debug, --trace-error, --reports-include-ml-loc, --no-only-cheap-debug, --trace-error,
--write-dotty, --write-html) (Conversely: --no-debug | -G) --write-dotty, --write-html) (Conversely: --no-debug | -G)
See also infer-analyze(1), infer-capture(1), infer-compile(1), See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level level --debug-level level
Debug level (sets --bo-debug level, --debug-level-analysis level, Debug level (sets --bo-debug level, --debug-level-analysis level,
@ -301,26 +296,22 @@ OPTIONS
- 1: verbose debugging enabled - 1: verbose debugging enabled
- 2: very verbose debugging enabled - 2: very verbose debugging enabled
See also infer-analyze(1), infer-capture(1), infer-compile(1), See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-analysis int --debug-level-analysis int
Debug level for the analysis. See --debug-level for accepted Debug level for the analysis. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-capture int --debug-level-capture int
Debug level for the capture. See --debug-level for accepted Debug level for the capture. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--debug-level-linters int --debug-level-linters int
Debug level for the linters. See --debug-level for accepted Debug level for the linters. See --debug-level for accepted
values. See also infer-analyze(1), infer-capture(1), infer-compile(1), values. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-report(1), infer-reportdiff(1), and infer-report(1), infer-reportdiff(1), and infer-run(1).
infer-run(1).
--no-deduplicate --no-deduplicate
Deactivates: Apply issue-specific deduplication during analysis Deactivates: Apply issue-specific deduplication during analysis
@ -603,21 +594,21 @@ OPTIONS
--help --help
Show this manual See also infer-analyze(1), infer-capture(1), infer-compile(1), Show this manual See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--help-format { auto | groff | pager | plain } --help-format { auto | groff | pager | plain }
Show this help in the specified format. auto sets the format to Show this help in the specified format. auto sets the format to
plain if the environment variable TERM is "dumb" or undefined, and plain if the environment variable TERM is "dumb" or undefined, and
to pager otherwise. See also infer-analyze(1), infer-capture(1), infer-compile(1), to pager otherwise. See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--help-full --help-full
Show this manual with all internal options in the INTERNAL OPTIONS Show this manual with all internal options in the INTERNAL OPTIONS
section See also infer-analyze(1), infer-capture(1), infer-compile(1), section See also infer-analyze(1), infer-capture(1), infer-compile(1),
infer-events(1), infer-explore(1), infer-report(1), infer-explore(1), infer-report(1), infer-reportdiff(1), and
infer-reportdiff(1), and infer-run(1). infer-run(1).
--no-hoisting-report-only-expensive --no-hoisting-report-only-expensive
Deactivates: [Hoisting] Report loop-invariant calls only when the Deactivates: [Hoisting] Report loop-invariant calls only when the
@ -764,16 +755,6 @@ OPTIONS
Do not start new parallel jobs if the load average is greater than Do not start new parallel jobs if the load average is greater than
that specified (Buck and make only) See also infer-capture(1). that specified (Buck and make only) See also infer-capture(1).
--log-events
Activates: Turn on the feature that logs events in a
machine-readable format (Conversely: --no-log-events)
See also infer-run(1).
--log-skipped
Activates: Turn on the feature that logs skipped functions (one
per file) in a machine-readable format (Conversely:
--no-log-skipped) See also infer-run(1).
--loop-hoisting --loop-hoisting
Activates: checker for loop-hoisting (Conversely: Activates: checker for loop-hoisting (Conversely:
--no-loop-hoisting) See also infer-analyze(1). --no-loop-hoisting) See also infer-analyze(1).
@ -814,11 +795,6 @@ OPTIONS
Activates: Print the active checkers before starting the analysis Activates: Print the active checkers before starting the analysis
(Conversely: --no-print-active-checkers) See also infer-analyze(1). (Conversely: --no-print-active-checkers) See also infer-analyze(1).
--print-log-identifier
Activates: Print the unique identifier that is common to all
logged events (Conversely: --no-print-log-identifier)
See also infer-run(1).
--print-logs --print-logs
Activates: Also log messages to stdout and stderr (Conversely: Activates: Also log messages to stdout and stderr (Conversely:
--no-print-logs) See also infer-analyze(1), infer-capture(1), infer-report(1), and --no-print-logs) See also infer-analyze(1), infer-capture(1), infer-report(1), and
@ -1194,7 +1170,7 @@ FILES
SEE ALSO SEE ALSO
infer-analyze(1), infer-capture(1), infer-compile(1), infer-events(1), infer-analyze(1), infer-capture(1), infer-compile(1),
infer-explore(1), infer-report(1), infer-reportdiff(1), infer-run(1) infer-explore(1), infer-report(1), infer-reportdiff(1), infer-run(1)

@ -223,8 +223,8 @@ let update errlog_old errlog_new =
ErrLogHash.iter (fun err_key l -> ignore (add_issue errlog_old err_key l)) errlog_new ErrLogHash.iter (fun err_key l -> ignore (add_issue errlog_old err_key l)) errlog_new
let log_issue procname ~clang_method_kind severity err_log ~loc ~node ~session ~ltr let log_issue severity err_log ~loc ~node ~session ~ltr ~linters_def_file ~doc_url ~access ~extras
~linters_def_file ~doc_url ~access ~extras exn = exn =
let error = Exceptions.recognize_exception exn in let error = Exceptions.recognize_exception exn in
let severity = Option.value error.severity ~default:severity in let severity = Option.value error.severity ~default:severity in
let hide_java_loc_zero = let hide_java_loc_zero =
@ -249,26 +249,6 @@ let log_issue procname ~clang_method_kind severity err_log ~loc ~node ~session ~
Exceptions.equal_visibility error.visibility Exceptions.Exn_user Exceptions.equal_visibility error.visibility Exceptions.Exn_user
|| (Config.developer_mode && exn_developer) || (Config.developer_mode && exn_developer)
in in
( if exn_developer then
let issue =
let lang = Procname.get_language procname in
let clang_method_kind =
match lang with
| Language.Clang ->
Option.map ~f:ClangMethodKind.to_string clang_method_kind
| _ ->
None
in
EventLogger.AnalysisIssue
{ bug_type= error.name.IssueType.unique_id
; bug_kind= Exceptions.severity_string severity
; clang_method_kind
; exception_triggered_location= error.ocaml_pos
; lang= Language.to_explicit_string lang
; procedure_name= Procname.to_string procname
; source_location= loc }
in
EventLogger.log issue ) ;
if should_report && (not hide_java_loc_zero) && not hide_memory_error then if should_report && (not hide_java_loc_zero) && not hide_memory_error then
let added = let added =
let node_id, node_key = let node_id, node_key =

@ -106,9 +106,7 @@ val update : t -> t -> unit
(** Update an old error log with a new one *) (** Update an old error log with a new one *)
val log_issue : val log_issue :
Procname.t Exceptions.severity
-> clang_method_kind:ClangMethodKind.t option
-> Exceptions.severity
-> t -> t
-> loc:Location.t -> loc:Location.t
-> node:node -> node:node

@ -500,8 +500,6 @@ let get_access pdesc = pdesc.attributes.access
let get_nodes pdesc = pdesc.nodes let get_nodes pdesc = pdesc.nodes
let get_nodes_num pdesc = pdesc.nodes_num
(** Return the return type of the procedure *) (** Return the return type of the procedure *)
let get_ret_type pdesc = pdesc.attributes.ret_type let get_ret_type pdesc = pdesc.attributes.ret_type

@ -241,8 +241,6 @@ val get_locals : t -> ProcAttributes.var_data list
val get_nodes : t -> Node.t list val get_nodes : t -> Node.t list
val get_nodes_num : t -> int
val get_proc_name : t -> Procname.t val get_proc_name : t -> Procname.t
val get_ret_type : t -> Typ.t val get_ret_type : t -> Typ.t

@ -497,8 +497,8 @@ let log_frontend_issue method_decl_opt (node : Ctl_parser_types.ast_node)
CAst_utils.generate_key_stmt st CAst_utils.generate_key_stmt st
in in
let node_key = Procdesc.NodeKey.of_frontend_node_key key_str in let node_key = Procdesc.NodeKey.of_frontend_node_key key_str in
Reporting.log_frontend_issue procname issue_desc.severity errlog exn ~loc:issue_desc.loc Reporting.log_frontend_issue issue_desc.severity errlog exn ~loc:issue_desc.loc ~ltr:trace
~ltr:trace ~node_key ~node_key
let fill_issue_desc_info_and_log context ~witness ~current_node (issue_desc : CIssue.issue_desc) loc let fill_issue_desc_info_and_log context ~witness ~current_node (issue_desc : CIssue.issue_desc) loc
@ -510,10 +510,7 @@ let fill_issue_desc_info_and_log context ~witness ~current_node (issue_desc : CI
let suggestion = Option.map ~f:process_message issue_desc.suggestion in let suggestion = Option.map ~f:process_message issue_desc.suggestion in
let issue_desc' = {issue_desc with description; loc; suggestion} in let issue_desc' = {issue_desc with description; loc; suggestion} in
try log_frontend_issue context.CLintersContext.current_method witness issue_desc' try log_frontend_issue context.CLintersContext.current_method witness issue_desc'
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> ()
let trans_unit_ctx = context.CLintersContext.translation_unit_context in
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position e.source_range
e.ast_node
(* Calls the set of hard coded checkers (if any) *) (* Calls the set of hard coded checkers (if any) *)

@ -785,11 +785,7 @@ and eval_AndWithWitnesses an lcxt f1 f2 pred_name_ args =
match eval_AndWithW an lcxt f1 f2 with match eval_AndWithW an lcxt f1 f2 with
| Some (witness1, witness2) -> ( | Some (witness1, witness2) -> (
try if eval_Atomic_with_witness pred_name_ args witness1 witness2 lcxt then Some an else None try if eval_Atomic_with_witness pred_name_ args witness1 witness2 lcxt then Some an else None
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> None )
let trans_unit_ctx = lcxt.CLintersContext.translation_unit_context in
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position
e.source_range e.ast_node ;
None )
| None -> | None ->
None None
@ -966,11 +962,7 @@ and eval_formula ?keep_witness f an lcxt : Ctl_parser_types.ast_node option =
None None
| Atomic (name, params) -> ( | Atomic (name, params) -> (
try if eval_Atomic name params an lcxt then Some an else None try if eval_Atomic name params an lcxt then Some an else None
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> None )
let trans_unit_ctx = lcxt.CLintersContext.translation_unit_context in
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position
e.source_range e.ast_node ;
None )
| InNode (node_type_list, f1) -> | InNode (node_type_list, f1) ->
in_node node_type_list f1 an lcxt in_node node_type_list f1 an lcxt
| Not f1 -> ( | Not f1 -> (

@ -160,11 +160,7 @@ let mutable_local_vars_advice context an =
| _ -> | _ ->
None None
else None else None
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> None
let trans_unit_ctx = context.CLintersContext.translation_unit_context in
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position e.source_range
e.ast_node ;
None
(* Should only be called with a VarDecl *) (* Should only be called with a VarDecl *)

@ -7,8 +7,7 @@
open Core open Core
(* NOTE: All variants must be also added to `all_commands` below *) (* NOTE: All variants must be also added to `all_commands` below *)
type t = Analyze | Capture | Compile | Events | Explore | Report | ReportDiff | Run type t = Analyze | Capture | Compile | Explore | Report | ReportDiff | Run [@@deriving compare]
[@@deriving compare]
let equal = [%compare.equal: t] let equal = [%compare.equal: t]
@ -16,7 +15,6 @@ let command_to_string =
[ (Analyze, "analyze") [ (Analyze, "analyze")
; (Capture, "capture") ; (Capture, "capture")
; (Compile, "compile") ; (Compile, "compile")
; (Events, "events")
; (Explore, "explore") ; (Explore, "explore")
; (Report, "report") ; (Report, "report")
; (ReportDiff, "reportdiff") ; (ReportDiff, "reportdiff")

@ -13,7 +13,6 @@ type t =
| Compile | Compile
(** set up the infer environment then run the compilation commands without capturing the (** set up the infer environment then run the compilation commands without capturing the
source files *) source files *)
| Events (** dump logged events into stdout *)
| Explore (** explore infer reports *) | Explore (** explore infer reports *)
| Report (** post-process infer results and reports *) | Report (** post-process infer results and reports *)
| ReportDiff (** compute the difference of two infer reports *) | ReportDiff (** compute the difference of two infer reports *)

@ -6,7 +6,6 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*) *)
open! IStd open! IStd
module Hashtbl = Caml.Hashtbl
module L = Logging module L = Logging
module F = Format module F = Format
@ -373,63 +372,6 @@ let pp_text_of_report fmt report =
List.iter ~f:pp_row report ; F.fprintf fmt "@?" List.iter ~f:pp_row report ; F.fprintf fmt "@?"
module Stats = struct
type t =
{ files: (SourceFile.t, unit) Hashtbl.t
; mutable nchecked: int
; mutable ndefective: int
; mutable nerrors: int
; mutable ninfos: int
; mutable nadvice: int
; mutable nlikes: int
; mutable nprocs: int
; mutable nspecs: int
; mutable ntimeouts: int
; mutable nverified: int
; mutable nwarnings: int
; mutable saved_errors: string list }
let create () =
{ files= Hashtbl.create 3
; nchecked= 0
; ndefective= 0
; nerrors= 0
; ninfos= 0
; nadvice= 0
; nlikes= 0
; nprocs= 0
; nspecs= 0
; ntimeouts= 0
; nverified= 0
; nwarnings= 0
; saved_errors= [] }
end
module StatsLogs = struct
let process _ (summary : Summary.t) _ _ =
let num_preposts =
match summary.payloads.biabduction with Some {preposts} -> List.length preposts | None -> 0
in
let clang_method_kind =
ClangMethodKind.to_string (Summary.get_attributes summary).clang_method_kind
in
let proc_name = Summary.get_proc_name summary in
let lang = Procname.get_language proc_name in
let stats =
EventLogger.AnalysisStats
{ analysis_nodes_visited= Summary.Stats.nb_visited summary.stats
; analysis_status= Summary.Stats.failure_kind summary.stats
; analysis_total_nodes= Summary.get_proc_desc summary |> Procdesc.get_nodes_num
; clang_method_kind= (match lang with Language.Clang -> Some clang_method_kind | _ -> None)
; lang= Language.to_explicit_string lang
; method_location= Summary.get_loc summary
; method_name= Procname.to_string proc_name
; num_preposts
; symops= Summary.Stats.symops summary.stats }
in
EventLogger.log stats
end
(** Categorize the preconditions of specs and print stats *) (** Categorize the preconditions of specs and print stats *)
module PreconditionStats = struct module PreconditionStats = struct
let nr_nopres = ref 0 let nr_nopres = ref 0
@ -635,31 +577,9 @@ let error_filter filters proc_name file error_name =
&& filters.Inferconfig.proc_filter proc_name && filters.Inferconfig.proc_filter proc_name
type report_kind = Costs | Issues | Stats | Summary [@@deriving compare] type report_kind = Costs | Issues | Summary [@@deriving compare]
let _string_of_report_kind = function
| Costs ->
"Costs"
| Issues ->
"Issues"
| Stats ->
"Stats"
| Summary ->
"Summary"
type bug_format_kind = Json | Logs | Tests | Text [@@deriving compare]
let _string_of_bug_format_kind = function
| Json ->
"Json"
| Logs ->
"Logs"
| Tests ->
"Tests"
| Text ->
"Text"
type bug_format_kind = Json | Tests | Text [@@deriving compare]
let get_outfile outfile = let get_outfile outfile =
match outfile with match outfile with
@ -677,9 +597,7 @@ let pp_issue_in_format (format_kind, (outfile_opt : Utils.outfile option)) error
IssuesJson.pp outf.fmt IssuesJson.pp outf.fmt
{error_filter; proc_name; proc_loc_opt= Some proc_location; err_key; err_data} {error_filter; proc_name; proc_loc_opt= Some proc_location; err_key; err_data}
| Tests -> | Tests ->
L.(die InternalError) "Printing issues as tests is not implemented" L.die InternalError "Printing issues as tests is not implemented"
| Logs ->
L.(die InternalError) "Printing issues as logs is not implemented"
| Text -> | Text ->
let outf = get_outfile outfile_opt in let outf = get_outfile outfile_opt in
IssuesTxt.pp_issue outf.fmt error_filter (Some proc_location) err_key err_data IssuesTxt.pp_issue outf.fmt error_filter (Some proc_location) err_key err_data
@ -691,22 +609,12 @@ let pp_issues_in_format (format_kind, (outfile_opt : Utils.outfile option)) =
let outf = get_outfile outfile_opt in let outf = get_outfile outfile_opt in
IssuesJson.pp_issues_of_error_log outf.fmt IssuesJson.pp_issues_of_error_log outf.fmt
| Tests -> | Tests ->
L.(die InternalError) "Printing issues as tests is not implemented" L.die InternalError "Printing issues as tests is not implemented"
| Logs ->
L.(die InternalError) "Printing issues as logs is not implemented"
| Text -> | Text ->
let outf = get_outfile outfile_opt in let outf = get_outfile outfile_opt in
IssuesTxt.pp_issues_of_error_log outf.fmt IssuesTxt.pp_issues_of_error_log outf.fmt
let pp_stats_in_format (format_kind, _) =
match format_kind with
| Logs ->
StatsLogs.process
| Json | Tests | Text ->
L.(die InternalError) "Printing stats in json/tests/text is not implemented"
let pp_issues_of_error_log error_filter linereader proc_loc_opt procname err_log bug_format_list = let pp_issues_of_error_log error_filter linereader proc_loc_opt procname err_log bug_format_list =
let pp_issues_in_format format = let pp_issues_in_format format =
pp_issues_in_format format error_filter linereader proc_loc_opt procname err_log pp_issues_in_format format error_filter linereader proc_loc_opt procname err_log
@ -723,14 +631,6 @@ let collect_issues summary issues_acc =
err_log issues_acc err_log issues_acc
let pp_stats error_filter linereader summary stats stats_format_list =
let pp_stats_in_format format =
let pp_stats = pp_stats_in_format format in
pp_stats error_filter summary linereader stats
in
List.iter ~f:pp_stats_in_format stats_format_list
let pp_summary summary = let pp_summary summary =
L.result "Procedure: %a@\n%a@." Procname.pp (Summary.get_proc_name summary) Summary.pp_text L.result "Procedure: %a@\n%a@." Procname.pp (Summary.get_proc_name summary) Summary.pp_text
summary summary
@ -741,7 +641,7 @@ let pp_costs_in_format (format_kind, (outfile_opt : Utils.outfile option)) =
| Json -> | Json ->
let outf = get_outfile outfile_opt in let outf = get_outfile outfile_opt in
JsonCostsPrinter.pp outf.fmt JsonCostsPrinter.pp outf.fmt
| Tests | Text | Logs -> | Tests | Text ->
L.(die InternalError) "Printing costs in tests/text/logs is not implemented" L.(die InternalError) "Printing costs in tests/text/logs is not implemented"
@ -755,14 +655,11 @@ let pp_costs summary costs_format_list =
List.iter ~f:pp costs_format_list List.iter ~f:pp costs_format_list
let pp_summary_by_report_kind formats_by_report_kind summary error_filter linereader stats file let pp_summary_by_report_kind formats_by_report_kind summary issues_acc =
issues_acc =
let pp_summary_by_report_kind (report_kind, format_list) = let pp_summary_by_report_kind (report_kind, format_list) =
match (report_kind, format_list) with match (report_kind, format_list) with
| Costs, _ -> | Costs, _ ->
pp_costs summary format_list pp_costs summary format_list
| Stats, _ :: _ ->
pp_stats (error_filter file) linereader summary stats format_list
| Summary, _ when InferCommand.equal Config.command Report && not Config.quiet -> | Summary, _ when InferCommand.equal Config.command Report && not Config.quiet ->
pp_summary summary pp_summary summary
| _ -> | _ ->
@ -785,9 +682,7 @@ let pp_json_report_by_report_kind formats_by_report_kind fname =
let outf = get_outfile outfile_opt in let outf = get_outfile outfile_opt in
pp_text_of_report outf.fmt report pp_text_of_report outf.fmt report
| Json -> | Json ->
L.(die InternalError) "Printing issues from json does not support json output" L.die InternalError "Printing issues from json does not support json output"
| Logs ->
L.(die InternalError) "Printing issues from json does not support logs output"
in in
List.iter ~f:pp_json_issue format_list List.iter ~f:pp_json_issue format_list
in in
@ -826,14 +721,9 @@ let pp_lint_issues filters formats_by_report_kind linereader procname error_log
(** Process a summary *) (** Process a summary *)
let process_summary filters formats_by_report_kind linereader stats summary issues_acc = let process_summary formats_by_report_kind summary issues_acc =
let file = (Summary.get_loc summary).Location.file in
let proc_name = Summary.get_proc_name summary in let proc_name = Summary.get_proc_name summary in
let error_filter = error_filter filters proc_name in let issues_acc' = pp_summary_by_report_kind formats_by_report_kind summary issues_acc in
let issues_acc' =
pp_summary_by_report_kind formats_by_report_kind summary error_filter linereader stats file
issues_acc
in
if Config.precondition_stats then PreconditionStats.do_summary proc_name summary ; if Config.precondition_stats then PreconditionStats.do_summary proc_name summary ;
if Config.summary_stats then SummaryStats.do_summary proc_name summary ; if Config.summary_stats then SummaryStats.do_summary proc_name summary ;
issues_acc' issues_acc'
@ -854,24 +744,17 @@ let init_issues_format_list report_json =
json_format @ tests_format @ txt_format json_format @ tests_format @ txt_format
let init_stats_format_list () =
let logs_format = if Config.log_events then [(Logs, None)] else [] in
logs_format
let init_files format_list_by_kind = let init_files format_list_by_kind =
let init_files_of_report_kind (report_kind, format_list) = let init_files_of_report_kind (report_kind, format_list) =
let init_files_of_format (format_kind, (outfile_opt : Utils.outfile option)) = let init_files_of_format (format_kind, (outfile_opt : Utils.outfile option)) =
match (format_kind, report_kind) with match (format_kind, report_kind) with
| Logs, (Issues | Summary) ->
L.(die InternalError) "Logging these reports is not implemented"
| Json, Costs -> | Json, Costs ->
let outfile = get_outfile outfile_opt in let outfile = get_outfile outfile_opt in
JsonCostsPrinter.pp_open outfile.fmt () JsonCostsPrinter.pp_open outfile.fmt ()
| Json, Issues -> | Json, Issues ->
let outfile = get_outfile outfile_opt in let outfile = get_outfile outfile_opt in
IssuesJson.pp_open outfile.fmt () IssuesJson.pp_open outfile.fmt ()
| Logs, (Costs | Stats) | Json, (Stats | Summary) | Tests, _ | Text, _ -> | Json, Summary | Tests, _ | Text, _ ->
() ()
in in
List.iter ~f:init_files_of_format format_list List.iter ~f:init_files_of_format format_list
@ -883,15 +766,13 @@ let finalize_and_close_files format_list_by_kind =
let close_files_of_report_kind (report_kind, format_list) = let close_files_of_report_kind (report_kind, format_list) =
let close_files_of_format (format_kind, (outfile_opt : Utils.outfile option)) = let close_files_of_format (format_kind, (outfile_opt : Utils.outfile option)) =
( match (format_kind, report_kind) with ( match (format_kind, report_kind) with
| Logs, (Issues | Summary) ->
L.(die InternalError) "Logging these reports is not implemented"
| Json, Costs -> | Json, Costs ->
let outfile = get_outfile outfile_opt in let outfile = get_outfile outfile_opt in
JsonCostsPrinter.pp_close outfile.fmt () JsonCostsPrinter.pp_close outfile.fmt ()
| Json, Issues -> | Json, Issues ->
let outfile = get_outfile outfile_opt in let outfile = get_outfile outfile_opt in
IssuesJson.pp_close outfile.fmt () IssuesJson.pp_close outfile.fmt ()
| Logs, (Costs | Stats) | Json, (Stats | Summary) | Tests, _ | Text, _ -> | Json, Summary | Tests, _ | Text, _ ->
() ) ; () ) ;
match outfile_opt with Some outfile -> Utils.close_outf outfile | None -> () match outfile_opt with Some outfile -> Utils.close_outf outfile | None -> ()
in in
@ -902,13 +783,11 @@ let finalize_and_close_files format_list_by_kind =
let pp_summary_and_issues formats_by_report_kind issue_formats = let pp_summary_and_issues formats_by_report_kind issue_formats =
let stats = Stats.create () in
let linereader = Printer.LineReader.create () in let linereader = Printer.LineReader.create () in
let filters = Inferconfig.create_filters () in let filters = Inferconfig.create_filters () in
let all_issues = ref [] in let all_issues = ref [] in
SpecsFiles.iter_from_config ~f:(fun summary -> SpecsFiles.iter_from_config ~f:(fun summary ->
all_issues := all_issues := process_summary formats_by_report_kind summary !all_issues ) ;
process_summary filters formats_by_report_kind linereader stats summary !all_issues ) ;
all_issues := Issue.sort_filter_issues !all_issues ; all_issues := Issue.sort_filter_issues !all_issues ;
if Config.is_checker_enabled QuandaryBO then all_issues := QuandaryBO.update_issues !all_issues ; if Config.is_checker_enabled QuandaryBO then all_issues := QuandaryBO.update_issues !all_issues ;
List.iter List.iter
@ -928,11 +807,6 @@ let pp_summary_and_issues formats_by_report_kind issue_formats =
finalize_and_close_files formats_by_report_kind finalize_and_close_files formats_by_report_kind
let register_perf_stats_report () =
let rtime_span, initial_times = (Mtime_clock.counter (), Unix.times ()) in
PerfStats.register_report (PerfStats.Time (rtime_span, initial_times)) PerfStats.Reporting
let main ~report_json = let main ~report_json =
let issue_formats = init_issues_format_list report_json in let issue_formats = init_issues_format_list report_json in
let formats_by_report_kind = let formats_by_report_kind =
@ -944,10 +818,8 @@ let main ~report_json =
| None -> | None ->
[] []
in in
costs_report_format_kind costs_report_format_kind @ [(Issues, issue_formats); (Summary, [])]
@ [(Issues, issue_formats); (Stats, init_stats_format_list ()); (Summary, [])]
in in
register_perf_stats_report () ;
init_files formats_by_report_kind ; init_files formats_by_report_kind ;
( match Config.from_json_report with ( match Config.from_json_report with
| Some fname -> | Some fname ->
@ -956,4 +828,4 @@ let main ~report_json =
pp_summary_and_issues formats_by_report_kind issue_formats ) ; pp_summary_and_issues formats_by_report_kind issue_formats ) ;
if Config.test_determinator && Config.process_clang_ast then if Config.test_determinator && Config.process_clang_ast then
TestDeterminator.merge_test_determinator_results () ; TestDeterminator.merge_test_determinator_results () ;
PerfStats.get_reporter PerfStats.Reporting () ()

@ -1,344 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Performance Statistics gathering and reporting *)
open! IStd
module F = Format
module L = Logging
type mem_perf =
{ minor_gb: float
; promoted_gb: float
; major_gb: float
; allocated_gb: float
; minor_collections: int
; major_collections: int
; compactions: int
; top_heap_gb: float
; stack_kb: float
; minor_heap_kb: float }
type time_perf = {rtime: float; utime: float; stime: float; cutime: float; cstime: float}
type perf_stats = {mem: mem_perf option; time: time_perf option}
type stats_kind = Time of Mtime_clock.counter * Unix.process_times | Memory | TimeAndMemory
type stats_type =
| ClangLinters of SourceFile.t
| ClangFrontend of SourceFile.t
| ClangProcessAST of SourceFile.t
| JavaFrontend of SourceFile.t
| TotalFrontend
| Backend of SourceFile.t
| TotalBackend
| Reporting
| Driver
let source_file_of_stats_type = function
| ClangLinters source_file
| ClangFrontend source_file
| ClangProcessAST source_file
| JavaFrontend source_file
| Backend source_file ->
Some source_file
| _ ->
None
let relative_path_of_stats_type stats_type =
let abbrev_source_file =
Option.map ~f:DB.source_file_encoding (source_file_of_stats_type stats_type)
in
let filename =
match abbrev_source_file with
| Some abbrev ->
F.sprintf "%s_%s.json" Config.perf_stats_prefix abbrev
| None ->
F.sprintf "%s.json" Config.perf_stats_prefix
in
let dirname =
match stats_type with
| ClangLinters _ | ClangFrontend _ | ClangProcessAST _ | JavaFrontend _ | TotalFrontend ->
Config.frontend_stats_dir_name
| Backend _ | TotalBackend ->
Config.backend_stats_dir_name
| Reporting ->
Config.reporting_stats_dir_name
| Driver ->
Config.driver_stats_dir_name
in
Filename.concat dirname filename
let string_of_stats_type = function
| ClangLinters _ ->
"linters"
| ClangFrontend _ ->
"clang_frontend"
| ClangProcessAST _ ->
"clang_process_ast"
| JavaFrontend _ ->
"java_frontend"
| TotalFrontend ->
"total_frontend"
| Backend _ ->
"backend"
| TotalBackend ->
"total_backend"
| Reporting ->
"reporting"
| Driver ->
"driver"
let to_json ps =
let time =
Option.value_map ~default:[] ps.time ~f:(fun time_perf ->
[ ( "time"
, `Assoc
[ ("rtime", `Float time_perf.rtime)
; ("utime", `Float time_perf.utime)
; ("stime", `Float time_perf.stime)
; ("cutime", `Float time_perf.cutime)
; ("cstime", `Float time_perf.cstime) ] ) ] )
in
let mem =
Option.value_map ~default:[] ps.mem ~f:(fun mem_perf ->
[ ( "mem"
, `Assoc
[ ("minor_gb", `Float mem_perf.minor_gb)
; ("promoted_gb", `Float mem_perf.promoted_gb)
; ("major_gb", `Float mem_perf.major_gb)
; ("allocated_gb", `Float mem_perf.allocated_gb)
; ("minor_collections", `Int mem_perf.minor_collections)
; ("major_collections", `Int mem_perf.major_collections)
; ("compactions", `Int mem_perf.compactions)
; ("top_heap_gb", `Float mem_perf.top_heap_gb)
; ("stack_kb", `Float mem_perf.stack_kb)
; ("minor_heap_kb", `Float mem_perf.minor_heap_kb) ] ) ] )
in
`Assoc (time @ mem)
let from_json json =
let open! Yojson.Basic.Util in
let mem =
json |> member "mem"
|> to_option (fun mem_perf ->
{ minor_gb= mem_perf |> member "minor_gb" |> to_float
; promoted_gb= mem_perf |> member "promoted_gb" |> to_float
; major_gb= mem_perf |> member "major_gb" |> to_float
; allocated_gb= mem_perf |> member "allocated_gb" |> to_float
; minor_collections= mem_perf |> member "minor_collections" |> to_int
; major_collections= mem_perf |> member "major_collections" |> to_int
; compactions= mem_perf |> member "compactions" |> to_int
; top_heap_gb= mem_perf |> member "top_heap_gb" |> to_float
; stack_kb= mem_perf |> member "stack_kb" |> to_float
; minor_heap_kb= mem_perf |> member "minor_heap_kb" |> to_float } )
in
let time =
json |> member "time"
|> to_option (fun time_perf ->
{ rtime= time_perf |> member "rtime" |> to_float
; utime= time_perf |> member "utime" |> to_float
; stime= time_perf |> member "stime" |> to_float
; cutime= time_perf |> member "cutime" |> to_float
; cstime= time_perf |> member "cstime" |> to_float } )
in
{mem; time}
let aggregate_mem_stats s =
let mk_stats f =
StatisticsToolbox.compute_statistics
(List.filter_map s ~f:(fun stats -> Option.map stats.mem ~f))
in
let aggr_minor_gb = mk_stats (fun mem_perf -> mem_perf.minor_gb) in
let aggr_promoted_gb = mk_stats (fun mem_perf -> mem_perf.promoted_gb) in
let aggr_major_gb = mk_stats (fun mem_perf -> mem_perf.major_gb) in
let aggr_allocated_gb = mk_stats (fun mem_perf -> mem_perf.allocated_gb) in
let aggr_minor_collections = mk_stats (fun mem -> float_of_int mem.minor_collections) in
let aggr_major_collections = mk_stats (fun mem -> float_of_int mem.major_collections) in
let aggr_compactions = mk_stats (fun mem -> float_of_int mem.compactions) in
let aggr_top_heap_gb = mk_stats (fun mem -> mem.top_heap_gb) in
let aggr_stack_kb = mk_stats (fun mem -> mem.stack_kb) in
let aggr_minor_heap_kb = mk_stats (fun mem -> mem.minor_heap_kb) in
[ ("minor_gb", aggr_minor_gb)
; ("promoted_gb", aggr_promoted_gb)
; ("major_gb", aggr_major_gb)
; ("allocated_gb", aggr_allocated_gb)
; ("minor_collections", aggr_minor_collections)
; ("major_collections", aggr_major_collections)
; ("compactions", aggr_compactions)
; ("top_heap_gb", aggr_top_heap_gb)
; ("stack_kb", aggr_stack_kb)
; ("minor_heap_kb", aggr_minor_heap_kb) ]
let aggregate_time_stats s =
let mk_stats f =
StatisticsToolbox.compute_statistics
(List.filter_map s ~f:(fun stats -> Option.map stats.time ~f))
in
let aggr_rtime = mk_stats (fun time -> time.rtime) in
let aggr_utime = mk_stats (fun time -> time.utime) in
let aggr_stime = mk_stats (fun time -> time.stime) in
let aggr_cutime = mk_stats (fun time -> time.cutime) in
let aggr_cstime = mk_stats (fun time -> time.cstime) in
[ ("rtime", aggr_rtime)
; ("utime", aggr_utime)
; ("stime", aggr_stime)
; ("cutime", aggr_cutime)
; ("cstime", aggr_cstime) ]
let aggregate s =
let build_json_list =
List.fold_right ~init:[] ~f:(fun (key, stats) l ->
match stats with Some stats -> (key, StatisticsToolbox.to_json stats) :: l | None -> l )
in
let mem_stats = build_json_list (aggregate_mem_stats s) in
let time_stats = build_json_list (aggregate_time_stats s) in
let mem_json = if List.is_empty mem_stats then [] else [("mem", `Assoc mem_stats)] in
let time_json = if List.is_empty time_stats then [] else [("time", `Assoc time_stats)] in
`Assoc (time_json @ mem_json)
let compute_mem_stats () =
let bytes_per_word = Sys.word_size / 8 in
let words_to_bytes n = n *. float_of_int bytes_per_word in
let words_to_kb n = words_to_bytes n /. 1024. in
let words_to_mb n = words_to_kb n /. 1024. in
let words_to_gb n = words_to_mb n /. 1024. in
let gc_stats = Gc.quick_stat () in
let allocated_words = gc_stats.minor_words +. gc_stats.major_words -. gc_stats.promoted_words in
let gc_ctrl = Gc.get () in
let stats =
Some
{ minor_gb= words_to_gb gc_stats.minor_words
; promoted_gb= words_to_gb gc_stats.promoted_words
; major_gb= words_to_gb gc_stats.major_words
; allocated_gb= words_to_gb allocated_words
; minor_collections= gc_stats.minor_collections
; major_collections= gc_stats.major_collections
; compactions= gc_stats.compactions
; top_heap_gb= words_to_gb (float_of_int gc_stats.top_heap_words)
; stack_kb= words_to_kb (float_of_int gc_stats.stack_size)
; minor_heap_kb= words_to_kb (float_of_int gc_ctrl.minor_heap_size) }
in
(* We log number of bytes instead of a larger unit in EventLogger so the EventLogger output can
display in whatever format fits best *)
let mem =
Some
{ EventLogger.minor_heap_mem= words_to_bytes gc_stats.minor_words
; promoted_minor_heap_mem= words_to_bytes gc_stats.promoted_words
; major_heap_mem= words_to_bytes gc_stats.major_words
; total_allocated_mem= words_to_bytes allocated_words
; minor_collections= gc_stats.minor_collections
; major_collections= gc_stats.major_collections
; heap_compactions= gc_stats.compactions
; top_heap_size= gc_stats.top_heap_words * bytes_per_word
; stack_size= gc_stats.stack_size * bytes_per_word
; minor_heap_size= gc_ctrl.minor_heap_size * bytes_per_word }
in
(stats, mem)
let compute_time_stats ?rtime_counter (initial_times : Unix.process_times) =
let exit_times = Unix.times () in
let rtime_span = Mtime_clock.elapsed () in
let rtime =
Option.value_map ~default:rtime_span ~f:Mtime_clock.count rtime_counter |> Mtime.Span.to_s
in
let utime = exit_times.tms_utime -. initial_times.tms_utime in
let stime = exit_times.tms_stime -. initial_times.tms_stime in
let cutime = exit_times.tms_cutime -. initial_times.tms_cutime in
let cstime = exit_times.tms_cstime -. initial_times.tms_cstime in
let stats = Some {rtime; utime; stime; cutime; cstime} in
let time =
Some
{ EventLogger.real_time= rtime
; user_time= utime
; sys_time= stime
; children_user_time= cutime
; children_sys_time= cstime }
in
(stats, time)
let compute_stats stats_kind stats_type =
let (mem, mem_perf), (time, time_perf) =
match stats_kind with
| Time (rtime_counter, initial_times) ->
((None, None), compute_time_stats ~rtime_counter initial_times)
| Memory ->
(compute_mem_stats (), (None, None))
| TimeAndMemory ->
(compute_mem_stats (), compute_time_stats Utils.initial_times)
in
let stats = {mem; time} in
let stats_event =
EventLogger.PerformanceStats
{ lang= Language.to_explicit_string !Language.curr_language
; source_file= source_file_of_stats_type stats_type
; stats_type= string_of_stats_type stats_type
; mem_perf
; time_perf }
in
(stats, stats_event)
let report stats_kind file stats_type () =
try
let stats, stats_event = compute_stats stats_kind stats_type in
let json_stats = to_json stats in
EventLogger.log stats_event ;
(* We always log to EventLogger, but json files are unnecessary to log outside of developer mode *)
if Config.developer_mode then
try
Unix.mkdir_p (Filename.dirname file) ;
(* the same report may be registered across different infer processes *)
Utils.with_intermediate_temp_file_out file ~f:(fun stats_oc ->
Yojson.Basic.pretty_to_channel stats_oc json_stats )
with exc ->
L.internal_error "Info: failed to write stats to %s@\n%s@\n%s@\n%s@." file
(Exn.to_string exc)
(Yojson.Basic.pretty_to_string json_stats)
(Printexc.get_backtrace ())
with exc ->
L.internal_error "Info: failed to compute stats for %s@\n%s@\n%s@." file (Exn.to_string exc)
(Printexc.get_backtrace ())
let registered = String.Table.create ~size:4 ()
let register_report stats_kind stats_type =
let relative_path = relative_path_of_stats_type stats_type in
let absolute_path = Filename.concat Config.results_dir relative_path in
let f = report stats_kind absolute_path stats_type in
(* make sure to not double register the same perf stat report *)
match String.Table.add registered ~key:relative_path ~data:f with
| `Ok ->
()
| `Duplicate ->
L.d_warning "Attempting to register same perf stats report multiple times"
let dummy_reporter () = ()
let get_reporter stats_type =
let relative_path = relative_path_of_stats_type stats_type in
String.Table.find registered relative_path |> Option.value ~default:dummy_reporter
let register_report_at_exit stats_type =
let relative_path = relative_path_of_stats_type stats_type in
register_report TimeAndMemory stats_type ;
Epilogues.register ~f:(get_reporter stats_type)
~description:("stats reporting in " ^ relative_path)

@ -1,38 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Performance Statistics gathering and reporting *)
open! IStd
type perf_stats
type stats_kind = Time of Mtime_clock.counter * Unix.process_times | Memory | TimeAndMemory
type stats_type =
| ClangLinters of SourceFile.t
| ClangFrontend of SourceFile.t
| ClangProcessAST of SourceFile.t
| JavaFrontend of SourceFile.t
| TotalFrontend
| Backend of SourceFile.t
| TotalBackend
| Reporting
| Driver
val from_json : Yojson.Basic.t -> perf_stats
val aggregate : perf_stats list -> Yojson.Basic.t
val register_report : stats_kind -> stats_type -> unit
(** Register performance reporting function *)
val get_reporter : stats_type -> unit -> unit
(** Get reporting function that can be called at any time to create a performance report *)
val register_report_at_exit : stats_type -> unit
(** Create performance report when the current process terminates *)

@ -1,183 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
open PolyVariantEqual
module L = Logging
let aggregated_stats_filename = "aggregated_stats.json"
let aggregated_stats_by_target_filename = "aggregated_stats_by_target.json"
let json_files_to_ignore_regex =
Str.regexp
( ".*\\("
^ Str.quote aggregated_stats_filename
^ "\\|"
^ Str.quote aggregated_stats_by_target_filename
^ "\\)$" )
let dir_exists dir = Sys.is_directory dir = `Yes
let find_json_files_in_dir dir =
let is_valid_json_file path =
let s = Unix.lstat path in
let json_regex = Str.regexp_case_fold ".*\\.json$" in
(not (Str.string_match json_files_to_ignore_regex path 0))
&& Str.string_match json_regex path 0
&& Poly.equal s.st_kind Unix.S_REG
in
match dir_exists dir with
| true ->
let content = Array.to_list (Sys.readdir dir) in
let content_with_path = List.map ~f:(fun p -> Filename.concat dir p) content in
List.filter ~f:is_valid_json_file content_with_path
| false ->
[]
type stats_paths =
{frontend_paths: string list; backend_paths: string list; reporting_paths: string list}
type origin = Buck_out of (string * stats_paths) list | Infer_out of stats_paths
let find_stats_files_in_dir dir =
let frontend_paths =
find_json_files_in_dir (Filename.concat dir Config.frontend_stats_dir_name)
in
let backend_paths = find_json_files_in_dir (Filename.concat dir Config.backend_stats_dir_name) in
let reporting_paths =
find_json_files_in_dir (Filename.concat dir Config.reporting_stats_dir_name)
in
{frontend_paths; backend_paths; reporting_paths}
let load_data_from_infer_deps file =
let error msg = Printf.sprintf ("Error reading '%s': " ^^ msg) file in
let extract_target_and_path line =
match String.split ~on:'\t' line with
| target :: _ :: path :: _ ->
if dir_exists path then Ok (target, path)
else Error (error "path '%s' is not a valid directory" path)
| _ ->
Error (error "malformed input")
in
let parse_lines lines = List.map lines ~f:extract_target_and_path |> Result.all in
Utils.read_file file
|> Result.map_error ~f:(fun msg -> error "%s" msg)
|> Result.bind ~f:parse_lines
let collect_all_stats_files () =
let infer_out = Config.results_dir in
let concatenate_paths p1 p2 = if Filename.is_relative p2 then Filename.concat p1 p2 else p2 in
match Config.buck_out with
| Some p ->
if dir_exists p then
let data =
load_data_from_infer_deps (Filename.concat infer_out Config.buck_infer_deps_file_name)
in
match data with
| Ok r ->
let buck_out_parent = Filename.concat p Filename.parent_dir_name in
let targets_files =
List.map
~f:(fun (t, p) -> (t, find_stats_files_in_dir (concatenate_paths buck_out_parent p)))
r
in
Ok (Buck_out targets_files)
| Error _ as e ->
e
else Error ("buck-out path '" ^ p ^ "' not found")
| None ->
Ok (Infer_out (find_stats_files_in_dir infer_out))
let aggregate_stats_files paths =
let open_json_file file = Yojson.Basic.from_file file in
let load_stats paths =
List.map ~f:(fun path -> PerfStats.from_json (open_json_file path)) paths
in
let all_perf_stats = load_stats paths in
match all_perf_stats with [] -> None | _ -> Some (PerfStats.aggregate all_perf_stats)
type json_aggregated_stats =
{ frontend_json_data: Yojson.Basic.t option
; backend_json_data: Yojson.Basic.t option
; reporting_json_data: Yojson.Basic.t option }
let aggregate_all_stats origin =
let accumulate_paths acc paths =
{ frontend_paths= paths.frontend_paths @ acc.frontend_paths
; backend_paths= paths.backend_paths @ acc.backend_paths
; reporting_paths= paths.reporting_paths @ acc.reporting_paths }
in
let empty_stats_paths = {frontend_paths= []; backend_paths= []; reporting_paths= []} in
let stats_paths =
match origin with
| Buck_out tf ->
List.fold ~f:(fun acc (_, paths) -> accumulate_paths acc paths) ~init:empty_stats_paths tf
| Infer_out paths ->
paths
in
{ frontend_json_data= aggregate_stats_files stats_paths.frontend_paths
; backend_json_data= aggregate_stats_files stats_paths.backend_paths
; reporting_json_data= aggregate_stats_files stats_paths.reporting_paths }
let aggregate_stats_by_target tp =
let to_json f aggr_stats =
let collect_valid_stats acc t p = match p with Some v -> (t, v) :: acc | None -> acc in
let l = List.fold ~f:(fun acc (t, p) -> collect_valid_stats acc t (f p)) ~init:[] aggr_stats in
match l with [] -> None | _ as v -> Some (`Assoc v)
in
let frontend_json_data = to_json (fun p -> aggregate_stats_files p.frontend_paths) tp in
let backend_json_data = to_json (fun p -> aggregate_stats_files p.backend_paths) tp in
let reporting_json_data = to_json (fun p -> aggregate_stats_files p.reporting_paths) tp in
{frontend_json_data; backend_json_data; reporting_json_data}
let generate_files () =
let infer_out = Config.results_dir in
let stats_files = collect_all_stats_files () in
let origin =
match stats_files with Ok origin -> origin | Error e -> L.(die InternalError) "%s" e
in
let aggregated_frontend_stats_dir = Filename.concat infer_out Config.frontend_stats_dir_name in
let aggregated_backend_stats_dir = Filename.concat infer_out Config.backend_stats_dir_name in
let aggregated_reporting_stats_dir = Filename.concat infer_out Config.reporting_stats_dir_name in
Utils.create_dir aggregated_frontend_stats_dir ;
Utils.create_dir aggregated_backend_stats_dir ;
Utils.create_dir aggregated_reporting_stats_dir ;
let write_to_json_file_opt destfile json =
match json with Some j -> Utils.write_json_to_file destfile j | None -> ()
in
( match origin with
| Buck_out tp ->
let j = aggregate_stats_by_target tp in
write_to_json_file_opt
(Filename.concat aggregated_frontend_stats_dir aggregated_stats_by_target_filename)
j.frontend_json_data ;
write_to_json_file_opt
(Filename.concat aggregated_backend_stats_dir aggregated_stats_by_target_filename)
j.backend_json_data ;
write_to_json_file_opt
(Filename.concat aggregated_reporting_stats_dir aggregated_stats_by_target_filename)
j.reporting_json_data
| Infer_out _ ->
() ) ;
let j = aggregate_all_stats origin in
write_to_json_file_opt
(Filename.concat aggregated_frontend_stats_dir aggregated_stats_filename)
j.frontend_json_data ;
write_to_json_file_opt
(Filename.concat aggregated_backend_stats_dir aggregated_stats_filename)
j.backend_json_data ;
write_to_json_file_opt
(Filename.concat aggregated_reporting_stats_dir aggregated_stats_filename)
j.reporting_json_data

@ -1,13 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
(** Generate files containing statistics aggregated from individual statistics files under
Config.results_dir *)
val generate_files : unit -> unit

@ -22,18 +22,12 @@ module Stats = struct
let add_visited stats node_id = stats.nodes_visited <- IntSet.add node_id stats.nodes_visited let add_visited stats node_id = stats.nodes_visited <- IntSet.add node_id stats.nodes_visited
let nb_visited {nodes_visited} = IntSet.cardinal nodes_visited
let update ?(add_symops = 0) ?failure_kind stats = let update ?(add_symops = 0) ?failure_kind stats =
let symops = stats.symops + add_symops in let symops = stats.symops + add_symops in
let failure_kind = match failure_kind with None -> stats.failure_kind | some -> some in let failure_kind = match failure_kind with None -> stats.failure_kind | some -> some in
{stats with symops; failure_kind} {stats with symops; failure_kind}
let failure_kind {failure_kind} = failure_kind
let symops {symops} = symops
let pp_failure_kind_opt fmt failure_kind_opt = let pp_failure_kind_opt fmt failure_kind_opt =
match failure_kind_opt with match failure_kind_opt with
| Some failure_kind -> | Some failure_kind ->

@ -19,13 +19,7 @@ module Stats : sig
val is_visited : t -> int -> bool val is_visited : t -> int -> bool
val nb_visited : t -> int
val update : ?add_symops:int -> ?failure_kind:SymOp.failure_kind -> t -> t val update : ?add_symops:int -> ?failure_kind:SymOp.failure_kind -> t -> t
val failure_kind : t -> SymOp.failure_kind option
val symops : t -> int
end end
module Status : sig module Status : sig

@ -286,11 +286,6 @@ let dump_duplicate_procs source_file procs =
if not (List.is_empty duplicate_procs) then output_to_file duplicate_procs if not (List.is_empty duplicate_procs) then output_to_file duplicate_procs
let create_perf_stats_report source_file =
PerfStats.register_report PerfStats.TimeAndMemory (PerfStats.Backend source_file) ;
PerfStats.get_reporter (PerfStats.Backend source_file) ()
let register_callee ?caller_summary callee_pname = let register_callee ?caller_summary callee_pname =
Option.iter Option.iter
~f:(fun (summary : Summary.t) -> ~f:(fun (summary : Summary.t) ->
@ -351,9 +346,7 @@ let analyze_callee ?caller_summary callee =
Some callee_summary Some callee_summary
| None -> | None ->
Summary.OnDisk.get callee_pname Summary.OnDisk.get callee_pname
else ( else Summary.OnDisk.get callee_pname
EventLogger.log_skipped_pname (F.asprintf "%a" Procname.pp callee_pname) ;
Summary.OnDisk.get callee_pname )
in in
LocalCache.add callee_pname summ_opt ; LocalCache.add callee_pname summ_opt ;
summ_opt summ_opt
@ -381,8 +374,7 @@ let analyze_procedures exe_env procs_to_analyze source_file_opt =
Option.iter source_file_opt ~f:(fun source_file -> Option.iter source_file_opt ~f:(fun source_file ->
if Config.dump_duplicate_symbols then dump_duplicate_procs source_file procs_to_analyze ) ; if Config.dump_duplicate_symbols then dump_duplicate_procs source_file procs_to_analyze ) ;
Option.iter source_file_opt ~f:(fun source_file -> Option.iter source_file_opt ~f:(fun source_file ->
Callbacks.iterate_file_callbacks_and_store_issues procs_to_analyze exe_env source_file ; Callbacks.iterate_file_callbacks_and_store_issues procs_to_analyze exe_env source_file ) ;
create_perf_stats_report source_file ) ;
unset_exe_env () ; unset_exe_env () ;
Language.curr_language := saved_language Language.curr_language := saved_language

@ -10,20 +10,18 @@ module L = Logging
type log_t = ?ltr:Errlog.loc_trace -> ?extras:Jsonbug_t.extra -> IssueType.t -> string -> unit type log_t = ?ltr:Errlog.loc_trace -> ?extras:Jsonbug_t.extra -> IssueType.t -> string -> unit
let log_issue_from_errlog procname ~clang_method_kind severity err_log ~loc ~node ~session ~ltr let log_issue_from_errlog severity err_log ~loc ~node ~session ~ltr ~access ~extras exn =
~access ~extras exn =
let issue_type = (Exceptions.recognize_exception exn).name in let issue_type = (Exceptions.recognize_exception exn).name in
if (not Config.filtering) (* no-filtering takes priority *) || issue_type.IssueType.enabled then if (not Config.filtering) (* no-filtering takes priority *) || issue_type.IssueType.enabled then
let doc_url = issue_type.doc_url in let doc_url = issue_type.doc_url in
let linters_def_file = issue_type.linters_def_file in let linters_def_file = issue_type.linters_def_file in
Errlog.log_issue procname ~clang_method_kind severity err_log ~loc ~node ~session ~ltr Errlog.log_issue severity err_log ~loc ~node ~session ~ltr ~linters_def_file ~doc_url ~access
~linters_def_file ~doc_url ~access ~extras exn ~extras exn
let log_frontend_issue procname severity errlog ~loc ~node_key ~ltr exn = let log_frontend_issue severity errlog ~loc ~node_key ~ltr exn =
let node = Errlog.FrontendNode {node_key} in let node = Errlog.FrontendNode {node_key} in
log_issue_from_errlog procname ~clang_method_kind:None severity errlog ~loc ~node ~session:0 ~ltr log_issue_from_errlog severity errlog ~loc ~node ~session:0 ~ltr ~access:None ~extras:None exn
~access:None ~extras:None exn
let log_issue_from_summary severity summary ~node ~session ~loc ~ltr ?extras exn = let log_issue_from_summary severity summary ~node ~session ~loc ~ltr ?extras exn =
@ -52,9 +50,7 @@ let log_issue_from_summary severity summary ~node ~session ~loc ~ltr ?extras exn
(* Skip the reporting *) (* Skip the reporting *)
else else
let err_log = Summary.get_err_log summary in let err_log = Summary.get_err_log summary in
let clang_method_kind = Some attrs.clang_method_kind in log_issue_from_errlog severity err_log ~loc ~node ~session ~ltr ~access:None ~extras exn
log_issue_from_errlog procname ~clang_method_kind severity err_log ~loc ~node ~session ~ltr
~access:None ~extras exn
let log_issue_deprecated_using_state severity proc_name ?node ?loc ?ltr exn = let log_issue_deprecated_using_state severity proc_name ?node ?loc ?ltr exn =
@ -94,8 +90,7 @@ let log_issue_external procname ~issue_log severity ~loc ~ltr ?access issue_type
let exn = checker_exception issue_type error_message in let exn = checker_exception issue_type error_message in
let issue_log, errlog = IssueLog.get_or_add issue_log ~proc:procname in let issue_log, errlog = IssueLog.get_or_add issue_log ~proc:procname in
let node = Errlog.UnknownNode in let node = Errlog.UnknownNode in
log_issue_from_errlog procname ~clang_method_kind:None severity errlog ~loc ~node ~session:0 ~ltr log_issue_from_errlog severity errlog ~loc ~node ~session:0 ~ltr ~access ~extras:None exn ;
~access ~extras:None exn ;
issue_log issue_log

@ -23,8 +23,7 @@ val log_issue_deprecated_using_state :
DEPRECATED as it can create race conditions between checkers. Use log_error/warning instead *) DEPRECATED as it can create race conditions between checkers. Use log_error/warning instead *)
val log_frontend_issue : val log_frontend_issue :
Procname.t Exceptions.severity
-> Exceptions.severity
-> Errlog.t -> Errlog.t
-> loc:Location.t -> loc:Location.t
-> node_key:Procdesc.NodeKey.t -> node_key:Procdesc.NodeKey.t

@ -272,17 +272,6 @@ let reportdiff =
~see_also:InferCommand.[Report] ~see_also:InferCommand.[Report]
let events =
mk_command_doc ~title:"Infer Events"
~short_description:"dump all the logged events in machine readable format"
~synopsis:{|$(b,infer) $(b,events)|}
~description:
[ `P
"Emit to stdout one JSON object per line, each describing a logged event happened during \
the execution of Infer" ]
~see_also:InferCommand.[Report; Run]
let run = let run =
mk_command_doc ~title:"Infer Analysis of a Project" mk_command_doc ~title:"Infer Analysis of a Project"
~short_description:"capture source files, analyze, and report" ~short_description:"capture source files, analyze, and report"
@ -308,7 +297,6 @@ let command_to_data =
[ mk Analyze analyze [ mk Analyze analyze
; mk Capture capture ; mk Capture capture
; mk Compile compile ; mk Compile compile
; mk Events events
; mk Explore explore ; mk Explore explore
; mk Report report ; mk Report report
; mk ReportDiff reportdiff ; mk ReportDiff reportdiff

@ -149,12 +149,8 @@ let default_in_zip_results_dir = "infer"
(** Dotty output filename **) (** Dotty output filename **)
let dotty_frontend_output = "proc_cfgs_frontend.dot" let dotty_frontend_output = "proc_cfgs_frontend.dot"
let driver_stats_dir_name = "driver_stats"
let duplicates_filename = "duplicates.txt" let duplicates_filename = "duplicates.txt"
let events_dir_name = "events"
let trace_events_file = "perf_events.json" let trace_events_file = "perf_events.json"
(** exit code to use for the --fail-on-issue option *) (** exit code to use for the --fail-on-issue option *)
@ -221,8 +217,6 @@ let meet_level = 1
let nsnotification_center_checker_backend = false let nsnotification_center_checker_backend = false
let perf_stats_prefix = "perf_stats"
let proc_stats_filename = "proc_stats.json" let proc_stats_filename = "proc_stats.json"
let procnames_locks_dir_name = "procnames_locks" let procnames_locks_dir_name = "procnames_locks"
@ -557,7 +551,7 @@ let () =
match cmd with match cmd with
| Report -> | Report ->
`Add `Add
| Analyze | Capture | Compile | Events | Explore | ReportDiff | Run -> | Analyze | Capture | Compile | Explore | ReportDiff | Run ->
`Reject `Reject
in in
(* make sure we generate doc for all the commands we know about *) (* make sure we generate doc for all the commands we know about *)
@ -812,10 +806,9 @@ and buck_mode =
buck_mode buck_mode
and buck_out = and _buck_out =
CLOpt.mk_path_opt ~long:"buck-out" CLOpt.mk_path_opt ~deprecated:["-buck-out"] ~long:"" ~meta:"dir"
~in_help:InferCommand.[(Capture, manual_buck)] "[DOES NOTHING] Specify the root directory of buck-out. Only valid for $(b,--buck-java)."
~meta:"dir" "Specify the root directory of buck-out. Only valid for $(b,--buck-java)."
and buck_targets_blacklist = and buck_targets_blacklist =
@ -1512,10 +1505,9 @@ and liveness_dangerous_classes =
by the program." by the program."
and log_events = and _log_events =
CLOpt.mk_bool ~long:"log-events" CLOpt.mk_bool ~long:"" ~deprecated:["-log-events"] ~deprecated_no:["-no-log-events"]
~in_help:InferCommand.[(Run, manual_generic)] "[DOES NOTHING] Turn on the feature that logs events in a machine-readable format"
"Turn on the feature that logs events in a machine-readable format"
and log_file = and log_file =
@ -1523,10 +1515,10 @@ and log_file =
~default:"logs" "Specify the file to use for logging" ~default:"logs" "Specify the file to use for logging"
and log_skipped = and _log_skipped =
CLOpt.mk_bool ~long:"log-skipped" CLOpt.mk_bool ~long:"" ~deprecated:["-log-skipped"] ~deprecated_no:["-no-log-skipped"]
~in_help:InferCommand.[(Run, manual_generic)] "[DOES NOTHING] Turn on the feature that logs skipped functions (one per file) in a \
"Turn on the feature that logs skipped functions (one per file) in a machine-readable format" machine-readable format"
and perf_profiler_data_file = and perf_profiler_data_file =
@ -1754,10 +1746,10 @@ and print_builtins =
"Print the builtin functions and exit" "Print the builtin functions and exit"
and print_log_identifier = and _print_log_identifier =
CLOpt.mk_bool ~long:"print-log-identifier" CLOpt.mk_bool ~long:"" ~deprecated:["-print-log-identifier"]
~in_help:InferCommand.[(Run, manual_generic)] ~deprecated_no:["-no-print-log-identifier"]
"Print the unique identifier that is common to all logged events" "[DOES NOTHING] Print the unique identifier that is common to all logged events"
and print_using_diff = and print_using_diff =
@ -2663,8 +2655,6 @@ and buck_mode : BuckMode.t option =
Some (ClangCompilationDB (DepsUpToDepth depth)) Some (ClangCompilationDB (DepsUpToDepth depth))
and buck_out = !buck_out
and buck_targets_blacklist = !buck_targets_blacklist and buck_targets_blacklist = !buck_targets_blacklist
and call_graph_schedule = !call_graph_schedule and call_graph_schedule = !call_graph_schedule
@ -2846,12 +2836,8 @@ and load_average =
match !load_average with None when !buck -> Some (float_of_int ncpu) | _ -> !load_average match !load_average with None when !buck -> Some (float_of_int ncpu) | _ -> !load_average
and log_events = !log_events
and log_file = !log_file and log_file = !log_file
and log_skipped = !log_skipped
and perf_profiler_data_file = !perf_profiler_data_file and perf_profiler_data_file = !perf_profiler_data_file
and max_nesting = !max_nesting and max_nesting = !max_nesting
@ -2914,8 +2900,6 @@ and print_active_checkers = !print_active_checkers
and print_builtins = !print_builtins and print_builtins = !print_builtins
and print_log_identifier = !print_log_identifier
and print_logs = !print_logs and print_logs = !print_logs
and print_types = !print_types and print_types = !print_types

@ -79,14 +79,10 @@ val default_in_zip_results_dir : string
val dotty_frontend_output : string val dotty_frontend_output : string
val driver_stats_dir_name : string
val duplicates_filename : string val duplicates_filename : string
val etc_dir : string val etc_dir : string
val events_dir_name : string
val fail_on_issue_exit_code : int val fail_on_issue_exit_code : int
val fcp_dir : string val fcp_dir : string
@ -134,8 +130,6 @@ val patterns_skip_implementation : string * Yojson.Basic.t
val patterns_skip_translation : string * Yojson.Basic.t val patterns_skip_translation : string * Yojson.Basic.t
val perf_stats_prefix : string
val pp_version : Format.formatter -> unit -> unit val pp_version : Format.formatter -> unit -> unit
val proc_stats_filename : string val proc_stats_filename : string
@ -248,8 +242,6 @@ val buck_merge_all_deps : bool
val buck_mode : BuckMode.t option val buck_mode : BuckMode.t option
val buck_out : string option
val buck_out_gen : string val buck_out_gen : string
val buck_targets_blacklist : string list val buck_targets_blacklist : string list
@ -444,12 +436,8 @@ val linters_validate_syntax_only : bool
val liveness_dangerous_classes : Yojson.Basic.t val liveness_dangerous_classes : Yojson.Basic.t
val log_events : bool
val log_file : string val log_file : string
val log_skipped : bool
val max_nesting : int option val max_nesting : int option
val merge : bool val merge : bool
@ -497,8 +485,6 @@ val print_active_checkers : bool
val print_builtins : bool val print_builtins : bool
val print_log_identifier : bool
val print_logs : bool val print_logs : bool
val print_types : bool val print_types : bool

@ -1,411 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module CLOpt = CommandLineOption
let hostname = Unix.gethostname ()
module IO = struct
let log_file_extension = ".log"
let events_dir = Config.toplevel_results_dir ^/ Config.events_dir_name
let out_chan = ref None
let close () =
match !out_chan with
| None ->
()
| Some chan ->
Out_channel.close chan ;
out_chan := None
let prepare () =
close () ;
let fname = events_dir ^/ (Unix.getpid () |> Pid.to_string) ^ log_file_extension in
let oc = Stdlib.open_out_gen [Open_append; Open_creat] 0o666 fname in
out_chan := Some oc
let write fmt =
match !out_chan with Some oc -> Printf.fprintf oc fmt | _ -> Printf.ifprintf stdout fmt
let write_skipped_pname pname =
let fname = events_dir ^/ "skipped_functions" ^ log_file_extension in
let oc = Stdlib.open_out_gen [Open_append; Open_creat] 0o666 fname in
Out_channel.output_string oc pname ;
Out_channel.output_char oc '\n' ;
Out_channel.close oc
let dump () =
let dump_file_to_stdout fname =
let ic = In_channel.create fname in
In_channel.iter_lines ic ~f:print_endline
in
let log_files = Utils.find_files ~path:events_dir ~extension:log_file_extension in
List.iter log_files ~f:dump_file_to_stdout
let () = Epilogues.register_late ~f:close ~description:"closing EventLogger file"
end
module Random_id : sig
val get : unit -> string
end = struct
let () = Random.self_init ()
let generate () = Random.int64 1_000_000_000_000L |> Int64.to_string
let infer_run_identifier_env_var = "INFER_RUN_IDENTIFIER"
let get () =
match Sys.getenv infer_run_identifier_env_var with
| Some id ->
id
| None ->
let new_id = generate () in
Unix.putenv ~key:infer_run_identifier_env_var ~data:new_id ;
new_id
end
type analysis_issue =
{ bug_kind: string
; bug_type: string
; clang_method_kind: string option
; exception_triggered_location: Logging.ocaml_pos option
; lang: string
; procedure_name: string
; source_location: Location.t }
let create_analysis_issue_row base record =
let open JsonBuilder in
base
|> add_string ~key:"bug_kind" ~data:record.bug_kind
|> add_string ~key:"bug_type" ~data:record.bug_type
|> add_string_opt ~key:"clang_method_kind" ~data:record.clang_method_kind
|> add_string_opt ~key:"exception_triggered_location"
~data:(Option.map ~f:Logging.ocaml_pos_to_string record.exception_triggered_location)
|> add_string ~key:"lang" ~data:record.lang
|> add_string ~key:"procedure_name" ~data:record.procedure_name
|> add_string ~key:"source_location"
~data:
(String.concat
[ string_of_int record.source_location.line
; ":"
; string_of_int record.source_location.col ])
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.source_location.file)
type analysis_stats =
{ analysis_nodes_visited: int
; analysis_status: SymOp.failure_kind option
; analysis_total_nodes: int
; clang_method_kind: string option
; lang: string
; method_location: Location.t
; method_name: string
; num_preposts: int
; symops: int }
let create_analysis_stats_row base record =
let open JsonBuilder in
base
|> add_int ~key:"analysis_nodes_visited" ~data:record.analysis_nodes_visited
|> add_string ~key:"analysis_status"
~data:
(Option.value_map record.analysis_status ~default:"OK" ~f:(fun stats_failure ->
SymOp.failure_kind_to_string stats_failure ))
|> add_int ~key:"analysis_total_nodes" ~data:record.analysis_total_nodes
|> add_string_opt ~key:"clang_method_kind" ~data:record.clang_method_kind
|> add_string ~key:"lang" ~data:record.lang
|> add_string ~key:"method_location"
~data:
(String.concat
[ string_of_int record.method_location.line
; ":"
; string_of_int record.method_location.col ])
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.method_location.file)
|> add_string ~key:"method_name" ~data:record.method_name
|> add_int ~key:"num_preposts" ~data:record.num_preposts
|> add_int ~key:"symops" ~data:record.symops
type dynamic_dispatch =
| Dynamic_dispatch_successful
| Dynamic_dispatch_parameters_arguments_mismatch
| Dynamic_dispatch_model_specialization_failure
let string_of_dynamic_dispatch_opt dd =
match dd with
| Some Dynamic_dispatch_successful ->
"dynamic dispatch successful"
| Some Dynamic_dispatch_parameters_arguments_mismatch ->
"dynamic dispatch failed with arguments mismatch"
| Some Dynamic_dispatch_model_specialization_failure ->
"dynamic dispatch model specialized failed"
| None ->
"no dynamic dispatch"
type call_trace =
{ call_location: Location.t
; call_result: string
; callee_clang_method_kind: string option
; callee_source_file: SourceFile.t option
; callee_name: string
; caller_name: string
; lang: string
; reason: string option
; dynamic_dispatch: dynamic_dispatch option }
let create_call_trace_row base record =
let open JsonBuilder in
base
|> add_string ~key:"call_location"
~data:
(String.concat
[string_of_int record.call_location.line; ":"; string_of_int record.call_location.col])
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.call_location.file)
|> add_string ~key:"call_result" ~data:record.call_result
|> add_string_opt ~key:"callee_clang_method_kind" ~data:record.callee_clang_method_kind
|> add_string_opt ~key:"callee_source_file"
~data:(Option.map ~f:SourceFile.to_rel_path record.callee_source_file)
|> add_string ~key:"callee_name" ~data:record.callee_name
|> add_string ~key:"caller_name" ~data:record.caller_name
|> add_string ~key:"lang" ~data:record.lang
|> add_string_opt ~key:"reason" ~data:record.reason
|> add_string ~key:"dynamic_dispatch"
~data:(string_of_dynamic_dispatch_opt record.dynamic_dispatch)
type frontend_exception =
{ ast_node: string option
; exception_triggered_location: Logging.ocaml_pos
; exception_type: string
; lang: string
; source_location_start: Location.t
; source_location_end: Location.t }
let create_frontend_exception_row base record =
let open JsonBuilder in
base
|> add_string_opt ~key:"ast_node" ~data:record.ast_node
|> add_string ~key:"exception_triggered_location"
~data:(Logging.ocaml_pos_to_string record.exception_triggered_location)
|> add_string ~key:"exception_type" ~data:record.exception_type
|> add_string ~key:"lang" ~data:record.lang
|> add_string ~key:"source_location_start_file"
~data:(SourceFile.to_rel_path record.source_location_start.file)
|> add_string ~key:"source_location_start_pos"
~data:
(String.concat
[ string_of_int record.source_location_start.line
; ":"
; string_of_int record.source_location_start.col ])
|> add_string ~key:"source_location_end_file"
~data:(SourceFile.to_rel_path record.source_location_end.file)
|> add_string ~key:"source_location_end_pos"
~data:
(String.concat
[ string_of_int record.source_location_end.line
; ":"
; string_of_int record.source_location_end.col ])
type mem_perf =
{ minor_heap_mem: float
; promoted_minor_heap_mem: float
; major_heap_mem: float
; total_allocated_mem: float
; minor_collections: int
; major_collections: int
; heap_compactions: int
; top_heap_size: int
; stack_size: int
; minor_heap_size: int }
type time_perf =
{ real_time: float
; user_time: float
; sys_time: float
; children_user_time: float
; children_sys_time: float }
type performance_stats =
{ lang: string
; source_file: SourceFile.t option
; stats_type: string
; mem_perf: mem_perf option
; time_perf: time_perf option }
let create_performance_stats_row base record =
let open JsonBuilder in
let add_mem_perf t =
Option.value_map ~default:t record.mem_perf ~f:(fun mem_perf ->
t
|> add_float ~key:"minor_heap_mem" ~data:mem_perf.minor_heap_mem
|> add_float ~key:"promoted_minor_heap_mem" ~data:mem_perf.promoted_minor_heap_mem
|> add_float ~key:"major_heap_mem" ~data:mem_perf.major_heap_mem
|> add_float ~key:"total_allocated_mem" ~data:mem_perf.total_allocated_mem
|> add_int ~key:"minor_collections" ~data:mem_perf.minor_collections
|> add_int ~key:"major_collections" ~data:mem_perf.major_collections
|> add_int ~key:"heap_compactions" ~data:mem_perf.heap_compactions
|> add_int ~key:"top_heap_size" ~data:mem_perf.top_heap_size
|> add_int ~key:"stack_size" ~data:mem_perf.stack_size
|> add_int ~key:"minor_heap_size" ~data:mem_perf.minor_heap_size )
in
let add_time_perf t =
Option.value_map ~default:t record.time_perf ~f:(fun time_perf ->
t
|> add_float ~key:"real_time" ~data:time_perf.real_time
|> add_float ~key:"user_time" ~data:time_perf.user_time
|> add_float ~key:"sys_time" ~data:time_perf.sys_time
|> add_float ~key:"children_user_time" ~data:time_perf.children_user_time
|> add_float ~key:"children_sys_time" ~data:time_perf.children_sys_time )
in
base
|> add_string ~key:"lang" ~data:record.lang
|> add_string_opt ~key:"source_file"
~data:(Option.map ~f:SourceFile.to_rel_path record.source_file)
|> add_string ~key:"stats_type" ~data:record.stats_type
|> add_mem_perf |> add_time_perf
type procedures_translated =
{ lang: string
; procedures_translated_failed: int
; procedures_translated_total: int
; source_file: SourceFile.t }
let create_procedures_translated_row base record =
let open JsonBuilder in
base
|> add_string ~key:"lang" ~data:record.lang
|> add_int ~key:"procedures_translated_failed" ~data:record.procedures_translated_failed
|> add_int ~key:"procedures_translated_total" ~data:record.procedures_translated_total
|> add_string ~key:"source_file" ~data:(SourceFile.to_rel_path record.source_file)
type event =
| AnalysisIssue of analysis_issue
| AnalysisStats of analysis_stats
| CallTrace of call_trace
| FrontendException of frontend_exception
| PerformanceStats of performance_stats
| ProceduresTranslatedSummary of procedures_translated
| UncaughtException of exn * int
let string_of_event event =
match event with
| AnalysisIssue _ ->
"AnalysisIssue"
| AnalysisStats _ ->
"AnalysisStats"
| CallTrace _ ->
"CallTrace"
| FrontendException _ ->
"FrontendException"
| PerformanceStats _ ->
"PerformanceStats"
| ProceduresTranslatedSummary _ ->
"ProceduresTranslatedSummary"
| UncaughtException _ ->
"UncaughtException"
let sequence_ctr = ref 0
let pid () = Pid.to_int (Unix.getpid ())
let sysname =
try
Utils.with_process_in "uname 2>/dev/null" (fun chan ->
Scanf.bscanf (Scanf.Scanning.from_channel chan) "%s" (fun n -> n) )
|> fst
with _ -> "Unknown"
module type S = sig
val get_log_identifier : unit -> string
val prepare : unit -> unit
val log : event -> unit
val log_skipped_pname : string -> unit
val dump : unit -> unit
end
module LoggerImpl : S = struct
let get_log_identifier () = Random_id.get ()
let create_row event =
incr sequence_ctr ;
let open JsonBuilder in
let base =
empty
|> add_string ~key:"command" ~data:(InferCommand.to_string Config.command)
|> add_string ~key:"event_tag" ~data:(string_of_event event)
|> add_string ~key:"hostname" ~data:hostname
|> add_string ~key:"infer_commit" ~data:Version.commit
|> add_int ~key:"is_originator" ~data:(if CLOpt.is_originator then 1 else 0)
|> add_string_opt ~key:"job_id" ~data:Config.job_id
|> add_int ~key:"pid" ~data:(pid ())
|> add_string ~key:"run_identifier" ~data:(get_log_identifier ())
|> add_int ~key:"sequence" ~data:(!sequence_ctr - 1)
|> add_string ~key:"sysname" ~data:sysname
|> add_int ~key:"time" ~data:(int_of_float (Unix.time ()))
in
( match event with
| AnalysisIssue record ->
create_analysis_issue_row base record
| AnalysisStats record ->
create_analysis_stats_row base record
| CallTrace record ->
create_call_trace_row base record
| FrontendException record ->
create_frontend_exception_row base record
| PerformanceStats record ->
create_performance_stats_row base record
| ProceduresTranslatedSummary record ->
create_procedures_translated_row base record
| UncaughtException (exn, exitcode) ->
base
|> add_string ~key:"exception" ~data:(Caml.Printexc.exn_slot_name exn)
|> add_string ~key:"exception_info" ~data:(Exn.to_string exn)
|> add_int ~key:"exitcode" ~data:exitcode )
|> JsonBuilder.to_json
let prepare = IO.prepare
let log event = IO.write "%s\n" (create_row event)
let dump = IO.dump
let log_skipped_pname pname = if Config.log_skipped then IO.write_skipped_pname pname else ()
end
module DummyLogger : S = struct
let get_log_identifier () = ""
let prepare () = ()
let log _ = ()
let dump _ = ()
let log_skipped_pname _ = ()
end
(* use real logger if logging is enabled, dummy logger otherwise *)
include (val if Config.log_events then (module LoggerImpl : S) else (module DummyLogger : S))

@ -1,103 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type analysis_issue =
{ bug_kind: string
; bug_type: string
; clang_method_kind: string option
; exception_triggered_location: Logging.ocaml_pos option
; lang: string
; procedure_name: string
; source_location: Location.t }
type analysis_stats =
{ analysis_nodes_visited: int
; analysis_status: SymOp.failure_kind option
; analysis_total_nodes: int
; clang_method_kind: string option
; lang: string
; method_location: Location.t
; method_name: string
; num_preposts: int
; symops: int }
type dynamic_dispatch =
| Dynamic_dispatch_successful
| Dynamic_dispatch_parameters_arguments_mismatch
| Dynamic_dispatch_model_specialization_failure
type call_trace =
{ call_location: Location.t
; call_result: string
; callee_clang_method_kind: string option
; callee_source_file: SourceFile.t option
; callee_name: string
; caller_name: string
; lang: string
; reason: string option
; dynamic_dispatch: dynamic_dispatch option }
type frontend_exception =
{ ast_node: string option
; exception_triggered_location: Logging.ocaml_pos
; exception_type: string
; lang: string
; source_location_start: Location.t
; source_location_end: Location.t }
type mem_perf =
{ minor_heap_mem: float
; promoted_minor_heap_mem: float
; major_heap_mem: float
; total_allocated_mem: float
; minor_collections: int
; major_collections: int
; heap_compactions: int
; top_heap_size: int
; stack_size: int
; minor_heap_size: int }
type time_perf =
{ real_time: float
; user_time: float
; sys_time: float
; children_user_time: float
; children_sys_time: float }
type performance_stats =
{ lang: string
; source_file: SourceFile.t option
; stats_type: string
; mem_perf: mem_perf option
; time_perf: time_perf option }
type procedures_translated =
{ lang: string
; procedures_translated_failed: int
; procedures_translated_total: int
; source_file: SourceFile.t }
type event =
| AnalysisIssue of analysis_issue
| AnalysisStats of analysis_stats
| CallTrace of call_trace
| FrontendException of frontend_exception
| PerformanceStats of performance_stats
| ProceduresTranslatedSummary of procedures_translated
| UncaughtException of exn * int (** exception, exitcode *)
val get_log_identifier : unit -> string
val prepare : unit -> unit
val log : event -> unit
val log_skipped_pname : string -> unit
val dump : unit -> unit

@ -10,7 +10,6 @@ module L = Logging
let protect ~f x = let protect ~f x =
Epilogues.reset () ; Epilogues.reset () ;
EventLogger.prepare () ;
L.reset_formatters () ; L.reset_formatters () ;
ResultsDatabase.new_database_connection () ; ResultsDatabase.new_database_connection () ;
(* get different streams of random numbers in each fork, in particular to lessen contention in (* get different streams of random numbers in each fork, in particular to lessen contention in

@ -1,42 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type t = {integers: int String.Map.t; floats: float String.Map.t; strings: string String.Map.t}
let empty = {integers= String.Map.empty; floats= String.Map.empty; strings= String.Map.empty}
let add_int ({integers} as t) ~key ~data = {t with integers= String.Map.set integers ~key ~data}
let add_float ({floats} as t) ~key ~data = {t with floats= String.Map.set floats ~key ~data}
let add_string ({strings} as t) ~key ~data = {t with strings= String.Map.set strings ~key ~data}
let add_string_opt t ~key ~data = match data with Some data -> add_string t ~key ~data | None -> t
let yojson_of_integers integers =
let f ~key ~data acc = (key, `Int data) :: acc in
`Assoc (String.Map.fold integers ~init:[] ~f)
let yojson_of_floats floats =
let f ~key ~data acc = (key, `Float data) :: acc in
`Assoc (String.Map.fold floats ~init:[] ~f)
let yojson_of_strings strings =
let f ~key ~data acc = (key, `String data) :: acc in
`Assoc (String.Map.fold strings ~init:[] ~f)
let to_json {integers; floats; strings} =
`Assoc
[ ("int", yojson_of_integers integers)
; ("double", yojson_of_floats floats)
; ("normal", yojson_of_strings strings) ]
|> Yojson.Basic.to_string

@ -1,22 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type t
val empty : t
val add_int : t -> key:string -> data:int -> t
val add_float : t -> key:string -> data:float -> t
val add_string : t -> key:string -> data:string -> t
val add_string_opt : t -> key:string -> data:string option -> t
val to_json : t -> string

@ -14,8 +14,6 @@ let language_to_string = [(Clang, "C/C++/ObjC"); (Java, "Java")]
let to_string lang = List.Assoc.find_exn language_to_string ~equal lang let to_string lang = List.Assoc.find_exn language_to_string ~equal lang
let to_explicit_string = function Clang -> "Clang" | Java -> "Java"
let of_string s = List.Assoc.find (List.Assoc.inverse language_to_string) ~equal:String.equal s let of_string s = List.Assoc.find (List.Assoc.inverse language_to_string) ~equal:String.equal s
(** Current language *) (** Current language *)

@ -13,7 +13,6 @@ val equal : t -> t -> bool
val to_string : t -> string val to_string : t -> string
val to_explicit_string : t -> string
val of_string : string -> t option val of_string : string -> t option

@ -66,7 +66,6 @@ let remove_results_dir () =
let prepare_logging_and_db () = let prepare_logging_and_db () =
L.setup_log_file () ; L.setup_log_file () ;
PerfEvent.init () ; PerfEvent.init () ;
EventLogger.prepare () ;
if Sys.is_file ResultsDatabase.database_fullpath <> `Yes then ResultsDatabase.create_db () ; if Sys.is_file ResultsDatabase.database_fullpath <> `Yes then ResultsDatabase.create_db () ;
ResultsDatabase.new_database_connection () ResultsDatabase.new_database_connection ()
@ -83,7 +82,6 @@ let create_results_dir () =
L.die UserError "ERROR: %s@\nPlease remove '%s' and try again" error Config.results_dir L.die UserError "ERROR: %s@\nPlease remove '%s' and try again" error Config.results_dir
) ; ) ;
Unix.mkdir_p Config.results_dir ; Unix.mkdir_p Config.results_dir ;
Unix.mkdir_p (Config.results_dir ^/ Config.events_dir_name) ;
List.iter ~f:Unix.mkdir_p results_dir_dir_markers ; List.iter ~f:Unix.mkdir_p results_dir_dir_markers ;
prepare_logging_and_db () ; prepare_logging_and_db () ;
() ()

@ -51,8 +51,6 @@ let pp_failure_kind fmt = function
F.fprintf fmt "CRASH (%s)" msg F.fprintf fmt "CRASH (%s)" msg
let failure_kind_to_string failure_kind = Format.asprintf "%a" pp_failure_kind failure_kind
(** Count the number of symbolic operations *) (** Count the number of symbolic operations *)
(** Timeout in seconds for each function *) (** Timeout in seconds for each function *)

@ -72,5 +72,3 @@ val try_finally : f:(unit -> 'a) -> finally:(unit -> unit) -> 'a
so [finally ()] should return reasonably quickly. *) so [finally ()] should return reasonably quickly. *)
val pp_failure_kind : Format.formatter -> failure_kind -> unit val pp_failure_kind : Format.formatter -> failure_kind -> unit
val failure_kind_to_string : failure_kind -> string

@ -11,9 +11,6 @@ module F = Format
module Hashtbl = Caml.Hashtbl module Hashtbl = Caml.Hashtbl
module L = Die module L = Die
(** initial process times *)
let initial_times = Unix.times ()
(** recursively traverse a path for files ending with a given extension *) (** recursively traverse a path for files ending with a given extension *)
let find_files ~path ~extension = let find_files ~path ~extension =
let rec traverse_dir_aux init dir_path = let rec traverse_dir_aux init dir_path =

@ -8,9 +8,6 @@
open! IStd open! IStd
val initial_times : Unix.process_times
(** initial process times *)
val find_files : path:string -> extension:string -> string list val find_files : path:string -> extension:string -> string list
(** recursively traverse a path for files ending with a given extension *) (** recursively traverse a path for files ending with a given extension *)

@ -683,8 +683,7 @@ let resolve_args prop args =
type resolve_and_analyze_result = type resolve_and_analyze_result =
{ resolved_pname: Procname.t { resolved_pname: Procname.t
; resolved_procdesc_opt: Procdesc.t option ; resolved_procdesc_opt: Procdesc.t option
; resolved_summary_opt: Summary.t option ; resolved_summary_opt: Summary.t option }
; dynamic_dispatch_status: EventLogger.dynamic_dispatch option }
(** Resolve the procedure name and run the analysis of the resolved procedure if not already (** Resolve the procedure name and run the analysis of the resolved procedure if not already
analyzed *) analyzed *)
@ -721,12 +720,8 @@ let resolve_and_analyze tenv ~caller_summary ?(has_clang_model = false) prop arg
~caller_pdesc:(Summary.get_proc_desc caller_summary) ~caller_pdesc:(Summary.get_proc_desc caller_summary)
tenv prop args callee_proc_name call_flags tenv prop args callee_proc_name call_flags
in in
let dynamic_dispatch_status =
if Procname.equal callee_proc_name resolved_pname then None
else Some EventLogger.Dynamic_dispatch_successful
in
let resolved_procdesc_opt, resolved_summary_opt = analyze_ondemand resolved_pname in let resolved_procdesc_opt, resolved_summary_opt = analyze_ondemand resolved_pname in
{resolved_pname; resolved_procdesc_opt; resolved_summary_opt; dynamic_dispatch_status} {resolved_pname; resolved_procdesc_opt; resolved_summary_opt}
(** recognize calls to the constructor java.net.URL and splits the argument string to be only the (** recognize calls to the constructor java.net.URL and splits the argument string to be only the
@ -1101,8 +1096,7 @@ let resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual
in in
{ resolved_pname { resolved_pname
; resolved_procdesc_opt= Ondemand.get_proc_desc resolved_pname ; resolved_procdesc_opt= Ondemand.get_proc_desc resolved_pname
; resolved_summary_opt ; resolved_summary_opt }
; dynamic_dispatch_status= None }
let resolve_and_analyze_clang current_summary tenv prop_r n_actual_params callee_pname call_flags = let resolve_and_analyze_clang current_summary tenv prop_r n_actual_params callee_pname call_flags =
@ -1132,20 +1126,12 @@ let resolve_and_analyze_clang current_summary tenv prop_r n_actual_params callee
false false
in in
if clang_model_specialized_failure then if clang_model_specialized_failure then
let result =
resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params
callee_pname call_flags callee_pname call_flags
in
{ result with
dynamic_dispatch_status= Some EventLogger.Dynamic_dispatch_model_specialization_failure }
else resolve_and_analyze_result else resolve_and_analyze_result
with SpecializeProcdesc.UnmatchedParameters -> with SpecializeProcdesc.UnmatchedParameters ->
let result =
resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params
callee_pname call_flags callee_pname call_flags
in
{ result with
dynamic_dispatch_status= Some EventLogger.Dynamic_dispatch_parameters_arguments_mismatch }
else else
resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params callee_pname resolve_and_analyze_no_dynamic_dispatch current_summary tenv prop_r n_actual_params callee_pname
call_flags call_flags
@ -1216,14 +1202,12 @@ let rec sym_exec exe_env tenv current_summary instr_ (prop_ : Prop.normal Prop.t
| _ -> | _ ->
instr_ instr_
in in
let skip_call ?(is_objc_instance_method = false) ?(callee_attributes = None) ~reason prop path let skip_call ?(is_objc_instance_method = false) ~reason prop path callee_pname ret_annots loc
callee_pname ret_annots loc ret_id_typ ret_typ actual_args = ret_id_typ ret_typ actual_args =
let skip_res () = let skip_res () =
let exn = Exceptions.Skip_function (Localise.desc_skip_function callee_pname) in let exn = Exceptions.Skip_function (Localise.desc_skip_function callee_pname) in
Reporting.log_issue_deprecated_using_state Exceptions.Info current_pname exn ; Reporting.log_issue_deprecated_using_state Exceptions.Info current_pname exn ;
L.d_printfln "Skipping function '%a': %s" Procname.pp callee_pname reason ; L.d_printfln "Skipping function '%a': %s" Procname.pp callee_pname reason ;
Tabulation.log_call_trace ~caller_name:current_pname ~callee_name:callee_pname
?callee_attributes ~reason loc Tabulation.CR_skip ;
unknown_or_scan_call ~is_scan:false ~reason ret_typ ret_annots unknown_or_scan_call ~is_scan:false ~reason ret_typ ret_annots
Builtin. Builtin.
{ summary= current_summary { summary= current_summary
@ -1381,7 +1365,6 @@ let rec sym_exec exe_env tenv current_summary instr_ (prop_ : Prop.normal Prop.t
let resolved_pname = resolve_and_analyze_result.resolved_pname in let resolved_pname = resolve_and_analyze_result.resolved_pname in
let resolved_pdesc_opt = resolve_and_analyze_result.resolved_procdesc_opt in let resolved_pdesc_opt = resolve_and_analyze_result.resolved_procdesc_opt in
let resolved_summary_opt = resolve_and_analyze_result.resolved_summary_opt in let resolved_summary_opt = resolve_and_analyze_result.resolved_summary_opt in
let dynamic_dispatch_status = resolve_and_analyze_result.dynamic_dispatch_status in
Logging.d_printfln "Original callee %s" (Procname.to_unique_id callee_pname) ; Logging.d_printfln "Original callee %s" (Procname.to_unique_id callee_pname) ;
Logging.d_printfln "Resolved callee %s" (Procname.to_unique_id resolved_pname) ; Logging.d_printfln "Resolved callee %s" (Procname.to_unique_id resolved_pname) ;
let sentinel_result = let sentinel_result =
@ -1448,14 +1431,13 @@ let rec sym_exec exe_env tenv current_summary instr_ (prop_ : Prop.normal Prop.t
ClangMethodKind.equal attrs.ProcAttributes.clang_method_kind ClangMethodKind.equal attrs.ProcAttributes.clang_method_kind
ClangMethodKind.OBJC_INSTANCE ClangMethodKind.OBJC_INSTANCE
in in
skip_call ~is_objc_instance_method ~callee_attributes:(Some attrs) skip_call ~is_objc_instance_method ~reason prop path resolved_pname
~reason prop path resolved_pname ret_annots loc ret_id_typ ret_type ret_annots loc ret_id_typ ret_type n_actual_params )
n_actual_params )
| None -> | None ->
skip_call ~reason prop path resolved_pname ret_annots loc ret_id_typ skip_call ~reason prop path resolved_pname ret_annots loc ret_id_typ
(snd ret_id_typ) n_actual_params ) (snd ret_id_typ) n_actual_params )
| None -> | None ->
proc_call ?dynamic_dispatch:dynamic_dispatch_status exe_env proc_call exe_env
(Option.value_exn resolved_summary_opt) (Option.value_exn resolved_summary_opt)
(call_args prop resolved_pname n_actual_params ret_id_typ loc) (call_args prop resolved_pname n_actual_params ret_id_typ loc)
in in
@ -1851,7 +1833,7 @@ and sym_exec_free_model exe_env ret_id_typ args tenv summary loc prop path : Bui
(** Perform symbolic execution for a function call *) (** Perform symbolic execution for a function call *)
and proc_call ?dynamic_dispatch exe_env callee_summary and proc_call exe_env callee_summary
{Builtin.summary; tenv; prop_= pre; path; ret_id_typ; args= actual_pars; loc} = {Builtin.summary; tenv; prop_= pre; path; ret_id_typ; args= actual_pars; loc} =
let caller_pname = Summary.get_proc_name summary in let caller_pname = Summary.get_proc_name summary in
let callee_attrs = Summary.get_attributes callee_summary in let callee_attrs = Summary.get_attributes callee_summary in
@ -1899,11 +1881,11 @@ and proc_call ?dynamic_dispatch exe_env callee_summary
| Language.Clang, ClangMethodKind.OBJC_INSTANCE -> | Language.Clang, ClangMethodKind.OBJC_INSTANCE ->
handle_objc_instance_method_call actual_pars actual_params pre tenv (fst ret_id_typ) pdesc handle_objc_instance_method_call actual_pars actual_params pre tenv (fst ret_id_typ) pdesc
callee_pname loc path callee_pname loc path
(Tabulation.exe_function_call ?dynamic_dispatch exe_env callee_summary) (Tabulation.exe_function_call exe_env callee_summary)
| _ -> | _ ->
(* non-objective-c method call. Standard tabulation *) (* non-objective-c method call. Standard tabulation *)
Tabulation.exe_function_call ?dynamic_dispatch exe_env callee_summary tenv (fst ret_id_typ) Tabulation.exe_function_call exe_env callee_summary tenv (fst ret_id_typ) pdesc callee_pname
pdesc callee_pname loc actual_params pre path loc actual_params pre path
(** perform symbolic execution for a single prop, and check for junk *) (** perform symbolic execution for a single prop, and check for junk *)

@ -38,8 +38,7 @@ val instrs :
val diverge : Prop.normal Prop.t -> Paths.Path.t -> (Prop.normal Prop.t * Paths.Path.t) list val diverge : Prop.normal Prop.t -> Paths.Path.t -> (Prop.normal Prop.t * Paths.Path.t) list
(** Symbolic execution of the divergent pure computation. *) (** Symbolic execution of the divergent pure computation. *)
val proc_call : val proc_call : Exe_env.t -> Summary.t -> Builtin.t
?dynamic_dispatch:EventLogger.dynamic_dispatch -> Exe_env.t -> Summary.t -> Builtin.t
val unknown_or_scan_call : is_scan:bool -> reason:string -> Typ.t -> Annot.Item.t -> Builtin.t val unknown_or_scan_call : is_scan:bool -> reason:string -> Typ.t -> Annot.Item.t -> Builtin.t

@ -24,24 +24,6 @@ type splitting =
; frame_typ: (Exp.t * Exp.t) list ; frame_typ: (Exp.t * Exp.t) list
; missing_typ: (Exp.t * Exp.t) list } ; missing_typ: (Exp.t * Exp.t) list }
(** kind of result of a procedure call *)
type call_result =
| CR_success (** successful call *)
| CR_not_met (** precondition not met *)
| CR_not_found (** the callee has no specs *)
| CR_skip (** the callee was skipped *)
let string_of_call_result = function
| CR_success ->
"OK"
| CR_not_met ->
"NotMet"
| CR_not_found ->
"NotFound"
| CR_skip ->
"Skip"
type deref_error = type deref_error =
| Deref_freed of PredSymb.res_action (** dereference a freed pointer *) | Deref_freed of PredSymb.res_action (** dereference a freed pointer *)
| Deref_minusone (** dereference -1 *) | Deref_minusone (** dereference -1 *)
@ -82,40 +64,6 @@ let print_results tenv actual_pre results =
L.d_strln "***** END RESULTS FUNCTION CALL *******" L.d_strln "***** END RESULTS FUNCTION CALL *******"
let log_call_trace ~caller_name ~callee_name ?callee_attributes ?reason ?dynamic_dispatch loc res =
if !BiabductionConfig.footprint then
let get_valid_source_file loc =
let file = loc.Location.file in
if SourceFile.is_invalid file then None else Some file
in
let callee_clang_method_kind, callee_source_file =
match callee_attributes with
| Some attributes when Language.curr_language_is Language.Clang ->
let callee_clang_method_kind =
ClangMethodKind.to_string attributes.ProcAttributes.clang_method_kind
in
let callee_source_file = get_valid_source_file attributes.ProcAttributes.loc in
(Some callee_clang_method_kind, callee_source_file)
| Some attributes ->
(None, get_valid_source_file attributes.ProcAttributes.loc)
| None ->
(None, None)
in
let call_trace =
EventLogger.CallTrace
{ call_location= loc
; call_result= string_of_call_result res
; callee_clang_method_kind
; callee_source_file
; callee_name= Procname.to_string callee_name
; caller_name= Procname.to_string caller_name
; lang= Procname.get_language caller_name |> Language.to_explicit_string
; reason
; dynamic_dispatch }
in
EventLogger.log call_trace
(***************) (***************)
let get_specs_from_payload summary = let get_specs_from_payload summary =
@ -156,8 +104,7 @@ let spec_rename_vars pname spec =
(** Find and number the specs for [proc_name], (** Find and number the specs for [proc_name],
after renaming their vars, and also return the parameters *) after renaming their vars, and also return the parameters *)
let spec_find_rename trace_call summary : let spec_find_rename summary : (int * Prop.exposed BiabductionSummary.spec) list * Pvar.t list =
(int * Prop.exposed BiabductionSummary.spec) list * Pvar.t list =
let proc_name = Summary.get_proc_name summary in let proc_name = Summary.get_proc_name summary in
try try
let count = ref 0 in let count = ref 0 in
@ -167,11 +114,10 @@ let spec_find_rename trace_call summary :
in in
let specs = get_specs_from_payload summary in let specs = get_specs_from_payload summary in
let formals = Summary.get_formals summary in let formals = Summary.get_formals summary in
if List.is_empty specs then ( if List.is_empty specs then
trace_call CR_not_found ;
raise raise
(Exceptions.Precondition_not_found (Exceptions.Precondition_not_found
(Localise.verbatim_desc (Procname.to_string proc_name), __POS__)) ) ; (Localise.verbatim_desc (Procname.to_string proc_name), __POS__)) ;
let formal_parameters = List.map ~f:(fun (x, _) -> Pvar.mk_callee x proc_name) formals in let formal_parameters = List.map ~f:(fun (x, _) -> Pvar.mk_callee x proc_name) formals in
(List.map ~f:rename_vars specs, formal_parameters) (List.map ~f:rename_vars specs, formal_parameters)
with Caml.Not_found -> with Caml.Not_found ->
@ -1297,7 +1243,7 @@ let prop_pure_to_footprint tenv (p : 'a Prop.t) : Prop.normal Prop.t =
(** post-process the raw result of a function call *) (** post-process the raw result of a function call *)
let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc results = let exe_call_postprocess tenv ret_id callee_pname callee_attrs loc results =
let filter_valid_res = function Invalid_res _ -> false | Valid_res _ -> true in let filter_valid_res = function Invalid_res _ -> false | Valid_res _ -> true in
let valid_res0, invalid_res0 = List.partition_tf ~f:filter_valid_res results in let valid_res0, invalid_res0 = List.partition_tf ~f:filter_valid_res results in
let valid_res = let valid_res =
@ -1319,7 +1265,7 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
let call_desc kind_opt = Localise.desc_precondition_not_met kind_opt callee_pname loc in let call_desc kind_opt = Localise.desc_precondition_not_met kind_opt callee_pname loc in
let res_with_path_idents = let res_with_path_idents =
if !BiabductionConfig.footprint then if !BiabductionConfig.footprint then
if List.is_empty valid_res_cons_pre_missing then ( if List.is_empty valid_res_cons_pre_missing then
(* no valid results where actual pre and missing are consistent *) (* no valid results where actual pre and missing are consistent *)
match deref_errors with match deref_errors with
| error :: _ -> ( | error :: _ -> (
@ -1338,15 +1284,12 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
in in
match error with match error with
| Dereference_error (Deref_minusone, desc, path_opt) -> | Dereference_error (Deref_minusone, desc, path_opt) ->
trace_call CR_not_met ;
extend_path path_opt None ; extend_path path_opt None ;
raise (Exceptions.Dangling_pointer_dereference (true, desc, __POS__)) raise (Exceptions.Dangling_pointer_dereference (true, desc, __POS__))
| Dereference_error (Deref_undef_exp, desc, path_opt) -> | Dereference_error (Deref_undef_exp, desc, path_opt) ->
trace_call CR_not_met ;
extend_path path_opt None ; extend_path path_opt None ;
raise (Exceptions.Dangling_pointer_dereference (true, desc, __POS__)) raise (Exceptions.Dangling_pointer_dereference (true, desc, __POS__))
| Dereference_error (Deref_null pos, desc, path_opt) -> | Dereference_error (Deref_null pos, desc, path_opt) ->
trace_call CR_not_met ;
extend_path path_opt (Some pos) ; extend_path path_opt (Some pos) ;
if Localise.is_parameter_not_null_checked_desc desc then if Localise.is_parameter_not_null_checked_desc desc then
raise (Exceptions.Parameter_not_null_checked (desc, __POS__)) raise (Exceptions.Parameter_not_null_checked (desc, __POS__))
@ -1356,15 +1299,12 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
raise (Exceptions.Empty_vector_access (desc, __POS__)) raise (Exceptions.Empty_vector_access (desc, __POS__))
else raise (Exceptions.Null_dereference (desc, __POS__)) else raise (Exceptions.Null_dereference (desc, __POS__))
| Dereference_error (Deref_freed _, desc, path_opt) -> | Dereference_error (Deref_freed _, desc, path_opt) ->
trace_call CR_not_met ;
extend_path path_opt None ; extend_path path_opt None ;
raise (Exceptions.Biabd_use_after_free (desc, __POS__)) raise (Exceptions.Biabd_use_after_free (desc, __POS__))
| Dereference_error (Deref_undef (_, _, pos), desc, path_opt) -> | Dereference_error (Deref_undef (_, _, pos), desc, path_opt) ->
trace_call CR_not_met ;
extend_path path_opt (Some pos) ; extend_path path_opt (Some pos) ;
raise (Exceptions.Skip_pointer_dereference (desc, __POS__)) raise (Exceptions.Skip_pointer_dereference (desc, __POS__))
| Prover_checks _ | Cannot_combine | Missing_sigma_not_empty | Missing_fld_not_empty -> | Prover_checks _ | Cannot_combine | Missing_sigma_not_empty | Missing_fld_not_empty ->
trace_call CR_not_met ;
assert false ) assert false )
| [] -> | [] ->
(* no dereference error detected *) (* no dereference error detected *)
@ -1375,7 +1315,6 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
List.exists List.exists
~f:(function ~f:(function
| Prover_checks (check :: _) -> | Prover_checks (check :: _) ->
trace_call CR_not_met ;
let exn = get_check_exn tenv check callee_pname loc __POS__ in let exn = get_check_exn tenv check callee_pname loc __POS__ in
raise exn raise exn
| _ -> | _ ->
@ -1384,8 +1323,7 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
then call_desc (Some Localise.Pnm_bounds) then call_desc (Some Localise.Pnm_bounds)
else call_desc None else call_desc None
in in
trace_call CR_not_met ; raise (Exceptions.Precondition_not_met (desc, __POS__))
raise (Exceptions.Precondition_not_met (desc, __POS__)) )
else else
(* combine the valid results, and store diverging states *) (* combine the valid results, and store diverging states *)
let process_valid_res vr = let process_valid_res vr =
@ -1416,14 +1354,12 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
(List.map ~f:(fun vr -> (vr.vr_pi, vr.vr_cons_res)) valid_res_miss_pi) (List.map ~f:(fun vr -> (vr.vr_pi, vr.vr_cons_res)) valid_res_miss_pi)
with with
| None -> | None ->
trace_call CR_not_met ;
raise (Exceptions.Precondition_not_met (call_desc None, __POS__)) raise (Exceptions.Precondition_not_met (call_desc None, __POS__))
| Some cover -> | Some cover ->
L.d_strln "Found minimum cover" ; L.d_strln "Found minimum cover" ;
List.iter ~f:print_pi (List.map ~f:fst cover) ; List.iter ~f:print_pi (List.map ~f:fst cover) ;
List.concat_map ~f:snd cover ) List.concat_map ~f:snd cover )
in in
trace_call CR_success ;
let res = let res =
List.map List.map
~f:(fun (p, path) -> (quantify_path_idents_remove_constant_strings tenv p, path)) ~f:(fun (p, path) -> (quantify_path_idents_remove_constant_strings tenv p, path))
@ -1453,14 +1389,10 @@ let exe_call_postprocess tenv ret_id trace_call callee_pname callee_attrs loc re
(** Execute the function call and return the list of results with return value *) (** Execute the function call and return the list of results with return value *)
let exe_function_call ?dynamic_dispatch exe_env callee_summary tenv ret_id caller_pdesc callee_pname let exe_function_call exe_env callee_summary tenv ret_id caller_pdesc callee_pname loc actual_params
loc actual_params prop path = prop path =
let callee_attributes = Summary.get_attributes callee_summary in let callee_attributes = Summary.get_attributes callee_summary in
let caller_name = Procdesc.get_proc_name caller_pdesc in let spec_list, formal_params = spec_find_rename callee_summary in
let trace_call =
log_call_trace ~caller_name ~callee_name:callee_pname ~callee_attributes ?dynamic_dispatch loc
in
let spec_list, formal_params = spec_find_rename trace_call callee_summary in
let nspecs = List.length spec_list in let nspecs = List.length spec_list in
L.d_printfln "Found %d specs for function %s" nspecs (Procname.to_unique_id callee_pname) ; L.d_printfln "Found %d specs for function %s" nspecs (Procname.to_unique_id callee_pname) ;
L.d_printfln "START EXECUTING SPECS FOR %s from state" (Procname.to_unique_id callee_pname) ; L.d_printfln "START EXECUTING SPECS FOR %s from state" (Procname.to_unique_id callee_pname) ;
@ -1471,4 +1403,4 @@ let exe_function_call ?dynamic_dispatch exe_env callee_summary tenv ret_id calle
actual_params formal_params callee_summary actual_params formal_params callee_summary
in in
let results = List.map ~f:exe_one_spec spec_list in let results = List.map ~f:exe_one_spec spec_list in
exe_call_postprocess tenv ret_id trace_call callee_pname callee_attributes loc results exe_call_postprocess tenv ret_id callee_pname callee_attributes loc results

@ -8,23 +8,6 @@
open! IStd open! IStd
(** kind of result of a procedure call *)
type call_result =
| CR_success (** successful call *)
| CR_not_met (** precondition not met *)
| CR_not_found (** the callee has no specs *)
| CR_skip (** the callee was skipped *)
val log_call_trace :
caller_name:Procname.t
-> callee_name:Procname.t
-> ?callee_attributes:ProcAttributes.t
-> ?reason:string
-> ?dynamic_dispatch:EventLogger.dynamic_dispatch
-> Location.t
-> call_result
-> unit
(** Interprocedural footprint analysis *) (** Interprocedural footprint analysis *)
val remove_constant_string_class : Tenv.t -> 'a Prop.t -> Prop.normal Prop.t val remove_constant_string_class : Tenv.t -> 'a Prop.t -> Prop.normal Prop.t
@ -53,8 +36,7 @@ val lookup_custom_errors : 'a Prop.t -> string option
(** search in prop contains an error state *) (** search in prop contains an error state *)
val exe_function_call : val exe_function_call :
?dynamic_dispatch:EventLogger.dynamic_dispatch Exe_env.t
-> Exe_env.t
-> Summary.t -> Summary.t
-> Tenv.t -> Tenv.t
-> Ident.t -> Ident.t

@ -22,24 +22,9 @@ let validate_decl_from_channel chan =
Clang_ast_b.read_decl chan Clang_ast_b.read_decl chan
(**FIXME(T54413835): Make the perf stats in the frontend work when one runs more than one frontend
action *)
let register_perf_stats_report source_file =
let stats_type =
if Config.capture then PerfStats.ClangFrontend source_file
else if Config.is_checker_enabled Linters then PerfStats.ClangLinters source_file
else if Config.process_clang_ast then PerfStats.ClangProcessAST source_file
else
Logging.(die UserError)
"Clang frontend should be run in capture, linters or process AST mode."
in
PerfStats.register_report_at_exit stats_type
let init_global_state_for_capture_and_linters source_file = let init_global_state_for_capture_and_linters source_file =
L.(debug Capture Medium) "Processing %s" (Filename.basename (SourceFile.to_abs_path source_file)) ; L.(debug Capture Medium) "Processing %s" (Filename.basename (SourceFile.to_abs_path source_file)) ;
Language.curr_language := Language.Clang ; Language.curr_language := Language.Clang ;
register_perf_stats_report source_file ;
if Config.capture then DB.Results_dir.init source_file ; if Config.capture then DB.Results_dir.init source_file ;
CFrontend_config.reset_global_state () CFrontend_config.reset_global_state ()

@ -1,34 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
let log_frontend_exception (trans_unit_ctx : CFrontend_config.translation_unit_context)
exception_type exception_triggered_location (source_location_start, source_location_end)
ast_node =
let frontend_exception =
EventLogger.FrontendException
{ exception_type
; source_location_start=
CLocation.clang_to_sil_location trans_unit_ctx.source_file source_location_start
; source_location_end=
CLocation.clang_to_sil_location trans_unit_ctx.source_file source_location_end
; exception_triggered_location
; ast_node
; lang= CFrontend_config.string_of_clang_lang trans_unit_ctx.lang }
in
EventLogger.log frontend_exception
let log_caught_exception trans_unit_ctx exception_type exception_triggered_location source_range
ast_node =
log_frontend_exception trans_unit_ctx exception_type exception_triggered_location source_range
ast_node
let log_unexpected_decl trans_unit_ctx exception_triggered_location source_range ast_node =
log_frontend_exception trans_unit_ctx "Skipped declaration inside a class"
exception_triggered_location source_range ast_node

@ -1,23 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
val log_caught_exception :
CFrontend_config.translation_unit_context
-> string
-> Logging.ocaml_pos
-> Clang_ast_t.source_location * Clang_ast_t.source_location
-> string option
-> unit
val log_unexpected_decl :
CFrontend_config.translation_unit_context
-> Logging.ocaml_pos
-> Clang_ast_t.source_location * Clang_ast_t.source_location
-> string option
-> unit

@ -24,7 +24,7 @@ let process_ast trans_unit_ctx ast_decl =
if Config.test_determinator then export_tests_to_run source_file ast_decl if Config.test_determinator then export_tests_to_run source_file ast_decl
in in
let call_f () = let call_f () =
CFrontend_errors.protect trans_unit_ctx CFrontend_errors.protect
~recover:(fun () -> ()) ~recover:(fun () -> ())
~pp_context:(fun f () -> F.fprintf f "Error when processing %a" SourceFile.pp source_file) ~pp_context:(fun f () -> F.fprintf f "Error when processing %a" SourceFile.pp source_file)
~f ~f

@ -60,12 +60,4 @@ let do_source_file (translation_unit_context : CFrontend_config.translation_unit
|| Option.is_some Config.icfg_dotty_outfile || Option.is_some Config.icfg_dotty_outfile
then DotCfg.emit_frontend_cfg source_file cfg ; then DotCfg.emit_frontend_cfg source_file cfg ;
L.debug Capture Verbose "Stored on disk:@[<v>%a@]@." Cfg.pp_proc_signatures cfg ; L.debug Capture Verbose "Stored on disk:@[<v>%a@]@." Cfg.pp_proc_signatures cfg ;
let procedures_translated_summary =
EventLogger.ProceduresTranslatedSummary
{ procedures_translated_total= !CFrontend_config.procedures_attempted
; procedures_translated_failed= !CFrontend_config.procedures_failed
; lang= CFrontend_config.string_of_clang_lang translation_unit_context.lang
; source_file= translation_unit_context.source_file }
in
EventLogger.log procedures_translated_summary ;
() ()

@ -11,10 +11,6 @@ open! IStd
type clang_lang = C | CPP | ObjC | ObjCPP [@@deriving compare] type clang_lang = C | CPP | ObjC | ObjCPP [@@deriving compare]
let string_of_clang_lang (lang : clang_lang) : string =
match lang with C -> "C" | CPP -> "CPP" | ObjC -> "ObjC" | ObjCPP -> "ObjCPP"
let equal_clang_lang = [%compare.equal: clang_lang] let equal_clang_lang = [%compare.equal: clang_lang]
type translation_unit_context = type translation_unit_context =

@ -11,8 +11,6 @@ open! IStd
type clang_lang = C | CPP | ObjC | ObjCPP [@@deriving compare] type clang_lang = C | CPP | ObjC | ObjCPP [@@deriving compare]
val string_of_clang_lang : clang_lang -> string
val equal_clang_lang : clang_lang -> clang_lang -> bool val equal_clang_lang : clang_lang -> clang_lang -> bool
type translation_unit_context = type translation_unit_context =

@ -53,7 +53,7 @@ module CFrontend_decl_funct (T : CModule_type.CTranslation) : CModule_type.CFron
| _ -> | _ ->
() ()
in in
CFrontend_errors.protect ~f ~recover ~pp_context trans_unit_ctx CFrontend_errors.protect ~f ~recover ~pp_context
let function_decl trans_unit_ctx tenv cfg func_decl block_data_opt = let function_decl trans_unit_ctx tenv cfg func_decl block_data_opt =
@ -86,9 +86,7 @@ module CFrontend_decl_funct (T : CModule_type.CTranslation) : CModule_type.CFron
return_param_typ_opt outer_context_opt extra_instrs return_param_typ_opt outer_context_opt extra_instrs
| None -> | None ->
() ()
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> ()
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position
e.source_range e.ast_node
let process_method_decl ?(set_objc_accessor_attr = false) ?(is_destructor = false) trans_unit_ctx let process_method_decl ?(set_objc_accessor_attr = false) ?(is_destructor = false) trans_unit_ctx
@ -134,9 +132,7 @@ module CFrontend_decl_funct (T : CModule_type.CTranslation) : CModule_type.CFron
ignore ignore
(CMethod_trans.create_local_procdesc ~set_objc_accessor_attr trans_unit_ctx cfg tenv (CMethod_trans.create_local_procdesc ~set_objc_accessor_attr trans_unit_ctx cfg tenv
ms [] []) ms [] [])
with CFrontend_errors.IncorrectAssumption e -> with CFrontend_errors.IncorrectAssumption _ -> ()
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position
e.source_range e.ast_node
let process_property_implementation trans_unit_ctx tenv cfg curr_class let process_property_implementation trans_unit_ctx tenv cfg curr_class
@ -239,10 +235,8 @@ module CFrontend_decl_funct (T : CModule_type.CTranslation) : CModule_type.CFron
| PragmaDetectMismatchDecl _ | PragmaDetectMismatchDecl _
| StaticAssertDecl _ | StaticAssertDecl _
| TranslationUnitDecl _ -> | TranslationUnitDecl _ ->
let decl_info = Clang_ast_proj.get_decl_tuple dec in (* TODO: some form of logging *)
ClangLogging.log_unexpected_decl trans_unit_ctx __POS__ ()
decl_info.Clang_ast_t.di_source_range
(Some (Clang_ast_proj.get_decl_kind_string dec))
let process_methods trans_unit_ctx tenv cfg curr_class decl_list = let process_methods trans_unit_ctx tenv cfg curr_class decl_list =
@ -422,8 +416,7 @@ module CFrontend_decl_funct (T : CModule_type.CTranslation) : CModule_type.CFron
~pp_context:(fun fmt () -> ~pp_context:(fun fmt () ->
F.fprintf fmt "Error adding types from decl '%a'" F.fprintf fmt "Error adding types from decl '%a'"
(Pp.of_string ~f:Clang_ast_j.string_of_decl) (Pp.of_string ~f:Clang_ast_j.string_of_decl)
dec ) dec ) ;
trans_unit_ctx ;
List.iter ~f:translate method_decls List.iter ~f:translate method_decls
| _ -> | _ ->
() ) ; () ) ;

@ -29,7 +29,7 @@ let incorrect_assumption position source_range ?ast_node fmt =
F.kasprintf (fun msg -> raise (IncorrectAssumption {msg; position; source_range; ast_node})) fmt F.kasprintf (fun msg -> raise (IncorrectAssumption {msg; position; source_range; ast_node})) fmt
let protect ~f ~recover ~pp_context (trans_unit_ctx : CFrontend_config.translation_unit_context) = let protect ~f ~recover ~pp_context =
let log_and_recover ~print fmt = let log_and_recover ~print fmt =
recover () ; recover () ;
(if print then L.internal_error else L.(debug Capture Quiet)) ("%a@\n" ^^ fmt) pp_context () (if print then L.internal_error else L.(debug Capture Quiet)) ("%a@\n" ^^ fmt) pp_context ()
@ -39,13 +39,9 @@ let protect ~f ~recover ~pp_context (trans_unit_ctx : CFrontend_config.translati
catching the exception) unless `--keep-going` was passed. Print errors we should fix catching the exception) unless `--keep-going` was passed. Print errors we should fix
(t21762295) to the console. *) (t21762295) to the console. *)
| Unimplemented e -> | Unimplemented e ->
ClangLogging.log_caught_exception trans_unit_ctx "Unimplemented" e.position e.source_range
e.ast_node ;
log_and_recover ~print:false "Unimplemented feature:@\n %s@\n" e.msg log_and_recover ~print:false "Unimplemented feature:@\n %s@\n" e.msg
| IncorrectAssumption e -> | IncorrectAssumption e ->
(* FIXME(t21762295): we do not expect this to happen but it does *) (* FIXME(t21762295): we do not expect this to happen but it does *)
ClangLogging.log_caught_exception trans_unit_ctx "IncorrectAssumption" e.position
e.source_range e.ast_node ;
log_and_recover ~print:true "Known incorrect assumption in the frontend: %s@\n" e.msg log_and_recover ~print:true "Known incorrect assumption in the frontend: %s@\n" e.msg
| exn -> | exn ->
let trace = Backtrace.get () in let trace = Backtrace.get () in

@ -41,7 +41,6 @@ val protect :
f:(unit -> unit) f:(unit -> unit)
-> recover:(unit -> unit) -> recover:(unit -> unit)
-> pp_context:(Format.formatter -> unit -> unit) -> pp_context:(Format.formatter -> unit -> unit)
-> CFrontend_config.translation_unit_context
-> unit -> unit
(** Catch frontend errors in [f] to avoid crashing due to bugs in the frontend. Upon error [recover] (** Catch frontend errors in [f] to avoid crashing due to bugs in the frontend. Upon error [recover]
is run and [pp_context] is used to provide more info to the user. *) is run and [pp_context] is used to provide more info to the user. *)

@ -9,8 +9,6 @@ open! IStd
(** Module for function to retrieve the location (file, line, etc) of instructions *) (** Module for function to retrieve the location (file, line, etc) of instructions *)
val clang_to_sil_location : SourceFile.t -> Clang_ast_t.source_location -> Location.t
val should_translate_lib : val should_translate_lib :
SourceFile.t SourceFile.t
-> Clang_ast_t.source_range -> Clang_ast_t.source_range

@ -58,9 +58,7 @@ let setup () =
&& not (Driver.is_analyze_mode driver_mode) && not (Driver.is_analyze_mode driver_mode)
then ( db_start () ; SourceFiles.mark_all_stale () ) then ( db_start () ; SourceFiles.mark_all_stale () )
| Explore -> | Explore ->
ResultsDir.assert_results_dir "please run an infer analysis first" ResultsDir.assert_results_dir "please run an infer analysis first" ) ;
| Events ->
ResultsDir.assert_results_dir "have you run infer before?" ) ;
db_start () ; db_start () ;
NullsafeInit.init () ; NullsafeInit.init () ;
if CLOpt.is_originator then (RunState.add_run_to_sequence () ; RunState.store ()) ; if CLOpt.is_originator then (RunState.add_run_to_sequence () ; RunState.store ()) ;
@ -115,21 +113,6 @@ let log_environment_info () =
print_active_checkers () ; print_scheduler () ; print_cores_used () print_active_checkers () ; print_scheduler () ; print_cores_used ()
let prepare_events_logging () =
(* there's no point in logging data from the events command. To fetch them we'd need to run events again... *)
if InferCommand.equal Config.command Events then ()
else
let log_identifier_msg =
Printf.sprintf "Infer log identifier is %s\n" (EventLogger.get_log_identifier ())
in
L.environment_info "%s" log_identifier_msg ;
if CLOpt.is_originator && Config.print_log_identifier then L.progress "%s" log_identifier_msg ;
let log_uncaught_exn exn ~exitcode =
EventLogger.log (EventLogger.UncaughtException (exn, exitcode))
in
L.set_log_uncaught_exception_callback log_uncaught_exn
let () = let () =
(* We specifically want to collect samples only from the main process until (* We specifically want to collect samples only from the main process until
we figure out what other entries and how we want to collect *) we figure out what other entries and how we want to collect *)
@ -151,7 +134,6 @@ let () =
if Config.print_builtins then Builtin.print_and_exit () ; if Config.print_builtins then Builtin.print_and_exit () ;
setup () ; setup () ;
log_environment_info () ; log_environment_info () ;
prepare_events_logging () ;
if Config.debug_mode && CLOpt.is_originator then ( if Config.debug_mode && CLOpt.is_originator then (
L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ; L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ;
L.progress "Execution ID %Ld@." Config.execution_id ) ; L.progress "Execution ID %Ld@." Config.execution_id ) ;
@ -238,8 +220,6 @@ let () =
"** Error running the reporting script:@\n** %s %s@\n** See error above@." prog "** Error running the reporting script:@\n** %s %s@\n** See error above@." prog
(String.concat ~sep:" " args) (String.concat ~sep:" " args)
| true, true -> | true, true ->
L.user_error "Options --procedures and --source-files cannot be used together.@\n" ) L.user_error "Options --procedures and --source-files cannot be used together.@\n" ) ) ;
| Events ->
EventLogger.dump () ) ;
(* to make sure the exitcode=0 case is logged, explicitly invoke exit *) (* to make sure the exitcode=0 case is logged, explicitly invoke exit *)
L.exit 0 L.exit 0

@ -72,11 +72,6 @@ let clean_compilation_command mode =
None None
let register_perf_stats_report stats_type =
let rtime_span, initial_times = (Mtime_clock.counter (), Unix.times ()) in
PerfStats.register_report (PerfStats.Time (rtime_span, initial_times)) stats_type
(** Clean up the results dir to select only what's relevant to go in the Buck cache. In particular, (** Clean up the results dir to select only what's relevant to go in the Buck cache. In particular,
get rid of non-deterministic outputs.*) get rid of non-deterministic outputs.*)
let clean_results_dir () = let clean_results_dir () =
@ -194,7 +189,6 @@ let capture_with_compilation_database db_files =
let buck_capture build_cmd = let buck_capture build_cmd =
register_perf_stats_report PerfStats.TotalFrontend ;
let prog_build_cmd_opt = let prog_build_cmd_opt =
let prog, buck_args = (List.hd_exn build_cmd, List.tl_exn build_cmd) in let prog, buck_args = (List.hd_exn build_cmd, List.tl_exn build_cmd) in
match Config.buck_mode with match Config.buck_mode with
@ -227,12 +221,10 @@ let buck_capture build_cmd =
L.progress "Capturing in buck mode...@." ; L.progress "Capturing in buck mode...@." ;
if Option.exists ~f:BuckMode.is_clang_flavors Config.buck_mode then ( if Option.exists ~f:BuckMode.is_clang_flavors Config.buck_mode then (
RunState.set_merge_capture true ; RunState.store () ) ; RunState.set_merge_capture true ; RunState.store () ) ;
Buck.clang_flavor_capture ~prog ~buck_build_cmd ) ; Buck.clang_flavor_capture ~prog ~buck_build_cmd )
PerfStats.get_reporter PerfStats.TotalFrontend ()
let python_capture build_system build_cmd = let python_capture build_system build_cmd =
register_perf_stats_report PerfStats.TotalFrontend ;
L.progress "Capturing in %s mode...@." (Config.string_of_build_system build_system) ; L.progress "Capturing in %s mode...@." (Config.string_of_build_system build_system) ;
let infer_py = Config.lib_dir ^/ "python" ^/ "infer.py" in let infer_py = Config.lib_dir ^/ "python" ^/ "infer.py" in
let args = let args =
@ -265,8 +257,7 @@ let python_capture build_system build_cmd =
Config.print_usage_exit () Config.print_usage_exit ()
| status -> | status ->
command_error_handling ~always_die:true ~prog:infer_py ~args status ) command_error_handling ~always_die:true ~prog:infer_py ~args status )
() ; ()
PerfStats.get_reporter PerfStats.TotalFrontend ()
let capture ~changed_files = function let capture ~changed_files = function
@ -322,16 +313,9 @@ let capture ~changed_files mode =
ScubaLogging.execute_with_time_logging "capture" (fun () -> capture ~changed_files mode) ScubaLogging.execute_with_time_logging "capture" (fun () -> capture ~changed_files mode)
let execute_analyze ~changed_files =
register_perf_stats_report PerfStats.TotalBackend ;
InferAnalyze.main ~changed_files ;
PerfStats.get_reporter PerfStats.TotalBackend ()
(* shadowed for tracing *)
let execute_analyze ~changed_files = let execute_analyze ~changed_files =
PerfEvent.(log (fun logger -> log_begin_event logger ~name:"analyze" ())) ; PerfEvent.(log (fun logger -> log_begin_event logger ~name:"analyze" ())) ;
execute_analyze ~changed_files ; InferAnalyze.main ~changed_files ;
PerfEvent.(log (fun logger -> log_end_event logger ())) PerfEvent.(log (fun logger -> log_end_event logger ()))
@ -399,7 +383,7 @@ let analyze_and_report ?suppress_console_report ~changed_files mode =
| _ when Config.infer_is_clang || Config.infer_is_javac -> | _ when Config.infer_is_clang || Config.infer_is_javac ->
(* Called from another integration to do capture only. *) (* Called from another integration to do capture only. *)
(false, false) (false, false)
| (Capture | Compile | Events | Explore | Report | ReportDiff), _ -> | (Capture | Compile | Explore | Report | ReportDiff), _ ->
(false, false) (false, false)
| (Analyze | Run), _ -> | (Analyze | Run), _ ->
(true, true) (true, true)
@ -582,9 +566,7 @@ let mode_from_command_line =
let run_prologue mode = let run_prologue mode =
if CLOpt.is_originator then ( if CLOpt.is_originator then L.environment_info "%a@\n" Config.pp_version () ;
L.environment_info "%a@\n" Config.pp_version () ;
PerfStats.register_report_at_exit PerfStats.Driver ) ;
if Config.debug_mode then L.environment_info "Driver mode:@\n%a@." pp_mode mode ; if Config.debug_mode then L.environment_info "Driver mode:@\n%a@." pp_mode mode ;
if CLOpt.is_originator then ( if CLOpt.is_originator then (
if Config.dump_duplicate_symbols then reset_duplicates_file () ; if Config.dump_duplicate_symbols then reset_duplicates_file () ;
@ -599,7 +581,6 @@ let run_prologue mode =
let run_epilogue () = let run_epilogue () =
if CLOpt.is_originator then ( if CLOpt.is_originator then (
if Config.developer_mode then StatsAggregator.generate_files () ;
if Config.fail_on_bug then fail_on_issue_epilogue () ; if Config.fail_on_bug then fail_on_issue_epilogue () ;
() ) ; () ) ;
if Config.buck_cache_mode then clean_results_dir () ; if Config.buck_cache_mode then clean_results_dir () ;

@ -1,56 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type t =
{sum: float; avg: float; min: float; p10: float; median: float; p75: float; max: float; count: int}
let to_json s =
`Assoc
[ ("sum", `Float s.sum)
; ("avg", `Float s.avg)
; ("min", `Float s.min)
; ("p10", `Float s.p10)
; ("median", `Float s.median)
; ("p75", `Float s.p75)
; ("max", `Float s.max)
; ("count", `Int s.count) ]
let compute_statistics values =
match values with
| [] ->
None
| _ :: _ as values ->
let open Float in
let num_elements = List.length values in
let sum = List.fold ~f:(fun acc v -> acc +. v) ~init:0.0 values in
let average = sum /. float_of_int num_elements in
let values_arr = Array.of_list values in
Array.sort
~compare:(fun a b -> if equal a b then 0 else if a -. b < 0.0 then -1 else 1)
values_arr ;
let percentile pct =
assert (pct >= 0.0 && pct <= 1.0) ;
assert (Int.(num_elements > 0)) ;
let max_index = Int.(num_elements - 1) in
let pct_index = float_of_int max_index *. pct in
let low_index = int_of_float (Stdlib.floor pct_index) in
let high_index = int_of_float (Stdlib.ceil pct_index) in
let low = values_arr.(low_index) in
let high = values_arr.(high_index) in
(low +. high) /. 2.0
in
Some
{ sum
; avg= average
; min= percentile 0.0
; p10= percentile 0.10
; median= percentile 0.50
; p75= percentile 0.75
; max= percentile 1.0
; count= num_elements }

@ -1,14 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type t
val to_json : t -> Yojson.Basic.t
val compute_statistics : float list -> t option

@ -13,7 +13,6 @@ module L = Logging
let init_global_state source_file = let init_global_state source_file =
Language.curr_language := Language.Java ; Language.curr_language := Language.Java ;
PerfStats.register_report_at_exit (PerfStats.JavaFrontend source_file) ;
DB.Results_dir.init source_file ; DB.Results_dir.init source_file ;
Ident.NameGenerator.reset () ; Ident.NameGenerator.reset () ;
JContext.reset_exn_node_table () JContext.reset_exn_node_table ()

Loading…
Cancel
Save