[infer-out][9/9] migrate everything else

Summary:
The full inventory of everything in infer-out/. The main change is
around "issues directories": instead of registering them dynamically
they are now all declared statically (well, they kind of were already in
Config.ml).

Reviewed By: ngorogiannis

Differential Revision: D20894305

fbshipit-source-id: 1a06ec09d
master
Jules Villard 5 years ago committed by Facebook GitHub Bot
parent 8f74e4a3d8
commit 89fd5db0ae

@ -29,10 +29,10 @@ let issues_serializer : Errlog.t Procname.Map.t Serialization.serializer =
let iter ~f m = Procname.Map.iter f m
let store ~dir ~file m =
let store ~entry ~file m =
if not (Procname.Map.is_empty m) then (
let abbrev_source_file = DB.source_file_encoding file in
let issues_dir = Config.results_dir ^/ dir in
let issues_dir = ResultsDir.get_path entry in
Utils.create_dir issues_dir ;
let filename =
DB.filename_from_string (Filename.concat issues_dir (abbrev_source_file ^ ".issue"))
@ -45,8 +45,8 @@ let store ~dir ~file m =
let load_issues issues_file = Serialization.read_from_file issues_serializer issues_file
(** Load all the issues in the given dir and update the issues map *)
let load dir =
let issues_dir = Filename.concat Config.results_dir dir in
let load entry =
let issues_dir = ResultsDir.get_path entry in
let load_issues_to_map init issues_file =
let file = DB.filename_from_string (Filename.concat issues_dir issues_file) in
load_issues file

@ -20,9 +20,9 @@ val get_or_add : proc:Procname.t -> t -> t * Errlog.t
(** Get the error log for a given procname. If there is none, add an empty one to the map. Return
the resulting map together with the errlog. *)
val store : dir:string -> file:SourceFile.t -> t -> unit
(** If there are any issues in the log, [store ~dir ~file] stores map to [infer-out/dir/file].
val store : entry:ResultsDirEntryName.id -> file:SourceFile.t -> t -> unit
(** If there are any issues in the log, [store ~entry ~file] stores map to [infer-out/entry/file].
Otherwise, no file is written. *)
val load : string -> t
(** [load directory] walks [infer-out/directory], merging maps stored in files into one map. *)
val load : ResultsDirEntryName.id -> t
(** [load entry] walks [infer-out/entry], merging maps stored in files into one map. *)

@ -130,7 +130,7 @@ let tenv_serializer : t Serialization.serializer =
let global_tenv : t option ref = ref None
let global_tenv_path = Config.(results_dir ^/ global_tenv_filename) |> DB.filename_from_string
let global_tenv_path = ResultsDir.get_path JavaGlobalTypeEnvironment |> DB.filename_from_string
let read path = Serialization.read_from_file tenv_serializer path

@ -371,7 +371,7 @@ let do_frontend_checks (trans_unit_ctx : CFrontend_config.translation_unit_conte
(* We analyze the top level and then all the allowed declarations *)
ALIssues.invoke_set_of_checkers_on_node parsed_linters context (Ctl_parser_types.Decl ast) ;
List.iter ~f:(do_frontend_checks_decl parsed_linters context) allowed_decls ;
IssueLog.store !ALIssues.issue_log ~dir:Config.lint_issues_dir_name ~file:source_file ;
IssueLog.store !ALIssues.issue_log ~entry:LintIssues ~file:source_file ;
L.(debug Linters Medium) "End linting file %a@\n" SourceFile.pp source_file ;
CTL.save_dotty_when_in_debug_mode trans_unit_ctx.CFrontend_config.source_file
(*if CFrontend_config.tableaux_evaluation then (

@ -113,7 +113,7 @@ let debug_eval_end result =
let save_dotty_when_in_debug_mode source_file =
match !ctl_evaluation_tracker with
| Some tracker ->
let dotty_dir = Config.results_dir ^/ Config.lint_dotty_dir_name in
let dotty_dir = ResultsDir.get_path LintDotty in
Utils.create_dir dotty_dir ;
let source_file_basename = Filename.basename (SourceFile.to_abs_path source_file) in
let file = dotty_dir ^/ source_file_basename ^ ".dot" in

@ -18,7 +18,7 @@ let record_time_of ~f ~log_f =
log_f execution_duration ; result
let locks_dir = Config.procnames_locks_dir
let locks_dir = ResultsDir.get_path ProcnamesLocks
let locks_target = locks_dir ^/ "locks_target"

@ -24,7 +24,7 @@ type file_callback =
{ checker_name: string
; language: Language.t
; callback: file_callback_t
; issue_dir: string
; issue_dir: ResultsDirEntryName.id
(** Place for storing issues generated at file-level analysis stage (additionally to ones
generated by procedure-level callbacks which are stored in summaries) *) }
@ -39,8 +39,7 @@ let register_procedure_callback ~checker_name ?(dynamic_dispatch = false) langua
let register_file_callback ~checker_name language (callback : file_callback_t) ~issue_dir =
file_callbacks := {checker_name; language; callback; issue_dir} :: !file_callbacks ;
FileLevelAnalysisIssueDirs.register_dir_name issue_dir
file_callbacks := {checker_name; language; callback; issue_dir} :: !file_callbacks
let iterate_procedure_callbacks exe_env summary =
@ -89,5 +88,5 @@ let iterate_file_callbacks_and_store_issues procedures exe_env source_file =
if language_matches language then (
Language.curr_language := language ;
let issue_log = callback environment in
IssueLog.store ~file:source_file ~dir:issue_dir issue_log ) )
IssueLog.store ~file:source_file ~entry:issue_dir issue_log ) )
!file_callbacks

@ -45,8 +45,8 @@ val register_procedure_callback :
(** Register a procedure callback (see details above) *)
val register_file_callback :
checker_name:string -> Language.t -> file_callback_t -> issue_dir:string -> unit
(** Register a file callback (see details above). [issue_dir] must be unique for this type of
checker_name:string -> Language.t -> file_callback_t -> issue_dir:ResultsDirEntryName.id -> unit
(** Register a file callback (see details above). [issues_dir] must be unique for this type of
checker. *)
val iterate_procedure_callbacks : Exe_env.t -> Summary.t -> Summary.t

@ -17,7 +17,8 @@ let merge_global_tenvs infer_deps_file =
let global_tenv = Tenv.create () in
let merge infer_out_src =
let global_tenv_path =
infer_out_src ^/ Config.global_tenv_filename |> DB.filename_from_string
ResultsDirEntryName.get_path ~results_dir:infer_out_src JavaGlobalTypeEnvironment
|> DB.filename_from_string
in
Tenv.read global_tenv_path |> Option.iter ~f:(fun tenv -> Tenv.merge ~src:tenv ~dst:global_tenv)
in
@ -43,7 +44,7 @@ let merge_json_results infer_out_src json_entry =
let merge_all_json_results merge_results results_json_str =
L.progress "Merging %s files...@." results_json_str ;
let infer_deps_file = Config.(results_dir ^/ buck_infer_deps_file_name) in
let infer_deps_file = ResultsDir.get_path BuckDependencies in
Utils.iter_infer_deps ~project_root:Config.project_root ~f:merge_results infer_deps_file ;
L.progress "Done merging %s files@." results_json_str
@ -58,7 +59,7 @@ let merge_changed_functions () =
let merge_captured_targets () =
let time0 = Mtime_clock.counter () in
L.progress "Merging captured Buck targets...@\n%!" ;
let infer_deps_file = Config.(results_dir ^/ buck_infer_deps_file_name) in
let infer_deps_file = ResultsDir.get_path BuckDependencies in
DBWriter.merge ~infer_deps_file ;
ScubaLogging.execute_with_time_logging "merge_captured_tenvs" (fun () ->
merge_global_tenvs infer_deps_file ) ;

@ -280,8 +280,8 @@ let dump_duplicate_procs source_file procs =
None )
in
let output_to_file duplicate_procs =
Out_channel.with_file (Config.results_dir ^/ Config.duplicates_filename)
~append:true ~perm:0o666 ~f:(fun outc ->
Out_channel.with_file (ResultsDir.get_path DuplicateFunctions) ~append:true ~perm:0o666
~f:(fun outc ->
let fmt = F.formatter_of_out_channel outc in
List.iter duplicate_procs ~f:(fun (pname, source_captured) ->
F.fprintf fmt "DUPLICATE_SYMBOLS source:%a source_captured:%a pname:%a@\n" SourceFile.pp

@ -106,36 +106,22 @@ let assign = "<\"Assign\">"
with a direct array access where an error is produced and the analysis continues *)
let bound_error_allowed_in_procedure_call = true
let buck_infer_deps_file_name = "infer-deps.txt"
let buck_out_gen = "buck-out" ^/ "gen"
let buck_results_dir_name = "infer"
let captured_dir_name = "captured"
let clang_initializer_prefix = "__infer_globals_initializer_"
let clang_inner_destructor_prefix = "__infer_inner_destructor_"
let classnames_dir_name = "classnames"
let costs_report_json = "costs-report.json"
let default_failure_name = "ASSERTION_FAILURE"
(** Dotty output filename **)
let dotty_frontend_output = "proc_cfgs_frontend.dot"
let duplicates_filename = "duplicates.txt"
let trace_events_file = "perf_events.json"
(** exit code to use for the --fail-on-issue option *)
let fail_on_issue_exit_code = 2
let global_tenv_filename = ".global.tenv"
(** If true, treat calls to no-arg getters as idempotent w.r.t non-nullness *)
let idempotent_getters = true
@ -143,10 +129,6 @@ let ivar_attributes = "ivar_attributes"
let java_lambda_marker_infix = "$Lambda$"
let lint_dotty_dir_name = "lint_dotty"
let lint_issues_dir_name = "lint_issues"
let manual_biabduction = "BIABDUCTION CHECKER OPTIONS"
let manual_buck = "BUCK OPTIONS"
@ -190,28 +172,14 @@ let meet_level = 1
let nsnotification_center_checker_backend = false
let nullsafe_file_level_issues_dir_name = "nullsafe_file_level"
let procnames_locks_dir_name = "procnames_locks"
let property_attributes = "property_attributes"
let racerd_issues_dir_name = "racerd"
let report_condition_always_true_in_clang = false
let report_html_dir = "report.html"
let report_json = "report.json"
(** If true, sanity-check inferred preconditions against Nullable annotations and report
inconsistencies *)
let report_nullable_inconsistency = true
let report_txt = "report.txt"
let retain_cycle_dotty_dir = "retain_cycle_dotty"
(** If true, compact summaries before saving *)
let save_compact_summaries = true
@ -222,10 +190,6 @@ let source_file_extentions = [".java"; ".m"; ".mm"; ".c"; ".cc"; ".cpp"; ".h"]
let specs_files_suffix = ".specs"
let starvation_issues_dir_name = "starvation_issues"
let test_determinator_results = "test_determinator_results"
(** Enable detailed tracing information during array abstraction *)
let trace_absarray = false
@ -2055,9 +2019,8 @@ and source_files_cfg =
CLOpt.mk_bool ~long:"source-files-cfg"
~in_help:InferCommand.[(Explore, manual_explore_source_files)]
(Printf.sprintf
"Output a dotty file in infer-out/%s for each source file in the output of \
$(b,--source-files)"
captured_dir_name)
"Output a dotty file in %s for each source file in the output of $(b,--source-files)"
(ResultsDirEntryName.get_path ~results_dir:"infer-out" Debug))
and source_files_filter =
@ -2216,7 +2179,8 @@ and threadsafe_aliases =
and trace_events =
CLOpt.mk_bool ~long:"trace-events"
(Printf.sprintf "Emit Chrome performance trace events in infer-out/%s" trace_events_file)
(Printf.sprintf "Emit Chrome performance trace events in %s"
(ResultsDirEntryName.get_path ~results_dir:"infer-out" PerfEvents))
and trace_join =
@ -2721,7 +2685,10 @@ and force_delete_results_dir = !force_delete_results_dir
and force_integration = !force_integration
and from_json_report = Option.value !from_json_report ~default:(!results_dir ^/ report_json)
and from_json_report =
Option.value !from_json_report
~default:(ResultsDirEntryName.get_path ~results_dir:!results_dir ReportJson)
and frontend_stats = !frontend_stats
@ -3077,8 +3044,6 @@ let is_checker_enabled c =
if Checker.equal checker c then Some enabled else None )
let captured_dir = results_dir ^/ captured_dir_name
let clang_frontend_action_string =
let text = if capture then ["translating"] else [] in
let text = if is_checker_enabled Linters then "linting" :: text else text in
@ -3092,8 +3057,6 @@ let clang_frontend_action_string =
String.concat ~sep:", " text
let procnames_locks_dir = results_dir ^/ procnames_locks_dir_name
(* Specify treatment of dynamic dispatch in Java code: false 'none' treats dynamic dispatch as
a call to unknown code and true triggers lazy dynamic dispatch. The latter mode follows the
JVM semantics and creates procedure descriptions during symbolic execution using the type

@ -53,10 +53,6 @@ val bin_dir : string
val bound_error_allowed_in_procedure_call : bool
val buck_infer_deps_file_name : string
val captured_dir_name : string
val clang_exe_aliases : string list
val clang_initializer_prefix : string
@ -65,26 +61,18 @@ val clang_inner_destructor_prefix : string
val clang_plugin_path : string
val classnames_dir_name : string
val classpath : string option
val costs_report_json : string
val default_failure_name : string
val dotty_frontend_output : string
val duplicates_filename : string
val etc_dir : string
val fail_on_issue_exit_code : int
val fcp_dir : string
val global_tenv_filename : string
val idempotent_getters : bool
val initial_analysis_time : float
@ -96,10 +84,6 @@ val java_lambda_marker_infix : string
val lib_dir : string
val lint_dotty_dir_name : string
val lint_issues_dir_name : string
val load_average : float option
val max_narrows : int
@ -126,8 +110,6 @@ val pp_version : Format.formatter -> unit -> unit
val property_attributes : string
val racerd_issues_dir_name : string
val relative_path_backtrack : int
val report : bool
@ -138,17 +120,8 @@ val report_custom_error : bool
val report_force_relative_path : bool
val report_html_dir : string
val report_json : string
val report_nullable_inconsistency : bool
val report_txt : string
(** name of the file inside infer-out/ containing the issues as human-readable text *)
val retain_cycle_dotty_dir : string
val save_compact_summaries : bool
val smt_output : bool
@ -161,14 +134,8 @@ val sources : string list
val specs_files_suffix : string
val starvation_issues_dir_name : string
val test_determinator_results : string
val trace_absarray : bool
val trace_events_file : string
val unsafe_unret : string
val use_cost_threshold : bool
@ -428,8 +395,6 @@ val no_translate_libs : bool
val nullable_annotation : string option
val nullsafe_file_level_issues_dir_name : string
val nullsafe_disable_field_not_initialized_in_nonstrict_classes : bool
val nullsafe_optimistic_third_party_params_in_non_strict : bool
@ -646,11 +611,6 @@ val xcpretty : bool
val dynamic_dispatch : bool
val captured_dir : string
(** directory where the results of the capture phase are stored *)
val procnames_locks_dir : string
val toplevel_results_dir : string
(** In some integrations, eg Buck, infer subprocesses started by the build system (started by the
toplevel infer process) will have their own results directory; this points to the results

@ -63,7 +63,7 @@ let source_dir_get_internal_file source_dir extension =
(** get the source directory corresponding to a source file *)
let source_dir_from_source_file source_file =
Filename.concat Config.captured_dir (source_file_encoding source_file)
ResultsDir.get_path Debug ^/ source_file_encoding source_file
(** {2 Filename} *)
@ -132,7 +132,7 @@ module Results_dir = struct
let init ?(debug = false) source =
if SourceFile.is_invalid source then L.(die InternalError) "Invalid source file passed" ;
if debug || Config.html || Config.debug_mode || Config.frontend_tests then (
Utils.create_dir (path_to_filename Abs_root [Config.captured_dir_name]) ;
Utils.create_dir (ResultsDir.get_path Debug) ;
Utils.create_dir (path_to_filename (Abs_source_dir source) []) )

@ -142,7 +142,7 @@ module Implementation = struct
let merge_db infer_out_src =
let db_file = infer_out_src ^/ ResultsDatabase.database_filename in
let db_file = ResultsDirEntryName.get_path ~results_dir:infer_out_src CaptureDB in
let main_db = ResultsDatabase.get_database () in
Sqlite3.exec main_db (Printf.sprintf "ATTACH '%s' AS attached" db_file)
|> SqliteUtils.check_result_code main_db ~log:(Printf.sprintf "attaching database '%s'" db_file) ;

@ -1,15 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
let dir_names = ref []
let get_registered_dir_names () = !dir_names
let register_dir_name name =
if not (List.exists !dir_names ~f:(String.equal name)) then dir_names := name :: !dir_names

@ -1,16 +0,0 @@
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
val get_registered_dir_names : unit -> string list
(** Directory names responsible for storing checker-specific issues generated at file-level analysis
phase. (Those are additional issues on top of ones stored in summary after procedure analysis
phase). *)
val register_dir_name : string -> unit
(** Add directory name. No-op if was already added *)

@ -225,7 +225,7 @@ let logger =
lazy
(let log_file =
(* if invoked in a sub-dir (e.g., in Buck integrations), log inside the original log file *)
Config.toplevel_results_dir ^/ Config.trace_events_file
ResultsDirEntryName.get_path ~results_dir:Config.toplevel_results_dir PerfEvents
in
let is_toplevel_process = CommandLineOption.is_originator && not !ProcessPoolState.in_child in
( if is_toplevel_process then

@ -8,9 +8,8 @@
open! IStd
module L = Logging
let database_filename = "results.db"
let database_fullpath = Config.results_dir ^/ database_filename
(** cannot use {!ResultsDir.get_path} due to circular dependency so re-implement it *)
let results_dir_get_path entry = ResultsDirEntryName.get_path ~results_dir:Config.results_dir entry
let procedures_schema prefix =
Printf.sprintf
@ -55,7 +54,7 @@ let create_tables ?(prefix = "") db =
let create_db () =
let temp_db = Filename.temp_file ~in_dir:Config.results_dir database_filename ".tmp" in
let temp_db = Filename.temp_file ~in_dir:(results_dir_get_path Temporary) "results.db" ".tmp" in
let db = Sqlite3.db_open ~mutex:`FULL temp_db in
SqliteUtils.exec db ~log:"sqlite page size"
~stmt:(Printf.sprintf "PRAGMA page_size=%d" Config.sqlite_page_size) ;
@ -70,7 +69,7 @@ let create_db () =
(* Can't use WAL with custom VFS *)
() ) ;
SqliteUtils.db_close db ;
try Sys.rename temp_db database_fullpath
try Sys.rename temp_db (results_dir_get_path CaptureDB)
with Sys_error _ -> (* lost the race, doesn't matter *) ()
@ -154,7 +153,7 @@ end = struct
db_close () ;
let db =
Sqlite3.db_open ~mode:`NO_CREATE ~cache:`PRIVATE ~mutex:`FULL ?vfs:Config.sqlite_vfs
database_fullpath
(results_dir_get_path CaptureDB)
in
Sqlite3.busy_timeout db Config.sqlite_lock_timeout ;
SqliteUtils.exec db ~log:"synchronous=OFF" ~stmt:"PRAGMA synchronous=OFF" ;

@ -7,12 +7,6 @@
open! IStd
val database_filename : string
(** the relative path to the database from the results directory *)
val database_fullpath : string
(** the absolute path to the database file *)
val schema_hum : string
(** some human-readable string describing the tables *)

@ -17,16 +17,15 @@ module RunState = struct
let open Runstate_t in
{ run_sequence= []
; results_dir_format=
Printf.sprintf "db_filename: %s\ndb_schema: %s" ResultsDatabase.database_filename
Printf.sprintf "db_filename: %s\ndb_schema: %s"
(ResultsDirEntryName.get_path ~results_dir:"infer-out" CaptureDB)
ResultsDatabase.schema_hum
; should_merge_capture= false }
let state : Runstate_t.t ref = ref state0
let state_filename = ".infer_runstate.json"
let state_file_path = Config.results_dir ^/ state_filename
let state_file_path = get_path RunState
let store () =
Utils.with_file_out state_file_path ~f:(fun oc ->
@ -85,6 +84,7 @@ let results_dir_dir_markers = [get_path Specs]
let is_results_dir ~check_correct_version () =
let not_found = ref "" in
let capture_db_path = get_path CaptureDB in
let has_all_markers =
List.for_all results_dir_dir_markers ~f:(fun d ->
Sys.is_directory d = `Yes
@ -92,9 +92,9 @@ let is_results_dir ~check_correct_version () =
( not_found := d ^ "/" ;
false ) )
&& ( (not check_correct_version)
|| Sys.is_file ResultsDatabase.database_fullpath = `Yes
|| Sys.is_file capture_db_path = `Yes
||
( not_found := ResultsDatabase.database_fullpath ;
( not_found := capture_db_path ;
false ) )
in
Result.ok_if_true has_all_markers ~error:(Printf.sprintf "'%s' not found" !not_found)
@ -121,7 +121,7 @@ let remove_results_dir () =
let prepare_logging_and_db () =
L.setup_log_file () ;
PerfEvent.init () ;
if Sys.is_file ResultsDatabase.database_fullpath <> `Yes then ResultsDatabase.create_db () ;
if Sys.is_file (get_path CaptureDB) <> `Yes then ResultsDatabase.create_db () ;
ResultsDatabase.new_database_connection ()
@ -156,12 +156,6 @@ let assert_results_dir advice =
let scrub_for_incremental () =
DBWriter.reset_capture_tables () ;
let dirs_to_delete =
List.map
~f:(Filename.concat Config.results_dir)
(Config.[classnames_dir_name] @ FileLevelAnalysisIssueDirs.get_registered_dir_names ())
in
List.iter ~f:Utils.rmtree dirs_to_delete ;
List.iter ~f:Utils.rmtree
(ResultsDirEntryName.to_delete_before_incremental_capture_and_analysis
~results_dir:Config.results_dir) ;
@ -175,62 +169,7 @@ let scrub_for_caching () =
if cache_capture then DBWriter.canonicalize () ;
(* make sure we are done with the database *)
ResultsDatabase.db_close () ;
let should_delete_dir =
let dirs_to_delete =
Config.
[captured_dir_name (* debug only *); classnames_dir_name (* a cache for the Java frontend *)]
@ (* temporarily needed to build report.json, safe to delete *)
FileLevelAnalysisIssueDirs.get_registered_dir_names ()
in
List.mem ~equal:String.equal dirs_to_delete
in
let should_delete_file =
let files_to_delete =
(if cache_capture then [] else [ResultsDatabase.database_filename])
@ [ (* some versions of sqlite do not clean up after themselves *)
ResultsDatabase.database_filename ^ "-shm"
; ResultsDatabase.database_filename ^ "-wal" ]
in
let suffixes_to_delete = [".txt"; ".json"] in
fun name ->
(* Keep the JSON report and the JSON costs report *)
(not
(List.exists
~f:(String.equal (Filename.basename name))
Config.
[ report_json
; costs_report_json
; (* TestDeterminatorReport; TODO: delete, see next entry *)
"test_determinator.json"
; (* ChangedFunctions; TODO: this hard-coded string will be deleted in a next diff
when the logic for scrubbing will be entirely in {!ResultsDirEntryName}. *)
"changed_functions.json" ]))
&& ( List.mem ~equal:String.equal files_to_delete (Filename.basename name)
|| List.exists ~f:(Filename.check_suffix name) suffixes_to_delete )
in
let rec delete_temp_results name =
let rec cleandir dir =
match Unix.readdir_opt dir with
| Some entry ->
if should_delete_dir entry then Utils.rmtree (name ^/ entry)
else if
not
( String.equal entry Filename.current_dir_name
|| String.equal entry Filename.parent_dir_name )
then delete_temp_results (name ^/ entry) ;
cleandir dir (* next entry *)
| None ->
Unix.closedir dir
in
match Unix.opendir name with
| dir ->
cleandir dir
| exception Unix.Unix_error (Unix.ENOTDIR, _, _) ->
if should_delete_file name then Unix.unlink name ;
()
| exception Unix.Unix_error (Unix.ENOENT, _, _) ->
()
in
delete_temp_results Config.results_dir ;
List.iter ~f:Utils.rmtree
(ResultsDirEntryName.to_delete_before_caching_capture ~results_dir:Config.results_dir)
( (* some versions of sqlite do not clean up after themselves *) (get_path CaptureDB ^ "-shm")
:: (get_path CaptureDB ^ "-wal")
:: ResultsDirEntryName.to_delete_before_caching_capture ~results_dir:Config.results_dir )

@ -4,14 +4,43 @@
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
type id = ChangedFunctions | Logs | Specs | Temporary | TestDeterminatorReport
let buck_infer_deps_file_name = "infer-deps.txt"
type id =
| BuckDependencies
| CaptureDB
| ChangedFunctions
| Debug
| Differential
| DuplicateFunctions
| JavaClassnamesCache
| JavaGlobalTypeEnvironment
| LintDotty
| LintIssues
| Logs
| NullsafeFileIssues
| PerfEvents
| ProcnamesLocks
| RacerDIssues
| ReportCostsJson
| ReportHtml
| ReportJson
| ReportText
| RetainCycles
| RunState
| Specs
| StarvationIssues
| Temporary
| TestDeterminatorReport
| TestDeterminatorTempResults
[@@deriving enumerate]
type cleanup_action = Delete | Keep [@@deriving equal]
type entry_kind = Directory | File
type entry_kind = Directory | File | IssuesDirectory [@@deriving equal]
type t =
{ rel_path: string (** path inside infer-out/ *)
@ -23,21 +52,121 @@ type t =
e.g., a distributed Buck cache. *) }
let of_id = function
| BuckDependencies ->
{ rel_path= buck_infer_deps_file_name
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| CaptureDB ->
{ rel_path= "results.db"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Keep }
| ChangedFunctions ->
{ rel_path= "changed_functions.json"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Keep }
| Debug ->
{ rel_path= "captured"
; kind= Directory
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| Differential ->
{ rel_path= "differential"
; kind= Directory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| DuplicateFunctions ->
{ rel_path= "duplicates.txt"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| JavaClassnamesCache ->
{ rel_path= "classnames"
; kind= Directory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| JavaGlobalTypeEnvironment ->
{ rel_path= ".global.tenv"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Keep }
| LintDotty ->
{ rel_path= "lint_dotty"
; kind= Directory
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| LintIssues ->
{ rel_path= "lint_issues"
; kind= IssuesDirectory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| Logs ->
{ rel_path= "logs"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| NullsafeFileIssues ->
{ rel_path= "nullsafe_file_level"
; kind= IssuesDirectory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| PerfEvents ->
{ rel_path= "perf_events.json"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| ProcnamesLocks ->
{ rel_path= "procnames_locks"
; kind= Directory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| RacerDIssues ->
{ rel_path= "racerd"
; kind= IssuesDirectory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| ReportCostsJson ->
{ rel_path= "costs-report.json"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| ReportHtml ->
{ rel_path= "report.html"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| ReportJson ->
{ rel_path= "report.json"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| ReportText ->
{ rel_path= "report.txt"
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| RetainCycles ->
{ rel_path= "retain_cycle_dotty"
; kind= Directory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| RunState ->
{ rel_path= ".infer_runstate.json"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| Specs ->
{ rel_path= "specs"
; kind= Directory
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| StarvationIssues ->
{ rel_path= "starvation_issues"
; kind= IssuesDirectory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
| Temporary ->
{ rel_path= "tmp"
; kind= Directory
@ -48,6 +177,11 @@ let of_id = function
; kind= File
; before_incremental_analysis= Delete
; before_caching_capture= Keep }
| TestDeterminatorTempResults ->
{ rel_path= "test_determinator_results"
; kind= Directory
; before_incremental_analysis= Delete
; before_caching_capture= Delete }
let path_of_entry ~results_dir {rel_path; _} = results_dir ^/ rel_path
@ -68,3 +202,9 @@ let to_delete_before_incremental_capture_and_analysis ~results_dir =
let to_delete_before_caching_capture ~results_dir =
get_filtered_paths ~results_dir ~f:(fun {before_caching_capture; _} ->
equal_cleanup_action before_caching_capture Delete )
let get_issues_directories () =
List.filter all_of_id ~f:(fun id ->
let entry = of_id id in
equal_entry_kind entry.kind IssuesDirectory )

@ -10,17 +10,47 @@ open! IStd
directory you probably want to use {!ResultsDir.Entry} instead of this module. *)
type id =
| BuckDependencies (** list of Buck directories with infer-out/ directories *)
| CaptureDB (** the capture database *)
| ChangedFunctions (** results of the clang test determinator *)
| Debug (** directory containing debug data *)
| Differential (** contains the results of [infer reportdiff] *)
| DuplicateFunctions (** list of duplicated functions *)
| JavaClassnamesCache (** used when capturing Java jar dependencies *)
| JavaGlobalTypeEnvironment (** internal {!Tenv.t} object corresponding to the whole project *)
| LintDotty (** directory of linters' dotty debug output for CTL evaluation *)
| LintIssues (** directory of linters' issues *)
| Logs (** log file *)
| NullsafeFileIssues (** file-wide issues of the nullsafe analysis *)
| PerfEvents (** file containing events for performance profiling *)
| ProcnamesLocks
(** directory of per-{!Procname.t} file locks, used by the analysis scheduler in certain modes *)
| RacerDIssues (** directory of issues reported by the RacerD analysis *)
| ReportCostsJson (** reports of the costs analysis *)
| ReportHtml (** directory of the HTML report *)
| ReportJson (** the main product of the analysis: [report.json] *)
| ReportText (** a human-readable textual version of [report.json] *)
| RetainCycles (** directory of retain cycles dotty files *)
| RunState (** internal data about the last infer run *)
| Specs (** directory containing summaries as .specs files *)
| StarvationIssues (** directory of issues reported by the starvation analysis *)
| Temporary (** directory containing temp files *)
| TestDeterminatorReport (** the report produced by the test determinator capture mode *)
| TestDeterminatorTempResults (** a directory *)
val get_path : results_dir:string -> id -> string
(** the absolute path for the given entry *)
val get_issues_directories : unit -> id list
(** all the entries that correspond to directories containing temporary issue logs for certain
analyses *)
val to_delete_before_incremental_capture_and_analysis : results_dir:string -> string list
(** utility for {!ResultsDir.scrub_for_incremental}, you probably want to use that instead *)
val to_delete_before_caching_capture : results_dir:string -> string list
(** utility for {!ResultsDir.scrub_for_caching}, you probably want to use that instead *)
val buck_infer_deps_file_name : string
(** sad that we have to have this here but some code path is looking for all files with that name in
buck-out/ *)

@ -226,7 +226,7 @@ let exn_retain_cycle tenv cycle =
let retain_cycle = desc_retain_cycle tenv cycle in
let cycle_dotty = Format.asprintf "%a" RetainCyclesType.pp_dotty cycle in
if Config.debug_mode then (
let rc_dotty_dir = Filename.concat Config.results_dir Config.retain_cycle_dotty_dir in
let rc_dotty_dir = ResultsDir.get_path RetainCycles in
Utils.create_dir rc_dotty_dir ;
let rc_dotty_file = Filename.temp_file ~in_dir:rc_dotty_dir "rc" ".dot" in
RetainCyclesType.write_dotty_to_file rc_dotty_file cycle ) ;

@ -21,7 +21,7 @@ let () =
type callback_fun =
| Procedure of Callbacks.proc_callback_t
| DynamicDispatch of Callbacks.proc_callback_t
| File of {callback: Callbacks.file_callback_t; issue_dir: string}
| File of {callback: Callbacks.file_callback_t; issue_dir: ResultsDirEntryName.id}
type callback = callback_fun * Language.t
@ -57,10 +57,8 @@ let all_checkers =
; active= Config.is_checker_enabled Eradicate
; callbacks=
[ (Procedure Eradicate.proc_callback, Language.Java)
; ( File
{ callback= Eradicate.file_callback
; issue_dir= Config.nullsafe_file_level_issues_dir_name }
, Language.Java ) ] }
; (File {callback= Eradicate.file_callback; issue_dir= NullsafeFileIssues}, Language.Java)
] }
; { name= "fragment retains view"
; active= Config.is_checker_enabled FragmentRetainsView
; callbacks=
@ -99,10 +97,8 @@ let all_checkers =
; callbacks=
[ (Procedure RacerD.analyze_procedure, Language.Clang)
; (Procedure RacerD.analyze_procedure, Language.Java)
; ( File {callback= RacerD.file_analysis; issue_dir= Config.racerd_issues_dir_name}
, Language.Clang )
; ( File {callback= RacerD.file_analysis; issue_dir= Config.racerd_issues_dir_name}
, Language.Java ) ] }
; (File {callback= RacerD.file_analysis; issue_dir= RacerDIssues}, Language.Clang)
; (File {callback= RacerD.file_analysis; issue_dir= RacerDIssues}, Language.Java) ] }
(* toy resource analysis to use in the infer lab, see the lab/ directory *)
; { name= "resource leak"
; active= Config.is_checker_enabled ResourceLeak
@ -136,11 +132,9 @@ let all_checkers =
; active= Config.is_checker_enabled Starvation
; callbacks=
[ (Procedure Starvation.analyze_procedure, Language.Java)
; ( File {callback= Starvation.reporting; issue_dir= Config.starvation_issues_dir_name}
, Language.Java )
; (File {callback= Starvation.reporting; issue_dir= StarvationIssues}, Language.Java)
; (Procedure Starvation.analyze_procedure, Language.Clang)
; ( File {callback= Starvation.reporting; issue_dir= Config.starvation_issues_dir_name}
, Language.Clang ) ] }
; (File {callback= Starvation.reporting; issue_dir= StarvationIssues}, Language.Clang) ] }
; { name= "purity"
; active= Config.(is_checker_enabled Purity || is_checker_enabled LoopHoisting)
; callbacks=

@ -549,8 +549,7 @@ end = struct
loc_map SourceFile.Map.empty
in
SourceFile.Map.iter
(fun file loc_map ->
issue_log_of loc_map |> IssueLog.store ~dir:Config.starvation_issues_dir_name ~file )
(fun file loc_map -> issue_log_of loc_map |> IssueLog.store ~entry:StarvationIssues ~file)
source_map
end

@ -205,20 +205,18 @@ let () =
|> Option.iter ~f:(fun cfg -> Procname.Hash.add cfgs proc_name cfg) ) ;
(* emit the dot file in captured/... *)
DotCfg.emit_frontend_cfg source_file cfgs ) ;
L.result "CFGs written in %s/*/%s@." Config.captured_dir Config.dotty_frontend_output )
L.result "CFGs written in %s/*/%s@." (ResultsDir.get_path Debug)
Config.dotty_frontend_output )
| false, false ->
(* explore bug traces *)
if Config.html then
TraceBugs.gen_html_report
~report_json:Config.(results_dir ^/ report_json)
TraceBugs.gen_html_report ~report_json:(ResultsDir.get_path ReportJson)
~show_source_context:Config.source_preview ~max_nested_level:Config.max_nesting
~report_html_dir:Config.(results_dir ^/ report_html_dir)
~report_html_dir:(ResultsDir.get_path ReportHtml)
else
TraceBugs.explore ~selector_limit:None
~report_json:Config.(results_dir ^/ report_json)
~report_txt:Config.(results_dir ^/ report_txt)
~selected:Config.select ~show_source_context:Config.source_preview
~max_nested_level:Config.max_nesting
TraceBugs.explore ~selector_limit:None ~report_json:(ResultsDir.get_path ReportJson)
~report_txt:(ResultsDir.get_path ReportText) ~selected:Config.select
~show_source_context:Config.source_preview ~max_nested_level:Config.max_nesting
| true, true ->
L.user_error "Options --procedures and --source-files cannot be used together.@\n" ) ) ;
(* to make sure the exitcode=0 case is logged, explicitly invoke exit *)

@ -409,7 +409,11 @@ let run_buck_build prog buck_build_args =
L.debug Capture Verbose "BUCK OUT: %s@." line ;
match String.split ~on:' ' line with
| [_; target_path] ->
let filename = Config.project_root ^/ target_path ^/ Config.buck_infer_deps_file_name in
let filename =
ResultsDirEntryName.get_path
~results_dir:(Config.project_root ^/ target_path)
BuckDependencies
in
if PolyVariantEqual.(Sys.file_exists filename = `Yes) then filename :: acc else acc
| _ ->
L.internal_error "Couldn't parse buck target output: %s" line ;
@ -430,20 +434,23 @@ let run_buck_build prog buck_build_args =
let merge_deps_files depsfiles =
let buck_out = Config.project_root ^/ Config.buck_out_gen in
let depslines, depsfiles =
match (depsfiles, Config.keep_going, Config.buck_merge_all_deps) with
| [], true, _ ->
match depsfiles with
| [] when Config.keep_going || Config.buck_merge_all_deps ->
let infouts =
Utils.fold_folders ~init:[] ~path:buck_out ~f:(fun acc dir ->
if
String.is_substring dir ~substring:"infer-out"
&& PolyVariantEqual.(
Sys.file_exists @@ dir ^/ ResultsDatabase.database_filename = `Yes)
Sys.file_exists (ResultsDirEntryName.get_path ~results_dir:dir CaptureDB)
= `Yes)
then Printf.sprintf "\t\t%s" dir :: acc
else acc )
in
(infouts, depsfiles)
| [], _, true ->
let files = Utils.find_files ~path:buck_out ~extension:Config.buck_infer_deps_file_name in
(infouts, [])
| [] when Config.buck_merge_all_deps ->
let files =
Utils.find_files ~path:buck_out ~extension:ResultsDirEntryName.buck_infer_deps_file_name
in
([], files)
| _ ->
([], depsfiles)
@ -459,7 +466,7 @@ let clang_flavor_capture ~prog ~buck_build_cmd =
Process.create_process_and_wait ~prog ~args:["clean"] ;
let depsfiles = run_buck_build prog (buck_build_cmd @ capture_buck_args) in
let deplines = merge_deps_files depsfiles in
let infer_out_depsfile = Config.(results_dir ^/ buck_infer_deps_file_name) in
let infer_out_depsfile = ResultsDir.get_path BuckDependencies in
Utils.with_file_out infer_out_depsfile ~f:(fun out_chan ->
Out_channel.output_lines out_chan deplines ) ;
()

@ -89,7 +89,7 @@ let infer_deps_of_build_report build_report =
let out_line out_channel (target, target_output_path) =
Printf.fprintf out_channel "%s\t-\t%s\n" target (Config.project_root ^/ target_output_path)
in
let infer_deps = Config.(results_dir ^/ buck_infer_deps_file_name) in
let infer_deps = ResultsDir.get_path BuckDependencies in
Utils.with_file_out infer_deps ~f:(fun out_channel ->
List.iter target_path_list ~f:(out_line out_channel) )

@ -77,7 +77,7 @@ let clean_compilation_command mode =
let reset_duplicates_file () =
let start = Config.results_dir ^/ Config.duplicates_filename in
let start = ResultsDir.get_path DuplicateFunctions in
let delete () = Unix.unlink start in
let create () =
Unix.close (Unix.openfile ~perm:0o0666 ~mode:[Unix.O_CREAT; Unix.O_WRONLY] start)
@ -214,8 +214,8 @@ let execute_analyze ~changed_files =
let report ?(suppress_console = false) () =
let issues_json = Config.(results_dir ^/ report_json) in
JsonReports.write_reports ~issues_json ~costs_json:Config.(results_dir ^/ costs_report_json) ;
let issues_json = ResultsDir.get_path ReportJson in
JsonReports.write_reports ~issues_json ~costs_json:(ResultsDir.get_path ReportCostsJson) ;
(* Post-process the report according to the user config. By default, calls report.py to create a
human-readable report.
@ -226,8 +226,7 @@ let report ?(suppress_console = false) () =
Out_channel.output_string outc "The contents of this file have moved to report.txt.\n" ) ;
TextReport.create_from_json
~quiet:(Config.quiet || suppress_console)
~console_limit:Config.report_console_limit
~report_txt:Config.(results_dir ^/ report_txt)
~console_limit:Config.report_console_limit ~report_txt:(ResultsDir.get_path ReportText)
~report_json:issues_json ) ;
if Config.(test_determinator && process_clang_ast) then
TestDeterminator.merge_test_determinator_results () ;
@ -306,9 +305,7 @@ let analyze_and_report ?suppress_console_report ~changed_files mode =
(** as the Config.fail_on_bug flag mandates, exit with error when an issue is reported *)
let fail_on_issue_epilogue () =
let issues_json =
DB.Results_dir.(path_to_filename Abs_root [Config.report_json]) |> DB.filename_to_string
in
let issues_json = ResultsDir.get_path ReportJson in
match Utils.read_file issues_json with
| Ok lines ->
let issues = Jsonbug_j.report_of_string @@ String.concat ~sep:"" lines in

@ -325,8 +325,7 @@ let process_all_summaries_and_issues ~issues_outf ~costs_outf =
{error_filter; proc_name; proc_loc_opt= Some proc_location; err_key; err_data} )
!all_issues ;
(* Issues that are generated and stored outside of summaries by linter and checkers *)
List.iter (Config.lint_issues_dir_name :: FileLevelAnalysisIssueDirs.get_registered_dir_names ())
~f:(fun dir_name ->
List.iter (ResultsDirEntryName.get_issues_directories ()) ~f:(fun dir_name ->
IssueLog.load dir_name |> IssueLog.iter ~f:(write_lint_issues filters issues_outf linereader)
) ;
()

@ -42,6 +42,6 @@ let reportdiff ~current_report:current_report_fname ~previous_report:previous_re
~skip_duplicated_types:Config.skip_duplicated_types ~interesting_paths
else unfiltered_diff
in
let out_path = Config.results_dir ^/ "differential" in
let out_path = ResultsDir.get_path Differential in
Unix.mkdir_p out_path ;
Differential.to_files diff out_path

@ -101,7 +101,7 @@ let add_cmethod source_file program icfg cm proc_name =
let path_of_cached_classname cn =
let root_path = Config.(results_dir ^/ classnames_dir_name) in
let root_path = ResultsDir.get_path JavaClassnamesCache in
let package_path = List.fold ~f:Filename.concat ~init:root_path (JBasics.cn_package cn) in
Filename.concat package_path (JBasics.cn_simple_name cn ^ ".java")

@ -55,8 +55,8 @@ let load_tenv () =
| None ->
Tenv.create ()
| Some _ when Config.biabduction_models_mode ->
L.(die InternalError)
"Unexpected global tenv file found in '%s' while generating the models" Config.captured_dir
L.die InternalError "Unexpected global tenv file found in '%s' while generating the models"
(ResultsDir.get_path JavaGlobalTypeEnvironment)
| Some tenv ->
tenv

@ -317,7 +317,7 @@ let emit_tests_to_run_clang source_file relevant_tests =
if not (List.is_empty relevant_tests) then (
let json = `List (List.map ~f:(fun t -> `String t) relevant_tests) in
let abbrev_source_file = DB.source_file_encoding source_file in
let test_determinator_results_path = Config.results_dir ^/ Config.test_determinator_results in
let test_determinator_results_path = ResultsDir.get_path TestDeterminatorTempResults in
let outpath = test_determinator_results_path ^/ abbrev_source_file ^ ".json" in
Utils.create_dir test_determinator_results_path ;
Utils.write_json_to_file outpath json )
@ -341,7 +341,7 @@ let merge_test_determinator_results () =
in
main_results_list := List.append changed_json !main_results_list
in
let test_determinator_results_path = Config.results_dir ^/ Config.test_determinator_results in
let test_determinator_results_path = ResultsDir.get_path TestDeterminatorTempResults in
Utils.directory_iter merge_json_results test_determinator_results_path ;
let main_results_list_sorted =
List.dedup_and_sort

Loading…
Cancel
Save