[infer][backend] no longer rely on make to drive the Infer internal parallelism

Summary: It seems that we don't need to rely on `make` anymore as the internal process manager is working fine on Buck Java projects.

Reviewed By: jvillard

Differential Revision: D7903639

fbshipit-source-id: 9b32f05
master
Jeremy Dubreil 7 years ago committed by Facebook Github Bot
parent afe511b53d
commit 8ce79a0613

@ -13,7 +13,7 @@ open! IStd
module L = Logging
(** Create tasks to analyze an execution environment *)
let analyze_exe_env_tasks cluster exe_env : Tasks.t =
let create_exe_env_tasks source_file exe_env : Tasks.t =
L.progressbar_file () ;
Specs.clear_spec_tbl () ;
Typ.Procname.SQLite.clear_cache () ;
@ -21,18 +21,17 @@ let analyze_exe_env_tasks cluster exe_env : Tasks.t =
Tasks.create
[ (fun () ->
Callbacks.iterate_callbacks exe_env ;
if Config.write_html then Printer.write_all_html_files cluster ) ]
if Config.write_html then Printer.write_all_html_files source_file ) ]
(** Create tasks to analyze a cluster *)
let analyze_cluster_tasks cluster_num (cluster: Cluster.t) : Tasks.t =
let exe_env = Exe_env.mk cluster in
L.(debug Analysis Medium)
"@\nProcessing cluster '%a' #%d@." SourceFile.pp cluster (cluster_num + 1) ;
analyze_exe_env_tasks cluster exe_env
let create_source_file_tasks (source_file: SourceFile.t) : Tasks.t =
let exe_env = Exe_env.mk source_file in
L.(debug Analysis Medium) "@\nProcessing '%a'@." SourceFile.pp source_file ;
create_exe_env_tasks source_file exe_env
let analyze_cluster cluster_num cluster = Tasks.run (analyze_cluster_tasks cluster_num cluster)
let analyze_source_file source_file = Tasks.run (create_source_file_tasks source_file)
let output_json_makefile_stats clusters =
let num_files = List.length clusters in
@ -47,15 +46,6 @@ let output_json_makefile_stats clusters =
Yojson.Basic.pretty_to_channel f file_stats
let process_cluster_cmdline fname =
match Cluster.load_from_file (DB.filename_from_string fname) with
| None ->
(if Config.keep_going then L.internal_error else L.die InternalError)
"Cannot find cluster file %s@." fname
| Some (nr, cluster) ->
analyze_cluster (nr - 1) cluster
let print_legend () =
L.progress "Starting analysis...@\n" ;
L.progress "@\n" ;
@ -73,12 +63,12 @@ let print_legend () =
L.progress "@\n@?"
let cluster_should_be_analyzed ~changed_files cluster =
let source_file_should_be_analyzed ~changed_files source_file =
(* whether [fname] is one of the [changed_files] *)
let is_changed_file = Option.map changed_files ~f:(SourceFile.Set.mem cluster) in
let is_changed_file = Option.map changed_files ~f:(SourceFile.Set.mem source_file) in
let check_modified () =
let modified = SourceFiles.is_freshly_captured cluster in
if modified then L.debug Analysis Medium "Modified: %a@\n" SourceFile.pp cluster ;
let modified = SourceFiles.is_freshly_captured source_file in
if modified then L.debug Analysis Medium "Modified: %a@\n" SourceFile.pp source_file ;
modified
in
match is_changed_file with
@ -98,56 +88,40 @@ let register_active_checkers () =
()
let main ~changed_files ~makefile =
let main ~changed_files =
( match Config.modified_targets with
| Some file ->
MergeCapture.record_modified_targets_from_file file
| None ->
() ) ;
register_active_checkers () ;
match Config.cluster_cmdline with
| Some fname ->
process_cluster_cmdline fname
| None ->
(* delete all specs when doing a full analysis so that we do not report on procedures that do
(* delete all specs when doing a full analysis so that we do not report on procedures that do
not exist anymore *)
if not Config.reactive_mode then DB.Results_dir.clean_specs_dir () ;
let all_clusters = SourceFiles.get_all () in
let clusters_to_analyze =
List.filter ~f:(cluster_should_be_analyzed ~changed_files) all_clusters
in
let n_clusters_to_analyze = List.length clusters_to_analyze in
L.progress "Found %d%s source file%s to analyze in %s@." n_clusters_to_analyze
( if Config.reactive_mode || Option.is_some changed_files then
" (out of " ^ string_of_int (List.length all_clusters) ^ ")"
else "" )
(if Int.equal n_clusters_to_analyze 1 then "" else "s")
Config.results_dir ;
let is_java =
lazy
(List.exists
~f:(fun cl -> Filename.check_suffix ".java" (SourceFile.to_string cl))
all_clusters)
in
print_legend () ;
if not (Lazy.force is_java) then (
(* Java uses ZipLib which is incompatible with forking *)
(* per-procedure parallelism *)
L.environment_info "Per-procedure parallelism jobs: %d@." Config.jobs ;
if makefile <> "" then ClusterMakefile.create_cluster_makefile [] makefile ;
(* Prepare tasks one cluster at a time while executing in parallel *)
let runner = Tasks.Runner.create ~jobs:Config.jobs in
let cluster_start_tasks i cluster =
let tasks = analyze_cluster_tasks i cluster in
let aggregate_tasks = Tasks.aggregate ~size:Config.procedures_per_process tasks in
Tasks.Runner.start runner ~tasks:aggregate_tasks
in
List.iteri ~f:cluster_start_tasks clusters_to_analyze ;
Tasks.Runner.complete runner )
else if makefile <> "" then
ClusterMakefile.create_cluster_makefile clusters_to_analyze makefile
else (
(* This branch is reached when -j 1 is used *)
List.iteri ~f:analyze_cluster clusters_to_analyze ;
L.progress "@\nAnalysis finished in %as@." Pp.elapsed_time () ) ;
output_json_makefile_stats clusters_to_analyze
if not Config.reactive_mode then DB.Results_dir.clean_specs_dir () ;
let all_source_files = SourceFiles.get_all () in
let source_files_to_analyze =
List.filter ~f:(source_file_should_be_analyzed ~changed_files) all_source_files
in
let n_source_files = List.length source_files_to_analyze in
L.progress "Found %d%s source file%s to analyze in %s@." n_source_files
( if Config.reactive_mode || Option.is_some changed_files then
" (out of " ^ string_of_int (List.length all_source_files) ^ ")"
else "" )
(if Int.equal n_source_files 1 then "" else "s")
Config.results_dir ;
print_legend () ;
if Int.equal Config.jobs 1 then (
List.iter ~f:analyze_source_file source_files_to_analyze ;
L.progress "@\nAnalysis finished in %as@." Pp.elapsed_time () )
else (
L.environment_info "Parallel jobs: %d@." Config.jobs ;
(* Prepare tasks one cluster at a time while executing in parallel *)
let runner = Tasks.Runner.create ~jobs:Config.jobs in
let analyze source_file =
let tasks = create_source_file_tasks source_file in
let aggregate_tasks = Tasks.aggregate ~size:Config.procedures_per_process tasks in
Tasks.Runner.start runner ~tasks:aggregate_tasks
in
List.iter ~f:analyze source_files_to_analyze ;
Tasks.Runner.complete runner ) ;
output_json_makefile_stats source_files_to_analyze

@ -12,5 +12,5 @@ open! IStd
(** Main module for the analysis after the capture phase *)
val main : changed_files:SourceFile.Set.t option -> makefile:string -> unit
val main : changed_files:SourceFile.Set.t option -> unit
(** Given a name of the Makefile to use for multicore analysis, analyze the captured code *)

@ -1,50 +0,0 @@
(*
* Copyright (c) 2015 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module F = Format
(** Module to process clusters of procedures. *)
(** a cluster is a file *)
type t = SourceFile.t
(** type stored in .cluster file: (n,cl) indicates cl is cluster n *)
type serializer_t = int * t
(** Serializer for clusters *)
let serializer : serializer_t Serialization.serializer =
Serialization.create_serializer Serialization.Key.cluster
(** Load a cluster from a file *)
let load_from_file (filename: DB.filename) : serializer_t option =
Serialization.read_from_file serializer filename
(** Save a cluster into a file *)
let store_to_file (filename: DB.filename) (data: serializer_t) =
Serialization.write_to_file serializer filename ~data
let cl_name n = "cl" ^ string_of_int n
let cl_file n = "x" ^ cl_name n ^ ".cluster"
let pp_cluster_name fmt n = Format.pp_print_string fmt (cl_name n)
let pp_cluster fmt (nr, cluster) =
let fname = Config.results_dir ^/ Config.multicore_dir_name ^/ cl_file nr in
let pp_cl fmt n = Format.pp_print_string fmt (cl_name n) in
store_to_file (DB.filename_from_string fname) (nr, cluster) ;
F.fprintf fmt "%a: @\n" pp_cl nr ;
F.fprintf fmt "\t%@$(INFERANALYZE) --cluster '%s'@\n" fname ;
(* touch the target of the rule to let `make` know that the job has been done *)
F.fprintf fmt "\t%@touch $%@@\n" ;
F.fprintf fmt "@\n"

@ -1,28 +0,0 @@
(*
* Copyright (c) 2016 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module F = Format
(** Module to process clusters of procedures. *)
(** a cluster is a file *)
type t = SourceFile.t
(** type stored in .cluster file: (n,cl) indicates cl is cluster n *)
type serializer_t = int * t
val load_from_file : DB.filename -> serializer_t option
(** Load a cluster from a file *)
val pp_cluster : F.formatter -> serializer_t -> unit
(** Print a cluster *)
val pp_cluster_name : F.formatter -> int -> unit
(** Print a cluster name *)

@ -1,49 +0,0 @@
(*
* Copyright (c) 2015 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module F = Format
(** Module to create a makefile with dependencies between clusters *)
let pp_prolog fmt clusters =
let escape = Escape.escape_map (fun c -> if Char.equal c '#' then Some "\\#" else None) in
let infer_flag_of_compilation_db = function
| `Escaped f ->
F.sprintf "--compilation-database-escaped '%s'" f
| `Raw f ->
F.sprintf "--compilation-database '%s'" f
in
let compilation_dbs_cmd =
List.map ~f:infer_flag_of_compilation_db !Config.clang_compilation_dbs
|> String.concat ~sep:" " |> escape
in
F.fprintf fmt "INFERANALYZE = '%s' --no-report --results-dir '%s' %s@\n@\n"
(Config.bin_dir ^/ InferCommand.(to_exe_name Analyze))
(escape Config.results_dir) compilation_dbs_cmd ;
F.pp_print_string fmt "CLUSTERS=" ;
List.iteri ~f:(fun i _ -> F.fprintf fmt "%a " Cluster.pp_cluster_name (i + 1)) clusters ;
F.fprintf fmt "@\n@\ndefault: test@\n@\nall: test@\n@\n" ;
F.fprintf fmt "test: $(CLUSTERS)@\n" ;
if Config.show_progress_bar then F.fprintf fmt "\t%@echo@\n@."
let pp_epilog fmt () = F.fprintf fmt "@.clean:@.\trm -f $(CLUSTERS)@."
let create_cluster_makefile (clusters: Cluster.t list) (fname: string) =
let outc = Out_channel.create fname in
let fmt = Format.formatter_of_out_channel outc in
let do_cluster cluster_nr cluster =
F.fprintf fmt "#%a@\n" SourceFile.pp cluster ;
Cluster.pp_cluster fmt (cluster_nr + 1, cluster)
in
pp_prolog fmt clusters ;
List.iteri ~f:do_cluster clusters ;
pp_epilog fmt () ;
Out_channel.close outc

@ -42,5 +42,5 @@ val node_start_session : pp_name:(Format.formatter -> unit) -> Procdesc.Node.t -
val write_proc_html : Procdesc.t -> unit
(** Write html file for the procedure. *)
val write_all_html_files : Cluster.t -> unit
val write_all_html_files : SourceFile.t -> unit
(** Create filename.ext.html for each file in the cluster. *)

@ -924,11 +924,6 @@ and clang_ignore_regex =
and classpath = CLOpt.mk_string_opt ~long:"classpath" "Specify the Java classpath"
and cluster =
CLOpt.mk_path_opt ~deprecated:["cluster"] ~long:"cluster" ~meta:"file"
"Specify a .cluster file to be analyzed"
and compilation_database =
CLOpt.mk_path_list ~long:"compilation-database" ~deprecated:["-clang-compilation-db-files"]
~in_help:InferCommand.[(Capture, manual_clang)]
@ -2474,8 +2469,6 @@ and clang_include_to_override_regex = !clang_include_to_override_regex
and classpath = !classpath
and cluster_cmdline = !cluster
and compute_analytics = !compute_analytics
and continue_capture = !continue

@ -312,8 +312,6 @@ val clang_ignore_regex : string option
val clang_include_to_override_regex : string option
val cluster_cmdline : string option
val command : InferCommand.t
val compute_analytics : bool

@ -23,7 +23,7 @@ module Key = struct
(** Current keys for various serializable objects. The keys are computed using the [generate_keys]
function below *)
let tenv, summary, cluster, issues = (425184201, 160179325, 579094948, 852343110)
let tenv, summary, issues = (425184201, 160179325, 852343110)
end
(** version of the binary files, to be incremented for each change *)

@ -16,9 +16,6 @@ module Key : sig
(** Serialization key, used to distinguish versions of serializers and avoid assert faults *)
type t
val cluster : t
(** current key for a cluster *)
val issues : t
(** current key for lint issues *)

@ -14,7 +14,6 @@ open! IStd
module CLOpt = CommandLineOption
module L = Logging
module F = Format
let run driver_mode =
let open Driver in
@ -123,13 +122,6 @@ let () =
L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ;
( match Config.command with
| Analyze ->
let pp_cluster_opt fmt = function
| None ->
F.pp_print_string fmt "(no cluster)"
| Some cluster ->
F.fprintf fmt "of cluster %s" (Filename.basename cluster)
in
L.progress "Starting analysis %a" pp_cluster_opt Config.cluster_cmdline ;
run Driver.Analyze
| Capture | Compile | Run ->
run (Lazy.force Driver.mode_from_command_line)

@ -300,30 +300,9 @@ let capture ~changed_files mode =
capture_with_compilation_database ~changed_files json_cdb
let run_parallel_analysis ~changed_files : unit =
let multicore_dir = Config.results_dir ^/ Config.multicore_dir_name in
Utils.rmtree multicore_dir ;
Unix.mkdir_p multicore_dir ;
InferAnalyze.main ~changed_files ~makefile:(multicore_dir ^/ "Makefile") ;
run_command ~prog:"make"
~args:
( "--directory"
:: multicore_dir
:: (if Config.keep_going then "--keep-going" else "--no-keep-going")
:: "--jobs"
:: string_of_int Config.jobs
:: Option.value_map
~f:(fun l -> ["--load-average"; string_of_float l])
~default:[] Config.load_average
@ if Config.debug_mode then [] else ["--silent"] )
()
let execute_analyze ~changed_files =
register_perf_stats_report PerfStats.TotalBackend ;
if Int.equal Config.jobs 1 || Config.cluster_cmdline <> None then
InferAnalyze.main ~changed_files ~makefile:""
else run_parallel_analysis ~changed_files ;
InferAnalyze.main ~changed_files ;
PerfStats.get_reporter PerfStats.TotalBackend ()

@ -12,9 +12,6 @@ open! IStd
(** Escape a string for use in a CSV or XML file: replace reserved
characters with escape sequences *)
val escape_map : (char -> string option) -> string -> string
(** escape a string specifying the per character escaping function *)
val escape_dotty : string -> string
(** escape a string to be used in a dotty file *)

Loading…
Cancel
Save