[sql] add proc attributes DB

Summary:
Use an SQLite database to store proc attributes, instead of files on disk.

Wrap SQLite operations in two layers:
1. `SqliteUtils` provides helper functions to make sure DB operations succeed
2. `KeyValue` provides a functor to expose a simple and type-safe key/value store backed by the SQLite DB.

Reviewed By: jberdine

Differential Revision: D5640053

fbshipit-source-id: 31050e5
master
Jules Villard 7 years ago committed by Facebook Github Bot
parent 03a727143a
commit 2adf654950

@ -232,8 +232,9 @@ endif
.PHONY: ocaml_unit_test
ocaml_unit_test: test_build
$(QUIET)$(REMOVE_DIR) infer-out-unit-tests
$(QUIET)$(call silent_on_success,Running OCaml unit tests,\
$(BUILD_DIR)/test/inferunit.bc)
INFER_ARGS=--results-dir^infer-out-unit-tests $(BUILD_DIR)/test/inferunit.bc)
define silence_make
($(1) 2> >(grep -v "warning: \(ignoring old\|overriding\) \(commands\|recipe\) for target") \

@ -11,6 +11,7 @@ PKG parmap
PKG ppx_compare
PKG ptrees
PKG sawja
PKG sqlite3
PKG str
PKG unix
PKG xmlm

@ -0,0 +1,97 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module L = Logging
type attributes_kind = ProcUndefined | ProcObjCAccessor | ProcDefined [@@deriving compare]
let least_relevant_up_to_proc_kind_exclusive = function
| ProcUndefined
-> []
| ProcObjCAccessor
-> [ProcUndefined]
| ProcDefined
-> [ProcUndefined; ProcObjCAccessor]
let most_relevant_down_to_proc_kind_inclusive = function
| ProcUndefined
-> [ProcDefined; ProcObjCAccessor; ProcUndefined]
| ProcObjCAccessor
-> [ProcDefined; ProcObjCAccessor]
| ProcDefined
-> [ProcDefined]
let proc_kind_of_attr (proc_attributes: ProcAttributes.t) =
if proc_attributes.is_defined then ProcDefined
else if Option.is_some proc_attributes.objc_accessor then ProcObjCAccessor
else ProcUndefined
let should_override_attr attr1 attr2 =
(* use the source file to be more deterministic in case the same procedure name is defined in several files *)
[%compare : attributes_kind * SourceFile.t]
(proc_kind_of_attr attr1, attr1.ProcAttributes.loc.file)
(proc_kind_of_attr attr2, attr2.ProcAttributes.loc.file)
> 0
module Table = struct
type key = Typ.Procname.t * attributes_kind
type value = ProcAttributes.t
let table = ResultsDir.attributes_table
end
module Store = KeyValue.Make (Table)
let load_aux ?(min_kind= ProcUndefined) pname =
List.find_map (most_relevant_down_to_proc_kind_inclusive min_kind) ~f:(fun pkind ->
Store.find (pname, pkind) )
let load pname : ProcAttributes.t option = load_aux pname
let store (attr: ProcAttributes.t) =
let pkind = proc_kind_of_attr attr in
let key = (attr.proc_name, pkind) in
if load attr.proc_name |> Option.value_map ~default:true ~f:(should_override_attr attr) then (
(* NOTE: We need to do this dance of adding the proc_kind to the key because there's a race condition between the time we load the attributes from the db and the time we write possibly better ones. We could avoid this by making the db schema richer than just key/value and turning the SELECT + REPLACE into an atomic transaction. *)
Store.replace key attr ;
least_relevant_up_to_proc_kind_exclusive pkind
|> List.iter ~f:(fun k -> Store.delete (attr.proc_name, k)) )
let load_defined pname = load_aux ~min_kind:ProcDefined pname
let get_correct_type_from_objc_class_name type_name =
(* ToDo: this function should return a type that includes a reference to the tenv computed by:
let class_method = Typ.Procname.get_default_objc_class_method (Typ.Name.name type_name);
switch (find_tenv_from_class_of_proc class_method) {
| Some tenv =>
*)
Some (Typ.mk (Tstruct type_name))
let find_file_capturing_procedure pname =
match load pname with
| None
-> None
| Some proc_attributes
-> let source_file = proc_attributes.ProcAttributes.source_file_captured in
let source_dir = DB.source_dir_from_source_file source_file in
let origin =
(* Procedure coming from include files if it has different location
than the file where it was captured. *)
match SourceFile.compare source_file proc_attributes.ProcAttributes.loc.file <> 0 with
| true
-> `Include
| false
-> `Source
in
let cfg_fname = DB.source_dir_get_internal_file source_dir ".cfg" in
let cfg_fname_exists =
PVariant.( = ) `Yes (Sys.file_exists (DB.filename_to_string cfg_fname))
in
if cfg_fname_exists then Some (source_file, origin) else None

@ -11,34 +11,28 @@
open! IStd
val store_attributes : ProcAttributes.t -> unit
type attributes_kind
module Table :
KeyValue.Table with type key = Typ.Procname.t * attributes_kind and type value = ProcAttributes.t
module Store : KeyValue.S with module Table = Table
val store : ProcAttributes.t -> unit
(** Save .attr file for the procedure into the attributes database. *)
val load_attributes : cache:bool -> Typ.Procname.t -> ProcAttributes.t option
(** Load the attributes for the procedure from the attributes database.
If cache is true, add the attribute to the global cache *)
val load : Typ.Procname.t -> ProcAttributes.t option
(** Load the attributes for the procedure from the attributes database. *)
val load_defined_attributes : cache_none:bool -> Typ.Procname.t -> ProcAttributes.t option
(** Load attrubutes for the procedure but only if is_defined is true *)
val load_defined : Typ.Procname.t -> ProcAttributes.t option
(** Load attributes for the procedure but only if is_defined is true *)
val get_correct_type_from_objc_class_name : Typ.Name.t -> Typ.t option
(** Given the name of an ObjC class, extract the type from the tenv where the class was defined. We
do this by adding a method that is unique to each class, and then finding the tenv that
corresponds to the class definition. *)
val find_file_capturing_procedure :
?cache:bool -> Typ.Procname.t -> (SourceFile.t * [`Include | `Source]) option
val find_file_capturing_procedure : Typ.Procname.t -> (SourceFile.t * [`Include | `Source]) option
(** Find the file where the procedure was captured, if a cfg for that file exists.
Return also a boolean indicating whether the procedure is defined in an
include file.
If cache is true, add the attribute to the global cache *)
type t
val stats : unit -> t
val to_json : t -> Yojson.Basic.json
val from_json : Yojson.Basic.json -> t
val aggregate : t list -> Yojson.Basic.json
include file. *)

@ -1,216 +0,0 @@
(*
* Copyright (c) 2015 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
open! PVariant
module Hashtbl = Caml.Hashtbl
module F = Format
module L = Logging
type attributes_kind = ProcUndefined | ProcObjCAccessor | ProcDefined [@@deriving compare]
let least_relevant_up_to_proc_kind_exclusive = function
| ProcUndefined
-> []
| ProcObjCAccessor
-> [ProcUndefined]
| ProcDefined
-> [ProcUndefined; ProcObjCAccessor]
let most_relevant_down_to_proc_kind_inclusive = function
| ProcUndefined
-> [ProcDefined; ProcObjCAccessor; ProcUndefined]
| ProcObjCAccessor
-> [ProcDefined; ProcObjCAccessor]
| ProcDefined
-> [ProcDefined]
let proc_kind_of_attr (proc_attributes: ProcAttributes.t) =
if proc_attributes.is_defined then ProcDefined
else if Option.is_some proc_attributes.objc_accessor then ProcObjCAccessor
else ProcUndefined
let should_override_attr attr1 attr2 =
(* use the source file to be more deterministic in case the same procedure name is defined in several files *)
[%compare : attributes_kind * SourceFile.t]
(proc_kind_of_attr attr1, attr1.ProcAttributes.loc.file)
(proc_kind_of_attr attr2, attr2.ProcAttributes.loc.file)
> 0
(** Module to manage the table of attributes. *)
let serializer : ProcAttributes.t Serialization.serializer =
Serialization.create_serializer Serialization.Key.attributes
let attributes_filename ~proc_kind pname_file =
let file_suffix =
match proc_kind with
| ProcDefined
-> ".attr"
| ProcObjCAccessor
-> ".objc_acc.attr"
| ProcUndefined
-> ".decl.attr"
in
pname_file ^ file_suffix
(** path to the .attr file for the given procedure in the current results directory *)
let res_dir_attr_filename ~create_dir ~proc_kind pname =
let pname_file = Typ.Procname.to_filename pname in
let attr_fname = attributes_filename ~proc_kind pname_file in
let bucket_dir =
let base = pname_file in
let len = String.length base in
if len < 2 then Filename.current_dir_name else String.sub base ~pos:(len - 2) ~len:2
in
let filename =
DB.Results_dir.path_to_filename DB.Results_dir.Abs_root
[Config.attributes_dir_name; bucket_dir; attr_fname]
in
if create_dir then DB.filename_create_dir filename ;
filename
(* Load the proc attribute for the defined filename if it exists,
otherwise try to load the declared filename. *)
let load_attr ~defined_only proc_name =
let attributes_file ~proc_kind proc_name =
Multilinks.resolve (res_dir_attr_filename ~create_dir:false ~proc_kind proc_name)
in
let min_kind = if defined_only then ProcDefined else ProcUndefined in
List.find_map (most_relevant_down_to_proc_kind_inclusive min_kind) ~f:(fun proc_kind ->
Serialization.read_from_file serializer (attributes_file ~proc_kind proc_name) )
(* Write a proc attributes to file.
If defined, delete the declared file if it exists. *)
let write_and_delete proc_name (proc_attributes: ProcAttributes.t) =
let proc_kind = proc_kind_of_attr proc_attributes in
let attributes_file proc_kind = res_dir_attr_filename ~create_dir:true ~proc_kind proc_name in
Serialization.write_to_file serializer (attributes_file proc_kind) ~data:proc_attributes ;
let upgrade_relevance less_relevant_proc_kind =
let fname_declared = DB.filename_to_string (attributes_file less_relevant_proc_kind) in
if Sys.file_exists fname_declared = `Yes then
try Unix.unlink fname_declared
with Unix.Unix_error _ -> ()
in
List.iter ~f:upgrade_relevance (least_relevant_up_to_proc_kind_exclusive proc_kind)
let store_attributes (proc_attributes: ProcAttributes.t) =
let proc_name = proc_attributes.proc_name in
let should_write =
match load_attr ~defined_only:false proc_name with
| None
-> true
| Some proc_attributes_on_disk
-> should_override_attr proc_attributes proc_attributes_on_disk
in
if should_write then write_and_delete proc_name proc_attributes
let attr_tbl = Typ.Procname.Hash.create 16
let defined_attr_tbl = Typ.Procname.Hash.create 16
let load_attributes ~cache proc_name =
try Typ.Procname.Hash.find attr_tbl proc_name
with Not_found ->
let proc_attributes = load_attr ~defined_only:false proc_name in
( match proc_attributes with
| Some attrs
-> if cache then (
Typ.Procname.Hash.add attr_tbl proc_name proc_attributes ;
if attrs.is_defined then Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes )
| None
-> () ) ;
proc_attributes
let load_defined_attributes ~cache_none proc_name =
try Typ.Procname.Hash.find defined_attr_tbl proc_name
with Not_found ->
let proc_attributes = load_attr ~defined_only:true proc_name in
if proc_attributes <> None then (
(* procedure just got defined, replace attribute in attr_tbl with defined version *)
Typ.Procname.Hash.replace attr_tbl proc_name proc_attributes ;
Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes )
else if cache_none then Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes ;
proc_attributes
(** Given the name of an ObjC class, extract the type from the tenv where the class was defined. We
do this by adding a method that is unique to each class, and then finding the tenv that
corresponds to the class definition. *)
let get_correct_type_from_objc_class_name type_name =
(* ToDo: this function should return a type that includes a reference to the tenv computed by:
let class_method = Typ.Procname.get_default_objc_class_method (Typ.Name.name type_name);
switch (find_tenv_from_class_of_proc class_method) {
| Some tenv =>
*)
Some (Typ.mk (Tstruct type_name))
type t =
{num_bindings: int; num_buckets: int; max_bucket_length: int; serialized_size_kb: int option}
let to_json at =
let extra_field =
match at.serialized_size_kb with Some v -> [("serialized_size_kb", `Int v)] | None -> []
in
`Assoc
( [ ("num_bindings", `Int at.num_bindings)
; ("num_buckets", `Int at.num_buckets)
; ("max_bucket_length", `Int at.max_bucket_length) ]
@ extra_field )
let from_json json =
let open! Yojson.Basic.Util in
{ num_bindings= json |> member "num_bindings" |> to_int
; num_buckets= json |> member "num_buckets" |> to_int
; max_bucket_length= json |> member "max_bucket_length" |> to_int
; serialized_size_kb= json |> member "serialized_size_kb" |> to_option to_int }
let aggregate s =
let all_num_bindings = List.map ~f:(fun stats -> float_of_int stats.num_bindings) s in
let all_num_buckets = List.map ~f:(fun stats -> float_of_int stats.num_buckets) s in
let all_max_bucket_length = List.map ~f:(fun stats -> float_of_int stats.max_bucket_length) s in
let aggr_num_bindings = StatisticsToolbox.compute_statistics all_num_bindings in
let aggr_num_buckets = StatisticsToolbox.compute_statistics all_num_buckets in
let aggr_max_bucket_length = StatisticsToolbox.compute_statistics all_max_bucket_length in
`Assoc
[ ("num_bindings", StatisticsToolbox.to_json aggr_num_bindings)
; ("num_buckets", StatisticsToolbox.to_json aggr_num_buckets)
; ("max_bucket_length", StatisticsToolbox.to_json aggr_max_bucket_length) ]
let stats () =
let stats = Typ.Procname.Hash.stats attr_tbl in
let {Hashtbl.num_bindings; num_buckets; max_bucket_length} = stats in
let serialized_size_kb =
match Config.developer_mode with
| true
-> Some (Marshal.data_size (Marshal.to_bytes attr_tbl []) 0 / 1024)
| false
-> None
in
{num_bindings; num_buckets; max_bucket_length; serialized_size_kb}
(* Find the file where the procedure was captured, if a cfg for that file exists.
Return also a boolean indicating whether the procedure is defined in an
include file. *)
let find_file_capturing_procedure ?(cache= true) pname =
match load_attributes ~cache pname with
| None
-> None
| Some proc_attributes
-> let source_file = proc_attributes.ProcAttributes.source_file_captured in
let source_dir = DB.source_dir_from_source_file source_file in
let origin =
(* Procedure coming from include files if it has different location
than the file where it was captured. *)
match SourceFile.compare source_file proc_attributes.ProcAttributes.loc.file <> 0 with
| true
-> `Include
| false
-> `Source
in
let cfg_fname = DB.source_dir_get_internal_file source_dir ".cfg" in
let cfg_fname_exists = Sys.file_exists (DB.filename_to_string cfg_fname) = `Yes in
if cfg_fname_exists then Some (source_file, origin) else None

@ -106,7 +106,7 @@ let save_attributes source_file cfg =
let loc' = if Location.equal loc Location.dummy then {loc with file= source_file} else loc in
{attributes with loc= loc'; source_file_captured= source_file}
in
AttributesTable.store_attributes attributes'
Attributes.store attributes'
in
List.iter ~f:save_proc (get_all_procs cfg)
@ -419,7 +419,7 @@ let specialize_types callee_pdesc resolved_pname args =
let resolved_attributes =
{callee_attributes with formals= List.rev resolved_params; proc_name= resolved_pname}
in
AttributesTable.store_attributes resolved_attributes ;
Attributes.store resolved_attributes ;
let resolved_pdesc =
let tmp_cfg = create_cfg () in
create_proc_desc tmp_cfg resolved_attributes

@ -767,7 +767,7 @@ let execute_alloc mk can_return_null {Builtin.pdesc; tenv; prop_; path; ret_id;
match args with
| [((Exp.Sizeof ({typ= {Typ.desc= Tstruct (ObjcClass _ as name)}} as sizeof_data) as e), _)]
-> let e' =
match AttributesTable.get_correct_type_from_objc_class_name name with
match Attributes.get_correct_type_from_objc_class_name name with
| Some struct_type
-> Exp.Sizeof {sizeof_data with typ= struct_type}
| None

@ -16,9 +16,7 @@ let compilation_db = (lazy (CompilationDatabase.from_json_files !Config.clang_co
frontend to finish before returning *)
let try_capture (attributes: ProcAttributes.t) : ProcAttributes.t option =
let lazy cdb = compilation_db in
( if Option.is_none
(AttributesTable.load_defined_attributes ~cache_none:false attributes.proc_name)
then
( if Option.is_none (Attributes.load_defined attributes.proc_name) then
let decl_file = attributes.loc.file in
let definition_file_opt = SourceFile.of_header decl_file in
let try_compile definition_file =
@ -35,9 +33,7 @@ let try_capture (attributes: ProcAttributes.t) : ProcAttributes.t option =
protect
~f:(fun () -> CaptureCompilationDatabase.capture_file_in_database cdb definition_file)
~finally:Timeout.resume_previous_timeout ;
if Config.debug_mode
&& Option.is_none
(AttributesTable.load_defined_attributes ~cache_none:false attributes.proc_name)
if Config.debug_mode && Option.is_none (Attributes.load_defined attributes.proc_name)
then
(* peek at the results to know if capture succeeded, but only in debug mode *)
L.(debug Capture Verbose)
@ -62,11 +58,7 @@ let try_capture (attributes: ProcAttributes.t) : ProcAttributes.t option =
- there was a race and proc_name got captured by the time we checked whether
cfg_filename exists. In this case it's important to refetch attributes from disk because
contents may have changed (attributes file for proc_name may be there now)
- proc_name can't be captured (there is no definition we know of). In that case
result will stay None. At this point we know(?) we won't be able to find definition
for it ever so we can cache None.
Caveat: it's possible that procedure will be captured in some other unrelated file
later - infer may ignore it then.
It also relies on retry mechanism in deserialization code to deal with half-written
attributes files *)
AttributesTable.load_defined_attributes ~cache_none:true attributes.proc_name
later - infer may ignore it then. *)
Attributes.load_defined attributes.proc_name

@ -27,11 +27,9 @@ type perf_stats =
; compactions: int
; top_heap_gb: float
; stack_kb: float
; minor_heap_kb: float
; attributes_table: AttributesTable.t }
; minor_heap_kb: float }
let to_json ps =
let attributes_table = AttributesTable.stats () in
`Assoc
[ ("rtime", `Float ps.rtime)
; ("utime", `Float ps.utime)
@ -47,8 +45,7 @@ let to_json ps =
; ("compactions", `Int ps.compactions)
; ("top_heap_gb", `Float ps.top_heap_gb)
; ("stack_kb", `Float ps.stack_kb)
; ("minor_heap_kb", `Float ps.minor_heap_kb)
; ("attributes_table", AttributesTable.to_json attributes_table) ]
; ("minor_heap_kb", `Float ps.minor_heap_kb) ]
let from_json json =
let open! Yojson.Basic.Util in
@ -66,8 +63,7 @@ let from_json json =
; compactions= json |> member "compactions" |> to_int
; top_heap_gb= json |> member "top_heap_gb" |> to_float
; stack_kb= json |> member "stack_kb" |> to_float
; minor_heap_kb= json |> member "minor_heap_kb" |> to_float
; attributes_table= json |> member "attributes_table" |> AttributesTable.from_json }
; minor_heap_kb= json |> member "minor_heap_kb" |> to_float }
let aggregate s =
let mk_stats f = StatisticsToolbox.compute_statistics (List.map ~f s) in
@ -86,9 +82,6 @@ let aggregate s =
let aggr_top_heap_gb = mk_stats (fun stats -> stats.top_heap_gb) in
let aggr_stack_kb = mk_stats (fun stats -> stats.stack_kb) in
let aggr_minor_heap_kb = mk_stats (fun stats -> stats.minor_heap_kb) in
let aggr_attributes_table =
AttributesTable.aggregate (List.map ~f:(fun stats -> stats.attributes_table) s)
in
`Assoc
[ ("rtime", StatisticsToolbox.to_json aggr_rtime)
; ("utime", StatisticsToolbox.to_json aggr_utime)
@ -104,8 +97,7 @@ let aggregate s =
; ("compactions", StatisticsToolbox.to_json aggr_compactions)
; ("top_heap_gb", StatisticsToolbox.to_json aggr_top_heap_gb)
; ("stack_kb", StatisticsToolbox.to_json aggr_stack_kb)
; ("minor_heap_kb", StatisticsToolbox.to_json aggr_minor_heap_kb)
; ("attributes_table", aggr_attributes_table) ]
; ("minor_heap_kb", StatisticsToolbox.to_json aggr_minor_heap_kb) ]
let stats () =
let words_to_kb n = n *. float_of_int (Sys.word_size / 8) /. 1024. in
@ -116,7 +108,6 @@ let stats () =
let gc_ctrl = Gc.get () in
let exit_timeofday = Unix.gettimeofday () in
let exit_times = Unix.times () in
let at = AttributesTable.stats () in
{ rtime= exit_timeofday -. Utils.initial_timeofday
; utime= exit_times.tms_utime -. Utils.initial_times.tms_utime
; stime= exit_times.tms_stime -. Utils.initial_times.tms_stime
@ -131,8 +122,7 @@ let stats () =
; compactions= gc_stats.compactions
; top_heap_gb= words_to_gb (float_of_int gc_stats.top_heap_words)
; stack_kb= words_to_kb (float_of_int gc_stats.stack_size)
; minor_heap_kb= words_to_kb (float_of_int gc_ctrl.minor_heap_size)
; attributes_table= at }
; minor_heap_kb= words_to_kb (float_of_int gc_ctrl.minor_heap_size) }
let report_at_exit file () =
try

@ -39,6 +39,8 @@ let run t =
List.iter ~f:(fun f -> f ()) t.closures ;
Queue.iter ~f:(fun closure -> closure ()) t.continuations
let fork_protect ~f x = L.reset_formatters () ; ResultsDir.new_database_connection () ; f x
module Runner = struct
type runner = {pool: ProcessPool.t; all_continuations: closure Queue.t}
@ -48,7 +50,7 @@ module Runner = struct
let pool = runner.pool in
Queue.enqueue_all runner.all_continuations (Queue.to_list tasks.continuations) ;
List.iter
~f:(fun x -> ProcessPool.start_child ~f:(fun f -> L.reset_formatters () ; f ()) ~pool x)
~f:(fun x -> ProcessPool.start_child ~f:(fun f -> fork_protect ~f ()) ~pool x)
tasks.closures
let complete runner =

@ -32,6 +32,9 @@ val empty : t
val run : t -> unit
(** Run the closures and continuation *)
val fork_protect : f:('a -> 'b) -> 'a -> 'b
(** does the bookkeeping necessary to safely execute an infer function [f] after a call to fork(2) *)
module Runner : sig
(** A runner accepts new tasks repeatedly for parallel execution *)
type runner

@ -90,7 +90,7 @@ let add_cg (exe_env: t) (source_dir: DB.source_dir) =
let defined_procs = Cg.get_defined_nodes cg in
let duplicate_procs_to_print =
List.filter_map defined_procs ~f:(fun pname ->
match AttributesTable.find_file_capturing_procedure ~cache:false pname with
match Attributes.find_file_capturing_procedure pname with
| None
-> None
| Some (source_captured, origin)
@ -115,7 +115,7 @@ let get_file_data exe_env pname =
try Some (Typ.Procname.Hash.find exe_env.proc_map pname)
with Not_found ->
let source_file_opt =
match AttributesTable.load_attributes ~cache:true pname with
match Attributes.load pname with
| None
-> L.(debug Analysis Medium) "can't find tenv_cfg_object for %a@." Typ.Procname.pp pname ;
None

@ -8,7 +8,6 @@
*)
open! IStd
open! PVariant
(** Top-level driver that orchestrates build system integration, frontends, backend, and
reporting *)
@ -25,58 +24,24 @@ let run driver_mode =
analyze_and_report driver_mode ~changed_files ;
run_epilogue driver_mode
let results_dir_dir_markers =
List.map ~f:(Filename.concat Config.results_dir)
[Config.attributes_dir_name; Config.captured_dir_name; Config.specs_dir_name]
let is_results_dir () =
let not_found = ref "" in
let has_all_markers =
List.for_all results_dir_dir_markers ~f:(fun d ->
Sys.is_directory d = `Yes
||
(not_found := d ;
false) )
in
Result.ok_if_true has_all_markers ~error:(Printf.sprintf "'%s/' not found" !not_found)
let create_results_dir () = List.iter ~f:Unix.mkdir_p results_dir_dir_markers ; L.setup_log_file ()
let assert_results_dir advice =
Result.iter_error (is_results_dir ()) ~f:(fun err ->
L.(die UserError)
"ERROR: No results directory at '%s': %s@\nERROR: %s@." Config.results_dir err advice ) ;
L.setup_log_file ()
let remove_results_dir () =
(* Look if file exists, it may not be a directory but that will be caught by the call to [is_results_dir]. If it's an empty directory, leave it alone. This allows users to create a temporary directory for the infer results without infer removing it to recreate it, which could be racy. *)
if Sys.file_exists Config.results_dir = `Yes && not (Utils.directory_is_empty Config.results_dir)
then (
if not Config.force_delete_results_dir then
Result.iter_error (is_results_dir ()) ~f:(fun err ->
L.(die UserError)
"ERROR: '%s' exists but does not seem to be an infer results directory: %s@\nERROR: Please delete '%s' and try again@."
Config.results_dir err Config.results_dir ) ;
Utils.rmtree Config.results_dir )
let setup_results_dir () =
let setup () =
match Config.command with
| Analyze
-> assert_results_dir "have you run capture before?"
-> ResultsDir.assert_results_dir "have you run capture before?"
| Report | ReportDiff
-> create_results_dir ()
-> ResultsDir.create_results_dir ()
| Diff
-> remove_results_dir () ; create_results_dir ()
-> ResultsDir.remove_results_dir () ; ResultsDir.create_results_dir ()
| Capture | Compile | Run
-> let driver_mode = Lazy.force Driver.mode_from_command_line in
if not
( Driver.(equal_mode driver_mode Analyze)
||
Config.(buck || continue_capture || infer_is_clang || infer_is_javac || reactive_mode) )
then remove_results_dir () ;
create_results_dir ()
then ResultsDir.remove_results_dir () ;
ResultsDir.create_results_dir ()
| Explore
-> assert_results_dir "please run an infer analysis first"
-> ResultsDir.assert_results_dir "please run an infer analysis first"
let log_environment_info () =
L.environment_info "CWD = %s@\n" (Sys.getcwd ()) ;
@ -96,9 +61,10 @@ let () =
| Error e
-> print_endline e ; L.exit 3 ) ;
if Config.print_builtins then Builtin.print_and_exit () ;
setup_results_dir () ;
setup () ;
log_environment_info () ;
if Config.debug_mode then L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ;
if Config.debug_mode && CLOpt.is_originator then
L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ;
match Config.command with
| Analyze
-> let pp_cluster_opt fmt = function

@ -20,8 +20,6 @@ let check_timestamp_of_symlinks = true
let buck_out () = Filename.concat Config.project_root "buck-out"
let infer_deps () = Filename.concat Config.results_dir Config.buck_infer_deps_file_name
let modified_targets = ref String.Set.empty
let record_modified_targets_from_file file =
@ -31,10 +29,9 @@ let record_modified_targets_from_file file =
| Error error
-> L.user_error "Failed to read modified targets file '%s': %s@." file error ; ()
type stats =
{mutable files_linked: int; mutable files_multilinked: int; mutable targets_merged: int}
type stats = {mutable files_linked: int; mutable targets_merged: int}
let empty_stats () = {files_linked= 0; files_multilinked= 0; targets_merged= 0}
let empty_stats () = {files_linked= 0; targets_merged= 0}
let link_exists s =
try
@ -42,31 +39,6 @@ let link_exists s =
true
with Unix.Unix_error _ -> false
(* Table mapping directories to multilinks.
Used for the hashed directories where attrbute files are stored. *)
let multilinks_dir_table = String.Table.create ~size:16 ()
(* Add a multilink for attributes to the internal per-directory table.
The files will be created by create_multilinks. *)
let add_multilink_attr ~stats src dst =
let attr_dir = Filename.dirname dst in
let attr_dir_name = Filename.basename attr_dir in
let multilinks =
try String.Table.find_exn multilinks_dir_table attr_dir_name
with Not_found ->
let multilinks =
match Multilinks.read ~dir:attr_dir with
| Some multilinks
-> (* incremental merge: start from the existing file on disk *)
multilinks
| None
-> Multilinks.create ()
in
String.Table.set multilinks_dir_table ~key:attr_dir_name ~data:multilinks ; multilinks
in
Multilinks.add multilinks src ;
stats.files_multilinked <- stats.files_multilinked + 1
let create_link ~stats src dst =
if link_exists dst then Unix.unlink dst ;
Unix.symlink ~src ~dst ;
@ -78,15 +50,6 @@ let create_link ~stats src dst =
Unix.utimes src ~access:near_past ~modif:near_past ;
stats.files_linked <- stats.files_linked + 1
let create_multilinks () =
let do_dir ~key:dir ~data:multilinks =
let attributes_dir =
Filename.concat (Filename.concat Config.results_dir Config.attributes_dir_name) dir
in
Multilinks.write multilinks ~dir:attributes_dir
in
String.Table.iteri ~f:do_dir multilinks_dir_table
(** Create symbolic links recursively from the destination to the source.
Replicate the structure of the source directory in the destination,
with files replaced by links to the source. *)
@ -101,7 +64,6 @@ let rec slink ~stats ~skiplevels src dst =
(Filename.concat dst item))
items )
else if skiplevels > 0 then ()
else if Filename.check_suffix dst ".attr" then add_multilink_attr ~stats src dst
else create_link ~stats src dst
(** Determine if the destination should link to the source.
@ -177,13 +139,13 @@ let process_merge_file deps_file =
-> List.iter ~f:process_line lines
| Error error
-> L.internal_error "Couldn't read deps file '%s': %s" deps_file error ) ;
create_multilinks () ;
L.progress "Targets merged: %d@\n" stats.targets_merged ;
L.progress "Files linked: %d@\n" stats.files_linked ;
L.progress "Files multilinked: %d@\n" stats.files_multilinked
L.progress "Files linked: %d@\n" stats.files_linked
let merge_captured_targets () =
let time0 = Unix.gettimeofday () in
L.progress "Merging captured Buck targets...@\n%!" ;
process_merge_file (infer_deps ()) ;
let infer_deps_file = Config.(results_dir ^/ buck_infer_deps_file_name) in
MergeResults.merge_buck_flavors_results infer_deps_file ;
process_merge_file infer_deps_file ;
L.progress "Merging captured Buck targets took %.03fs@\n%!" (Unix.gettimeofday () -. time0)

@ -407,7 +407,7 @@ let write_html_proc source proof_cover table_nodes_at_linenum global_err_log pro
let process_proc =
Procdesc.is_defined proc_desc && SourceFile.equal proc_loc.Location.file source
&&
match AttributesTable.find_file_capturing_procedure ~cache:true proc_name with
match Attributes.find_file_capturing_procedure proc_name with
| None
-> true
| Some (source_captured, _)

@ -629,7 +629,7 @@ let proc_is_library proc_attributes =
If no attributes can be found, return None.
*)
let proc_resolve_attributes proc_name =
let from_attributes_table () = AttributesTable.load_attributes ~cache:true proc_name in
let from_attributes_table () = Attributes.load proc_name in
let from_specs () =
match get_summary proc_name with Some summary -> Some summary.attributes | None -> None
in

@ -63,7 +63,7 @@ let check_block_retain_cycle tenv caller_pname prop block_nullified =
let mblock = Pvar.get_name block_nullified in
let block_pname = Typ.Procname.mangled_objc_block (Mangled.to_string mblock) in
let block_captured =
match AttributesTable.load_attributes ~cache:true block_pname with
match Attributes.load block_pname with
| Some attributes
-> fst (List.unzip attributes.ProcAttributes.captured)
| None
@ -530,7 +530,7 @@ let method_exists right_proc_name methods =
(* ObjC/C++ case : The attribute map will only exist when we have code for the method or
the method has been called directly somewhere. It can still be that this is not the
case but we have a model for the method. *)
match AttributesTable.load_attributes ~cache:true right_proc_name with
match Attributes.load right_proc_name with
| Some attrs
-> attrs.ProcAttributes.is_defined
| None
@ -1029,7 +1029,7 @@ let execute_load ?(report_deref_errors= true) pname pdesc tenv id rhs_exp typ lo
[Prop.conjoin_eq tenv (Exp.Var id) undef prop_]
let load_ret_annots pname =
match AttributesTable.load_attributes ~cache:true pname with
match Attributes.load pname with
| Some attrs
-> let ret_annots, _ = attrs.ProcAttributes.method_annotation in
ret_annots
@ -1265,7 +1265,7 @@ let rec sym_exec tenv current_pdesc _instr (prop_: Prop.normal Prop.t) path
| Some attrs, Typ.Procname.ObjC_Cpp _
-> Some attrs
| None, Typ.Procname.ObjC_Cpp _
-> AttributesTable.load_attributes ~cache:true resolved_pname
-> Attributes.load resolved_pname
| _
-> None
in
@ -1483,7 +1483,7 @@ and add_constraints_on_actuals_by_ref tenv prop actuals_by_ref callee_pname call
in
let non_const_actuals_by_ref =
let is_not_const (e, _, i) =
match AttributesTable.load_attributes ~cache:true callee_pname with
match Attributes.load callee_pname with
| Some attrs
-> let is_const = List.mem ~equal:Int.equal attrs.ProcAttributes.const_formals i in
if is_const then (

@ -95,8 +95,6 @@ let anonymous_block_prefix = "__objc_anonymous_block_"
let assign = "<\"Assign\">"
let attributes_dir_name = "attributes"
let backend_stats_dir_name = "backend_stats"
(** If true, a procedure call succeeds even when there is a bound error this mimics what

@ -85,8 +85,6 @@ val append_buck_flavors : string list
val assign : string
val attributes_dir_name : string
val backend_stats_dir_name : string
val bin_dir : string

@ -220,7 +220,6 @@ module Results_dir = struct
if SourceFile.is_invalid source then L.(die InternalError) "Invalid source file passed" ;
Utils.create_dir Config.results_dir ;
Utils.create_dir specs_dir ;
Utils.create_dir (path_to_filename Abs_root [Config.attributes_dir_name]) ;
Utils.create_dir (path_to_filename Abs_root [Config.captured_dir_name]) ;
Utils.create_dir (path_to_filename (Abs_source_dir source) [])

@ -0,0 +1,99 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module type Table = sig
type key
type value
val table : string
end
module type Blob = sig
module Table : Table
val blob_of_key : Table.key -> Sqlite3.Data.t
val blob_of_value : Table.value -> Sqlite3.Data.t
val key_of_blob : Sqlite3.Data.t -> Table.key option
val value_of_blob : Sqlite3.Data.t -> Table.value option
end
module type S = sig
include Blob
val replace : Table.key -> Table.value -> unit
val find : Table.key -> Table.value option
val delete : Table.key -> unit
end
(* The functor is mostly here to provide a modicum of type safety around blobing/unblobing *)
module Make (Table : Table) : S with module Table = Table = struct
module Unsafe : Blob with module Table = Table = struct
module Table = Table
let blob x = Sqlite3.Data.BLOB (Marshal.to_string x [])
let unblob = function
| Sqlite3.Data.BLOB b
-> Some (Marshal.from_string b 0)
| Sqlite3.Data.NULL
-> None
| _
-> assert false
let blob_of_key = blob
let blob_of_value = blob
let key_of_blob = unblob
let value_of_blob = unblob
end
(* cannot mix, e.g., blob_key and blob_value now *)
include Unsafe
let replace =
let replace_statement =
Printf.sprintf "REPLACE INTO %s(key, value) VALUES(:k, :v)" Table.table
in
fun key value ->
let db = ResultsDir.get_database () in
let replace_stmt = Sqlite3.prepare db replace_statement in
SqliteUtils.check_sqlite_error ~log:"replace bind key"
(Sqlite3.bind replace_stmt 1 (blob_of_key key)) ;
SqliteUtils.check_sqlite_error ~log:"replace bind value"
(Sqlite3.bind replace_stmt 2 (blob_of_value value)) ;
SqliteUtils.sqlite_unit_step ~log:"KeyValue.replace" replace_stmt
let find =
let select_statement = Printf.sprintf "SELECT value FROM %s WHERE key = :k" Table.table in
fun key ->
let db = ResultsDir.get_database () in
let select_stmt = Sqlite3.prepare db select_statement in
SqliteUtils.check_sqlite_error ~log:"insert bind key"
(Sqlite3.bind select_stmt 1 (blob_of_key key)) ;
Option.bind ~f:value_of_blob
(SqliteUtils.sqlite_result_step ~log:"KeyValue.find" select_stmt)
let delete =
let delete_statement = Printf.sprintf "DELETE FROM %s WHERE key = :k" Table.table in
fun key ->
let db = ResultsDir.get_database () in
let delete_stmt = Sqlite3.prepare db delete_statement in
SqliteUtils.check_sqlite_error ~log:"delete bind key"
(Sqlite3.bind delete_stmt 1 (blob_of_key key)) ;
SqliteUtils.sqlite_unit_step ~log:"KeyValue.delete" delete_stmt
end

@ -0,0 +1,44 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
(** key/value database information *)
module type Table = sig
type key
type value
val table : string
end
(** Key/value store backed by the ResultsDir database *)
module type S = sig
module Table : Table
val blob_of_key : Table.key -> Sqlite3.Data.t
(** turn a key into a [Sqlite3.Data.BLOB] *)
val blob_of_value : Table.value -> Sqlite3.Data.t
(** turn a value into a [Sqlite3.Data.BLOB] *)
val key_of_blob : Sqlite3.Data.t -> Table.key option
(** turn a [Sqlite3.Data.BLOB] (or [Sqlite3.Data.NULL]) back into a key *)
val value_of_blob : Sqlite3.Data.t -> Table.value option
(** turn a [Sqlite3.Data.BLOB] (or [Sqlite3.Data.NULL]) back into a value *)
val replace : Table.key -> Table.value -> unit
val find : Table.key -> Table.value option
val delete : Table.key -> unit
end
module Make (Table : Table) : S with module Table = Table

@ -0,0 +1,52 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module L = Logging
let all_attributes ~into ~db_name =
let select_stmt =
Sqlite3.prepare into
(Printf.sprintf "SELECT value FROM %s.%s" db_name ResultsDir.attributes_table)
in
List.filter_map ~f:(Option.bind ~f:Attributes.Store.value_of_blob)
(SqliteUtils.sqlite_result_rev_list_step ~log:"select" select_stmt)
let merge_attributes_table ~into ~db_name =
let rows = all_attributes ~into ~db_name in
(* no need to wrap this in a single transaction because we open the table with synchronous=OFF *)
List.iter rows ~f:Attributes.store
let merge ~into db =
let db_name = "db" in
SqliteUtils.check_sqlite_error ~fatal:true ~log:"attaching db"
(Sqlite3.exec into (Printf.sprintf "ATTACH '%s' AS %s" db db_name)) ;
merge_attributes_table ~into ~db_name ;
SqliteUtils.check_sqlite_error ~fatal:true ~log:"detaching db"
(Sqlite3.exec into (Printf.sprintf "DETACH %s" db_name)) ;
()
let merge_buck_flavors_results infer_deps_file =
let into = ResultsDir.get_database () in
let one_line line =
match String.split ~on:'\t' line with
| [_; _; target_results_dir]
-> let infer_out_src =
if Filename.is_relative target_results_dir then
Filename.dirname (Config.project_root ^/ "buck-out") ^/ target_results_dir
else target_results_dir
in
merge ~into (infer_out_src ^/ ResultsDir.database_filename)
| _
-> assert false
in
match Utils.read_file infer_deps_file with
| Ok lines
-> List.iter ~f:one_line lines
| Error error
-> L.internal_error "Couldn't read deps file '%s': %s" infer_deps_file error

@ -0,0 +1,13 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
val merge_buck_flavors_results : string -> unit
(** Merge the results from sub-invocations of infer inside buck-out/. Takes as argument the infer_deps file. *)

@ -0,0 +1,112 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
open! PVariant
module L = Logging
let database : Sqlite3.db option ref = ref None
let () =
Epilogues.register "closing results database" ~f:(fun () ->
Option.iter !database ~f:SqliteUtils.db_close )
let database_filename = "results.db"
let database_fullpath = Config.results_dir ^/ database_filename
let attributes_table = "attributes"
let results_dir_dir_markers =
List.map ~f:(Filename.concat Config.results_dir) [Config.captured_dir_name; Config.specs_dir_name]
let is_results_dir ~check_correct_version () =
let not_found = ref "" in
let has_all_markers =
List.for_all results_dir_dir_markers ~f:(fun d ->
Sys.is_directory d = `Yes
||
(not_found := d ^ "/" ;
false) )
&& ( not check_correct_version || Sys.is_file database_fullpath = `Yes
||
(not_found := database_fullpath ;
false) )
in
Result.ok_if_true has_all_markers ~error:(Printf.sprintf "'%s' not found" !not_found)
let remove_results_dir () =
(* Look if file exists, it may not be a directory but that will be caught by the call to [is_results_dir]. If it's an empty directory, leave it alone. This allows users to create a temporary directory for the infer results without infer removing it to recreate it, which could be racy. *)
if Sys.file_exists Config.results_dir = `Yes && not (Utils.directory_is_empty Config.results_dir)
then (
if not Config.force_delete_results_dir then
Result.iter_error (is_results_dir ~check_correct_version:false ()) ~f:(fun err ->
L.(die UserError)
"ERROR: '%s' exists but does not seem to be an infer results directory: %s@\nERROR: Please delete '%s' and try again@."
Config.results_dir err Config.results_dir ) ;
Utils.rmtree Config.results_dir )
let create_attributes_table db =
SqliteUtils.exec db ~log:"initializing results DB"
~stmt:
(Printf.sprintf "CREATE TABLE IF NOT EXISTS %s (key BLOB PRIMARY KEY ,value BLOB)"
attributes_table)
let create_db () =
let temp_db = Filename.temp_file ~in_dir:Config.results_dir database_filename ".tmp" in
let db = Sqlite3.db_open ~mutex:`FULL temp_db in
create_attributes_table db ;
(* This should be the default but better be sure, otherwise we cannot access the database concurrently. This has to happen before setting WAL mode. *)
SqliteUtils.exec db ~log:"locking mode=NORMAL" ~stmt:"PRAGMA locking_mode=NORMAL" ;
(* Write-ahead log is much faster than other journalling modes. *)
SqliteUtils.exec db ~log:"journal_mode=WAL" ~stmt:"PRAGMA journal_mode=WAL" ;
SqliteUtils.db_close db ;
try Sys.rename temp_db database_fullpath
with Sys_error _ -> (* lost the race, doesn't matter *) ()
let new_database_connection () =
Option.iter !database ~f:SqliteUtils.db_close ;
let db = Sqlite3.db_open ~mode:`NO_CREATE ~cache:`PRIVATE ~mutex:`FULL database_fullpath in
Sqlite3.busy_timeout db 1000 ;
(* Higher level of "synchronous" are only useful to guarantee that the db will not be corrupted if the machine crashes for some reason before the data has been actually written to disk. We do not need this kind of guarantee for infer results as one can always rerun infer if interrupted. *)
SqliteUtils.exec db ~log:"synchronous=OFF" ~stmt:"PRAGMA synchronous=OFF" ;
database := Some db
let create_results_dir () =
Unix.mkdir_p Config.results_dir ;
L.setup_log_file () ;
if Sys.file_exists database_fullpath <> `Yes then (
Logging.progress "creating results database@." ; create_db () ) ;
new_database_connection () ;
List.iter ~f:Unix.mkdir_p results_dir_dir_markers
let assert_results_dir advice =
Result.iter_error (is_results_dir ~check_correct_version:true ()) ~f:(fun err ->
L.(die UserError)
"ERROR: No results directory at '%s': %s@\nERROR: %s@." Config.results_dir err advice ) ;
L.setup_log_file () ;
new_database_connection ()
let get_database () = Option.value_exn !database
let reset_attributes_table () =
let db = get_database () in
SqliteUtils.exec db ~log:"drop attributes table"
~stmt:(Printf.sprintf "DROP TABLE %s" attributes_table) ;
create_attributes_table db
let delete_capture_and_analysis_data () =
reset_attributes_table () ;
let dirs_to_delete =
List.map ~f:(Filename.concat Config.results_dir) Config.([captured_dir_name; specs_dir_name])
in
List.iter ~f:Utils.rmtree dirs_to_delete ; List.iter ~f:Unix.mkdir_p dirs_to_delete ; ()
let canonicalize_db () =
let db = get_database () in
SqliteUtils.exec db ~log:"running VACUUM" ~stmt:"VACUUM"

@ -0,0 +1,40 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
val database_filename : string
(** the relative path to the database from the results directory *)
val get_database : unit -> Sqlite3.db
(** The results database. You should always use this function to access the database, as the connection to it may change during the execution (see [new_database_connection]). *)
val attributes_table : string
(** the name of the table of proc names to their proc attributes *)
val reset_attributes_table : unit -> unit
(** zero out the attributes table *)
val assert_results_dir : string -> unit
(** Check that the results dir exists and sets up logging, the database, etc. *)
val remove_results_dir : unit -> unit
(** Recursively delete the results directory. *)
val create_results_dir : unit -> unit
(** Create the results dir and sets up logging, the database, etc. *)
val new_database_connection : unit -> unit
(** Closes the previous connection to the database (if any), and opens a new one. Needed after calls to fork(2). *)
val delete_capture_and_analysis_data : unit -> unit
(** delete all results from the capture and the analysis *)
val canonicalize_db : unit -> unit
(** put the database on disk in deterministic form *)

@ -0,0 +1,72 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
module L = Logging
exception Error of string
let error ~fatal fmt =
(if fatal then Format.kasprintf (fun err -> raise (Error err)) else L.internal_error) fmt
let check_sqlite_error ?(fatal= false) ~log rc =
match (rc : Sqlite3.Rc.t) with
| OK | ROW
-> ()
| _ as err
-> error ~fatal "%s: %s" log (Sqlite3.Rc.to_string err)
let exec db ~log ~stmt =
(* Call [check_sqlite_error] with [fatal:true] and catch exceptions to rewrite the error message. This avoids allocating the error string when not needed. *)
try check_sqlite_error ~fatal:true ~log (Sqlite3.exec db stmt)
with Error err -> error ~fatal:true "exec: %s" err
let finalize ~log stmt =
try check_sqlite_error ~fatal:true ~log (Sqlite3.finalize stmt) with
| Error err
-> error ~fatal:true "finalize: %s" err
| Sqlite3.Error err
-> error ~fatal:true "finalize: %s: %s" log err
let sqlite_result_rev_list_step ~log stmt =
let rec aux rev_results =
match Sqlite3.step stmt with
| Sqlite3.Rc.ROW
-> (* the operation returned a result, get it *)
let value = Some (Sqlite3.column stmt 0) in
aux (value :: rev_results)
| DONE
-> rev_results
| err
-> L.die InternalError "%s: %s" log (Sqlite3.Rc.to_string err)
in
protect ~finally:(fun () -> finalize ~log stmt) ~f:(fun () -> aux [])
let sqlite_result_step ~log stmt =
match sqlite_result_rev_list_step ~log stmt with
| []
-> None
| [x]
-> x
| l
-> L.die InternalError "%s: zero or one result expected, got %d instead" log (List.length l)
let sqlite_unit_step ~log stmt =
match sqlite_result_rev_list_step ~log stmt with
| []
-> ()
| l
-> L.die InternalError "%s: exactly zero result expected, got %d instead" log (List.length l)
let db_close db =
if not (Sqlite3.db_close db) then
raise
(Error
(Printf.sprintf "closing: %s (%s)"
(Sqlite3.errcode db |> Sqlite3.Rc.to_string)
(Sqlite3.errmsg db)))

@ -0,0 +1,35 @@
(*
* Copyright (c) 2017 - present Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*)
open! IStd
exception
Error of
string(** The functions in this module tend to raise more often than their counterparts in [Sqlite3]. In particular, they may raise if the [Sqlite3.Rc.t] result of certain operations is unexpected. *)
val check_sqlite_error : ?fatal:bool -> log:string -> Sqlite3.Rc.t -> unit
(** Assert that the result is either [Sqlite3.Rc.OK]. If [row_is_ok] then [Sqlite3.Rc.ROW] is also accepted. If the result is not valid, then if [fatal] is set raise [Error], otherwise log the error and proceed. *)
val exec : Sqlite3.db -> log:string -> stmt:string -> unit
(** Execute the given Sqlite [stmt] and asserts that it resulted in [Sqlite3.Rc.OK]. Otherwise, fail similarly to [check_sqlite_error ~fatal:true]. *)
val finalize : log:string -> Sqlite3.stmt -> unit
(** Finalize the given [stmt]. Raises [Error] on failure. *)
val sqlite_result_rev_list_step : log:string -> Sqlite3.stmt -> Sqlite3.Data.t option list
(** Return a reversed list of results obtained by repeatedly stepping through [stmt] and saving only column 0 of each returned row (all that's been needed so far). *)
val sqlite_result_step : log:string -> Sqlite3.stmt -> Sqlite3.Data.t option
(** Same as [sqlite_result_rev_list_step] but asserts that exactly one result is returned. *)
val sqlite_unit_step : log:string -> Sqlite3.stmt -> unit
(** Same as [sqlite_result_rev_list_step] but asserts that no result is returned. *)
val db_close : Sqlite3.db -> unit
(** Close the given database and asserts that it was effective. Raises [Error] if not. *)

@ -658,7 +658,7 @@ let add_default_method_for_class trans_unit_ctx class_name decl_info =
in
let proc_name = Typ.Procname.get_default_objc_class_method class_name in
let attrs = {(ProcAttributes.default proc_name Config.Clang) with loc} in
AttributesTable.store_attributes attrs
Attributes.store attrs
let get_procname_from_cpp_lambda context dec =
match dec with

@ -56,13 +56,6 @@ let gen_previous_driver_mode script =
(String.concat ~sep:" " command) ;
Driver.mode_of_build_command command
let delete_capture_and_analysis_artifacts () =
let dirs_to_delete =
List.map ~f:(Filename.concat Config.results_dir)
Config.([attributes_dir_name; captured_dir_name; specs_dir_name])
in
List.iter ~f:Utils.rmtree dirs_to_delete ; List.iter ~f:Unix.mkdir_p dirs_to_delete ; ()
let diff driver_mode =
Driver.run_prologue driver_mode ;
let changed_files = Driver.read_config_changed_files () in
@ -70,7 +63,7 @@ let diff driver_mode =
Driver.analyze_and_report ~suppress_console_report:true driver_mode ~changed_files ;
let current_report = Some (save_report Current) in
(* Some files in the current checkout may be deleted in the old checkout. If we kept the results of the previous capture and analysis around, we would report issues on these files again in the previous checkout, which is wrong. Do not do anything too smart for now and just delete all results from the analysis of the current checkout. *)
delete_capture_and_analysis_artifacts () ;
ResultsDir.delete_capture_and_analysis_data () ;
(* TODO(t15553258) bail if nothing to analyze (configurable, some people might care about bugs
fixed more than about time to analyze) *)
checkout Previous ;

@ -118,11 +118,11 @@ let clean_results_dir () =
let should_delete_dir =
let dirs_to_delete =
let open Config in
backend_stats_dir_name
:: classnames_dir_name
:: frontend_stats_dir_name
:: multicore_dir_name
:: reporting_stats_dir_name :: (if Config.flavors then [] else [attributes_dir_name])
[ backend_stats_dir_name
; classnames_dir_name
; frontend_stats_dir_name
; multicore_dir_name
; reporting_stats_dir_name ]
in
List.mem ~equal:String.equal dirs_to_delete
in
@ -137,7 +137,7 @@ let clean_results_dir () =
&& ( List.mem ~equal:String.equal files_to_delete (Filename.basename name)
|| List.exists ~f:(Filename.check_suffix name) suffixes_to_delete )
in
let rec clean name =
let rec delete_temp_results name =
let rec cleandir dir =
match Unix.readdir_opt dir with
| Some entry
@ -145,7 +145,7 @@ let clean_results_dir () =
else if not
( String.equal entry Filename.current_dir_name
|| String.equal entry Filename.parent_dir_name )
then clean (name ^/ entry) ;
then delete_temp_results (name ^/ entry) ;
cleandir dir
(* next entry *)
| None
@ -160,7 +160,11 @@ let clean_results_dir () =
| exception Unix.Unix_error (Unix.ENOENT, _, _)
-> ()
in
clean Config.results_dir
delete_temp_results Config.results_dir ;
if not Config.flavors then
(* we do not need to keep the capture data in Buck/Java mode *)
ResultsDir.reset_attributes_table () ;
ResultsDir.canonicalize_db ()
let check_captured_empty mode =
let clean_command_opt = clean_compilation_command mode in

@ -12,7 +12,24 @@ open! IStd
(** entry points for top-level functionalities such as capture under various build systems,
analysis, and reporting *)
type build_system
type build_system =
| BAnalyze
| BAnt
| BBuck
| BClang
| BGradle
| BJava
| BJavac
| BMake
| BMvn
| BNdk
| BPython
| BXcode
[@@deriving compare]
val equal_build_system : build_system -> build_system -> bool
val string_of_build_system : build_system -> string
(** based on the build_system and options passed to infer, we run in different driver modes *)
type mode =
@ -30,6 +47,8 @@ type mode =
val equal_mode : mode -> mode -> bool
val pp_mode : Format.formatter -> mode -> unit
val mode_from_command_line : mode Lazy.t
(** driver mode computed from the command-line arguments and settings in Config *)

@ -59,6 +59,7 @@ let common_libraries =
; "extlib"
; "oUnit"
; "parmap"
; "sqlite3"
; "str"
; "unix"
; "xmlm"

@ -11,10 +11,23 @@ open! IStd
(** module for running OCaml unit tests *)
let rec mk_test_fork_proof test =
let open OUnitTest in
match test with
| TestCase (length, f)
-> TestCase (length, Tasks.fork_protect ~f)
| TestList l
-> TestList (List.map ~f:mk_test_fork_proof l)
| TestLabel (label, test)
-> TestLabel (label, mk_test_fork_proof test)
let () =
ResultsDir.create_results_dir () ;
let open OUnit2 in
let tests =
[ AbstractInterpreterTests.tests
(* OUnit runs tests in parallel using fork(2) *)
List.map ~f:mk_test_fork_proof
( [ AbstractInterpreterTests.tests
; AccessPathTests.tests
; AccessTreeTests.tests
; AddressTakenTests.tests
@ -27,7 +40,7 @@ let () =
; StacktraceTests.tests
; TaintTests.tests
; TraceTests.tests ]
@ ClangTests.tests
@ ClangTests.tests )
in
let test_suite = "all" >::: tests in
OUnit2.run_test_tt_main test_suite

@ -40,6 +40,7 @@ depends: [
"parmap" {>="1.0-rc8"}
"ppx_deriving" {>="4.1"}
"sawja" {>="1.5.2"}
"sqlite3"
"xmlm" {>="1.2.0"}
]
depexts: [

@ -73,6 +73,7 @@ result = 1.2
sawja = 1.5.2
sexplib = v0.9.2
spawn = v0.9.0
sqlite3 = 4.2.0
stdio = v0.9.0
topkg = 0.9.0
typerep = v0.9.0

@ -8,6 +8,7 @@
#require "ctypes";;
#require "ctypes.stubs";;
#require "sawja";;
#require "sqlite3";;
#require "atdgen";;
#require "xmlm";;
#require "ANSITerminal";;

Loading…
Cancel
Save