Summary: Use an SQLite database to store proc attributes, instead of files on disk. Wrap SQLite operations in two layers: 1. `SqliteUtils` provides helper functions to make sure DB operations succeed 2. `KeyValue` provides a functor to expose a simple and type-safe key/value store backed by the SQLite DB. Reviewed By: jberdine Differential Revision: D5640053 fbshipit-source-id: 31050e5master
parent
03a727143a
commit
2adf654950
@ -0,0 +1,97 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
module L = Logging
|
||||
|
||||
type attributes_kind = ProcUndefined | ProcObjCAccessor | ProcDefined [@@deriving compare]
|
||||
|
||||
let least_relevant_up_to_proc_kind_exclusive = function
|
||||
| ProcUndefined
|
||||
-> []
|
||||
| ProcObjCAccessor
|
||||
-> [ProcUndefined]
|
||||
| ProcDefined
|
||||
-> [ProcUndefined; ProcObjCAccessor]
|
||||
|
||||
let most_relevant_down_to_proc_kind_inclusive = function
|
||||
| ProcUndefined
|
||||
-> [ProcDefined; ProcObjCAccessor; ProcUndefined]
|
||||
| ProcObjCAccessor
|
||||
-> [ProcDefined; ProcObjCAccessor]
|
||||
| ProcDefined
|
||||
-> [ProcDefined]
|
||||
|
||||
let proc_kind_of_attr (proc_attributes: ProcAttributes.t) =
|
||||
if proc_attributes.is_defined then ProcDefined
|
||||
else if Option.is_some proc_attributes.objc_accessor then ProcObjCAccessor
|
||||
else ProcUndefined
|
||||
|
||||
let should_override_attr attr1 attr2 =
|
||||
(* use the source file to be more deterministic in case the same procedure name is defined in several files *)
|
||||
[%compare : attributes_kind * SourceFile.t]
|
||||
(proc_kind_of_attr attr1, attr1.ProcAttributes.loc.file)
|
||||
(proc_kind_of_attr attr2, attr2.ProcAttributes.loc.file)
|
||||
> 0
|
||||
|
||||
module Table = struct
|
||||
type key = Typ.Procname.t * attributes_kind
|
||||
|
||||
type value = ProcAttributes.t
|
||||
|
||||
let table = ResultsDir.attributes_table
|
||||
end
|
||||
|
||||
module Store = KeyValue.Make (Table)
|
||||
|
||||
let load_aux ?(min_kind= ProcUndefined) pname =
|
||||
List.find_map (most_relevant_down_to_proc_kind_inclusive min_kind) ~f:(fun pkind ->
|
||||
Store.find (pname, pkind) )
|
||||
|
||||
let load pname : ProcAttributes.t option = load_aux pname
|
||||
|
||||
let store (attr: ProcAttributes.t) =
|
||||
let pkind = proc_kind_of_attr attr in
|
||||
let key = (attr.proc_name, pkind) in
|
||||
if load attr.proc_name |> Option.value_map ~default:true ~f:(should_override_attr attr) then (
|
||||
(* NOTE: We need to do this dance of adding the proc_kind to the key because there's a race condition between the time we load the attributes from the db and the time we write possibly better ones. We could avoid this by making the db schema richer than just key/value and turning the SELECT + REPLACE into an atomic transaction. *)
|
||||
Store.replace key attr ;
|
||||
least_relevant_up_to_proc_kind_exclusive pkind
|
||||
|> List.iter ~f:(fun k -> Store.delete (attr.proc_name, k)) )
|
||||
|
||||
let load_defined pname = load_aux ~min_kind:ProcDefined pname
|
||||
|
||||
let get_correct_type_from_objc_class_name type_name =
|
||||
(* ToDo: this function should return a type that includes a reference to the tenv computed by:
|
||||
let class_method = Typ.Procname.get_default_objc_class_method (Typ.Name.name type_name);
|
||||
switch (find_tenv_from_class_of_proc class_method) {
|
||||
| Some tenv =>
|
||||
*)
|
||||
Some (Typ.mk (Tstruct type_name))
|
||||
|
||||
let find_file_capturing_procedure pname =
|
||||
match load pname with
|
||||
| None
|
||||
-> None
|
||||
| Some proc_attributes
|
||||
-> let source_file = proc_attributes.ProcAttributes.source_file_captured in
|
||||
let source_dir = DB.source_dir_from_source_file source_file in
|
||||
let origin =
|
||||
(* Procedure coming from include files if it has different location
|
||||
than the file where it was captured. *)
|
||||
match SourceFile.compare source_file proc_attributes.ProcAttributes.loc.file <> 0 with
|
||||
| true
|
||||
-> `Include
|
||||
| false
|
||||
-> `Source
|
||||
in
|
||||
let cfg_fname = DB.source_dir_get_internal_file source_dir ".cfg" in
|
||||
let cfg_fname_exists =
|
||||
PVariant.( = ) `Yes (Sys.file_exists (DB.filename_to_string cfg_fname))
|
||||
in
|
||||
if cfg_fname_exists then Some (source_file, origin) else None
|
@ -1,216 +0,0 @@
|
||||
(*
|
||||
* Copyright (c) 2015 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
open! PVariant
|
||||
module Hashtbl = Caml.Hashtbl
|
||||
module F = Format
|
||||
module L = Logging
|
||||
|
||||
type attributes_kind = ProcUndefined | ProcObjCAccessor | ProcDefined [@@deriving compare]
|
||||
|
||||
let least_relevant_up_to_proc_kind_exclusive = function
|
||||
| ProcUndefined
|
||||
-> []
|
||||
| ProcObjCAccessor
|
||||
-> [ProcUndefined]
|
||||
| ProcDefined
|
||||
-> [ProcUndefined; ProcObjCAccessor]
|
||||
|
||||
let most_relevant_down_to_proc_kind_inclusive = function
|
||||
| ProcUndefined
|
||||
-> [ProcDefined; ProcObjCAccessor; ProcUndefined]
|
||||
| ProcObjCAccessor
|
||||
-> [ProcDefined; ProcObjCAccessor]
|
||||
| ProcDefined
|
||||
-> [ProcDefined]
|
||||
|
||||
let proc_kind_of_attr (proc_attributes: ProcAttributes.t) =
|
||||
if proc_attributes.is_defined then ProcDefined
|
||||
else if Option.is_some proc_attributes.objc_accessor then ProcObjCAccessor
|
||||
else ProcUndefined
|
||||
|
||||
let should_override_attr attr1 attr2 =
|
||||
(* use the source file to be more deterministic in case the same procedure name is defined in several files *)
|
||||
[%compare : attributes_kind * SourceFile.t]
|
||||
(proc_kind_of_attr attr1, attr1.ProcAttributes.loc.file)
|
||||
(proc_kind_of_attr attr2, attr2.ProcAttributes.loc.file)
|
||||
> 0
|
||||
|
||||
(** Module to manage the table of attributes. *)
|
||||
let serializer : ProcAttributes.t Serialization.serializer =
|
||||
Serialization.create_serializer Serialization.Key.attributes
|
||||
|
||||
let attributes_filename ~proc_kind pname_file =
|
||||
let file_suffix =
|
||||
match proc_kind with
|
||||
| ProcDefined
|
||||
-> ".attr"
|
||||
| ProcObjCAccessor
|
||||
-> ".objc_acc.attr"
|
||||
| ProcUndefined
|
||||
-> ".decl.attr"
|
||||
in
|
||||
pname_file ^ file_suffix
|
||||
|
||||
(** path to the .attr file for the given procedure in the current results directory *)
|
||||
let res_dir_attr_filename ~create_dir ~proc_kind pname =
|
||||
let pname_file = Typ.Procname.to_filename pname in
|
||||
let attr_fname = attributes_filename ~proc_kind pname_file in
|
||||
let bucket_dir =
|
||||
let base = pname_file in
|
||||
let len = String.length base in
|
||||
if len < 2 then Filename.current_dir_name else String.sub base ~pos:(len - 2) ~len:2
|
||||
in
|
||||
let filename =
|
||||
DB.Results_dir.path_to_filename DB.Results_dir.Abs_root
|
||||
[Config.attributes_dir_name; bucket_dir; attr_fname]
|
||||
in
|
||||
if create_dir then DB.filename_create_dir filename ;
|
||||
filename
|
||||
|
||||
(* Load the proc attribute for the defined filename if it exists,
|
||||
otherwise try to load the declared filename. *)
|
||||
let load_attr ~defined_only proc_name =
|
||||
let attributes_file ~proc_kind proc_name =
|
||||
Multilinks.resolve (res_dir_attr_filename ~create_dir:false ~proc_kind proc_name)
|
||||
in
|
||||
let min_kind = if defined_only then ProcDefined else ProcUndefined in
|
||||
List.find_map (most_relevant_down_to_proc_kind_inclusive min_kind) ~f:(fun proc_kind ->
|
||||
Serialization.read_from_file serializer (attributes_file ~proc_kind proc_name) )
|
||||
|
||||
(* Write a proc attributes to file.
|
||||
If defined, delete the declared file if it exists. *)
|
||||
let write_and_delete proc_name (proc_attributes: ProcAttributes.t) =
|
||||
let proc_kind = proc_kind_of_attr proc_attributes in
|
||||
let attributes_file proc_kind = res_dir_attr_filename ~create_dir:true ~proc_kind proc_name in
|
||||
Serialization.write_to_file serializer (attributes_file proc_kind) ~data:proc_attributes ;
|
||||
let upgrade_relevance less_relevant_proc_kind =
|
||||
let fname_declared = DB.filename_to_string (attributes_file less_relevant_proc_kind) in
|
||||
if Sys.file_exists fname_declared = `Yes then
|
||||
try Unix.unlink fname_declared
|
||||
with Unix.Unix_error _ -> ()
|
||||
in
|
||||
List.iter ~f:upgrade_relevance (least_relevant_up_to_proc_kind_exclusive proc_kind)
|
||||
|
||||
let store_attributes (proc_attributes: ProcAttributes.t) =
|
||||
let proc_name = proc_attributes.proc_name in
|
||||
let should_write =
|
||||
match load_attr ~defined_only:false proc_name with
|
||||
| None
|
||||
-> true
|
||||
| Some proc_attributes_on_disk
|
||||
-> should_override_attr proc_attributes proc_attributes_on_disk
|
||||
in
|
||||
if should_write then write_and_delete proc_name proc_attributes
|
||||
|
||||
let attr_tbl = Typ.Procname.Hash.create 16
|
||||
|
||||
let defined_attr_tbl = Typ.Procname.Hash.create 16
|
||||
|
||||
let load_attributes ~cache proc_name =
|
||||
try Typ.Procname.Hash.find attr_tbl proc_name
|
||||
with Not_found ->
|
||||
let proc_attributes = load_attr ~defined_only:false proc_name in
|
||||
( match proc_attributes with
|
||||
| Some attrs
|
||||
-> if cache then (
|
||||
Typ.Procname.Hash.add attr_tbl proc_name proc_attributes ;
|
||||
if attrs.is_defined then Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes )
|
||||
| None
|
||||
-> () ) ;
|
||||
proc_attributes
|
||||
|
||||
let load_defined_attributes ~cache_none proc_name =
|
||||
try Typ.Procname.Hash.find defined_attr_tbl proc_name
|
||||
with Not_found ->
|
||||
let proc_attributes = load_attr ~defined_only:true proc_name in
|
||||
if proc_attributes <> None then (
|
||||
(* procedure just got defined, replace attribute in attr_tbl with defined version *)
|
||||
Typ.Procname.Hash.replace attr_tbl proc_name proc_attributes ;
|
||||
Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes )
|
||||
else if cache_none then Typ.Procname.Hash.add defined_attr_tbl proc_name proc_attributes ;
|
||||
proc_attributes
|
||||
|
||||
(** Given the name of an ObjC class, extract the type from the tenv where the class was defined. We
|
||||
do this by adding a method that is unique to each class, and then finding the tenv that
|
||||
corresponds to the class definition. *)
|
||||
let get_correct_type_from_objc_class_name type_name =
|
||||
(* ToDo: this function should return a type that includes a reference to the tenv computed by:
|
||||
let class_method = Typ.Procname.get_default_objc_class_method (Typ.Name.name type_name);
|
||||
switch (find_tenv_from_class_of_proc class_method) {
|
||||
| Some tenv =>
|
||||
*)
|
||||
Some (Typ.mk (Tstruct type_name))
|
||||
|
||||
type t =
|
||||
{num_bindings: int; num_buckets: int; max_bucket_length: int; serialized_size_kb: int option}
|
||||
|
||||
let to_json at =
|
||||
let extra_field =
|
||||
match at.serialized_size_kb with Some v -> [("serialized_size_kb", `Int v)] | None -> []
|
||||
in
|
||||
`Assoc
|
||||
( [ ("num_bindings", `Int at.num_bindings)
|
||||
; ("num_buckets", `Int at.num_buckets)
|
||||
; ("max_bucket_length", `Int at.max_bucket_length) ]
|
||||
@ extra_field )
|
||||
|
||||
let from_json json =
|
||||
let open! Yojson.Basic.Util in
|
||||
{ num_bindings= json |> member "num_bindings" |> to_int
|
||||
; num_buckets= json |> member "num_buckets" |> to_int
|
||||
; max_bucket_length= json |> member "max_bucket_length" |> to_int
|
||||
; serialized_size_kb= json |> member "serialized_size_kb" |> to_option to_int }
|
||||
|
||||
let aggregate s =
|
||||
let all_num_bindings = List.map ~f:(fun stats -> float_of_int stats.num_bindings) s in
|
||||
let all_num_buckets = List.map ~f:(fun stats -> float_of_int stats.num_buckets) s in
|
||||
let all_max_bucket_length = List.map ~f:(fun stats -> float_of_int stats.max_bucket_length) s in
|
||||
let aggr_num_bindings = StatisticsToolbox.compute_statistics all_num_bindings in
|
||||
let aggr_num_buckets = StatisticsToolbox.compute_statistics all_num_buckets in
|
||||
let aggr_max_bucket_length = StatisticsToolbox.compute_statistics all_max_bucket_length in
|
||||
`Assoc
|
||||
[ ("num_bindings", StatisticsToolbox.to_json aggr_num_bindings)
|
||||
; ("num_buckets", StatisticsToolbox.to_json aggr_num_buckets)
|
||||
; ("max_bucket_length", StatisticsToolbox.to_json aggr_max_bucket_length) ]
|
||||
|
||||
let stats () =
|
||||
let stats = Typ.Procname.Hash.stats attr_tbl in
|
||||
let {Hashtbl.num_bindings; num_buckets; max_bucket_length} = stats in
|
||||
let serialized_size_kb =
|
||||
match Config.developer_mode with
|
||||
| true
|
||||
-> Some (Marshal.data_size (Marshal.to_bytes attr_tbl []) 0 / 1024)
|
||||
| false
|
||||
-> None
|
||||
in
|
||||
{num_bindings; num_buckets; max_bucket_length; serialized_size_kb}
|
||||
|
||||
(* Find the file where the procedure was captured, if a cfg for that file exists.
|
||||
Return also a boolean indicating whether the procedure is defined in an
|
||||
include file. *)
|
||||
let find_file_capturing_procedure ?(cache= true) pname =
|
||||
match load_attributes ~cache pname with
|
||||
| None
|
||||
-> None
|
||||
| Some proc_attributes
|
||||
-> let source_file = proc_attributes.ProcAttributes.source_file_captured in
|
||||
let source_dir = DB.source_dir_from_source_file source_file in
|
||||
let origin =
|
||||
(* Procedure coming from include files if it has different location
|
||||
than the file where it was captured. *)
|
||||
match SourceFile.compare source_file proc_attributes.ProcAttributes.loc.file <> 0 with
|
||||
| true
|
||||
-> `Include
|
||||
| false
|
||||
-> `Source
|
||||
in
|
||||
let cfg_fname = DB.source_dir_get_internal_file source_dir ".cfg" in
|
||||
let cfg_fname_exists = Sys.file_exists (DB.filename_to_string cfg_fname) = `Yes in
|
||||
if cfg_fname_exists then Some (source_file, origin) else None
|
@ -0,0 +1,99 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
|
||||
module type Table = sig
|
||||
type key
|
||||
|
||||
type value
|
||||
|
||||
val table : string
|
||||
end
|
||||
|
||||
module type Blob = sig
|
||||
module Table : Table
|
||||
|
||||
val blob_of_key : Table.key -> Sqlite3.Data.t
|
||||
|
||||
val blob_of_value : Table.value -> Sqlite3.Data.t
|
||||
|
||||
val key_of_blob : Sqlite3.Data.t -> Table.key option
|
||||
|
||||
val value_of_blob : Sqlite3.Data.t -> Table.value option
|
||||
end
|
||||
|
||||
module type S = sig
|
||||
include Blob
|
||||
|
||||
val replace : Table.key -> Table.value -> unit
|
||||
|
||||
val find : Table.key -> Table.value option
|
||||
|
||||
val delete : Table.key -> unit
|
||||
end
|
||||
|
||||
(* The functor is mostly here to provide a modicum of type safety around blobing/unblobing *)
|
||||
module Make (Table : Table) : S with module Table = Table = struct
|
||||
module Unsafe : Blob with module Table = Table = struct
|
||||
module Table = Table
|
||||
|
||||
let blob x = Sqlite3.Data.BLOB (Marshal.to_string x [])
|
||||
|
||||
let unblob = function
|
||||
| Sqlite3.Data.BLOB b
|
||||
-> Some (Marshal.from_string b 0)
|
||||
| Sqlite3.Data.NULL
|
||||
-> None
|
||||
| _
|
||||
-> assert false
|
||||
|
||||
let blob_of_key = blob
|
||||
|
||||
let blob_of_value = blob
|
||||
|
||||
let key_of_blob = unblob
|
||||
|
||||
let value_of_blob = unblob
|
||||
end
|
||||
|
||||
(* cannot mix, e.g., blob_key and blob_value now *)
|
||||
include Unsafe
|
||||
|
||||
let replace =
|
||||
let replace_statement =
|
||||
Printf.sprintf "REPLACE INTO %s(key, value) VALUES(:k, :v)" Table.table
|
||||
in
|
||||
fun key value ->
|
||||
let db = ResultsDir.get_database () in
|
||||
let replace_stmt = Sqlite3.prepare db replace_statement in
|
||||
SqliteUtils.check_sqlite_error ~log:"replace bind key"
|
||||
(Sqlite3.bind replace_stmt 1 (blob_of_key key)) ;
|
||||
SqliteUtils.check_sqlite_error ~log:"replace bind value"
|
||||
(Sqlite3.bind replace_stmt 2 (blob_of_value value)) ;
|
||||
SqliteUtils.sqlite_unit_step ~log:"KeyValue.replace" replace_stmt
|
||||
|
||||
let find =
|
||||
let select_statement = Printf.sprintf "SELECT value FROM %s WHERE key = :k" Table.table in
|
||||
fun key ->
|
||||
let db = ResultsDir.get_database () in
|
||||
let select_stmt = Sqlite3.prepare db select_statement in
|
||||
SqliteUtils.check_sqlite_error ~log:"insert bind key"
|
||||
(Sqlite3.bind select_stmt 1 (blob_of_key key)) ;
|
||||
Option.bind ~f:value_of_blob
|
||||
(SqliteUtils.sqlite_result_step ~log:"KeyValue.find" select_stmt)
|
||||
|
||||
let delete =
|
||||
let delete_statement = Printf.sprintf "DELETE FROM %s WHERE key = :k" Table.table in
|
||||
fun key ->
|
||||
let db = ResultsDir.get_database () in
|
||||
let delete_stmt = Sqlite3.prepare db delete_statement in
|
||||
SqliteUtils.check_sqlite_error ~log:"delete bind key"
|
||||
(Sqlite3.bind delete_stmt 1 (blob_of_key key)) ;
|
||||
SqliteUtils.sqlite_unit_step ~log:"KeyValue.delete" delete_stmt
|
||||
end
|
@ -0,0 +1,44 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
|
||||
open! IStd
|
||||
|
||||
(** key/value database information *)
|
||||
module type Table = sig
|
||||
type key
|
||||
|
||||
type value
|
||||
|
||||
val table : string
|
||||
end
|
||||
|
||||
(** Key/value store backed by the ResultsDir database *)
|
||||
module type S = sig
|
||||
module Table : Table
|
||||
|
||||
val blob_of_key : Table.key -> Sqlite3.Data.t
|
||||
(** turn a key into a [Sqlite3.Data.BLOB] *)
|
||||
|
||||
val blob_of_value : Table.value -> Sqlite3.Data.t
|
||||
(** turn a value into a [Sqlite3.Data.BLOB] *)
|
||||
|
||||
val key_of_blob : Sqlite3.Data.t -> Table.key option
|
||||
(** turn a [Sqlite3.Data.BLOB] (or [Sqlite3.Data.NULL]) back into a key *)
|
||||
|
||||
val value_of_blob : Sqlite3.Data.t -> Table.value option
|
||||
(** turn a [Sqlite3.Data.BLOB] (or [Sqlite3.Data.NULL]) back into a value *)
|
||||
|
||||
val replace : Table.key -> Table.value -> unit
|
||||
|
||||
val find : Table.key -> Table.value option
|
||||
|
||||
val delete : Table.key -> unit
|
||||
end
|
||||
|
||||
module Make (Table : Table) : S with module Table = Table
|
@ -0,0 +1,52 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
module L = Logging
|
||||
|
||||
let all_attributes ~into ~db_name =
|
||||
let select_stmt =
|
||||
Sqlite3.prepare into
|
||||
(Printf.sprintf "SELECT value FROM %s.%s" db_name ResultsDir.attributes_table)
|
||||
in
|
||||
List.filter_map ~f:(Option.bind ~f:Attributes.Store.value_of_blob)
|
||||
(SqliteUtils.sqlite_result_rev_list_step ~log:"select" select_stmt)
|
||||
|
||||
let merge_attributes_table ~into ~db_name =
|
||||
let rows = all_attributes ~into ~db_name in
|
||||
(* no need to wrap this in a single transaction because we open the table with synchronous=OFF *)
|
||||
List.iter rows ~f:Attributes.store
|
||||
|
||||
let merge ~into db =
|
||||
let db_name = "db" in
|
||||
SqliteUtils.check_sqlite_error ~fatal:true ~log:"attaching db"
|
||||
(Sqlite3.exec into (Printf.sprintf "ATTACH '%s' AS %s" db db_name)) ;
|
||||
merge_attributes_table ~into ~db_name ;
|
||||
SqliteUtils.check_sqlite_error ~fatal:true ~log:"detaching db"
|
||||
(Sqlite3.exec into (Printf.sprintf "DETACH %s" db_name)) ;
|
||||
()
|
||||
|
||||
let merge_buck_flavors_results infer_deps_file =
|
||||
let into = ResultsDir.get_database () in
|
||||
let one_line line =
|
||||
match String.split ~on:'\t' line with
|
||||
| [_; _; target_results_dir]
|
||||
-> let infer_out_src =
|
||||
if Filename.is_relative target_results_dir then
|
||||
Filename.dirname (Config.project_root ^/ "buck-out") ^/ target_results_dir
|
||||
else target_results_dir
|
||||
in
|
||||
merge ~into (infer_out_src ^/ ResultsDir.database_filename)
|
||||
| _
|
||||
-> assert false
|
||||
in
|
||||
match Utils.read_file infer_deps_file with
|
||||
| Ok lines
|
||||
-> List.iter ~f:one_line lines
|
||||
| Error error
|
||||
-> L.internal_error "Couldn't read deps file '%s': %s" infer_deps_file error
|
@ -0,0 +1,13 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
|
||||
open! IStd
|
||||
|
||||
val merge_buck_flavors_results : string -> unit
|
||||
(** Merge the results from sub-invocations of infer inside buck-out/. Takes as argument the infer_deps file. *)
|
@ -0,0 +1,112 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
open! PVariant
|
||||
module L = Logging
|
||||
|
||||
let database : Sqlite3.db option ref = ref None
|
||||
|
||||
let () =
|
||||
Epilogues.register "closing results database" ~f:(fun () ->
|
||||
Option.iter !database ~f:SqliteUtils.db_close )
|
||||
|
||||
let database_filename = "results.db"
|
||||
|
||||
let database_fullpath = Config.results_dir ^/ database_filename
|
||||
|
||||
let attributes_table = "attributes"
|
||||
|
||||
let results_dir_dir_markers =
|
||||
List.map ~f:(Filename.concat Config.results_dir) [Config.captured_dir_name; Config.specs_dir_name]
|
||||
|
||||
let is_results_dir ~check_correct_version () =
|
||||
let not_found = ref "" in
|
||||
let has_all_markers =
|
||||
List.for_all results_dir_dir_markers ~f:(fun d ->
|
||||
Sys.is_directory d = `Yes
|
||||
||
|
||||
(not_found := d ^ "/" ;
|
||||
false) )
|
||||
&& ( not check_correct_version || Sys.is_file database_fullpath = `Yes
|
||||
||
|
||||
(not_found := database_fullpath ;
|
||||
false) )
|
||||
in
|
||||
Result.ok_if_true has_all_markers ~error:(Printf.sprintf "'%s' not found" !not_found)
|
||||
|
||||
let remove_results_dir () =
|
||||
(* Look if file exists, it may not be a directory but that will be caught by the call to [is_results_dir]. If it's an empty directory, leave it alone. This allows users to create a temporary directory for the infer results without infer removing it to recreate it, which could be racy. *)
|
||||
if Sys.file_exists Config.results_dir = `Yes && not (Utils.directory_is_empty Config.results_dir)
|
||||
then (
|
||||
if not Config.force_delete_results_dir then
|
||||
Result.iter_error (is_results_dir ~check_correct_version:false ()) ~f:(fun err ->
|
||||
L.(die UserError)
|
||||
"ERROR: '%s' exists but does not seem to be an infer results directory: %s@\nERROR: Please delete '%s' and try again@."
|
||||
Config.results_dir err Config.results_dir ) ;
|
||||
Utils.rmtree Config.results_dir )
|
||||
|
||||
let create_attributes_table db =
|
||||
SqliteUtils.exec db ~log:"initializing results DB"
|
||||
~stmt:
|
||||
(Printf.sprintf "CREATE TABLE IF NOT EXISTS %s (key BLOB PRIMARY KEY ,value BLOB)"
|
||||
attributes_table)
|
||||
|
||||
let create_db () =
|
||||
let temp_db = Filename.temp_file ~in_dir:Config.results_dir database_filename ".tmp" in
|
||||
let db = Sqlite3.db_open ~mutex:`FULL temp_db in
|
||||
create_attributes_table db ;
|
||||
(* This should be the default but better be sure, otherwise we cannot access the database concurrently. This has to happen before setting WAL mode. *)
|
||||
SqliteUtils.exec db ~log:"locking mode=NORMAL" ~stmt:"PRAGMA locking_mode=NORMAL" ;
|
||||
(* Write-ahead log is much faster than other journalling modes. *)
|
||||
SqliteUtils.exec db ~log:"journal_mode=WAL" ~stmt:"PRAGMA journal_mode=WAL" ;
|
||||
SqliteUtils.db_close db ;
|
||||
try Sys.rename temp_db database_fullpath
|
||||
with Sys_error _ -> (* lost the race, doesn't matter *) ()
|
||||
|
||||
let new_database_connection () =
|
||||
Option.iter !database ~f:SqliteUtils.db_close ;
|
||||
let db = Sqlite3.db_open ~mode:`NO_CREATE ~cache:`PRIVATE ~mutex:`FULL database_fullpath in
|
||||
Sqlite3.busy_timeout db 1000 ;
|
||||
(* Higher level of "synchronous" are only useful to guarantee that the db will not be corrupted if the machine crashes for some reason before the data has been actually written to disk. We do not need this kind of guarantee for infer results as one can always rerun infer if interrupted. *)
|
||||
SqliteUtils.exec db ~log:"synchronous=OFF" ~stmt:"PRAGMA synchronous=OFF" ;
|
||||
database := Some db
|
||||
|
||||
let create_results_dir () =
|
||||
Unix.mkdir_p Config.results_dir ;
|
||||
L.setup_log_file () ;
|
||||
if Sys.file_exists database_fullpath <> `Yes then (
|
||||
Logging.progress "creating results database@." ; create_db () ) ;
|
||||
new_database_connection () ;
|
||||
List.iter ~f:Unix.mkdir_p results_dir_dir_markers
|
||||
|
||||
let assert_results_dir advice =
|
||||
Result.iter_error (is_results_dir ~check_correct_version:true ()) ~f:(fun err ->
|
||||
L.(die UserError)
|
||||
"ERROR: No results directory at '%s': %s@\nERROR: %s@." Config.results_dir err advice ) ;
|
||||
L.setup_log_file () ;
|
||||
new_database_connection ()
|
||||
|
||||
let get_database () = Option.value_exn !database
|
||||
|
||||
let reset_attributes_table () =
|
||||
let db = get_database () in
|
||||
SqliteUtils.exec db ~log:"drop attributes table"
|
||||
~stmt:(Printf.sprintf "DROP TABLE %s" attributes_table) ;
|
||||
create_attributes_table db
|
||||
|
||||
let delete_capture_and_analysis_data () =
|
||||
reset_attributes_table () ;
|
||||
let dirs_to_delete =
|
||||
List.map ~f:(Filename.concat Config.results_dir) Config.([captured_dir_name; specs_dir_name])
|
||||
in
|
||||
List.iter ~f:Utils.rmtree dirs_to_delete ; List.iter ~f:Unix.mkdir_p dirs_to_delete ; ()
|
||||
|
||||
let canonicalize_db () =
|
||||
let db = get_database () in
|
||||
SqliteUtils.exec db ~log:"running VACUUM" ~stmt:"VACUUM"
|
@ -0,0 +1,40 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
|
||||
open! IStd
|
||||
|
||||
val database_filename : string
|
||||
(** the relative path to the database from the results directory *)
|
||||
|
||||
val get_database : unit -> Sqlite3.db
|
||||
(** The results database. You should always use this function to access the database, as the connection to it may change during the execution (see [new_database_connection]). *)
|
||||
|
||||
val attributes_table : string
|
||||
(** the name of the table of proc names to their proc attributes *)
|
||||
|
||||
val reset_attributes_table : unit -> unit
|
||||
(** zero out the attributes table *)
|
||||
|
||||
val assert_results_dir : string -> unit
|
||||
(** Check that the results dir exists and sets up logging, the database, etc. *)
|
||||
|
||||
val remove_results_dir : unit -> unit
|
||||
(** Recursively delete the results directory. *)
|
||||
|
||||
val create_results_dir : unit -> unit
|
||||
(** Create the results dir and sets up logging, the database, etc. *)
|
||||
|
||||
val new_database_connection : unit -> unit
|
||||
(** Closes the previous connection to the database (if any), and opens a new one. Needed after calls to fork(2). *)
|
||||
|
||||
val delete_capture_and_analysis_data : unit -> unit
|
||||
(** delete all results from the capture and the analysis *)
|
||||
|
||||
val canonicalize_db : unit -> unit
|
||||
(** put the database on disk in deterministic form *)
|
@ -0,0 +1,72 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
open! IStd
|
||||
module L = Logging
|
||||
|
||||
exception Error of string
|
||||
|
||||
let error ~fatal fmt =
|
||||
(if fatal then Format.kasprintf (fun err -> raise (Error err)) else L.internal_error) fmt
|
||||
|
||||
let check_sqlite_error ?(fatal= false) ~log rc =
|
||||
match (rc : Sqlite3.Rc.t) with
|
||||
| OK | ROW
|
||||
-> ()
|
||||
| _ as err
|
||||
-> error ~fatal "%s: %s" log (Sqlite3.Rc.to_string err)
|
||||
|
||||
let exec db ~log ~stmt =
|
||||
(* Call [check_sqlite_error] with [fatal:true] and catch exceptions to rewrite the error message. This avoids allocating the error string when not needed. *)
|
||||
try check_sqlite_error ~fatal:true ~log (Sqlite3.exec db stmt)
|
||||
with Error err -> error ~fatal:true "exec: %s" err
|
||||
|
||||
let finalize ~log stmt =
|
||||
try check_sqlite_error ~fatal:true ~log (Sqlite3.finalize stmt) with
|
||||
| Error err
|
||||
-> error ~fatal:true "finalize: %s" err
|
||||
| Sqlite3.Error err
|
||||
-> error ~fatal:true "finalize: %s: %s" log err
|
||||
|
||||
let sqlite_result_rev_list_step ~log stmt =
|
||||
let rec aux rev_results =
|
||||
match Sqlite3.step stmt with
|
||||
| Sqlite3.Rc.ROW
|
||||
-> (* the operation returned a result, get it *)
|
||||
let value = Some (Sqlite3.column stmt 0) in
|
||||
aux (value :: rev_results)
|
||||
| DONE
|
||||
-> rev_results
|
||||
| err
|
||||
-> L.die InternalError "%s: %s" log (Sqlite3.Rc.to_string err)
|
||||
in
|
||||
protect ~finally:(fun () -> finalize ~log stmt) ~f:(fun () -> aux [])
|
||||
|
||||
let sqlite_result_step ~log stmt =
|
||||
match sqlite_result_rev_list_step ~log stmt with
|
||||
| []
|
||||
-> None
|
||||
| [x]
|
||||
-> x
|
||||
| l
|
||||
-> L.die InternalError "%s: zero or one result expected, got %d instead" log (List.length l)
|
||||
|
||||
let sqlite_unit_step ~log stmt =
|
||||
match sqlite_result_rev_list_step ~log stmt with
|
||||
| []
|
||||
-> ()
|
||||
| l
|
||||
-> L.die InternalError "%s: exactly zero result expected, got %d instead" log (List.length l)
|
||||
|
||||
let db_close db =
|
||||
if not (Sqlite3.db_close db) then
|
||||
raise
|
||||
(Error
|
||||
(Printf.sprintf "closing: %s (%s)"
|
||||
(Sqlite3.errcode db |> Sqlite3.Rc.to_string)
|
||||
(Sqlite3.errmsg db)))
|
@ -0,0 +1,35 @@
|
||||
(*
|
||||
* Copyright (c) 2017 - present Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*)
|
||||
|
||||
open! IStd
|
||||
|
||||
exception
|
||||
Error of
|
||||
string(** The functions in this module tend to raise more often than their counterparts in [Sqlite3]. In particular, they may raise if the [Sqlite3.Rc.t] result of certain operations is unexpected. *)
|
||||
|
||||
val check_sqlite_error : ?fatal:bool -> log:string -> Sqlite3.Rc.t -> unit
|
||||
(** Assert that the result is either [Sqlite3.Rc.OK]. If [row_is_ok] then [Sqlite3.Rc.ROW] is also accepted. If the result is not valid, then if [fatal] is set raise [Error], otherwise log the error and proceed. *)
|
||||
|
||||
val exec : Sqlite3.db -> log:string -> stmt:string -> unit
|
||||
(** Execute the given Sqlite [stmt] and asserts that it resulted in [Sqlite3.Rc.OK]. Otherwise, fail similarly to [check_sqlite_error ~fatal:true]. *)
|
||||
|
||||
val finalize : log:string -> Sqlite3.stmt -> unit
|
||||
(** Finalize the given [stmt]. Raises [Error] on failure. *)
|
||||
|
||||
val sqlite_result_rev_list_step : log:string -> Sqlite3.stmt -> Sqlite3.Data.t option list
|
||||
(** Return a reversed list of results obtained by repeatedly stepping through [stmt] and saving only column 0 of each returned row (all that's been needed so far). *)
|
||||
|
||||
val sqlite_result_step : log:string -> Sqlite3.stmt -> Sqlite3.Data.t option
|
||||
(** Same as [sqlite_result_rev_list_step] but asserts that exactly one result is returned. *)
|
||||
|
||||
val sqlite_unit_step : log:string -> Sqlite3.stmt -> unit
|
||||
(** Same as [sqlite_result_rev_list_step] but asserts that no result is returned. *)
|
||||
|
||||
val db_close : Sqlite3.db -> unit
|
||||
(** Close the given database and asserts that it was effective. Raises [Error] if not. *)
|
Loading…
Reference in new issue