[infer-out][6/9] kill --log-file option

Summary:
This option allowed one to customise the name of the log file, but the
log file lives in infer-out/ so that flexibility is not needed and even
undesirable: we want entries in infer-out/ to be predictable.

Reviewed By: skcho

Differential Revision: D20894304

fbshipit-source-id: 760d91df3
master
Jules Villard 5 years ago committed by Facebook GitHub Bot
parent cae160cccf
commit 932d3a3f92

@ -1461,9 +1461,6 @@ INTERNAL OPTIONS
--load-average-reset
Cancel the effect of --load-average.
--log-file file
Specify the file to use for logging
--margin int
Set right margin for the pretty printing functions

@ -1489,11 +1489,6 @@ and _log_events =
"[DOES NOTHING] Turn on the feature that logs events in a machine-readable format"
and log_file =
CLOpt.mk_string ~deprecated:["out_file"; "-out-file"] ~long:"log-file" ~meta:"file"
~default:"logs" "Specify the file to use for logging"
and _log_skipped =
CLOpt.mk_bool ~long:"" ~deprecated:["-log-skipped"] ~deprecated_no:["-no-log-skipped"]
"[DOES NOTHING] Turn on the feature that logs skipped functions (one per file) in a \
@ -2800,8 +2795,6 @@ and load_average =
match !load_average with None when !buck -> Some (float_of_int ncpu) | _ -> !load_average
and log_file = !log_file
and perf_profiler_data_file = !perf_profiler_data_file
and max_nesting = !max_nesting

@ -409,8 +409,6 @@ val linters_validate_syntax_only : bool
val liveness_dangerous_classes : Yojson.Basic.t
val log_file : string
val max_nesting : int option
val merge : bool

@ -322,7 +322,9 @@ let setup_log_file () =
let fmt, chan, preexisting_logfile =
(* if invoked in a sub-dir (e.g., in Buck integrations), log inside the original log file *)
(* assumes the results dir exists already *)
let logfile_path = Config.toplevel_results_dir ^/ Config.log_file in
let logfile_path =
ResultsDirEntryName.get_path ~results_dir:Config.toplevel_results_dir Logs
in
let preexisting_logfile = PolyVariantEqual.( = ) (Sys.file_exists logfile_path) `Yes in
let chan = Stdlib.open_out_gen [Open_append; Open_creat] 0o666 logfile_path in
let file_fmt =

@ -187,8 +187,7 @@ let scrub_for_caching () =
let should_delete_file =
let files_to_delete =
(if cache_capture then [] else [ResultsDatabase.database_filename])
@ [ Config.log_file
; (* some versions of sqlite do not clean up after themselves *)
@ [ (* some versions of sqlite do not clean up after themselves *)
ResultsDatabase.database_filename ^ "-shm"
; ResultsDatabase.database_filename ^ "-wal" ]
in

@ -6,15 +6,15 @@
*)
open! IStd
type id = Specs | Temporary [@@deriving enumerate]
type id = Logs | Specs | Temporary [@@deriving enumerate]
type cleanup_action = Delete | Keep [@@deriving equal]
type entry_kind = Directory
type entry_kind = Directory | File
type t =
{ rel_path: string (** path inside infer-out/ *)
; kind: entry_kind
; kind: entry_kind (** unused for now, useful for documentation *)
; before_incremental_analysis: cleanup_action
(** whether this should be deleted before an incremental analysis *)
; before_caching_capture: cleanup_action
@ -22,6 +22,11 @@ type t =
e.g., a distributed Buck cache. *) }
let of_id = function
| Logs ->
{ rel_path= "logs"
; kind= File
; before_incremental_analysis= Keep
; before_caching_capture= Delete }
| Specs ->
{ rel_path= "specs"
; kind= Directory

@ -10,6 +10,7 @@ open! IStd
directory you probably want to use {!ResultsDir.Entry} instead of this module. *)
type id =
| Logs (** log file *)
| Specs (** directory containing summaries as .specs files *)
| Temporary (** directory containing temp files *)

@ -135,7 +135,7 @@ let () =
setup () ;
log_environment_info () ;
if Config.debug_mode && CLOpt.is_originator then (
L.progress "Logs in %s@." (Config.results_dir ^/ Config.log_file) ;
L.progress "Logs in %s@." (ResultsDir.get_path Logs) ;
L.progress "Execution ID %Ld@." Config.execution_id ) ;
( if Config.test_determinator && not Config.process_clang_ast then
TestDeterminator.compute_and_emit_test_to_run ()

Loading…
Cancel
Save