diff --git a/infer/src/concurrency/starvation.ml b/infer/src/concurrency/starvation.ml index 045e30a81..c145c03eb 100644 --- a/infer/src/concurrency/starvation.ml +++ b/infer/src/concurrency/starvation.ml @@ -11,8 +11,6 @@ module MF = MarkupFormatter let pname_pp = MF.wrap_monospaced Typ.Procname.pp -let debug fmt = L.(debug Analysis Verbose fmt) - let attrs_of_pname = Summary.OnDisk.proc_resolve_attributes let is_ui_thread_model pn = @@ -401,20 +399,15 @@ let report_lockless_violations (tenv, summary) StarvationDomain.{critical_pairs} inner class but this is no longer obvious in the path, because of nested-class path normalisation. The net effect of the above issues is that we will only see these locks in conflicting pairs once, as opposed to twice with all other deadlock pairs. *) -let report_deadlocks env StarvationDomain.{critical_pairs; thread} report_map' = +let report_deadlocks env StarvationDomain.{critical_pairs} report_map' = let open StarvationDomain in let _, current_summary = env in let current_pname = Summary.get_proc_name current_summary in let report_endpoint_elem current_elem endpoint_pname elem report_map = if - not - ( CriticalPair.may_deadlock current_elem elem - && should_report_deadlock_on_current_proc current_elem elem ) - then report_map - else - let () = - debug "Possible deadlock:@.%a@.%a@." CriticalPair.pp current_elem CriticalPair.pp elem - in + CriticalPair.may_deadlock current_elem elem + && should_report_deadlock_on_current_proc current_elem elem + then match (current_elem.CriticalPair.elem.event, elem.CriticalPair.elem.event) with | LockAcquire lock1, LockAcquire lock2 -> let error_message = @@ -435,6 +428,7 @@ let report_deadlocks env StarvationDomain.{critical_pairs; thread} report_map' = ReportMap.add_deadlock current_pname loc ltr error_message report_map | _, _ -> report_map + else report_map in let report_on_current_elem elem report_map = match elem.CriticalPair.elem.event with @@ -456,82 +450,85 @@ let report_deadlocks env StarvationDomain.{critical_pairs; thread} report_map' = and retrieve all the summaries of the methods of that class *) (* for each summary related to the endpoint, analyse and report on its pairs *) fold_reportable_summaries env endpoint_class ~init:report_map - ~f:(fun acc - (endpoint_pname, {critical_pairs= endp_critical_pairs; thread= endp_thread}) - -> - if ThreadDomain.can_run_in_parallel thread endp_thread then - CriticalPairs.fold - (report_endpoint_elem elem endpoint_pname) - endp_critical_pairs acc - else acc ) ) + ~f:(fun acc (endpoint_pname, {critical_pairs= endp_critical_pairs}) -> + CriticalPairs.fold + (report_endpoint_elem elem endpoint_pname) + endp_critical_pairs acc ) ) in CriticalPairs.fold report_on_current_elem critical_pairs report_map' -let report_starvation env {StarvationDomain.critical_pairs; thread} report_map' = +(** report blocking call chains on the UI thread, or, a call chain on the UI thread taking a lock, + which is held in another call chain making a blocking call *) +let report_starvation env {StarvationDomain.critical_pairs} report_map' = let open StarvationDomain in let _, current_summary = env in let current_pname = Summary.get_proc_name current_summary in let report_remote_block event current_lock endpoint_pname endpoint_elem report_map = - let acquisitions = endpoint_elem.CriticalPair.elem.acquisitions in - match endpoint_elem.CriticalPair.elem.event with - | MayBlock (block_descr, sev) when Acquisitions.lock_is_held current_lock acquisitions -> - let error_message = - Format.asprintf - "Method %a runs on UI thread and%a, which may be held by another thread which %s." - pname_pp current_pname Lock.pp_locks current_lock block_descr - in - let first_trace = CriticalPair.make_trace ~header:"[Trace 1] " current_pname event in - let second_trace = - CriticalPair.make_trace ~header:"[Trace 2] " endpoint_pname endpoint_elem - in - let ltr = first_trace @ second_trace in - let loc = CriticalPair.get_earliest_lock_or_call_loc ~procname:current_pname event in - ReportMap.add_starvation sev current_pname loc ltr error_message report_map - | _ -> - report_map + (* only consider methods that can run in parallel to the ui thread *) + if CriticalPair.can_run_in_parallel event endpoint_elem then + let acquisitions = endpoint_elem.CriticalPair.elem.acquisitions in + match endpoint_elem.CriticalPair.elem.event with + | MayBlock (block_descr, sev) when Acquisitions.lock_is_held current_lock acquisitions -> + let error_message = + Format.asprintf + "Method %a runs on UI thread and%a, which may be held by another thread which %s." + pname_pp current_pname Lock.pp_locks current_lock block_descr + in + let first_trace = CriticalPair.make_trace ~header:"[Trace 1] " current_pname event in + let second_trace = + CriticalPair.make_trace ~header:"[Trace 2] " endpoint_pname endpoint_elem + in + let ltr = first_trace @ second_trace in + let loc = CriticalPair.get_earliest_lock_or_call_loc ~procname:current_pname event in + ReportMap.add_starvation sev current_pname loc ltr error_message report_map + | _ -> + report_map + else report_map in let report_on_current_elem (critical_pair : CriticalPair.t) report_map = - let event = critical_pair.elem.event in - match event with - | MayBlock (_, sev) -> - let error_message = - Format.asprintf "Method %a runs on UI thread and may block; %a." pname_pp current_pname - Event.describe event - in - let loc = CriticalPair.get_loc critical_pair in - let ltr = - CriticalPair.make_trace ~include_acquisitions:false current_pname critical_pair - in - ReportMap.add_starvation sev current_pname loc ltr error_message report_map - | StrictModeCall _ -> - let error_message = - Format.asprintf "Method %a runs on UI thread and may violate Strict Mode; %a." pname_pp - current_pname Event.describe event - in - let loc = CriticalPair.get_loc critical_pair in - let ltr = - CriticalPair.make_trace ~include_acquisitions:false current_pname critical_pair - in - ReportMap.add_strict_mode_violation current_pname loc ltr error_message report_map - | LockAcquire endpoint_lock -> - Lock.owner_class endpoint_lock - |> Option.value_map ~default:report_map ~f:(fun endpoint_class -> - (* get the class of the root variable of the lock in the endpoint elem + (* we must be on the UI thread, otherwise there is no reason to report *) + if CriticalPair.is_uithread critical_pair then + let event = critical_pair.elem.event in + match event with + | MayBlock (_, sev) -> + let error_message = + Format.asprintf "Method %a runs on UI thread and may block; %a." pname_pp current_pname + Event.describe event + in + let loc = CriticalPair.get_loc critical_pair in + let ltr = + CriticalPair.make_trace ~include_acquisitions:false current_pname critical_pair + in + ReportMap.add_starvation sev current_pname loc ltr error_message report_map + | StrictModeCall _ -> + let error_message = + Format.asprintf "Method %a runs on UI thread and may violate Strict Mode; %a." pname_pp + current_pname Event.describe event + in + let loc = CriticalPair.get_loc critical_pair in + let ltr = + CriticalPair.make_trace ~include_acquisitions:false current_pname critical_pair + in + ReportMap.add_strict_mode_violation current_pname loc ltr error_message report_map + | LockAcquire endpoint_lock -> + Lock.owner_class endpoint_lock + |> Option.value_map ~default:report_map ~f:(fun endpoint_class -> + (* get the class of the root variable of the lock in the endpoint elem and retrieve all the summaries of the methods of that class *) - (* for each summary related to the endpoint, analyse and report on its pairs *) - fold_reportable_summaries env endpoint_class ~init:report_map - ~f:(fun acc (endpoint_pname, {critical_pairs; thread}) -> - (* skip methods on ui thread, as they cannot run in parallel to us *) - if ThreadDomain.is_uithread thread then acc - else - CriticalPairs.fold - (report_remote_block critical_pair endpoint_lock endpoint_pname) - critical_pairs acc ) ) + (* for each summary related to the endpoint, analyse and report on its pairs *) + fold_reportable_summaries env endpoint_class ~init:report_map + ~f:(fun acc (endpoint_pname, {critical_pairs; thread}) -> + (* perf optimisation: skip if element and other method are both on UI thread *) + if ThreadDomain.can_run_in_parallel critical_pair.elem.thread thread then + CriticalPairs.fold + (report_remote_block critical_pair endpoint_lock endpoint_pname) + critical_pairs acc + else acc ) ) + else report_map in (* do not report starvation/strict mode warnings on constructors, keep that for callers *) - if Typ.Procname.is_constructor current_pname || not (ThreadDomain.is_uithread thread) then - report_map' + if Typ.Procname.is_constructor current_pname then report_map' else CriticalPairs.fold report_on_current_elem critical_pairs report_map' diff --git a/infer/src/concurrency/starvationDomain.ml b/infer/src/concurrency/starvationDomain.ml index d20efca39..03a2935b0 100644 --- a/infer/src/concurrency/starvationDomain.ml +++ b/infer/src/concurrency/starvationDomain.ml @@ -11,6 +11,37 @@ module MF = MarkupFormatter let pname_pp = MF.wrap_monospaced Typ.Procname.pp +module ThreadDomain = struct + type t = UIThread | AnyThread [@@deriving compare] + + let top = AnyThread + + let is_top = function AnyThread -> true | UIThread -> false + + let join st1 st2 = + match (st1, st2) with AnyThread, _ | _, AnyThread -> AnyThread | _, _ -> UIThread + + + let ( <= ) ~lhs ~rhs = match (lhs, rhs) with AnyThread, UIThread -> false | _, _ -> true + + let widen ~prev ~next ~num_iters:_ = join prev next + + let pp fmt st = + (match st with UIThread -> "UIThread" | AnyThread -> "AnyThread") |> F.pp_print_string fmt + + + (* There is only one UI thread, so (UIThread || UIThread) is impossible. *) + let can_run_in_parallel st1 st2 = + match (st1, st2) with UIThread, UIThread -> false | _, _ -> true + + + let is_uithread = function UIThread -> true | _ -> false + + (* If we know that either the caller or the callee is on UIThread, keep it that way. *) + let integrate_summary ~caller ~callee = + match (caller, callee) with UIThread, _ | _, UIThread -> UIThread | _, _ -> AnyThread +end + module Lock = struct (* TODO (T37174859): change to [HilExp.t] *) type t = AccessPath.t @@ -220,7 +251,8 @@ end = struct end module CriticalPairElement = struct - type t = {acquisitions: Acquisitions.t; event: Event.t} [@@deriving compare] + type t = {acquisitions: Acquisitions.t; event: Event.t; thread: ThreadDomain.t} + [@@deriving compare] let pp fmt {acquisitions; event} = F.fprintf fmt "{acquisitions= %a; event= %a}" Acquisitions.pp acquisitions Event.pp event @@ -232,7 +264,7 @@ end module CriticalPair = struct include ExplicitTrace.MakeTraceElem (CriticalPairElement) (ExplicitTrace.DefaultCallPrinter) - let make ~loc acquisitions event = make {acquisitions; event} loc + let make ~loc acquisitions event thread = make {acquisitions; event; thread} loc let is_blocking_call {elem= {event}} = match event with LockAcquire _ -> true | _ -> false @@ -241,17 +273,21 @@ module CriticalPair = struct let may_deadlock ({elem= pair1} as t1 : t) ({elem= pair2} as t2 : t) = - Option.both (get_final_acquire t1) (get_final_acquire t2) - |> Option.exists ~f:(fun (lock1, lock2) -> - (not (Lock.equal lock1 lock2)) - && Acquisitions.lock_is_held lock2 pair1.acquisitions - && Acquisitions.lock_is_held lock1 pair2.acquisitions - && Acquisitions.inter pair1.acquisitions pair2.acquisitions |> Acquisitions.is_empty ) - - - let with_callsite t existing_acquisitions call_site = - let f ({acquisitions} as elem : CriticalPairElement.t) = - {elem with acquisitions= Acquisitions.union existing_acquisitions acquisitions} + ThreadDomain.can_run_in_parallel pair1.thread pair2.thread + && Option.both (get_final_acquire t1) (get_final_acquire t2) + |> Option.exists ~f:(fun (lock1, lock2) -> + (not (Lock.equal lock1 lock2)) + && Acquisitions.lock_is_held lock2 pair1.acquisitions + && Acquisitions.lock_is_held lock1 pair2.acquisitions + && Acquisitions.inter pair1.acquisitions pair2.acquisitions |> Acquisitions.is_empty + ) + + + let with_callsite t existing_acquisitions call_site thread = + let f (elem : CriticalPairElement.t) = + { elem with + acquisitions= Acquisitions.union existing_acquisitions elem.acquisitions + ; thread= ThreadDomain.integrate_summary ~caller:thread ~callee:elem.thread } in let new_t = map ~f t in with_callsite new_t call_site @@ -314,6 +350,11 @@ module CriticalPair = struct Errlog.make_trace_element 0 loc endpoint_descr [] in List.concat (([header_step] :: call_stack) @ [[endpoint_step]]) + + + let is_uithread t = ThreadDomain.is_uithread t.elem.thread + + let can_run_in_parallel t1 t2 = ThreadDomain.can_run_in_parallel t1.elem.thread t2.elem.thread end let is_recursive_lock event tenv = @@ -345,50 +386,19 @@ let should_skip tenv_opt event lock_state = module CriticalPairs = struct include CriticalPair.FiniteSet - let with_callsite astate tenv_opt lock_state call_site = + let with_callsite astate tenv lock_state call_site thread = let existing_acquisitions = LockState.get_acquisitions lock_state in fold (fun ({elem= {event}} as critical_pair : CriticalPair.t) acc -> - if should_skip tenv_opt event lock_state then acc + if should_skip (Some tenv) event lock_state then acc else let new_pair = - CriticalPair.with_callsite critical_pair existing_acquisitions call_site + CriticalPair.with_callsite critical_pair existing_acquisitions call_site thread in add new_pair acc ) astate empty end -module ThreadDomain = struct - type t = UIThread | AnyThread [@@deriving equal] - - let top = AnyThread - - let is_top = function AnyThread -> true | UIThread -> false - - let join st1 st2 = - match (st1, st2) with AnyThread, _ | _, AnyThread -> AnyThread | _, _ -> UIThread - - - let ( <= ) ~lhs ~rhs = match (lhs, rhs) with AnyThread, UIThread -> false | _, _ -> true - - let widen ~prev ~next ~num_iters:_ = join prev next - - let pp fmt st = - (match st with UIThread -> "UIThread" | AnyThread -> "AnyThread") |> F.pp_print_string fmt - - - (* There is only one UI thread, so (UIThread || UIThread) is impossible. *) - let can_run_in_parallel st1 st2 = - match (st1, st2) with UIThread, UIThread -> false | _, _ -> true - - - let is_uithread = function UIThread -> true | _ -> false - - (* If we know that either the caller or the callee is on UIThread, keep it that way. *) - let integrate_summary ~caller ~callee = - match (caller, callee) with UIThread, _ | _, UIThread -> UIThread | _, _ -> AnyThread -end - module FlatLock = AbstractDomain.Flat (Lock) module GuardToLockMap = struct @@ -439,11 +449,11 @@ let ( <= ) ~lhs ~rhs = && ThreadDomain.( <= ) ~lhs:lhs.thread ~rhs:rhs.thread -let add_critical_pair tenv_opt lock_state event ~loc acc = +let add_critical_pair tenv_opt lock_state event thread ~loc acc = if should_skip tenv_opt event lock_state then acc else let acquisitions = LockState.get_acquisitions lock_state in - let critical_pair = CriticalPair.make ~loc acquisitions event in + let critical_pair = CriticalPair.make ~loc acquisitions event thread in CriticalPairs.add critical_pair acc @@ -452,26 +462,27 @@ let acquire tenv ({lock_state; critical_pairs} as astate) ~procname ~loc locks = critical_pairs= List.fold locks ~init:critical_pairs ~f:(fun acc lock -> let event = Event.make_acquire lock in - add_critical_pair (Some tenv) lock_state event ~loc acc ) + add_critical_pair (Some tenv) lock_state event astate.thread ~loc acc ) ; lock_state= List.fold locks ~init:lock_state ~f:(fun acc lock -> LockState.acquire ~procname ~loc lock acc ) } -let make_call_with_event tenv_opt new_event ~loc astate = +let make_call_with_event new_event ~loc astate = { astate with critical_pairs= - add_critical_pair tenv_opt astate.lock_state new_event ~loc astate.critical_pairs } + add_critical_pair None astate.lock_state new_event astate.thread ~loc astate.critical_pairs + } let blocking_call ~callee sev ~loc astate = let new_event = Event.make_blocking_call callee sev in - make_call_with_event None new_event ~loc astate + make_call_with_event new_event ~loc astate let strict_mode_call ~callee ~loc astate = let new_event = Event.make_strict_mode_call callee in - make_call_with_event None new_event ~loc astate + make_call_with_event new_event ~loc astate let release ({lock_state} as astate) locks = @@ -483,7 +494,7 @@ let integrate_summary tenv ~caller_summary:({lock_state; critical_pairs; thread} ~loc ~callee_summary = let callsite = CallSite.make callee loc in let critical_pairs' = - CriticalPairs.with_callsite callee_summary.critical_pairs (Some tenv) lock_state callsite + CriticalPairs.with_callsite callee_summary.critical_pairs tenv lock_state callsite thread in { astate with critical_pairs= CriticalPairs.join critical_pairs critical_pairs' diff --git a/infer/src/concurrency/starvationDomain.mli b/infer/src/concurrency/starvationDomain.mli index ec6d1861e..1653c881a 100644 --- a/infer/src/concurrency/starvationDomain.mli +++ b/infer/src/concurrency/starvationDomain.mli @@ -8,6 +8,27 @@ open! IStd module F = Format +(** Domain for thread-type. The main goals are + - Track code paths that are explicitly on UI thread (via annotations, or assertions). + - Maintain UI-thread-ness through the call stack (if a callee is on UI thread then the + trace any call site must be on the UI thread too). + - If we are not on the UI thread we assume we are on a background thread. + - Traces with "UI-thread" status cannot interleave but all other combinations can. + - We do not track other annotations (eg WorkerThread or AnyThread) as they can be + erroneously applied -- other checkers should catch those errors (annotation reachability). + - Top is AnyThread, and is used as the initial state for analysis. +*) +module ThreadDomain : sig + type t = UIThread | AnyThread + + include AbstractDomain.WithTop with type t := t + + val can_run_in_parallel : t -> t -> bool + (** Can two thread statuses occur in parallel? Only [UIThread, UIThread] is forbidden. + In addition, this is monotonic wrt the lattice (increasing either argument cannot + transition from true to false). *) +end + (** Abstraction of a path that represents a lock, special-casing comparison to work over type, base variable modulo this and access list *) module Lock : sig @@ -43,7 +64,7 @@ end (** An event and the currently-held locks at the time it occurred. *) module CriticalPairElement : sig - type t = private {acquisitions: Acquisitions.t; event: Event.t} + type t = private {acquisitions: Acquisitions.t; event: Event.t; thread: ThreadDomain.t} end (** A [CriticalPairElement] equipped with a call stack. @@ -64,33 +85,20 @@ module CriticalPair : sig (** outermost callsite location OR lock acquisition *) val may_deadlock : t -> t -> bool + (** two pairs can run in parallel and satisfy the conditions for deadlock *) val make_trace : ?header:string -> ?include_acquisitions:bool -> Typ.Procname.t -> t -> Errlog.loc_trace -end - -module CriticalPairs : AbstractDomain.FiniteSetS with type elt = CriticalPair.t - -(** Domain for thread-type. The main goals are - - Track code paths that are explicitly on UI thread (via annotations, or assertions). - - Maintain UI-thread-ness through the call stack (if a callee is on UI thread then the - trace any call site must be on the UI thread too). - - If we are not on the UI thread we assume we are on a background thread. - - Traces with "UI-thread" status cannot interleave but all other combinations can. - - We do not track other annotations (eg WorkerThread or AnyThread) as they can be - erroneously applied -- other checkers should catch those errors (annotation reachability). - - Top is AnyThread, and is used as the initial state for analysis. -*) -module ThreadDomain : sig - type t = UIThread | AnyThread - include AbstractDomain.WithTop with type t := t + val is_uithread : t -> bool + (** is pair about an event on the UI thread *) val can_run_in_parallel : t -> t -> bool - - val is_uithread : t -> bool + (** can two pairs describe events on two threads that can run in parallel *) end +module CriticalPairs : AbstractDomain.FiniteSetS with type elt = CriticalPair.t + module GuardToLockMap : AbstractDomain.WithTop type t = diff --git a/infer/tests/codetoanalyze/java/starvation/Binders.java b/infer/tests/codetoanalyze/java/starvation/Binders.java index d55b9b9f6..dbf5158a2 100644 --- a/infer/tests/codetoanalyze/java/starvation/Binders.java +++ b/infer/tests/codetoanalyze/java/starvation/Binders.java @@ -20,11 +20,17 @@ class Binders { b.transact(0, null, null, 1); } - void interBad() throws RemoteException { + // assert happens after bad call so thread status is still unknown + void FN_interBad() throws RemoteException { b.transact(0, null, null, 0); forceMainThread(); } + void interBad() throws RemoteException { + forceMainThread(); + b.transact(0, null, null, 0); + } + void intraBad() throws RemoteException { OurThreadUtils.assertMainThread(); doTransact(); diff --git a/infer/tests/codetoanalyze/java/starvation/issues.exp b/infer/tests/codetoanalyze/java/starvation/issues.exp index a8e289cba..d2f8f1ca8 100644 --- a/infer/tests/codetoanalyze/java/starvation/issues.exp +++ b/infer/tests/codetoanalyze/java/starvation/issues.exp @@ -1,8 +1,8 @@ codetoanalyze/java/starvation/AsyncTaskGet.java, AsyncTaskGet.lockOnUiThreadBad():void, 31, STARVATION, no_bucket, ERROR, [[Trace 1] `void AsyncTaskGet.lockOnUiThreadBad()`, locks `this.lock` in `class AsyncTaskGet`,[Trace 2] `void AsyncTaskGet.taskGetUnderLock()`, locks `this.lock` in `class AsyncTaskGet`,calls `Object AsyncTask.get()`] codetoanalyze/java/starvation/AsyncTaskGet.java, AsyncTaskGet.taskGetOnUiThreadBad():void, 20, STARVATION, no_bucket, ERROR, [`void AsyncTaskGet.taskGetOnUiThreadBad()`,calls `Object AsyncTask.get()`] -codetoanalyze/java/starvation/Binders.java, Binders.annotationBad():void, 35, STARVATION, no_bucket, ERROR, [`void Binders.annotationBad()`,Method call: `void Binders.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] -codetoanalyze/java/starvation/Binders.java, Binders.interBad():void, 24, STARVATION, no_bucket, ERROR, [`void Binders.interBad()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] -codetoanalyze/java/starvation/Binders.java, Binders.intraBad():void, 30, STARVATION, no_bucket, ERROR, [`void Binders.intraBad()`,Method call: `void Binders.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] +codetoanalyze/java/starvation/Binders.java, Binders.annotationBad():void, 41, STARVATION, no_bucket, ERROR, [`void Binders.annotationBad()`,Method call: `void Binders.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] +codetoanalyze/java/starvation/Binders.java, Binders.interBad():void, 31, STARVATION, no_bucket, ERROR, [`void Binders.interBad()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] +codetoanalyze/java/starvation/Binders.java, Binders.intraBad():void, 36, STARVATION, no_bucket, ERROR, [`void Binders.intraBad()`,Method call: `void Binders.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] codetoanalyze/java/starvation/Countdwn.java, Countdwn.awaitOnMainByAnnotBad():void, 21, STARVATION, no_bucket, ERROR, [`void Countdwn.awaitOnMainByAnnotBad()`,calls `void CountDownLatch.await()`] codetoanalyze/java/starvation/Countdwn.java, Countdwn.awaitOnMainByCallBad():void, 16, STARVATION, no_bucket, ERROR, [`void Countdwn.awaitOnMainByCallBad()`,calls `void CountDownLatch.await()`] codetoanalyze/java/starvation/FutureGet.java, FutureGet.getDirectBad():void, 21, STARVATION, no_bucket, ERROR, [`void FutureGet.getDirectBad()`,calls `Object Future.get()`]