[starvation] introduce a proper thread domain

Summary:
Domain for thread-type.  The main goals are
- Track code paths that are explicitly on UI thread (via annotations, or assertions).
- Maintain UI-thread-ness through the call stack (if a callee is on UI thread then the
   trace any call site must be on the UI thread too).
- If we are not on the UI thread we assume we are on a background thread.
- Traces with "UI-thread" status cannot interleave but all other combinations can.
- We do not track other annotations (eg WorkerThread or AnyThread) as they can be
erroneously applied -- other checkers should catch those errors (annotation reachability).
- Top is AnyThread, and is used as the initial state for analysis.

Interestingly, by choosing the right strategy for choosing initial state and applying callee summaries gets rid of some false negatives in the tests even though we have not introduced any path sensitivity yet.

Reviewed By: jvillard

Differential Revision: D17929390

fbshipit-source-id: d72871034
master
Nikos Gorogiannis 5 years ago committed by Facebook Github Bot
parent 6d78026e8e
commit 734ae60cb7

@ -119,7 +119,7 @@ module TransferFunctions (CFG : ProcCfg.S) = struct
let locks = List.hd actuals |> Option.to_list in let locks = List.hd actuals |> Option.to_list in
do_lock locks loc astate |> do_unlock locks do_lock locks loc astate |> do_unlock locks
| NoEffect when is_java && is_ui_thread_model callee -> | NoEffect when is_java && is_ui_thread_model callee ->
Domain.set_on_ui_thread astate () Domain.set_on_ui_thread astate
| NoEffect when is_java && is_strict_mode_violation tenv callee actuals -> | NoEffect when is_java && is_strict_mode_violation tenv callee actuals ->
Domain.strict_mode_call ~callee ~loc astate Domain.strict_mode_call ~callee ~loc astate
| NoEffect when is_java -> ( | NoEffect when is_java -> (
@ -163,7 +163,7 @@ let analyze_procedure {Callbacks.exe_env; summary} =
in in
let initial = let initial =
if ConcurrencyModels.runs_on_ui_thread ~attrs_of_pname tenv procname then if ConcurrencyModels.runs_on_ui_thread ~attrs_of_pname tenv procname then
StarvationDomain.set_on_ui_thread initial () StarvationDomain.set_on_ui_thread initial
else initial else initial
in in
let filter_blocks = let filter_blocks =
@ -401,7 +401,7 @@ let report_lockless_violations (tenv, summary) StarvationDomain.{critical_pairs}
inner class but this is no longer obvious in the path, because of nested-class path normalisation. inner class but this is no longer obvious in the path, because of nested-class path normalisation.
The net effect of the above issues is that we will only see these locks in conflicting pairs The net effect of the above issues is that we will only see these locks in conflicting pairs
once, as opposed to twice with all other deadlock pairs. *) once, as opposed to twice with all other deadlock pairs. *)
let report_deadlocks env StarvationDomain.{critical_pairs; ui} report_map' = let report_deadlocks env StarvationDomain.{critical_pairs; thread} report_map' =
let open StarvationDomain in let open StarvationDomain in
let _, current_summary = env in let _, current_summary = env in
let current_pname = Summary.get_proc_name current_summary in let current_pname = Summary.get_proc_name current_summary in
@ -457,9 +457,9 @@ let report_deadlocks env StarvationDomain.{critical_pairs; ui} report_map' =
(* for each summary related to the endpoint, analyse and report on its pairs *) (* for each summary related to the endpoint, analyse and report on its pairs *)
fold_reportable_summaries env endpoint_class ~init:report_map fold_reportable_summaries env endpoint_class ~init:report_map
~f:(fun acc ~f:(fun acc
(endpoint_pname, {critical_pairs= endp_critical_pairs; ui= endp_ui}) (endpoint_pname, {critical_pairs= endp_critical_pairs; thread= endp_thread})
-> ->
if UIThreadDomain.is_bottom ui || UIThreadDomain.is_bottom endp_ui then if ThreadDomain.can_run_in_parallel thread endp_thread then
CriticalPairs.fold CriticalPairs.fold
(report_endpoint_elem elem endpoint_pname) (report_endpoint_elem elem endpoint_pname)
endp_critical_pairs acc endp_critical_pairs acc
@ -468,7 +468,7 @@ let report_deadlocks env StarvationDomain.{critical_pairs; ui} report_map' =
CriticalPairs.fold report_on_current_elem critical_pairs report_map' CriticalPairs.fold report_on_current_elem critical_pairs report_map'
let report_starvation env {StarvationDomain.critical_pairs; ui} report_map' = let report_starvation env {StarvationDomain.critical_pairs; thread} report_map' =
let open StarvationDomain in let open StarvationDomain in
let _, current_summary = env in let _, current_summary = env in
let current_pname = Summary.get_proc_name current_summary in let current_pname = Summary.get_proc_name current_summary in
@ -521,17 +521,17 @@ let report_starvation env {StarvationDomain.critical_pairs; ui} report_map' =
and retrieve all the summaries of the methods of that class *) and retrieve all the summaries of the methods of that class *)
(* for each summary related to the endpoint, analyse and report on its pairs *) (* for each summary related to the endpoint, analyse and report on its pairs *)
fold_reportable_summaries env endpoint_class ~init:report_map fold_reportable_summaries env endpoint_class ~init:report_map
~f:(fun acc (endpoint_pname, {critical_pairs; ui}) -> ~f:(fun acc (endpoint_pname, {critical_pairs; thread}) ->
(* skip methods on ui thread, as they cannot run in parallel to us *) (* skip methods on ui thread, as they cannot run in parallel to us *)
if UIThreadDomain.is_bottom ui then if ThreadDomain.is_uithread thread then acc
else
CriticalPairs.fold CriticalPairs.fold
(report_remote_block critical_pair endpoint_lock endpoint_pname) (report_remote_block critical_pair endpoint_lock endpoint_pname)
critical_pairs acc critical_pairs acc ) )
else acc ) )
in in
(* do not report starvation/strict mode warnings on constructors, keep that for callers *) (* do not report starvation/strict mode warnings on constructors, keep that for callers *)
if Typ.Procname.is_constructor current_pname then report_map' if Typ.Procname.is_constructor current_pname || not (ThreadDomain.is_uithread thread) then
else if UIThreadDomain.is_bottom ui then report_map' report_map'
else CriticalPairs.fold report_on_current_elem critical_pairs report_map' else CriticalPairs.fold report_on_current_elem critical_pairs report_map'

@ -358,7 +358,37 @@ module CriticalPairs = struct
astate empty astate empty
end end
module UIThreadDomain = AbstractDomain.BooleanOr module ThreadDomain = struct
type t = UIThread | AnyThread [@@deriving equal]
let top = AnyThread
let is_top = function AnyThread -> true | UIThread -> false
let join st1 st2 =
match (st1, st2) with AnyThread, _ | _, AnyThread -> AnyThread | _, _ -> UIThread
let ( <= ) ~lhs ~rhs = match (lhs, rhs) with AnyThread, UIThread -> false | _, _ -> true
let widen ~prev ~next ~num_iters:_ = join prev next
let pp fmt st =
(match st with UIThread -> "UIThread" | AnyThread -> "AnyThread") |> F.pp_print_string fmt
(* There is only one UI thread, so (UIThread || UIThread) is impossible. *)
let can_run_in_parallel st1 st2 =
match (st1, st2) with UIThread, UIThread -> false | _, _ -> true
let is_uithread = function UIThread -> true | _ -> false
(* If we know that either the caller or the callee is on UIThread, keep it that way. *)
let integrate_summary ~caller ~callee =
match (caller, callee) with UIThread, _ | _, UIThread -> UIThread | _, _ -> AnyThread
end
module FlatLock = AbstractDomain.Flat (Lock) module FlatLock = AbstractDomain.Flat (Lock)
module GuardToLockMap = struct module GuardToLockMap = struct
@ -373,31 +403,31 @@ type t =
{ guard_map: GuardToLockMap.t { guard_map: GuardToLockMap.t
; lock_state: LockState.t ; lock_state: LockState.t
; critical_pairs: CriticalPairs.t ; critical_pairs: CriticalPairs.t
; ui: UIThreadDomain.t } ; thread: ThreadDomain.t }
let bottom = let bottom =
{ guard_map= GuardToLockMap.empty { guard_map= GuardToLockMap.empty
; lock_state= LockState.top ; lock_state= LockState.top
; critical_pairs= CriticalPairs.empty ; critical_pairs= CriticalPairs.empty
; ui= UIThreadDomain.bottom } ; thread= ThreadDomain.top }
let is_bottom {guard_map; lock_state; critical_pairs; ui} = let is_bottom {guard_map; lock_state; critical_pairs; thread} =
GuardToLockMap.is_empty guard_map && LockState.is_top lock_state GuardToLockMap.is_empty guard_map && LockState.is_top lock_state
&& CriticalPairs.is_empty critical_pairs && CriticalPairs.is_empty critical_pairs
&& UIThreadDomain.is_bottom ui && ThreadDomain.is_top thread
let pp fmt {guard_map; lock_state; critical_pairs; ui} = let pp fmt {guard_map; lock_state; critical_pairs; thread} =
F.fprintf fmt "{guard_map= %a; lock_state= %a; critical_pairs= %a; ui= %a}" GuardToLockMap.pp F.fprintf fmt "{guard_map= %a; lock_state= %a; critical_pairs= %a; thread= %a}" GuardToLockMap.pp
guard_map LockState.pp lock_state CriticalPairs.pp critical_pairs UIThreadDomain.pp ui guard_map LockState.pp lock_state CriticalPairs.pp critical_pairs ThreadDomain.pp thread
let join lhs rhs = let join lhs rhs =
{ guard_map= GuardToLockMap.join lhs.guard_map rhs.guard_map { guard_map= GuardToLockMap.join lhs.guard_map rhs.guard_map
; lock_state= LockState.join lhs.lock_state rhs.lock_state ; lock_state= LockState.join lhs.lock_state rhs.lock_state
; critical_pairs= CriticalPairs.join lhs.critical_pairs rhs.critical_pairs ; critical_pairs= CriticalPairs.join lhs.critical_pairs rhs.critical_pairs
; ui= UIThreadDomain.join lhs.ui rhs.ui } ; thread= ThreadDomain.join lhs.thread rhs.thread }
let widen ~prev ~next ~num_iters:_ = join prev next let widen ~prev ~next ~num_iters:_ = join prev next
@ -406,7 +436,7 @@ let ( <= ) ~lhs ~rhs =
GuardToLockMap.( <= ) ~lhs:lhs.guard_map ~rhs:rhs.guard_map GuardToLockMap.( <= ) ~lhs:lhs.guard_map ~rhs:rhs.guard_map
&& LockState.( <= ) ~lhs:lhs.lock_state ~rhs:rhs.lock_state && LockState.( <= ) ~lhs:lhs.lock_state ~rhs:rhs.lock_state
&& CriticalPairs.( <= ) ~lhs:lhs.critical_pairs ~rhs:rhs.critical_pairs && CriticalPairs.( <= ) ~lhs:lhs.critical_pairs ~rhs:rhs.critical_pairs
&& UIThreadDomain.( <= ) ~lhs:lhs.ui ~rhs:rhs.ui && ThreadDomain.( <= ) ~lhs:lhs.thread ~rhs:rhs.thread
let add_critical_pair tenv_opt lock_state event ~loc acc = let add_critical_pair tenv_opt lock_state event ~loc acc =
@ -449,7 +479,7 @@ let release ({lock_state} as astate) locks =
lock_state= List.fold locks ~init:lock_state ~f:(fun acc l -> LockState.release l acc) } lock_state= List.fold locks ~init:lock_state ~f:(fun acc l -> LockState.release l acc) }
let integrate_summary tenv ~caller_summary:({lock_state; critical_pairs; ui} as astate) ~callee let integrate_summary tenv ~caller_summary:({lock_state; critical_pairs; thread} as astate) ~callee
~loc ~callee_summary = ~loc ~callee_summary =
let callsite = CallSite.make callee loc in let callsite = CallSite.make callee loc in
let critical_pairs' = let critical_pairs' =
@ -457,13 +487,10 @@ let integrate_summary tenv ~caller_summary:({lock_state; critical_pairs; ui} as
in in
{ astate with { astate with
critical_pairs= CriticalPairs.join critical_pairs critical_pairs' critical_pairs= CriticalPairs.join critical_pairs critical_pairs'
; ui= UIThreadDomain.join ui callee_summary.ui } ; thread= ThreadDomain.integrate_summary ~caller:thread ~callee:callee_summary.thread }
let set_on_ui_thread ({ui} as astate) () =
let ui = UIThreadDomain.join ui true in
{astate with ui}
let set_on_ui_thread astate = {astate with thread= ThreadDomain.UIThread}
let add_guard ~acquire_now ~procname ~loc tenv astate guard lock = let add_guard ~acquire_now ~procname ~loc tenv astate guard lock =
let astate = {astate with guard_map= GuardToLockMap.add_guard ~guard ~lock astate.guard_map} in let astate = {astate with guard_map= GuardToLockMap.add_guard ~guard ~lock astate.guard_map} in

@ -71,7 +71,25 @@ end
module CriticalPairs : AbstractDomain.FiniteSetS with type elt = CriticalPair.t module CriticalPairs : AbstractDomain.FiniteSetS with type elt = CriticalPair.t
module UIThreadDomain : AbstractDomain.WithBottom (** Domain for thread-type. The main goals are
- Track code paths that are explicitly on UI thread (via annotations, or assertions).
- Maintain UI-thread-ness through the call stack (if a callee is on UI thread then the
trace any call site must be on the UI thread too).
- If we are not on the UI thread we assume we are on a background thread.
- Traces with "UI-thread" status cannot interleave but all other combinations can.
- We do not track other annotations (eg WorkerThread or AnyThread) as they can be
erroneously applied -- other checkers should catch those errors (annotation reachability).
- Top is AnyThread, and is used as the initial state for analysis.
*)
module ThreadDomain : sig
type t = UIThread | AnyThread
include AbstractDomain.WithTop with type t := t
val can_run_in_parallel : t -> t -> bool
val is_uithread : t -> bool
end
module GuardToLockMap : AbstractDomain.WithTop module GuardToLockMap : AbstractDomain.WithTop
@ -79,7 +97,7 @@ type t =
{ guard_map: GuardToLockMap.t { guard_map: GuardToLockMap.t
; lock_state: LockState.t ; lock_state: LockState.t
; critical_pairs: CriticalPairs.t ; critical_pairs: CriticalPairs.t
; ui: UIThreadDomain.t } ; thread: ThreadDomain.t }
include AbstractDomain.WithBottom with type t := t include AbstractDomain.WithBottom with type t := t
@ -93,8 +111,8 @@ val blocking_call : callee:Typ.Procname.t -> StarvationModels.severity -> loc:Lo
val strict_mode_call : callee:Typ.Procname.t -> loc:Location.t -> t -> t val strict_mode_call : callee:Typ.Procname.t -> loc:Location.t -> t -> t
val set_on_ui_thread : t -> unit -> t val set_on_ui_thread : t -> t
(** set the property "runs on UI thread" to true *) (** signal that the procedure is running on UI thread *)
val add_guard : val add_guard :
acquire_now:bool acquire_now:bool

@ -9,7 +9,7 @@
class ThreadSensitivity { class ThreadSensitivity {
Object monitorA, monitorB; Object monitorA, monitorB;
void FN_conditionalAssertMainThread_Bad(boolean b) { void conditionalAssertMainThread_Bad(boolean b) {
if (b) { if (b) {
// this branch asserts on Main thread // this branch asserts on Main thread
OurThreadUtils.assertMainThread(); OurThreadUtils.assertMainThread();
@ -66,7 +66,7 @@ class ThreadSensitivity {
Object monitorG, monitorH; Object monitorG, monitorH;
public void FN_confusedAssertBad(boolean b, boolean c) { public void confusedAssertBad(boolean b, boolean c) {
if (b) { if (b) {
OurThreadUtils.assertOnBackgroundThread(); OurThreadUtils.assertOnBackgroundThread();
} else { } else {
@ -86,4 +86,27 @@ class ThreadSensitivity {
} }
} }
} }
Object monitorI, monitorJ;
public void FP_confusedAssertOk(boolean b) {
if (b) {
OurThreadUtils.assertOnBackgroundThread();
}
// b determines if running on UI thread, should NOT report
if (b) {
synchronized (monitorI) {
synchronized (monitorJ) {
}
}
}
if (b) {
synchronized (monitorJ) {
synchronized (monitorI) {
}
}
}
}
} }

@ -75,8 +75,14 @@ codetoanalyze/java/starvation/ThreadDeadlock.java, ThreadDeadlock.notAnnotatedBB
codetoanalyze/java/starvation/ThreadDeadlock.java, ThreadDeadlock.notAnnotatedBadA():void, 71, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadDeadlock.notAnnotatedBadA()`, locks `this` in `class ThreadDeadlock`, locks `this.lockD` in `class ThreadDeadlock`,[Trace 2] `void ThreadDeadlock.notAnnotatedBBad()`, locks `this.lockD` in `class ThreadDeadlock`, locks `this` in `class ThreadDeadlock`] codetoanalyze/java/starvation/ThreadDeadlock.java, ThreadDeadlock.notAnnotatedBadA():void, 71, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadDeadlock.notAnnotatedBadA()`, locks `this` in `class ThreadDeadlock`, locks `this.lockD` in `class ThreadDeadlock`,[Trace 2] `void ThreadDeadlock.notAnnotatedBBad()`, locks `this.lockD` in `class ThreadDeadlock`, locks `this` in `class ThreadDeadlock`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_conditionalIsMainThread_Ok():void, 35, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_conditionalIsMainThread_Ok()`, locks `this.monitorC` in `class ThreadSensitivity`, locks `this.monitorD` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_conditionalIsUiThread_Ok()`, locks `this.monitorD` in `class ThreadSensitivity`, locks `this.monitorC` in `class ThreadSensitivity`] codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_conditionalIsMainThread_Ok():void, 35, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_conditionalIsMainThread_Ok()`, locks `this.monitorC` in `class ThreadSensitivity`, locks `this.monitorD` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_conditionalIsUiThread_Ok()`, locks `this.monitorD` in `class ThreadSensitivity`, locks `this.monitorC` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_conditionalIsUiThread_Ok():void, 44, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_conditionalIsUiThread_Ok()`, locks `this.monitorD` in `class ThreadSensitivity`, locks `this.monitorC` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_conditionalIsMainThread_Ok()`, locks `this.monitorC` in `class ThreadSensitivity`, locks `this.monitorD` in `class ThreadSensitivity`] codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_conditionalIsUiThread_Ok():void, 44, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_conditionalIsUiThread_Ok()`, locks `this.monitorD` in `class ThreadSensitivity`, locks `this.monitorC` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_conditionalIsMainThread_Ok()`, locks `this.monitorC` in `class ThreadSensitivity`, locks `this.monitorD` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_confusedAssertOk(boolean):void, 99, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_confusedAssertOk(boolean)`, locks `this.monitorI` in `class ThreadSensitivity`, locks `this.monitorJ` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_confusedAssertOk(boolean)`, locks `this.monitorJ` in `class ThreadSensitivity`, locks `this.monitorI` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.FP_confusedAssertOk(boolean):void, 106, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.FP_confusedAssertOk(boolean)`, locks `this.monitorJ` in `class ThreadSensitivity`, locks `this.monitorI` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.FP_confusedAssertOk(boolean)`, locks `this.monitorI` in `class ThreadSensitivity`, locks `this.monitorJ` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalAssertMainThread_Bad(boolean):void, 16, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalAssertMainThread_Bad(boolean)`, locks `this.monitorA` in `class ThreadSensitivity`, locks `this.monitorB` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalAssertMainThread_Bad(boolean)`, locks `this.monitorB` in `class ThreadSensitivity`, locks `this.monitorA` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalAssertMainThread_Bad(boolean):void, 22, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalAssertMainThread_Bad(boolean)`, locks `this.monitorB` in `class ThreadSensitivity`, locks `this.monitorA` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalAssertMainThread_Bad(boolean)`, locks `this.monitorA` in `class ThreadSensitivity`, locks `this.monitorB` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalNegatedIsMainThread_Bad():void, 55, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorE` in `class ThreadSensitivity`, locks `this.monitorF` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorF` in `class ThreadSensitivity`, locks `this.monitorE` in `class ThreadSensitivity`] codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalNegatedIsMainThread_Bad():void, 55, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorE` in `class ThreadSensitivity`, locks `this.monitorF` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorF` in `class ThreadSensitivity`, locks `this.monitorE` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalNegatedIsMainThread_Bad():void, 60, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorF` in `class ThreadSensitivity`, locks `this.monitorE` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorE` in `class ThreadSensitivity`, locks `this.monitorF` in `class ThreadSensitivity`] codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.conditionalNegatedIsMainThread_Bad():void, 60, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorF` in `class ThreadSensitivity`, locks `this.monitorE` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.conditionalNegatedIsMainThread_Bad()`, locks `this.monitorE` in `class ThreadSensitivity`, locks `this.monitorF` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.confusedAssertBad(boolean,boolean):void, 78, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.confusedAssertBad(boolean,boolean)`, locks `this.monitorG` in `class ThreadSensitivity`, locks `this.monitorH` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.confusedAssertBad(boolean,boolean)`, locks `this.monitorH` in `class ThreadSensitivity`, locks `this.monitorG` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSensitivity.java, ThreadSensitivity.confusedAssertBad(boolean,boolean):void, 83, DEADLOCK, no_bucket, ERROR, [[Trace 1] `void ThreadSensitivity.confusedAssertBad(boolean,boolean)`, locks `this.monitorH` in `class ThreadSensitivity`, locks `this.monitorG` in `class ThreadSensitivity`,[Trace 2] `void ThreadSensitivity.confusedAssertBad(boolean,boolean)`, locks `this.monitorG` in `class ThreadSensitivity`, locks `this.monitorH` in `class ThreadSensitivity`]
codetoanalyze/java/starvation/ThreadSleep.java, ThreadSleep.indirectSleepOnUIThreadBad():void, 24, STARVATION, no_bucket, ERROR, [[Trace 1] `void ThreadSleep.indirectSleepOnUIThreadBad()`, locks `this.lock` in `class ThreadSleep`,[Trace 2] `void ThreadSleep.lockAndSleepOnNonUIThread()`, locks `this.lock` in `class ThreadSleep`,Method call: `void ThreadSleep.sleepOnAnyThreadOk()`,calls `void Thread.sleep(long)`] codetoanalyze/java/starvation/ThreadSleep.java, ThreadSleep.indirectSleepOnUIThreadBad():void, 24, STARVATION, no_bucket, ERROR, [[Trace 1] `void ThreadSleep.indirectSleepOnUIThreadBad()`, locks `this.lock` in `class ThreadSleep`,[Trace 2] `void ThreadSleep.lockAndSleepOnNonUIThread()`, locks `this.lock` in `class ThreadSleep`,Method call: `void ThreadSleep.sleepOnAnyThreadOk()`,calls `void Thread.sleep(long)`]
codetoanalyze/java/starvation/ThreadSleep.java, ThreadSleep.sleepOnUIThreadBad():void, 17, STARVATION, no_bucket, ERROR, [`void ThreadSleep.sleepOnUIThreadBad()`,calls `void Thread.sleep(long)`] codetoanalyze/java/starvation/ThreadSleep.java, ThreadSleep.sleepOnUIThreadBad():void, 17, STARVATION, no_bucket, ERROR, [`void ThreadSleep.sleepOnUIThreadBad()`,calls `void Thread.sleep(long)`]
codetoanalyze/java/starvation/Workers.java, Workers.uiThreadBad():void, 28, STARVATION, no_bucket, ERROR, [`void Workers.uiThreadBad()`,Method call: `void Workers.workerOk()`,Method call: `void Workers.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`] codetoanalyze/java/starvation/Workers.java, Workers.uiThreadBad():void, 28, STARVATION, no_bucket, ERROR, [`void Workers.uiThreadBad()`,Method call: `void Workers.workerOk()`,Method call: `void Workers.doTransact()`,calls `boolean Binder.transact(int,Parcel,Parcel,int)`]

Loading…
Cancel
Save