When looking up and adding a chain (i.e., in lookup_chain_cache_add()
and only in it), explicitly specify the depth of the held lock stack.
This is now only curr->lockdep_depth.

No functional change.

Signed-off-by: Yuyang Du <duyuy...@gmail.com>
---
 kernel/locking/lockdep.c | 48 ++++++++++++++++++++++++++----------------------
 1 file changed, 26 insertions(+), 22 deletions(-)

diff --git a/kernel/locking/lockdep.c b/kernel/locking/lockdep.c
index 095e532..5d19dc6 100644
--- a/kernel/locking/lockdep.c
+++ b/kernel/locking/lockdep.c
@@ -2527,12 +2527,12 @@ struct lock_class *lock_chain_get_class(struct 
lock_chain *chain, int i)
  * Returns the index of the first held_lock of the current chain
  */
 static inline int get_first_held_lock(struct task_struct *curr,
-                                       struct held_lock *hlock)
+                                     struct held_lock *hlock, int depth)
 {
        int i;
        struct held_lock *hlock_curr;
 
-       for (i = curr->lockdep_depth - 1; i >= 0; i--) {
+       for (i = depth - 1; i >= 0; i--) {
                hlock_curr = curr->held_locks + i;
                if (hlock_curr->irq_context != hlock->irq_context)
                        break;
@@ -2557,12 +2557,12 @@ static u64 print_chain_key_iteration(int class_idx, u64 
chain_key)
 }
 
 static void
-print_chain_keys_held_locks(struct task_struct *curr, struct held_lock 
*hlock_next)
+print_chain_keys_held_locks(struct task_struct *curr,
+                           struct held_lock *hlock_next, int depth)
 {
        struct held_lock *hlock;
        u64 chain_key = INITIAL_CHAIN_KEY;
-       int depth = curr->lockdep_depth;
-       int i = get_first_held_lock(curr, hlock_next);
+       int i = get_first_held_lock(curr, hlock_next, depth);
 
        printk("depth: %u (irq_context %u)\n", depth - i + 1,
                hlock_next->irq_context);
@@ -2594,8 +2594,8 @@ static void print_chain_keys_chain(struct lock_chain 
*chain)
 }
 
 static void print_collision(struct task_struct *curr,
-                       struct held_lock *hlock_next,
-                       struct lock_chain *chain)
+                           struct held_lock *hlock_next,
+                           struct lock_chain *chain, int depth)
 {
        pr_warn("\n");
        pr_warn("============================\n");
@@ -2606,7 +2606,7 @@ static void print_collision(struct task_struct *curr,
        pr_warn("Hash chain already cached but the contents don't match!\n");
 
        pr_warn("Held locks:");
-       print_chain_keys_held_locks(curr, hlock_next);
+       print_chain_keys_held_locks(curr, hlock_next, depth);
 
        pr_warn("Locks in cached chain:");
        print_chain_keys_chain(chain);
@@ -2622,17 +2622,16 @@ static void print_collision(struct task_struct *curr,
  * that there was a collision during the calculation of the chain_key.
  * Returns: 0 not passed, 1 passed
  */
-static int check_no_collision(struct task_struct *curr,
-                       struct held_lock *hlock,
-                       struct lock_chain *chain)
+static int check_no_collision(struct task_struct *curr, struct held_lock 
*hlock,
+                             struct lock_chain *chain, int depth)
 {
 #ifdef CONFIG_DEBUG_LOCKDEP
        int i, j, id;
 
-       i = get_first_held_lock(curr, hlock);
+       i = get_first_held_lock(curr, hlock, depth);
 
-       if (DEBUG_LOCKS_WARN_ON(chain->depth != curr->lockdep_depth - (i - 1))) 
{
-               print_collision(curr, hlock, chain);
+       if (DEBUG_LOCKS_WARN_ON(chain->depth != depth - (i - 1))) {
+               print_collision(curr, hlock, chain, depth);
                return 0;
        }
 
@@ -2640,7 +2639,7 @@ static int check_no_collision(struct task_struct *curr,
                id = curr->held_locks[i].class_idx;
 
                if (DEBUG_LOCKS_WARN_ON(chain_hlocks[chain->base + j] != id)) {
-                       print_collision(curr, hlock, chain);
+                       print_collision(curr, hlock, chain, depth);
                        return 0;
                }
        }
@@ -2684,7 +2683,7 @@ static struct lock_chain *alloc_lock_chain(void)
  */
 static inline struct lock_chain *add_chain_cache(struct task_struct *curr,
                                                 struct held_lock *hlock,
-                                                u64 chain_key)
+                                                u64 chain_key, int depth)
 {
        struct lock_class *class = hlock_class(hlock);
        struct hlist_head *hash_head = chainhashentry(chain_key);
@@ -2710,8 +2709,8 @@ static inline struct lock_chain *add_chain_cache(struct 
task_struct *curr,
        }
        chain->chain_key = chain_key;
        chain->irq_context = hlock->irq_context;
-       i = get_first_held_lock(curr, hlock);
-       chain->depth = curr->lockdep_depth + 1 - i;
+       i = get_first_held_lock(curr, hlock, depth);
+       chain->depth = depth + 1 - i;
 
        BUILD_BUG_ON((1UL << 24) <= ARRAY_SIZE(chain_hlocks));
        BUILD_BUG_ON((1UL << 6)  <= ARRAY_SIZE(curr->held_locks));
@@ -2764,17 +2763,21 @@ static inline struct lock_chain *lookup_chain_cache(u64 
chain_key)
  * add it and return the chain - in this case the new dependency
  * chain will be validated. If the key is already hashed, return
  * NULL. (On return with the new chain graph_lock is held.)
+ *
+ * If the key is not hashed, the new chain is composed of @hlock
+ * and @depth worth of the current held lock stack, of which the
+ * held locks are in the same context as @hlock.
  */
 static inline struct lock_chain *
 lookup_chain_cache_add(struct task_struct *curr, struct held_lock *hlock,
-                      u64 chain_key)
+                      u64 chain_key, int depth)
 {
        struct lock_class *class = hlock_class(hlock);
        struct lock_chain *chain = lookup_chain_cache(chain_key);
 
        if (chain) {
 cache_hit:
-               if (!check_no_collision(curr, hlock, chain))
+               if (!check_no_collision(curr, hlock, chain, depth))
                        return NULL;
 
                if (very_verbose(class)) {
@@ -2804,7 +2807,7 @@ static inline struct lock_chain *lookup_chain_cache(u64 
chain_key)
                goto cache_hit;
        }
 
-       return add_chain_cache(curr, hlock, chain_key);
+       return add_chain_cache(curr, hlock, chain_key, depth);
 }
 
 static int validate_chain(struct task_struct *curr, struct held_lock *hlock,
@@ -2822,7 +2825,8 @@ static int validate_chain(struct task_struct *curr, 
struct held_lock *hlock,
         * graph_lock for us)
         */
        if (!hlock->trylock && hlock->check &&
-           (chain = lookup_chain_cache_add(curr, hlock, chain_key))) {
+           (chain = lookup_chain_cache_add(curr, hlock, chain_key,
+                                           curr->lockdep_depth))) {
                /*
                 * Check whether last held lock:
                 *
-- 
1.8.3.1

Reply via email to