stack_trace_hash 471 kernel/locking/lockdep.c static struct hlist_head stack_trace_hash[STACK_TRACE_HASH_SIZE]; stack_trace_hash 508 kernel/locking/lockdep.c hash_head = stack_trace_hash + (hash & (STACK_TRACE_HASH_SIZE - 1)); stack_trace_hash 526 kernel/locking/lockdep.c for (i = 0; i < ARRAY_SIZE(stack_trace_hash); i++) { stack_trace_hash 527 kernel/locking/lockdep.c hlist_for_each_entry(trace, &stack_trace_hash[i], hash_entry) { stack_trace_hash 541 kernel/locking/lockdep.c for (i = 0; i < ARRAY_SIZE(stack_trace_hash); i++) stack_trace_hash 542 kernel/locking/lockdep.c if (!hlist_empty(&stack_trace_hash[i])) stack_trace_hash 5240 kernel/locking/lockdep.c (sizeof(stack_trace) + sizeof(stack_trace_hash)) / 1024