1 #include <linux/kernel.h>
2 #include <linux/kthread.h>
3 #include <linux/spinlock.h>
9 // How far up the stack to track the caller
12 // 2 => caller of v3_lock..
14 #define STEP_BACK_DEPTH_FIRST 1
15 #define STEP_BACK_DEPTH_LAST 4
16 #define STEP_BACK_DEPTH (STEP_BACK_DEPTH_LAST-STEP_BACK_DEPTH_FIRST+1)
18 // show when multiple locks are held simultaneously
19 // This is the minimum number
20 #define WARN_MULTIPLE_THRESHOLD 3
23 int inuse; // nonzero if this is in use
24 void *lock; // the lock
25 void *allocator[STEP_BACK_DEPTH];
27 int lockcount; // how many times it's been locked/unlocked (lock=+1, unlock=-1)
28 int irqcount; // how many times interrupts have been turned off (+1/-1)
29 void *lastlocker[STEP_BACK_DEPTH];
31 void *lastunlocker[STEP_BACK_DEPTH];
33 void *lastirqlocker[STEP_BACK_DEPTH];
35 unsigned long lastlockflags; // their flags
36 void *lastirqunlocker[STEP_BACK_DEPTH]
37 ; // who last unlocked
38 unsigned long lastunlockflags; // their flags
42 static spinlock_t lock;
44 static lockcheck_state_t state[NUM_LOCKS];
46 static lockcheck_state_t *get_lock_entry(void)
52 spin_lock_irqsave(&lock,f);
54 for (i=0;i<NUM_LOCKS;i++) {
62 spin_unlock_irqrestore(&lock,f);
72 static lockcheck_state_t *find_lock_entry(void *lock)
77 for (i=0;i<NUM_LOCKS;i++) {
79 if (l->inuse && l->lock == lock) {
87 static void free_lock_entry(lockcheck_state_t *l)
94 void palacios_lockcheck_init()
96 memset(state,0,sizeof(lockcheck_state_t)*NUM_LOCKS);
97 spin_lock_init(&lock);
98 DEBUG("LOCKCHECK: LOCK CHECKING INITED\n");
102 // This needs to be defined explictly since the intrinsic does not take a var
104 #define backtrace(t) \
105 t[0]=__builtin_return_address(STEP_BACK_DEPTH_FIRST); \
106 t[1]=__builtin_return_address(STEP_BACK_DEPTH_FIRST+1); \
107 t[2]=__builtin_return_address(STEP_BACK_DEPTH_FIRST+2); \
108 t[3]=__builtin_return_address(STEP_BACK_DEPTH_FIRST+3);
111 // For printing a backtrace
114 #define backtrace_format "%pS << %pS << %pS << %pS"
115 #define backtrace_expand(t) ((t)[0]),((t)[1]),((t)[2]),((t)[3])
118 static void clear_trace(void **trace)
122 for (i=0;i<STEP_BACK_DEPTH;i++) {
128 static void printlock(char *prefix, lockcheck_state_t *l)
131 DEBUG("LOCKCHECK: %s: lock 0x%p, allocator="
133 ", lockcount=%d, lastlocker="
137 ", irqcount=%d, lastirqlocker="
139 ", lastlockflags=%lu, lastirqunlocker="
141 ", lastunlockflags=%lu\n",
143 backtrace_expand(l->allocator),
145 backtrace_expand(l->lastlocker),
146 backtrace_expand(l->lastunlocker),
148 backtrace_expand(l->lastirqlocker),
150 backtrace_expand(l->lastirqunlocker),
157 static void find_multiple_locks_held(void)
161 lockcheck_state_t *l;
164 for (i=0;i<NUM_LOCKS;i++) {
166 if (l->inuse && l->lockcount>0) {
168 if (have>=WARN_MULTIPLE_THRESHOLD) {
174 if (have>=WARN_MULTIPLE_THRESHOLD) {
176 for (i=0;i<NUM_LOCKS;i++) {
178 if (l->inuse && l->lockcount>0) {
179 snprintf(buf,64,"MULTIPLE LOCKS HELD (%d)",have);
188 static void find_multiple_irqs_held(void)
192 lockcheck_state_t *l;
195 for (i=0;i<NUM_LOCKS;i++) {
197 if (l->inuse && l->irqcount>0) {
199 if (have>=WARN_MULTIPLE_THRESHOLD) {
205 if (have>=WARN_MULTIPLE_THRESHOLD) {
207 for (i=0;i<NUM_LOCKS;i++) {
209 if (l->inuse && l->irqcount>0) {
210 snprintf(buf,64,"MULTIPLE IRQS HELD (%d)",have);
220 void palacios_lockcheck_deinit()
223 lockcheck_state_t *l;
225 for (i=0;i<NUM_LOCKS;i++) {
228 printlock("ALLOCATED LOCK AT DEINIT",l);
229 if ((l->lockcount)) {
230 printlock("BAD LOCK COUNT AT DEINIT",l);
233 printlock("BAD IRQ COUNT AT DEINIT",l);
237 INFO("LOCKCHECK: DEINITED\n");
241 void palacios_lockcheck_alloc(void *lock)
243 lockcheck_state_t *l=get_lock_entry();
246 DEBUG("LOCKCHECK: UNABLE TO ALLOCATE TRACKING DATA FOR LOCK 0x%p\n",lock);
249 backtrace(l->allocator);
250 l->lockcount=l->irqcount=0;
251 clear_trace(l->lastlocker);
252 clear_trace(l->lastunlocker);
253 clear_trace(l->lastirqlocker);
254 clear_trace(l->lastirqunlocker);
255 //INFO("LOCKCHECK: LOCK ALLOCATE 0x%p\n",lock);
256 printlock("NEW LOCK", l);
260 void palacios_lockcheck_free(void *lock)
262 lockcheck_state_t *l=find_lock_entry(lock);
265 DEBUG("LOCKCHECK: FREEING UNTRACKED LOCK 0x%p\n",lock);
269 if ((l->lockcount)) {
270 printlock("BAD LOCK COUNT AT FREE",l);
274 printlock("BAD IRQ COUNT AT FREE",l);
279 void palacios_lockcheck_lock(void *lock)
281 lockcheck_state_t *l=find_lock_entry(lock);
284 DEBUG("LOCKCHECK: LOCKING UNTRACKED LOCK 0x%p\n",lock);
288 if (l->lockcount!=0) {
289 printlock("BAD LOCKCOUNT AT LOCK",l);
291 if (l->irqcount!=0) {
292 printlock("BAD IRQCOUNT AT LOCK",l);
296 backtrace(l->lastlocker);
298 find_multiple_locks_held();
301 void palacios_lockcheck_unlock(void *lock)
303 lockcheck_state_t *l=find_lock_entry(lock);
306 DEBUG("LOCKCHECK: UNLOCKING UNTRACKED LOCK 0x%p\n",lock);
310 if (l->lockcount!=1) {
311 printlock("LOCKCHECK: BAD LOCKCOUNT AT UNLOCK",l);
313 if (l->irqcount!=0) {
314 printlock("LOCKCHECK: BAD IRQCOUNT AT UNLOCK",l);
318 backtrace(l->lastunlocker);
321 void palacios_lockcheck_lock_irqsave(void *lock,unsigned long flags)
323 lockcheck_state_t *l=find_lock_entry(lock);
326 DEBUG("LOCKCHECK: IRQ LOCKING UNTRACKED LOCK 0x%p\n",lock);
330 if (l->lockcount!=0) {
331 printlock("BAD LOCKCOUNT AT IRQ LOCK",l);
333 if (l->irqcount!=0) {
334 printlock("BAD IRQCOUNT AT IRQ LOCK",l);
338 l->lastlockflags=flags;
339 backtrace(l->lastirqlocker);
342 find_multiple_irqs_held();
346 void palacios_lockcheck_unlock_irqrestore(void *lock,unsigned long flags)
348 lockcheck_state_t *l=find_lock_entry(lock);
351 DEBUG("LOCKCHECK: IRQ UNLOCKING UNTRACKED LOCK 0x%p\n",lock);
355 if (l->lockcount!=0) {
356 printlock("LOCKCHECK: BAD LOCKCOUNT AT IRQ UNLOCK",l);
358 if (l->irqcount!=1) {
359 printlock("LOCKCHECK: BAD IRQCOUNT AT IRQ UNLOCK",l);
363 l->lastunlockflags = flags;
364 backtrace(l->lastirqunlocker);