| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * linux/arch/xtensa/kernel/irq.c | 
|  | 3 | * | 
|  | 4 | * Xtensa built-in interrupt controller and some generic functions copied | 
|  | 5 | * from i386. | 
|  | 6 | * | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 7 | * Copyright (C) 2002 - 2006 Tensilica, Inc. | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 8 | * Copyright (C) 1992, 1998 Linus Torvalds, Ingo Molnar | 
|  | 9 | * | 
|  | 10 | * | 
|  | 11 | * Chris Zankel <chris@zankel.net> | 
|  | 12 | * Kevin Chea | 
|  | 13 | * | 
|  | 14 | */ | 
|  | 15 |  | 
|  | 16 | #include <linux/module.h> | 
|  | 17 | #include <linux/seq_file.h> | 
|  | 18 | #include <linux/interrupt.h> | 
|  | 19 | #include <linux/irq.h> | 
|  | 20 | #include <linux/kernel_stat.h> | 
|  | 21 |  | 
|  | 22 | #include <asm/uaccess.h> | 
|  | 23 | #include <asm/platform.h> | 
|  | 24 |  | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 25 | static unsigned int cached_irq_mask; | 
|  | 26 |  | 
|  | 27 | atomic_t irq_err_count; | 
|  | 28 |  | 
|  | 29 | /* | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 30 | * do_IRQ handles all normal device IRQ's (the special | 
|  | 31 | * SMP cross-CPU interrupts have their own specific | 
|  | 32 | * handlers). | 
|  | 33 | */ | 
|  | 34 |  | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 35 | asmlinkage void do_IRQ(int irq, struct pt_regs *regs) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 36 | { | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 37 | struct pt_regs *old_regs = set_irq_regs(regs); | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 38 |  | 
|  | 39 | if (irq >= NR_IRQS) { | 
|  | 40 | printk(KERN_EMERG "%s: cannot handle IRQ %d\n", | 
| Harvey Harrison | 1b532c6 | 2008-07-30 12:48:54 -0700 | [diff] [blame] | 41 | __func__, irq); | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 42 | } | 
|  | 43 |  | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 44 | irq_enter(); | 
|  | 45 |  | 
|  | 46 | #ifdef CONFIG_DEBUG_STACKOVERFLOW | 
|  | 47 | /* Debugging check for stack overflow: is there less than 1KB free? */ | 
|  | 48 | { | 
|  | 49 | unsigned long sp; | 
|  | 50 |  | 
|  | 51 | __asm__ __volatile__ ("mov %0, a1\n" : "=a" (sp)); | 
|  | 52 | sp &= THREAD_SIZE - 1; | 
|  | 53 |  | 
|  | 54 | if (unlikely(sp < (sizeof(thread_info) + 1024))) | 
|  | 55 | printk("Stack overflow in do_IRQ: %ld\n", | 
|  | 56 | sp - sizeof(struct thread_info)); | 
|  | 57 | } | 
|  | 58 | #endif | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 59 | generic_handle_irq(irq); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 60 |  | 
|  | 61 | irq_exit(); | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 62 | set_irq_regs(old_regs); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 63 | } | 
|  | 64 |  | 
| Thomas Gleixner | 47a5d9d | 2011-03-24 18:28:40 +0100 | [diff] [blame] | 65 | int arch_show_interrupts(struct seq_file *p, int prec) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 66 | { | 
| Thomas Gleixner | 47a5d9d | 2011-03-24 18:28:40 +0100 | [diff] [blame] | 67 | seq_printf(p, "%*s: ", prec, "ERR"); | 
|  | 68 | seq_printf(p, "%10u\n", atomic_read(&irq_err_count)); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 69 | return 0; | 
|  | 70 | } | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 71 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 72 | static void xtensa_irq_mask(struct irq_data *d) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 73 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 74 | cached_irq_mask &= ~(1 << d->irq); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 75 | set_sr (cached_irq_mask, INTENABLE); | 
|  | 76 | } | 
|  | 77 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 78 | static void xtensa_irq_unmask(struct irq_data *d) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 79 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 80 | cached_irq_mask |= 1 << d->irq; | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 81 | set_sr (cached_irq_mask, INTENABLE); | 
|  | 82 | } | 
|  | 83 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 84 | static void xtensa_irq_enable(struct irq_data *d) | 
| Johannes Weiner | 4c0d214 | 2009-03-04 16:21:31 +0100 | [diff] [blame] | 85 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 86 | variant_irq_enable(d->irq); | 
|  | 87 | xtensa_irq_unmask(d->irq); | 
| Johannes Weiner | 4c0d214 | 2009-03-04 16:21:31 +0100 | [diff] [blame] | 88 | } | 
|  | 89 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 90 | static void xtensa_irq_disable(struct irq_data *d) | 
| Johannes Weiner | 4c0d214 | 2009-03-04 16:21:31 +0100 | [diff] [blame] | 91 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 92 | xtensa_irq_mask(d->irq); | 
|  | 93 | variant_irq_disable(d->irq); | 
| Johannes Weiner | 4c0d214 | 2009-03-04 16:21:31 +0100 | [diff] [blame] | 94 | } | 
|  | 95 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 96 | static void xtensa_irq_ack(struct irq_data *d) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 97 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 98 | set_sr(1 << d->irq, INTCLEAR); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 99 | } | 
|  | 100 |  | 
| Thomas Gleixner | 2ea4db6 | 2011-04-19 22:52:58 +0200 | [diff] [blame] | 101 | static int xtensa_irq_retrigger(struct irq_data *d) | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 102 | { | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 103 | set_sr (1 << d->irq, INTSET); | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 104 | return 1; | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 105 | } | 
|  | 106 |  | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 107 |  | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 108 | static struct irq_chip xtensa_irq_chip = { | 
|  | 109 | .name		= "xtensa", | 
| Thomas Gleixner | 495e0c7 | 2011-02-06 22:10:52 +0100 | [diff] [blame] | 110 | .irq_enable	= xtensa_irq_enable, | 
|  | 111 | .irq_disable	= xtensa_irq_disable, | 
|  | 112 | .irq_mask	= xtensa_irq_mask, | 
|  | 113 | .irq_unmask	= xtensa_irq_unmask, | 
|  | 114 | .irq_ack	= xtensa_irq_ack, | 
|  | 115 | .irq_retrigger	= xtensa_irq_retrigger, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 116 | }; | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 117 |  | 
|  | 118 | void __init init_IRQ(void) | 
|  | 119 | { | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 120 | int index; | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 121 |  | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 122 | for (index = 0; index < XTENSA_NR_IRQS; index++) { | 
|  | 123 | int mask = 1 << index; | 
|  | 124 |  | 
|  | 125 | if (mask & XCHAL_INTTYPE_MASK_SOFTWARE) | 
| Thomas Gleixner | 610e175 | 2011-03-24 14:58:43 +0100 | [diff] [blame] | 126 | irq_set_chip_and_handler(index, &xtensa_irq_chip, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 127 | handle_simple_irq); | 
|  | 128 |  | 
|  | 129 | else if (mask & XCHAL_INTTYPE_MASK_EXTERN_EDGE) | 
| Thomas Gleixner | 610e175 | 2011-03-24 14:58:43 +0100 | [diff] [blame] | 130 | irq_set_chip_and_handler(index, &xtensa_irq_chip, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 131 | handle_edge_irq); | 
|  | 132 |  | 
|  | 133 | else if (mask & XCHAL_INTTYPE_MASK_EXTERN_LEVEL) | 
| Thomas Gleixner | 610e175 | 2011-03-24 14:58:43 +0100 | [diff] [blame] | 134 | irq_set_chip_and_handler(index, &xtensa_irq_chip, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 135 | handle_level_irq); | 
|  | 136 |  | 
|  | 137 | else if (mask & XCHAL_INTTYPE_MASK_TIMER) | 
| Thomas Gleixner | 610e175 | 2011-03-24 14:58:43 +0100 | [diff] [blame] | 138 | irq_set_chip_and_handler(index, &xtensa_irq_chip, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 139 | handle_edge_irq); | 
|  | 140 |  | 
|  | 141 | else	/* XCHAL_INTTYPE_MASK_WRITE_ERROR */ | 
|  | 142 | /* XCHAL_INTTYPE_MASK_NMI */ | 
|  | 143 |  | 
| Thomas Gleixner | 610e175 | 2011-03-24 14:58:43 +0100 | [diff] [blame] | 144 | irq_set_chip_and_handler(index, &xtensa_irq_chip, | 
| Chris Zankel | fd43fe1 | 2006-12-10 02:18:47 -0800 | [diff] [blame] | 145 | handle_level_irq); | 
|  | 146 | } | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 147 |  | 
|  | 148 | cached_irq_mask = 0; | 
| Daniel Glöckner | 1beee21 | 2009-05-05 15:03:21 +0000 | [diff] [blame] | 149 |  | 
|  | 150 | variant_init_irq(); | 
| Chris Zankel | 5a0015d | 2005-06-23 22:01:16 -0700 | [diff] [blame] | 151 | } |