| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 1 |  | 
| Christoph Hellwig | d824e66 | 2006-04-10 22:54:04 -0700 | [diff] [blame] | 2 | #include <linux/irq.h> | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 3 |  | 
 | 4 | void set_pending_irq(unsigned int irq, cpumask_t mask) | 
 | 5 | { | 
| Ingo Molnar | 34ffdb7 | 2006-06-29 02:24:40 -0700 | [diff] [blame] | 6 | 	struct irq_desc *desc = irq_desc + irq; | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 7 | 	unsigned long flags; | 
 | 8 |  | 
 | 9 | 	spin_lock_irqsave(&desc->lock, flags); | 
| Eric W. Biederman | a24ceab | 2006-10-04 02:16:27 -0700 | [diff] [blame] | 10 | 	desc->status |= IRQ_MOVE_PENDING; | 
| Ingo Molnar | cd916d3 | 2006-06-29 02:24:42 -0700 | [diff] [blame] | 11 | 	irq_desc[irq].pending_mask = mask; | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 12 | 	spin_unlock_irqrestore(&desc->lock, flags); | 
 | 13 | } | 
 | 14 |  | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 15 | void move_masked_irq(int irq) | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 16 | { | 
| Ingo Molnar | 34ffdb7 | 2006-06-29 02:24:40 -0700 | [diff] [blame] | 17 | 	struct irq_desc *desc = irq_desc + irq; | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 18 | 	cpumask_t tmp; | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 19 |  | 
| Eric W. Biederman | a24ceab | 2006-10-04 02:16:27 -0700 | [diff] [blame] | 20 | 	if (likely(!(desc->status & IRQ_MOVE_PENDING))) | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 21 | 		return; | 
 | 22 |  | 
| Bryan Holty | 501f249 | 2006-03-25 03:07:37 -0800 | [diff] [blame] | 23 | 	/* | 
 | 24 | 	 * Paranoia: cpu-local interrupts shouldn't be calling in here anyway. | 
 | 25 | 	 */ | 
 | 26 | 	if (CHECK_IRQ_PER_CPU(desc->status)) { | 
 | 27 | 		WARN_ON(1); | 
 | 28 | 		return; | 
 | 29 | 	} | 
 | 30 |  | 
| Eric W. Biederman | a24ceab | 2006-10-04 02:16:27 -0700 | [diff] [blame] | 31 | 	desc->status &= ~IRQ_MOVE_PENDING; | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 32 |  | 
| Ingo Molnar | cd916d3 | 2006-06-29 02:24:42 -0700 | [diff] [blame] | 33 | 	if (unlikely(cpus_empty(irq_desc[irq].pending_mask))) | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 34 | 		return; | 
 | 35 |  | 
| Ingo Molnar | d1bef4e | 2006-06-29 02:24:36 -0700 | [diff] [blame] | 36 | 	if (!desc->chip->set_affinity) | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 37 | 		return; | 
 | 38 |  | 
| Bryan Holty | 501f249 | 2006-03-25 03:07:37 -0800 | [diff] [blame] | 39 | 	assert_spin_locked(&desc->lock); | 
 | 40 |  | 
| Ingo Molnar | cd916d3 | 2006-06-29 02:24:42 -0700 | [diff] [blame] | 41 | 	cpus_and(tmp, irq_desc[irq].pending_mask, cpu_online_map); | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 42 |  | 
 | 43 | 	/* | 
 | 44 | 	 * If there was a valid mask to work with, please | 
 | 45 | 	 * do the disable, re-program, enable sequence. | 
 | 46 | 	 * This is *not* particularly important for level triggered | 
 | 47 | 	 * but in a edge trigger case, we might be setting rte | 
 | 48 | 	 * when an active trigger is comming in. This could | 
 | 49 | 	 * cause some ioapics to mal-function. | 
 | 50 | 	 * Being paranoid i guess! | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 51 | 	 * | 
 | 52 | 	 * For correct operation this depends on the caller | 
 | 53 | 	 * masking the irqs. | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 54 | 	 */ | 
| Daniel Walker | 89d0cf0 | 2006-06-23 02:05:29 -0700 | [diff] [blame] | 55 | 	if (likely(!cpus_empty(tmp))) { | 
| Ingo Molnar | d1bef4e | 2006-06-29 02:24:36 -0700 | [diff] [blame] | 56 | 		desc->chip->set_affinity(irq,tmp); | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 57 | 	} | 
| Ingo Molnar | cd916d3 | 2006-06-29 02:24:42 -0700 | [diff] [blame] | 58 | 	cpus_clear(irq_desc[irq].pending_mask); | 
| Andrew Morton | c777ac5 | 2006-03-25 03:07:36 -0800 | [diff] [blame] | 59 | } | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 60 |  | 
 | 61 | void move_native_irq(int irq) | 
 | 62 | { | 
 | 63 | 	struct irq_desc *desc = irq_desc + irq; | 
 | 64 |  | 
 | 65 | 	if (likely(!(desc->status & IRQ_MOVE_PENDING))) | 
 | 66 | 		return; | 
 | 67 |  | 
| Eric W. Biederman | 2a786b4 | 2007-02-23 04:46:20 -0700 | [diff] [blame] | 68 | 	if (unlikely(desc->status & IRQ_DISABLED)) | 
 | 69 | 		return; | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 70 |  | 
| Eric W. Biederman | 2a786b4 | 2007-02-23 04:46:20 -0700 | [diff] [blame] | 71 | 	desc->chip->mask(irq); | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 72 | 	move_masked_irq(irq); | 
| Eric W. Biederman | 2a786b4 | 2007-02-23 04:46:20 -0700 | [diff] [blame] | 73 | 	desc->chip->unmask(irq); | 
| Eric W. Biederman | e7b946e | 2006-10-04 02:16:29 -0700 | [diff] [blame] | 74 | } | 
 | 75 |  |