| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1 | /****************************************************************************** | 
|  | 2 | * | 
| Reinette Chatre | 01f8162 | 2009-01-08 10:20:02 -0800 | [diff] [blame] | 3 | * Copyright(c) 2007 - 2009 Intel Corporation. All rights reserved. | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 4 | * | 
|  | 5 | * This program is free software; you can redistribute it and/or modify it | 
|  | 6 | * under the terms of version 2 of the GNU General Public License as | 
|  | 7 | * published by the Free Software Foundation. | 
|  | 8 | * | 
|  | 9 | * This program is distributed in the hope that it will be useful, but WITHOUT | 
|  | 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | 
|  | 11 | * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for | 
|  | 12 | * more details. | 
|  | 13 | * | 
|  | 14 | * You should have received a copy of the GNU General Public License along with | 
|  | 15 | * this program; if not, write to the Free Software Foundation, Inc., | 
|  | 16 | * 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA | 
|  | 17 | * | 
|  | 18 | * The full GNU General Public License is included in this distribution in the | 
|  | 19 | * file called LICENSE. | 
|  | 20 | * | 
|  | 21 | * Contact Information: | 
|  | 22 | * Intel Corporation, 5200 N.E. Elam Young Parkway, Hillsboro, OR 97124-6497 | 
|  | 23 | * | 
|  | 24 | *****************************************************************************/ | 
|  | 25 |  | 
|  | 26 | #include <linux/kernel.h> | 
|  | 27 | #include <linux/module.h> | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 28 | #include <linux/init.h> | 
|  | 29 | #include <linux/pci.h> | 
|  | 30 | #include <linux/dma-mapping.h> | 
|  | 31 | #include <linux/delay.h> | 
|  | 32 | #include <linux/skbuff.h> | 
|  | 33 | #include <linux/netdevice.h> | 
|  | 34 | #include <linux/wireless.h> | 
|  | 35 | #include <net/mac80211.h> | 
|  | 36 | #include <linux/etherdevice.h> | 
|  | 37 | #include <asm/unaligned.h> | 
|  | 38 |  | 
|  | 39 | #include "iwl-eeprom.h" | 
| Tomas Winkler | 3e0d4cb | 2008-04-24 11:55:38 -0700 | [diff] [blame] | 40 | #include "iwl-dev.h" | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 41 | #include "iwl-core.h" | 
|  | 42 | #include "iwl-io.h" | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 43 | #include "iwl-sta.h" | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 44 | #include "iwl-helpers.h" | 
|  | 45 | #include "iwl-5000-hw.h" | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 46 | #include "iwl-6000-hw.h" | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 47 |  | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 48 | /* Highest firmware API version supported */ | 
|  | 49 | #define IWL5000_UCODE_API_MAX 1 | 
| Jay Sternberg | 39e6d22 | 2009-02-27 16:21:19 -0800 | [diff] [blame] | 50 | #define IWL5150_UCODE_API_MAX 2 | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 51 |  | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 52 | /* Lowest firmware API version supported */ | 
|  | 53 | #define IWL5000_UCODE_API_MIN 1 | 
|  | 54 | #define IWL5150_UCODE_API_MIN 1 | 
|  | 55 |  | 
|  | 56 | #define IWL5000_FW_PRE "iwlwifi-5000-" | 
|  | 57 | #define _IWL5000_MODULE_FIRMWARE(api) IWL5000_FW_PRE #api ".ucode" | 
|  | 58 | #define IWL5000_MODULE_FIRMWARE(api) _IWL5000_MODULE_FIRMWARE(api) | 
|  | 59 |  | 
|  | 60 | #define IWL5150_FW_PRE "iwlwifi-5150-" | 
|  | 61 | #define _IWL5150_MODULE_FIRMWARE(api) IWL5150_FW_PRE #api ".ucode" | 
|  | 62 | #define IWL5150_MODULE_FIRMWARE(api) _IWL5150_MODULE_FIRMWARE(api) | 
| Jay Sternberg | 4e062f9 | 2008-10-14 12:32:41 -0700 | [diff] [blame] | 63 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 64 | static const u16 iwl5000_default_queue_to_tx_fifo[] = { | 
|  | 65 | IWL_TX_FIFO_AC3, | 
|  | 66 | IWL_TX_FIFO_AC2, | 
|  | 67 | IWL_TX_FIFO_AC1, | 
|  | 68 | IWL_TX_FIFO_AC0, | 
|  | 69 | IWL50_CMD_FIFO_NUM, | 
|  | 70 | IWL_TX_FIFO_HCCA_1, | 
|  | 71 | IWL_TX_FIFO_HCCA_2 | 
|  | 72 | }; | 
|  | 73 |  | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 74 | /* FIXME: same implementation as 4965 */ | 
|  | 75 | static int iwl5000_apm_stop_master(struct iwl_priv *priv) | 
|  | 76 | { | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 77 | unsigned long flags; | 
|  | 78 |  | 
|  | 79 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 80 |  | 
|  | 81 | /* set stop master bit */ | 
|  | 82 | iwl_set_bit(priv, CSR_RESET, CSR_RESET_REG_FLAG_STOP_MASTER); | 
|  | 83 |  | 
| Wu Fengguang | febf337 | 2008-12-17 16:52:31 +0800 | [diff] [blame] | 84 | iwl_poll_direct_bit(priv, CSR_RESET, | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 85 | CSR_RESET_REG_FLAG_MASTER_DISABLED, 100); | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 86 |  | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 87 | spin_unlock_irqrestore(&priv->lock, flags); | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 88 | IWL_DEBUG_INFO(priv, "stop master\n"); | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 89 |  | 
| Wu Fengguang | febf337 | 2008-12-17 16:52:31 +0800 | [diff] [blame] | 90 | return 0; | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 91 | } | 
|  | 92 |  | 
|  | 93 |  | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 94 | static int iwl5000_apm_init(struct iwl_priv *priv) | 
|  | 95 | { | 
|  | 96 | int ret = 0; | 
|  | 97 |  | 
|  | 98 | iwl_set_bit(priv, CSR_GIO_CHICKEN_BITS, | 
|  | 99 | CSR_GIO_CHICKEN_BITS_REG_BIT_DIS_L0S_EXIT_TIMER); | 
|  | 100 |  | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 101 | /* disable L0s without affecting L1 :don't wait for ICH L0s bug W/A) */ | 
|  | 102 | iwl_set_bit(priv, CSR_GIO_CHICKEN_BITS, | 
|  | 103 | CSR_GIO_CHICKEN_BITS_REG_BIT_L1A_NO_L0S_RX); | 
|  | 104 |  | 
| Tomas Winkler | a96a27f | 2008-10-23 23:48:56 -0700 | [diff] [blame] | 105 | /* Set FH wait threshold to maximum (HW error during stress W/A) */ | 
| Tomas Winkler | 4c43e0d | 2008-08-04 16:00:39 +0800 | [diff] [blame] | 106 | iwl_set_bit(priv, CSR_DBG_HPET_MEM_REG, CSR_DBG_HPET_MEM_REG_VAL); | 
|  | 107 |  | 
|  | 108 | /* enable HAP INTA to move device L1a -> L0s */ | 
|  | 109 | iwl_set_bit(priv, CSR_HW_IF_CONFIG_REG, | 
|  | 110 | CSR_HW_IF_CONFIG_REG_BIT_HAP_WAKE_L1A); | 
|  | 111 |  | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 112 | if (priv->cfg->need_pll_cfg) | 
|  | 113 | iwl_set_bit(priv, CSR_ANA_PLL_CFG, CSR50_ANA_PLL_CFG_VAL); | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 114 |  | 
|  | 115 | /* set "initialization complete" bit to move adapter | 
|  | 116 | * D0U* --> D0A* state */ | 
|  | 117 | iwl_set_bit(priv, CSR_GP_CNTRL, CSR_GP_CNTRL_REG_FLAG_INIT_DONE); | 
|  | 118 |  | 
|  | 119 | /* wait for clock stabilization */ | 
| Zhu, Yi | 73d7b5a | 2008-12-05 07:58:40 -0800 | [diff] [blame] | 120 | ret = iwl_poll_direct_bit(priv, CSR_GP_CNTRL, | 
|  | 121 | CSR_GP_CNTRL_REG_FLAG_MAC_CLOCK_READY, 25000); | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 122 | if (ret < 0) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 123 | IWL_DEBUG_INFO(priv, "Failed to init the card\n"); | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 124 | return ret; | 
|  | 125 | } | 
|  | 126 |  | 
|  | 127 | ret = iwl_grab_nic_access(priv); | 
|  | 128 | if (ret) | 
|  | 129 | return ret; | 
|  | 130 |  | 
|  | 131 | /* enable DMA */ | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 132 | iwl_write_prph(priv, APMG_CLK_EN_REG, APMG_CLK_VAL_DMA_CLK_RQT); | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 133 |  | 
|  | 134 | udelay(20); | 
|  | 135 |  | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 136 | /* disable L1-Active */ | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 137 | iwl_set_bits_prph(priv, APMG_PCIDEV_STT_REG, | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 138 | APMG_PCIDEV_STT_VAL_L1_ACT_DIS); | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 139 |  | 
|  | 140 | iwl_release_nic_access(priv); | 
|  | 141 |  | 
|  | 142 | return ret; | 
|  | 143 | } | 
|  | 144 |  | 
| Tomas Winkler | a96a27f | 2008-10-23 23:48:56 -0700 | [diff] [blame] | 145 | /* FIXME: this is identical to 4965 */ | 
| Tomas Winkler | f118a91 | 2008-05-29 16:34:58 +0800 | [diff] [blame] | 146 | static void iwl5000_apm_stop(struct iwl_priv *priv) | 
|  | 147 | { | 
|  | 148 | unsigned long flags; | 
|  | 149 |  | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 150 | iwl5000_apm_stop_master(priv); | 
| Tomas Winkler | f118a91 | 2008-05-29 16:34:58 +0800 | [diff] [blame] | 151 |  | 
|  | 152 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 153 |  | 
|  | 154 | iwl_set_bit(priv, CSR_RESET, CSR_RESET_REG_FLAG_SW_RESET); | 
|  | 155 |  | 
|  | 156 | udelay(10); | 
|  | 157 |  | 
| Mohamed Abbas | 1d3e6c6 | 2008-08-28 17:25:05 +0800 | [diff] [blame] | 158 | /* clear "init complete"  move adapter D0A* --> D0U state */ | 
|  | 159 | iwl_clear_bit(priv, CSR_GP_CNTRL, CSR_GP_CNTRL_REG_FLAG_INIT_DONE); | 
| Tomas Winkler | f118a91 | 2008-05-29 16:34:58 +0800 | [diff] [blame] | 160 |  | 
|  | 161 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 162 | } | 
|  | 163 |  | 
|  | 164 |  | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 165 | static int iwl5000_apm_reset(struct iwl_priv *priv) | 
|  | 166 | { | 
|  | 167 | int ret = 0; | 
|  | 168 | unsigned long flags; | 
|  | 169 |  | 
| Tomas Winkler | 46315e0 | 2008-05-29 16:34:59 +0800 | [diff] [blame] | 170 | iwl5000_apm_stop_master(priv); | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 171 |  | 
|  | 172 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 173 |  | 
|  | 174 | iwl_set_bit(priv, CSR_RESET, CSR_RESET_REG_FLAG_SW_RESET); | 
|  | 175 |  | 
|  | 176 | udelay(10); | 
|  | 177 |  | 
|  | 178 |  | 
|  | 179 | /* FIXME: put here L1A -L0S w/a */ | 
|  | 180 |  | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 181 | if (priv->cfg->need_pll_cfg) | 
|  | 182 | iwl_set_bit(priv, CSR_ANA_PLL_CFG, CSR50_ANA_PLL_CFG_VAL); | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 183 |  | 
|  | 184 | /* set "initialization complete" bit to move adapter | 
|  | 185 | * D0U* --> D0A* state */ | 
|  | 186 | iwl_set_bit(priv, CSR_GP_CNTRL, CSR_GP_CNTRL_REG_FLAG_INIT_DONE); | 
|  | 187 |  | 
|  | 188 | /* wait for clock stabilization */ | 
| Zhu, Yi | 73d7b5a | 2008-12-05 07:58:40 -0800 | [diff] [blame] | 189 | ret = iwl_poll_direct_bit(priv, CSR_GP_CNTRL, | 
|  | 190 | CSR_GP_CNTRL_REG_FLAG_MAC_CLOCK_READY, 25000); | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 191 | if (ret < 0) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 192 | IWL_DEBUG_INFO(priv, "Failed to init the card\n"); | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 193 | goto out; | 
|  | 194 | } | 
|  | 195 |  | 
|  | 196 | ret = iwl_grab_nic_access(priv); | 
|  | 197 | if (ret) | 
|  | 198 | goto out; | 
|  | 199 |  | 
|  | 200 | /* enable DMA */ | 
|  | 201 | iwl_write_prph(priv, APMG_CLK_EN_REG, APMG_CLK_VAL_DMA_CLK_RQT); | 
|  | 202 |  | 
|  | 203 | udelay(20); | 
|  | 204 |  | 
|  | 205 | /* disable L1-Active */ | 
|  | 206 | iwl_set_bits_prph(priv, APMG_PCIDEV_STT_REG, | 
|  | 207 | APMG_PCIDEV_STT_VAL_L1_ACT_DIS); | 
|  | 208 |  | 
|  | 209 | iwl_release_nic_access(priv); | 
|  | 210 |  | 
|  | 211 | out: | 
|  | 212 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 213 |  | 
|  | 214 | return ret; | 
|  | 215 | } | 
|  | 216 |  | 
|  | 217 |  | 
| Ron Rindjunsky | 5a83535 | 2008-05-05 10:22:29 +0800 | [diff] [blame] | 218 | static void iwl5000_nic_config(struct iwl_priv *priv) | 
| Tomas Winkler | e86fe9f | 2008-04-24 11:55:36 -0700 | [diff] [blame] | 219 | { | 
|  | 220 | unsigned long flags; | 
|  | 221 | u16 radio_cfg; | 
| Tomas Winkler | 3fdb68d | 2009-02-10 15:19:02 -0800 | [diff] [blame] | 222 | u16 lctl; | 
| Tomas Winkler | e86fe9f | 2008-04-24 11:55:36 -0700 | [diff] [blame] | 223 |  | 
|  | 224 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 225 |  | 
| Tomas Winkler | 3fdb68d | 2009-02-10 15:19:02 -0800 | [diff] [blame] | 226 | lctl = iwl_pcie_link_ctl(priv); | 
| Tomas Winkler | e86fe9f | 2008-04-24 11:55:36 -0700 | [diff] [blame] | 227 |  | 
| Tomas Winkler | 3fdb68d | 2009-02-10 15:19:02 -0800 | [diff] [blame] | 228 | /* HW bug W/A */ | 
|  | 229 | /* L1-ASPM is enabled by BIOS */ | 
|  | 230 | if ((lctl & PCI_CFG_LINK_CTRL_VAL_L1_EN) == PCI_CFG_LINK_CTRL_VAL_L1_EN) | 
|  | 231 | /* L1-APSM enabled: disable L0S  */ | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 232 | iwl_set_bit(priv, CSR_GIO_REG, CSR_GIO_REG_VAL_L0S_ENABLED); | 
|  | 233 | else | 
| Tomas Winkler | 3fdb68d | 2009-02-10 15:19:02 -0800 | [diff] [blame] | 234 | /* L1-ASPM disabled: enable L0S */ | 
| Tomas Winkler | 8f06189 | 2008-05-29 16:34:56 +0800 | [diff] [blame] | 235 | iwl_clear_bit(priv, CSR_GIO_REG, CSR_GIO_REG_VAL_L0S_ENABLED); | 
| Tomas Winkler | e86fe9f | 2008-04-24 11:55:36 -0700 | [diff] [blame] | 236 |  | 
|  | 237 | radio_cfg = iwl_eeprom_query16(priv, EEPROM_RADIO_CONFIG); | 
|  | 238 |  | 
|  | 239 | /* write radio config values to register */ | 
|  | 240 | if (EEPROM_RF_CFG_TYPE_MSK(radio_cfg) < EEPROM_5000_RF_CFG_TYPE_MAX) | 
|  | 241 | iwl_set_bit(priv, CSR_HW_IF_CONFIG_REG, | 
|  | 242 | EEPROM_RF_CFG_TYPE_MSK(radio_cfg) | | 
|  | 243 | EEPROM_RF_CFG_STEP_MSK(radio_cfg) | | 
|  | 244 | EEPROM_RF_CFG_DASH_MSK(radio_cfg)); | 
|  | 245 |  | 
|  | 246 | /* set CSR_HW_CONFIG_REG for uCode use */ | 
|  | 247 | iwl_set_bit(priv, CSR_HW_IF_CONFIG_REG, | 
|  | 248 | CSR_HW_IF_CONFIG_REG_BIT_RADIO_SI | | 
|  | 249 | CSR_HW_IF_CONFIG_REG_BIT_MAC_SI); | 
|  | 250 |  | 
| Tomas Winkler | 4c43e0d | 2008-08-04 16:00:39 +0800 | [diff] [blame] | 251 | /* W/A : NIC is stuck in a reset state after Early PCIe power off | 
|  | 252 | * (PCIe power is lost before PERST# is asserted), | 
|  | 253 | * causing ME FW to lose ownership and not being able to obtain it back. | 
|  | 254 | */ | 
| Tomas Winkler | 2d3db67 | 2008-08-04 16:00:47 +0800 | [diff] [blame] | 255 | iwl_grab_nic_access(priv); | 
|  | 256 | iwl_set_bits_mask_prph(priv, APMG_PS_CTRL_REG, | 
| Tomas Winkler | 4c43e0d | 2008-08-04 16:00:39 +0800 | [diff] [blame] | 257 | APMG_PS_CTRL_EARLY_PWR_OFF_RESET_DIS, | 
|  | 258 | ~APMG_PS_CTRL_EARLY_PWR_OFF_RESET_DIS); | 
| Tomas Winkler | 2d3db67 | 2008-08-04 16:00:47 +0800 | [diff] [blame] | 259 | iwl_release_nic_access(priv); | 
| Tomas Winkler | 4c43e0d | 2008-08-04 16:00:39 +0800 | [diff] [blame] | 260 |  | 
| Tomas Winkler | e86fe9f | 2008-04-24 11:55:36 -0700 | [diff] [blame] | 261 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 262 | } | 
|  | 263 |  | 
|  | 264 |  | 
|  | 265 |  | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 266 | /* | 
|  | 267 | * EEPROM | 
|  | 268 | */ | 
|  | 269 | static u32 eeprom_indirect_address(const struct iwl_priv *priv, u32 address) | 
|  | 270 | { | 
|  | 271 | u16 offset = 0; | 
|  | 272 |  | 
|  | 273 | if ((address & INDIRECT_ADDRESS) == 0) | 
|  | 274 | return address; | 
|  | 275 |  | 
|  | 276 | switch (address & INDIRECT_TYPE_MSK) { | 
|  | 277 | case INDIRECT_HOST: | 
|  | 278 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_HOST); | 
|  | 279 | break; | 
|  | 280 | case INDIRECT_GENERAL: | 
|  | 281 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_GENERAL); | 
|  | 282 | break; | 
|  | 283 | case INDIRECT_REGULATORY: | 
|  | 284 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_REGULATORY); | 
|  | 285 | break; | 
|  | 286 | case INDIRECT_CALIBRATION: | 
|  | 287 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_CALIBRATION); | 
|  | 288 | break; | 
|  | 289 | case INDIRECT_PROCESS_ADJST: | 
|  | 290 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_PROCESS_ADJST); | 
|  | 291 | break; | 
|  | 292 | case INDIRECT_OTHERS: | 
|  | 293 | offset = iwl_eeprom_query16(priv, EEPROM_5000_LINK_OTHERS); | 
|  | 294 | break; | 
|  | 295 | default: | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 296 | IWL_ERR(priv, "illegal indirect type: 0x%X\n", | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 297 | address & INDIRECT_TYPE_MSK); | 
|  | 298 | break; | 
|  | 299 | } | 
|  | 300 |  | 
|  | 301 | /* translate the offset from words to byte */ | 
|  | 302 | return (address & ADDRESS_MSK) + (offset << 1); | 
|  | 303 | } | 
|  | 304 |  | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 305 | static u16 iwl5000_eeprom_calib_version(struct iwl_priv *priv) | 
| Tomas Winkler | f1f6941 | 2008-04-24 11:55:35 -0700 | [diff] [blame] | 306 | { | 
| Tomas Winkler | f1f6941 | 2008-04-24 11:55:35 -0700 | [diff] [blame] | 307 | struct iwl_eeprom_calib_hdr { | 
|  | 308 | u8 version; | 
|  | 309 | u8 pa_type; | 
|  | 310 | u16 voltage; | 
|  | 311 | } *hdr; | 
|  | 312 |  | 
| Tomas Winkler | f1f6941 | 2008-04-24 11:55:35 -0700 | [diff] [blame] | 313 | hdr = (struct iwl_eeprom_calib_hdr *)iwl_eeprom_query_addr(priv, | 
|  | 314 | EEPROM_5000_CALIB_ALL); | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 315 | return hdr->version; | 
| Tomas Winkler | f1f6941 | 2008-04-24 11:55:35 -0700 | [diff] [blame] | 316 |  | 
|  | 317 | } | 
|  | 318 |  | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 319 | static void iwl5000_gain_computation(struct iwl_priv *priv, | 
|  | 320 | u32 average_noise[NUM_RX_CHAINS], | 
|  | 321 | u16 min_average_noise_antenna_i, | 
|  | 322 | u32 min_average_noise) | 
|  | 323 | { | 
|  | 324 | int i; | 
|  | 325 | s32 delta_g; | 
|  | 326 | struct iwl_chain_noise_data *data = &priv->chain_noise_data; | 
|  | 327 |  | 
|  | 328 | /* Find Gain Code for the antennas B and C */ | 
|  | 329 | for (i = 1; i < NUM_RX_CHAINS; i++) { | 
|  | 330 | if ((data->disconn_array[i])) { | 
|  | 331 | data->delta_gain_code[i] = 0; | 
|  | 332 | continue; | 
|  | 333 | } | 
|  | 334 | delta_g = (1000 * ((s32)average_noise[0] - | 
|  | 335 | (s32)average_noise[i])) / 1500; | 
|  | 336 | /* bound gain by 2 bits value max, 3rd bit is sign */ | 
|  | 337 | data->delta_gain_code[i] = | 
|  | 338 | min(abs(delta_g), CHAIN_NOISE_MAX_DELTA_GAIN_CODE); | 
|  | 339 |  | 
|  | 340 | if (delta_g < 0) | 
|  | 341 | /* set negative sign */ | 
|  | 342 | data->delta_gain_code[i] |= (1 << 2); | 
|  | 343 | } | 
|  | 344 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 345 | IWL_DEBUG_CALIB(priv, "Delta gains: ANT_B = %d  ANT_C = %d\n", | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 346 | data->delta_gain_code[1], data->delta_gain_code[2]); | 
|  | 347 |  | 
|  | 348 | if (!data->radio_write) { | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 349 | struct iwl_calib_chain_noise_gain_cmd cmd; | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 350 |  | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 351 | memset(&cmd, 0, sizeof(cmd)); | 
|  | 352 |  | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 353 | cmd.hdr.op_code = IWL_PHY_CALIBRATE_CHAIN_NOISE_GAIN_CMD; | 
|  | 354 | cmd.hdr.first_group = 0; | 
|  | 355 | cmd.hdr.groups_num = 1; | 
|  | 356 | cmd.hdr.data_valid = 1; | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 357 | cmd.delta_gain_1 = data->delta_gain_code[1]; | 
|  | 358 | cmd.delta_gain_2 = data->delta_gain_code[2]; | 
|  | 359 | iwl_send_cmd_pdu_async(priv, REPLY_PHY_CALIBRATION_CMD, | 
|  | 360 | sizeof(cmd), &cmd, NULL); | 
|  | 361 |  | 
|  | 362 | data->radio_write = 1; | 
|  | 363 | data->state = IWL_CHAIN_NOISE_CALIBRATED; | 
|  | 364 | } | 
|  | 365 |  | 
|  | 366 | data->chain_noise_a = 0; | 
|  | 367 | data->chain_noise_b = 0; | 
|  | 368 | data->chain_noise_c = 0; | 
|  | 369 | data->chain_signal_a = 0; | 
|  | 370 | data->chain_signal_b = 0; | 
|  | 371 | data->chain_signal_c = 0; | 
|  | 372 | data->beacon_count = 0; | 
|  | 373 | } | 
|  | 374 |  | 
|  | 375 | static void iwl5000_chain_noise_reset(struct iwl_priv *priv) | 
|  | 376 | { | 
|  | 377 | struct iwl_chain_noise_data *data = &priv->chain_noise_data; | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 378 | int ret; | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 379 |  | 
|  | 380 | if ((data->state == IWL_CHAIN_NOISE_ALIVE) && iwl_is_associated(priv)) { | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 381 | struct iwl_calib_chain_noise_reset_cmd cmd; | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 382 | memset(&cmd, 0, sizeof(cmd)); | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 383 |  | 
|  | 384 | cmd.hdr.op_code = IWL_PHY_CALIBRATE_CHAIN_NOISE_RESET_CMD; | 
|  | 385 | cmd.hdr.first_group = 0; | 
|  | 386 | cmd.hdr.groups_num = 1; | 
|  | 387 | cmd.hdr.data_valid = 1; | 
|  | 388 | ret = iwl_send_cmd_pdu(priv, REPLY_PHY_CALIBRATION_CMD, | 
|  | 389 | sizeof(cmd), &cmd); | 
|  | 390 | if (ret) | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 391 | IWL_ERR(priv, | 
|  | 392 | "Could not send REPLY_PHY_CALIBRATION_CMD\n"); | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 393 | data->state = IWL_CHAIN_NOISE_ACCUMULATE; | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 394 | IWL_DEBUG_CALIB(priv, "Run chain_noise_calibrate\n"); | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 395 | } | 
|  | 396 | } | 
|  | 397 |  | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 398 | void iwl5000_rts_tx_cmd_flag(struct ieee80211_tx_info *info, | 
| Emmanuel Grumbach | a326a5d | 2008-07-11 11:53:31 +0800 | [diff] [blame] | 399 | __le32 *tx_flags) | 
|  | 400 | { | 
| Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 401 | if ((info->control.rates[0].flags & IEEE80211_TX_RC_USE_RTS_CTS) || | 
|  | 402 | (info->control.rates[0].flags & IEEE80211_TX_RC_USE_CTS_PROTECT)) | 
| Emmanuel Grumbach | a326a5d | 2008-07-11 11:53:31 +0800 | [diff] [blame] | 403 | *tx_flags |= TX_CMD_FLG_RTS_CTS_MSK; | 
|  | 404 | else | 
|  | 405 | *tx_flags &= ~TX_CMD_FLG_RTS_CTS_MSK; | 
|  | 406 | } | 
|  | 407 |  | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 408 | static struct iwl_sensitivity_ranges iwl5000_sensitivity = { | 
|  | 409 | .min_nrg_cck = 95, | 
|  | 410 | .max_nrg_cck = 0, | 
|  | 411 | .auto_corr_min_ofdm = 90, | 
|  | 412 | .auto_corr_min_ofdm_mrc = 170, | 
|  | 413 | .auto_corr_min_ofdm_x1 = 120, | 
|  | 414 | .auto_corr_min_ofdm_mrc_x1 = 240, | 
|  | 415 |  | 
|  | 416 | .auto_corr_max_ofdm = 120, | 
|  | 417 | .auto_corr_max_ofdm_mrc = 210, | 
|  | 418 | .auto_corr_max_ofdm_x1 = 155, | 
|  | 419 | .auto_corr_max_ofdm_mrc_x1 = 290, | 
|  | 420 |  | 
|  | 421 | .auto_corr_min_cck = 125, | 
|  | 422 | .auto_corr_max_cck = 200, | 
|  | 423 | .auto_corr_min_cck_mrc = 170, | 
|  | 424 | .auto_corr_max_cck_mrc = 400, | 
|  | 425 | .nrg_th_cck = 95, | 
|  | 426 | .nrg_th_ofdm = 95, | 
|  | 427 | }; | 
|  | 428 |  | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 429 | static const u8 *iwl5000_eeprom_query_addr(const struct iwl_priv *priv, | 
|  | 430 | size_t offset) | 
|  | 431 | { | 
|  | 432 | u32 address = eeprom_indirect_address(priv, offset); | 
|  | 433 | BUG_ON(address >= priv->cfg->eeprom_size); | 
|  | 434 | return &priv->eeprom[address]; | 
|  | 435 | } | 
|  | 436 |  | 
| Tomas Winkler | 339afc8 | 2008-12-01 16:32:20 -0800 | [diff] [blame] | 437 | static s32 iwl5150_get_ct_threshold(struct iwl_priv *priv) | 
|  | 438 | { | 
|  | 439 | const s32 volt2temp_coef = -5; | 
|  | 440 | u16 *temp_calib = (u16 *)iwl_eeprom_query_addr(priv, | 
|  | 441 | EEPROM_5000_TEMPERATURE); | 
|  | 442 | /* offset =  temperate -  voltage / coef */ | 
|  | 443 | s32 offset = temp_calib[0] - temp_calib[1] / volt2temp_coef; | 
|  | 444 | s32 threshold = (s32)CELSIUS_TO_KELVIN(CT_KILL_THRESHOLD) - offset; | 
|  | 445 | return threshold * volt2temp_coef; | 
|  | 446 | } | 
|  | 447 |  | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 448 | /* | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 449 | *  Calibration | 
|  | 450 | */ | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 451 | static int iwl5000_set_Xtal_calib(struct iwl_priv *priv) | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 452 | { | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 453 | struct iwl_calib_xtal_freq_cmd cmd; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 454 | u16 *xtal_calib = (u16 *)iwl_eeprom_query_addr(priv, EEPROM_5000_XTAL); | 
|  | 455 |  | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 456 | cmd.hdr.op_code = IWL_PHY_CALIBRATE_CRYSTAL_FRQ_CMD; | 
|  | 457 | cmd.hdr.first_group = 0; | 
|  | 458 | cmd.hdr.groups_num = 1; | 
|  | 459 | cmd.hdr.data_valid = 1; | 
|  | 460 | cmd.cap_pin1 = (u8)xtal_calib[0]; | 
|  | 461 | cmd.cap_pin2 = (u8)xtal_calib[1]; | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 462 | return iwl_calib_set(&priv->calib_results[IWL_CALIB_XTAL], | 
| Tomas Winkler | 0d950d8 | 2008-11-25 13:36:01 -0800 | [diff] [blame] | 463 | (u8 *)&cmd, sizeof(cmd)); | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 464 | } | 
|  | 465 |  | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 466 | static int iwl5000_send_calib_cfg(struct iwl_priv *priv) | 
|  | 467 | { | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 468 | struct iwl_calib_cfg_cmd calib_cfg_cmd; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 469 | struct iwl_host_cmd cmd = { | 
|  | 470 | .id = CALIBRATION_CFG_CMD, | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 471 | .len = sizeof(struct iwl_calib_cfg_cmd), | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 472 | .data = &calib_cfg_cmd, | 
|  | 473 | }; | 
|  | 474 |  | 
|  | 475 | memset(&calib_cfg_cmd, 0, sizeof(calib_cfg_cmd)); | 
|  | 476 | calib_cfg_cmd.ucd_calib_cfg.once.is_enable = IWL_CALIB_INIT_CFG_ALL; | 
|  | 477 | calib_cfg_cmd.ucd_calib_cfg.once.start = IWL_CALIB_INIT_CFG_ALL; | 
|  | 478 | calib_cfg_cmd.ucd_calib_cfg.once.send_res = IWL_CALIB_INIT_CFG_ALL; | 
|  | 479 | calib_cfg_cmd.ucd_calib_cfg.flags = IWL_CALIB_INIT_CFG_ALL; | 
|  | 480 |  | 
|  | 481 | return iwl_send_cmd(priv, &cmd); | 
|  | 482 | } | 
|  | 483 |  | 
|  | 484 | static void iwl5000_rx_calib_result(struct iwl_priv *priv, | 
|  | 485 | struct iwl_rx_mem_buffer *rxb) | 
|  | 486 | { | 
|  | 487 | struct iwl_rx_packet *pkt = (void *)rxb->skb->data; | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 488 | struct iwl_calib_hdr *hdr = (struct iwl_calib_hdr *)pkt->u.raw; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 489 | int len = le32_to_cpu(pkt->len) & FH_RSCSR_FRAME_SIZE_MSK; | 
| Tomas Winkler | 6e21f2c | 2008-09-03 11:26:37 +0800 | [diff] [blame] | 490 | int index; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 491 |  | 
|  | 492 | /* reduce the size of the length field itself */ | 
|  | 493 | len -= 4; | 
|  | 494 |  | 
| Tomas Winkler | 6e21f2c | 2008-09-03 11:26:37 +0800 | [diff] [blame] | 495 | /* Define the order in which the results will be sent to the runtime | 
|  | 496 | * uCode. iwl_send_calib_results sends them in a row according to their | 
|  | 497 | * index. We sort them here */ | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 498 | switch (hdr->op_code) { | 
| Tomas Winkler | 819500c | 2008-12-01 16:32:19 -0800 | [diff] [blame] | 499 | case IWL_PHY_CALIBRATE_DC_CMD: | 
|  | 500 | index = IWL_CALIB_DC; | 
|  | 501 | break; | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 502 | case IWL_PHY_CALIBRATE_LO_CMD: | 
|  | 503 | index = IWL_CALIB_LO; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 504 | break; | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 505 | case IWL_PHY_CALIBRATE_TX_IQ_CMD: | 
|  | 506 | index = IWL_CALIB_TX_IQ; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 507 | break; | 
| Tomas Winkler | f69f42a | 2008-10-23 23:48:52 -0700 | [diff] [blame] | 508 | case IWL_PHY_CALIBRATE_TX_IQ_PERD_CMD: | 
|  | 509 | index = IWL_CALIB_TX_IQ_PERD; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 510 | break; | 
| Tomas Winkler | 201706a | 2008-11-19 15:32:24 -0800 | [diff] [blame] | 511 | case IWL_PHY_CALIBRATE_BASE_BAND_CMD: | 
|  | 512 | index = IWL_CALIB_BASE_BAND; | 
|  | 513 | break; | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 514 | default: | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 515 | IWL_ERR(priv, "Unknown calibration notification %d\n", | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 516 | hdr->op_code); | 
|  | 517 | return; | 
|  | 518 | } | 
| Tomas Winkler | 6e21f2c | 2008-09-03 11:26:37 +0800 | [diff] [blame] | 519 | iwl_calib_set(&priv->calib_results[index], pkt->u.raw, len); | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 520 | } | 
|  | 521 |  | 
|  | 522 | static void iwl5000_rx_calib_complete(struct iwl_priv *priv, | 
|  | 523 | struct iwl_rx_mem_buffer *rxb) | 
|  | 524 | { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 525 | IWL_DEBUG_INFO(priv, "Init. calibration is completed, restarting fw.\n"); | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 526 | queue_work(priv->workqueue, &priv->restart); | 
|  | 527 | } | 
|  | 528 |  | 
|  | 529 | /* | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 530 | * ucode | 
|  | 531 | */ | 
|  | 532 | static int iwl5000_load_section(struct iwl_priv *priv, | 
|  | 533 | struct fw_desc *image, | 
|  | 534 | u32 dst_addr) | 
|  | 535 | { | 
|  | 536 | int ret = 0; | 
|  | 537 | unsigned long flags; | 
|  | 538 |  | 
|  | 539 | dma_addr_t phy_addr = image->p_addr; | 
|  | 540 | u32 byte_cnt = image->len; | 
|  | 541 |  | 
|  | 542 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 543 | ret = iwl_grab_nic_access(priv); | 
|  | 544 | if (ret) { | 
|  | 545 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 546 | return ret; | 
|  | 547 | } | 
|  | 548 |  | 
|  | 549 | iwl_write_direct32(priv, | 
|  | 550 | FH_TCSR_CHNL_TX_CONFIG_REG(FH_SRVC_CHNL), | 
|  | 551 | FH_TCSR_TX_CONFIG_REG_VAL_DMA_CHNL_PAUSE); | 
|  | 552 |  | 
|  | 553 | iwl_write_direct32(priv, | 
|  | 554 | FH_SRVC_CHNL_SRAM_ADDR_REG(FH_SRVC_CHNL), dst_addr); | 
|  | 555 |  | 
|  | 556 | iwl_write_direct32(priv, | 
|  | 557 | FH_TFDIB_CTRL0_REG(FH_SRVC_CHNL), | 
|  | 558 | phy_addr & FH_MEM_TFDIB_DRAM_ADDR_LSB_MSK); | 
|  | 559 |  | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 560 | iwl_write_direct32(priv, | 
| Tomas Winkler | f0b9f5c | 2008-08-28 17:25:10 +0800 | [diff] [blame] | 561 | FH_TFDIB_CTRL1_REG(FH_SRVC_CHNL), | 
| Tomas Winkler | 499b188 | 2008-10-14 12:32:48 -0700 | [diff] [blame] | 562 | (iwl_get_dma_hi_addr(phy_addr) | 
| Tomas Winkler | f0b9f5c | 2008-08-28 17:25:10 +0800 | [diff] [blame] | 563 | << FH_MEM_TFDIB_REG1_ADDR_BITSHIFT) | byte_cnt); | 
|  | 564 |  | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 565 | iwl_write_direct32(priv, | 
|  | 566 | FH_TCSR_CHNL_TX_BUF_STS_REG(FH_SRVC_CHNL), | 
|  | 567 | 1 << FH_TCSR_CHNL_TX_BUF_STS_REG_POS_TB_NUM | | 
|  | 568 | 1 << FH_TCSR_CHNL_TX_BUF_STS_REG_POS_TB_IDX | | 
|  | 569 | FH_TCSR_CHNL_TX_BUF_STS_REG_VAL_TFDB_VALID); | 
|  | 570 |  | 
|  | 571 | iwl_write_direct32(priv, | 
|  | 572 | FH_TCSR_CHNL_TX_CONFIG_REG(FH_SRVC_CHNL), | 
|  | 573 | FH_TCSR_TX_CONFIG_REG_VAL_DMA_CHNL_ENABLE	| | 
| Winkler, Tomas | 9c80c50 | 2008-10-29 14:05:43 -0700 | [diff] [blame] | 574 | FH_TCSR_TX_CONFIG_REG_VAL_DMA_CREDIT_DISABLE	| | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 575 | FH_TCSR_TX_CONFIG_REG_VAL_CIRQ_HOST_ENDTFD); | 
|  | 576 |  | 
|  | 577 | iwl_release_nic_access(priv); | 
|  | 578 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 579 | return 0; | 
|  | 580 | } | 
|  | 581 |  | 
|  | 582 | static int iwl5000_load_given_ucode(struct iwl_priv *priv, | 
|  | 583 | struct fw_desc *inst_image, | 
|  | 584 | struct fw_desc *data_image) | 
|  | 585 | { | 
|  | 586 | int ret = 0; | 
|  | 587 |  | 
| Samuel Ortiz | 250bdd2 | 2008-12-19 10:37:11 +0800 | [diff] [blame] | 588 | ret = iwl5000_load_section(priv, inst_image, | 
|  | 589 | IWL50_RTC_INST_LOWER_BOUND); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 590 | if (ret) | 
|  | 591 | return ret; | 
|  | 592 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 593 | IWL_DEBUG_INFO(priv, "INST uCode section being loaded...\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 594 | ret = wait_event_interruptible_timeout(priv->wait_command_queue, | 
| Winkler, Tomas | 9c80c50 | 2008-10-29 14:05:43 -0700 | [diff] [blame] | 595 | priv->ucode_write_complete, 5 * HZ); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 596 | if (ret == -ERESTARTSYS) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 597 | IWL_ERR(priv, "Could not load the INST uCode section due " | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 598 | "to interrupt\n"); | 
|  | 599 | return ret; | 
|  | 600 | } | 
|  | 601 | if (!ret) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 602 | IWL_ERR(priv, "Could not load the INST uCode section\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 603 | return -ETIMEDOUT; | 
|  | 604 | } | 
|  | 605 |  | 
|  | 606 | priv->ucode_write_complete = 0; | 
|  | 607 |  | 
|  | 608 | ret = iwl5000_load_section( | 
| Samuel Ortiz | 250bdd2 | 2008-12-19 10:37:11 +0800 | [diff] [blame] | 609 | priv, data_image, IWL50_RTC_DATA_LOWER_BOUND); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 610 | if (ret) | 
|  | 611 | return ret; | 
|  | 612 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 613 | IWL_DEBUG_INFO(priv, "DATA uCode section being loaded...\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 614 |  | 
|  | 615 | ret = wait_event_interruptible_timeout(priv->wait_command_queue, | 
|  | 616 | priv->ucode_write_complete, 5 * HZ); | 
|  | 617 | if (ret == -ERESTARTSYS) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 618 | IWL_ERR(priv, "Could not load the INST uCode section due " | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 619 | "to interrupt\n"); | 
|  | 620 | return ret; | 
|  | 621 | } else if (!ret) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 622 | IWL_ERR(priv, "Could not load the DATA uCode section\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 623 | return -ETIMEDOUT; | 
|  | 624 | } else | 
|  | 625 | ret = 0; | 
|  | 626 |  | 
|  | 627 | priv->ucode_write_complete = 0; | 
|  | 628 |  | 
|  | 629 | return ret; | 
|  | 630 | } | 
|  | 631 |  | 
|  | 632 | static int iwl5000_load_ucode(struct iwl_priv *priv) | 
|  | 633 | { | 
|  | 634 | int ret = 0; | 
|  | 635 |  | 
|  | 636 | /* check whether init ucode should be loaded, or rather runtime ucode */ | 
|  | 637 | if (priv->ucode_init.len && (priv->ucode_type == UCODE_NONE)) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 638 | IWL_DEBUG_INFO(priv, "Init ucode found. Loading init ucode...\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 639 | ret = iwl5000_load_given_ucode(priv, | 
|  | 640 | &priv->ucode_init, &priv->ucode_init_data); | 
|  | 641 | if (!ret) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 642 | IWL_DEBUG_INFO(priv, "Init ucode load complete.\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 643 | priv->ucode_type = UCODE_INIT; | 
|  | 644 | } | 
|  | 645 | } else { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 646 | IWL_DEBUG_INFO(priv, "Init ucode not found, or already loaded. " | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 647 | "Loading runtime ucode...\n"); | 
|  | 648 | ret = iwl5000_load_given_ucode(priv, | 
|  | 649 | &priv->ucode_code, &priv->ucode_data); | 
|  | 650 | if (!ret) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 651 | IWL_DEBUG_INFO(priv, "Runtime ucode load complete.\n"); | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 652 | priv->ucode_type = UCODE_RT; | 
|  | 653 | } | 
|  | 654 | } | 
|  | 655 |  | 
|  | 656 | return ret; | 
|  | 657 | } | 
|  | 658 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 659 | static void iwl5000_init_alive_start(struct iwl_priv *priv) | 
|  | 660 | { | 
|  | 661 | int ret = 0; | 
|  | 662 |  | 
|  | 663 | /* Check alive response for "valid" sign from uCode */ | 
|  | 664 | if (priv->card_alive_init.is_valid != UCODE_VALID_OK) { | 
|  | 665 | /* We had an error bringing up the hardware, so take it | 
|  | 666 | * all the way back down so we can try again */ | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 667 | IWL_DEBUG_INFO(priv, "Initialize Alive failed.\n"); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 668 | goto restart; | 
|  | 669 | } | 
|  | 670 |  | 
|  | 671 | /* initialize uCode was loaded... verify inst image. | 
|  | 672 | * This is a paranoid check, because we would not have gotten the | 
|  | 673 | * "initialize" alive if code weren't properly loaded.  */ | 
|  | 674 | if (iwl_verify_ucode(priv)) { | 
|  | 675 | /* Runtime instruction load was bad; | 
|  | 676 | * take it all the way back down so we can try again */ | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 677 | IWL_DEBUG_INFO(priv, "Bad \"initialize\" uCode load.\n"); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 678 | goto restart; | 
|  | 679 | } | 
|  | 680 |  | 
| Emmanuel Grumbach | 37deb2a | 2008-06-30 17:23:08 +0800 | [diff] [blame] | 681 | iwl_clear_stations_table(priv); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 682 | ret = priv->cfg->ops->lib->alive_notify(priv); | 
|  | 683 | if (ret) { | 
| Winkler, Tomas | 39aadf8 | 2008-12-19 10:37:32 +0800 | [diff] [blame] | 684 | IWL_WARN(priv, | 
|  | 685 | "Could not complete ALIVE transition: %d\n", ret); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 686 | goto restart; | 
|  | 687 | } | 
|  | 688 |  | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 689 | iwl5000_send_calib_cfg(priv); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 690 | return; | 
|  | 691 |  | 
|  | 692 | restart: | 
|  | 693 | /* real restart (first load init_ucode) */ | 
|  | 694 | queue_work(priv->workqueue, &priv->restart); | 
|  | 695 | } | 
|  | 696 |  | 
|  | 697 | static void iwl5000_set_wr_ptrs(struct iwl_priv *priv, | 
|  | 698 | int txq_id, u32 index) | 
|  | 699 | { | 
|  | 700 | iwl_write_direct32(priv, HBUS_TARG_WRPTR, | 
|  | 701 | (index & 0xff) | (txq_id << 8)); | 
|  | 702 | iwl_write_prph(priv, IWL50_SCD_QUEUE_RDPTR(txq_id), index); | 
|  | 703 | } | 
|  | 704 |  | 
|  | 705 | static void iwl5000_tx_queue_set_status(struct iwl_priv *priv, | 
|  | 706 | struct iwl_tx_queue *txq, | 
|  | 707 | int tx_fifo_id, int scd_retry) | 
|  | 708 | { | 
|  | 709 | int txq_id = txq->q.id; | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 710 | int active = test_bit(txq_id, &priv->txq_ctx_active_msk) ? 1 : 0; | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 711 |  | 
|  | 712 | iwl_write_prph(priv, IWL50_SCD_QUEUE_STATUS_BITS(txq_id), | 
|  | 713 | (active << IWL50_SCD_QUEUE_STTS_REG_POS_ACTIVE) | | 
|  | 714 | (tx_fifo_id << IWL50_SCD_QUEUE_STTS_REG_POS_TXF) | | 
|  | 715 | (1 << IWL50_SCD_QUEUE_STTS_REG_POS_WSL) | | 
|  | 716 | IWL50_SCD_QUEUE_STTS_REG_MSK); | 
|  | 717 |  | 
|  | 718 | txq->sched_retry = scd_retry; | 
|  | 719 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 720 | IWL_DEBUG_INFO(priv, "%s %s Queue %d on AC %d\n", | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 721 | active ? "Activate" : "Deactivate", | 
|  | 722 | scd_retry ? "BA" : "AC", txq_id, tx_fifo_id); | 
|  | 723 | } | 
|  | 724 |  | 
| Ron Rindjunsky | 9636e58 | 2008-05-15 13:54:14 +0800 | [diff] [blame] | 725 | static int iwl5000_send_wimax_coex(struct iwl_priv *priv) | 
|  | 726 | { | 
|  | 727 | struct iwl_wimax_coex_cmd coex_cmd; | 
|  | 728 |  | 
|  | 729 | memset(&coex_cmd, 0, sizeof(coex_cmd)); | 
|  | 730 |  | 
|  | 731 | return iwl_send_cmd_pdu(priv, COEX_PRIORITY_TABLE_CMD, | 
|  | 732 | sizeof(coex_cmd), &coex_cmd); | 
|  | 733 | } | 
|  | 734 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 735 | static int iwl5000_alive_notify(struct iwl_priv *priv) | 
|  | 736 | { | 
|  | 737 | u32 a; | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 738 | unsigned long flags; | 
|  | 739 | int ret; | 
| Winkler, Tomas | 31a73fe | 2008-11-19 15:32:26 -0800 | [diff] [blame] | 740 | int i, chan; | 
| Winkler, Tomas | 40fc95d | 2008-11-19 15:32:27 -0800 | [diff] [blame] | 741 | u32 reg_val; | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 742 |  | 
|  | 743 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 744 |  | 
|  | 745 | ret = iwl_grab_nic_access(priv); | 
|  | 746 | if (ret) { | 
|  | 747 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 748 | return ret; | 
|  | 749 | } | 
|  | 750 |  | 
|  | 751 | priv->scd_base_addr = iwl_read_prph(priv, IWL50_SCD_SRAM_BASE_ADDR); | 
|  | 752 | a = priv->scd_base_addr + IWL50_SCD_CONTEXT_DATA_OFFSET; | 
|  | 753 | for (; a < priv->scd_base_addr + IWL50_SCD_TX_STTS_BITMAP_OFFSET; | 
|  | 754 | a += 4) | 
|  | 755 | iwl_write_targ_mem(priv, a, 0); | 
|  | 756 | for (; a < priv->scd_base_addr + IWL50_SCD_TRANSLATE_TBL_OFFSET; | 
|  | 757 | a += 4) | 
|  | 758 | iwl_write_targ_mem(priv, a, 0); | 
|  | 759 | for (; a < sizeof(u16) * priv->hw_params.max_txq_num; a += 4) | 
|  | 760 | iwl_write_targ_mem(priv, a, 0); | 
|  | 761 |  | 
|  | 762 | iwl_write_prph(priv, IWL50_SCD_DRAM_BASE_ADDR, | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 763 | priv->scd_bc_tbls.dma >> 10); | 
| Winkler, Tomas | 31a73fe | 2008-11-19 15:32:26 -0800 | [diff] [blame] | 764 |  | 
|  | 765 | /* Enable DMA channel */ | 
|  | 766 | for (chan = 0; chan < FH50_TCSR_CHNL_NUM ; chan++) | 
|  | 767 | iwl_write_direct32(priv, FH_TCSR_CHNL_TX_CONFIG_REG(chan), | 
|  | 768 | FH_TCSR_TX_CONFIG_REG_VAL_DMA_CHNL_ENABLE | | 
|  | 769 | FH_TCSR_TX_CONFIG_REG_VAL_DMA_CREDIT_ENABLE); | 
|  | 770 |  | 
| Winkler, Tomas | 40fc95d | 2008-11-19 15:32:27 -0800 | [diff] [blame] | 771 | /* Update FH chicken bits */ | 
|  | 772 | reg_val = iwl_read_direct32(priv, FH_TX_CHICKEN_BITS_REG); | 
|  | 773 | iwl_write_direct32(priv, FH_TX_CHICKEN_BITS_REG, | 
|  | 774 | reg_val | FH_TX_CHICKEN_BITS_SCD_AUTO_RETRY_EN); | 
|  | 775 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 776 | iwl_write_prph(priv, IWL50_SCD_QUEUECHAIN_SEL, | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 777 | IWL50_SCD_QUEUECHAIN_SEL_ALL(priv->hw_params.max_txq_num)); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 778 | iwl_write_prph(priv, IWL50_SCD_AGGR_SEL, 0); | 
|  | 779 |  | 
|  | 780 | /* initiate the queues */ | 
|  | 781 | for (i = 0; i < priv->hw_params.max_txq_num; i++) { | 
|  | 782 | iwl_write_prph(priv, IWL50_SCD_QUEUE_RDPTR(i), 0); | 
|  | 783 | iwl_write_direct32(priv, HBUS_TARG_WRPTR, 0 | (i << 8)); | 
|  | 784 | iwl_write_targ_mem(priv, priv->scd_base_addr + | 
|  | 785 | IWL50_SCD_CONTEXT_QUEUE_OFFSET(i), 0); | 
|  | 786 | iwl_write_targ_mem(priv, priv->scd_base_addr + | 
|  | 787 | IWL50_SCD_CONTEXT_QUEUE_OFFSET(i) + | 
|  | 788 | sizeof(u32), | 
|  | 789 | ((SCD_WIN_SIZE << | 
|  | 790 | IWL50_SCD_QUEUE_CTX_REG2_WIN_SIZE_POS) & | 
|  | 791 | IWL50_SCD_QUEUE_CTX_REG2_WIN_SIZE_MSK) | | 
|  | 792 | ((SCD_FRAME_LIMIT << | 
|  | 793 | IWL50_SCD_QUEUE_CTX_REG2_FRAME_LIMIT_POS) & | 
|  | 794 | IWL50_SCD_QUEUE_CTX_REG2_FRAME_LIMIT_MSK)); | 
|  | 795 | } | 
|  | 796 |  | 
|  | 797 | iwl_write_prph(priv, IWL50_SCD_INTERRUPT_MASK, | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 798 | IWL_MASK(0, priv->hw_params.max_txq_num)); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 799 |  | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 800 | /* Activate all Tx DMA/FIFO channels */ | 
|  | 801 | priv->cfg->ops->lib->txq_set_sched(priv, IWL_MASK(0, 7)); | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 802 |  | 
|  | 803 | iwl5000_set_wr_ptrs(priv, IWL_CMD_QUEUE_NUM, 0); | 
| Winkler, Tomas | 9c80c50 | 2008-10-29 14:05:43 -0700 | [diff] [blame] | 804 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 805 | /* map qos queues to fifos one-to-one */ | 
|  | 806 | for (i = 0; i < ARRAY_SIZE(iwl5000_default_queue_to_tx_fifo); i++) { | 
|  | 807 | int ac = iwl5000_default_queue_to_tx_fifo[i]; | 
|  | 808 | iwl_txq_ctx_activate(priv, i); | 
|  | 809 | iwl5000_tx_queue_set_status(priv, &priv->txq[i], ac, 0); | 
|  | 810 | } | 
|  | 811 | /* TODO - need to initialize those FIFOs inside the loop above, | 
|  | 812 | * not only mark them as active */ | 
|  | 813 | iwl_txq_ctx_activate(priv, 4); | 
|  | 814 | iwl_txq_ctx_activate(priv, 7); | 
|  | 815 | iwl_txq_ctx_activate(priv, 8); | 
|  | 816 | iwl_txq_ctx_activate(priv, 9); | 
|  | 817 |  | 
|  | 818 | iwl_release_nic_access(priv); | 
|  | 819 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 820 |  | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 821 |  | 
| Ron Rindjunsky | 9636e58 | 2008-05-15 13:54:14 +0800 | [diff] [blame] | 822 | iwl5000_send_wimax_coex(priv); | 
|  | 823 |  | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 824 | iwl5000_set_Xtal_calib(priv); | 
|  | 825 | iwl_send_calib_results(priv); | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 826 |  | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 827 | return 0; | 
|  | 828 | } | 
|  | 829 |  | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 830 | static int iwl5000_hw_set_hw_params(struct iwl_priv *priv) | 
|  | 831 | { | 
|  | 832 | if ((priv->cfg->mod_params->num_of_queues > IWL50_NUM_QUEUES) || | 
|  | 833 | (priv->cfg->mod_params->num_of_queues < IWL_MIN_NUM_QUEUES)) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 834 | IWL_ERR(priv, | 
|  | 835 | "invalid queues_num, should be between %d and %d\n", | 
|  | 836 | IWL_MIN_NUM_QUEUES, IWL50_NUM_QUEUES); | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 837 | return -EINVAL; | 
|  | 838 | } | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 839 |  | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 840 | priv->hw_params.max_txq_num = priv->cfg->mod_params->num_of_queues; | 
| Zhu Yi | f3f911d | 2008-12-02 12:14:04 -0800 | [diff] [blame] | 841 | priv->hw_params.dma_chnl_num = FH50_TCSR_CHNL_NUM; | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 842 | priv->hw_params.scd_bc_tbls_size = | 
|  | 843 | IWL50_NUM_QUEUES * sizeof(struct iwl5000_scd_bc_tbl); | 
| Samuel Ortiz | a8e74e2 | 2009-01-23 13:45:14 -0800 | [diff] [blame] | 844 | priv->hw_params.tfd_size = sizeof(struct iwl_tfd); | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 845 | priv->hw_params.max_stations = IWL5000_STATION_COUNT; | 
|  | 846 | priv->hw_params.bcast_sta_id = IWL5000_BROADCAST_ID; | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 847 |  | 
|  | 848 | switch (priv->hw_rev & CSR_HW_REV_TYPE_MSK) { | 
|  | 849 | case CSR_HW_REV_TYPE_6x00: | 
|  | 850 | case CSR_HW_REV_TYPE_6x50: | 
|  | 851 | priv->hw_params.max_data_size = IWL60_RTC_DATA_SIZE; | 
|  | 852 | priv->hw_params.max_inst_size = IWL60_RTC_INST_SIZE; | 
|  | 853 | break; | 
|  | 854 | default: | 
|  | 855 | priv->hw_params.max_data_size = IWL50_RTC_DATA_SIZE; | 
|  | 856 | priv->hw_params.max_inst_size = IWL50_RTC_INST_SIZE; | 
|  | 857 | } | 
|  | 858 |  | 
| Ron Rindjunsky | da154e3 | 2008-06-30 17:23:20 +0800 | [diff] [blame] | 859 | priv->hw_params.max_bsm_size = 0; | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 860 | priv->hw_params.fat_channel =  BIT(IEEE80211_BAND_2GHZ) | | 
|  | 861 | BIT(IEEE80211_BAND_5GHZ); | 
| Winkler, Tomas | 141c43a | 2009-01-08 10:19:53 -0800 | [diff] [blame] | 862 | priv->hw_params.rx_wrt_ptr_reg = FH_RSCSR_CHNL0_WPTR; | 
|  | 863 |  | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 864 | priv->hw_params.sens = &iwl5000_sensitivity; | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 865 |  | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 866 | priv->hw_params.tx_chains_num = num_of_ant(priv->cfg->valid_tx_ant); | 
|  | 867 | priv->hw_params.rx_chains_num = num_of_ant(priv->cfg->valid_rx_ant); | 
|  | 868 | priv->hw_params.valid_tx_ant = priv->cfg->valid_tx_ant; | 
|  | 869 | priv->hw_params.valid_rx_ant = priv->cfg->valid_rx_ant; | 
| Emmanuel Grumbach | c031bf8 | 2008-04-24 11:55:29 -0700 | [diff] [blame] | 870 |  | 
|  | 871 | switch (priv->hw_rev & CSR_HW_REV_TYPE_MSK) { | 
| Emmanuel Grumbach | c031bf8 | 2008-04-24 11:55:29 -0700 | [diff] [blame] | 872 | case CSR_HW_REV_TYPE_5150: | 
| Tomas Winkler | d5d7c58 | 2008-10-08 09:37:28 +0800 | [diff] [blame] | 873 | /* 5150 wants in Kelvin */ | 
| Emmanuel Grumbach | c031bf8 | 2008-04-24 11:55:29 -0700 | [diff] [blame] | 874 | priv->hw_params.ct_kill_threshold = | 
| Tomas Winkler | 339afc8 | 2008-12-01 16:32:20 -0800 | [diff] [blame] | 875 | iwl5150_get_ct_threshold(priv); | 
| Emmanuel Grumbach | c031bf8 | 2008-04-24 11:55:29 -0700 | [diff] [blame] | 876 | break; | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 877 | default: | 
|  | 878 | /* all others want Celsius */ | 
|  | 879 | priv->hw_params.ct_kill_threshold = CT_KILL_THRESHOLD; | 
|  | 880 | break; | 
| Emmanuel Grumbach | c031bf8 | 2008-04-24 11:55:29 -0700 | [diff] [blame] | 881 | } | 
|  | 882 |  | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 883 | /* Set initial calibration set */ | 
|  | 884 | switch (priv->hw_rev & CSR_HW_REV_TYPE_MSK) { | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 885 | case CSR_HW_REV_TYPE_5150: | 
| Tomas Winkler | 819500c | 2008-12-01 16:32:19 -0800 | [diff] [blame] | 886 | priv->hw_params.calib_init_cfg = | 
| Winkler, Tomas | 7470d7f | 2008-12-01 16:32:22 -0800 | [diff] [blame] | 887 | BIT(IWL_CALIB_DC)		| | 
|  | 888 | BIT(IWL_CALIB_LO)		| | 
|  | 889 | BIT(IWL_CALIB_TX_IQ) 		| | 
|  | 890 | BIT(IWL_CALIB_BASE_BAND); | 
| Tomas Winkler | 819500c | 2008-12-01 16:32:19 -0800 | [diff] [blame] | 891 |  | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 892 | break; | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 893 | default: | 
|  | 894 | priv->hw_params.calib_init_cfg = | 
|  | 895 | BIT(IWL_CALIB_XTAL)		| | 
|  | 896 | BIT(IWL_CALIB_LO)		| | 
|  | 897 | BIT(IWL_CALIB_TX_IQ) 		| | 
|  | 898 | BIT(IWL_CALIB_TX_IQ_PERD)	| | 
|  | 899 | BIT(IWL_CALIB_BASE_BAND); | 
|  | 900 | break; | 
| Tomas Winkler | be5d56e | 2008-10-08 09:37:27 +0800 | [diff] [blame] | 901 | } | 
|  | 902 |  | 
|  | 903 |  | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 904 | return 0; | 
|  | 905 | } | 
| Ron Rindjunsky | d4100dd | 2008-04-24 11:55:33 -0700 | [diff] [blame] | 906 |  | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 907 | /** | 
|  | 908 | * iwl5000_txq_update_byte_cnt_tbl - Set up entry in Tx byte-count array | 
|  | 909 | */ | 
|  | 910 | static void iwl5000_txq_update_byte_cnt_tbl(struct iwl_priv *priv, | 
| Ron Rindjunsky | 1646690 | 2008-05-05 10:22:50 +0800 | [diff] [blame] | 911 | struct iwl_tx_queue *txq, | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 912 | u16 byte_cnt) | 
|  | 913 | { | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 914 | struct iwl5000_scd_bc_tbl *scd_bc_tbl = priv->scd_bc_tbls.addr; | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 915 | int write_ptr = txq->q.write_ptr; | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 916 | int txq_id = txq->q.id; | 
|  | 917 | u8 sec_ctl = 0; | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 918 | u8 sta_id = 0; | 
|  | 919 | u16 len = byte_cnt + IWL_TX_CRC_SIZE + IWL_TX_DELIMITER_SIZE; | 
|  | 920 | __le16 bc_ent; | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 921 |  | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 922 | WARN_ON(len > 0xFFF || write_ptr >= TFD_QUEUE_SIZE_MAX); | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 923 |  | 
|  | 924 | if (txq_id != IWL_CMD_QUEUE_NUM) { | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 925 | sta_id = txq->cmd[txq->q.write_ptr]->cmd.tx.sta_id; | 
| Gregory Greenman | da99c4b | 2008-08-04 16:00:40 +0800 | [diff] [blame] | 926 | sec_ctl = txq->cmd[txq->q.write_ptr]->cmd.tx.sec_ctl; | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 927 |  | 
|  | 928 | switch (sec_ctl & TX_CMD_SEC_MSK) { | 
|  | 929 | case TX_CMD_SEC_CCM: | 
|  | 930 | len += CCMP_MIC_LEN; | 
|  | 931 | break; | 
|  | 932 | case TX_CMD_SEC_TKIP: | 
|  | 933 | len += TKIP_ICV_LEN; | 
|  | 934 | break; | 
|  | 935 | case TX_CMD_SEC_WEP: | 
|  | 936 | len += WEP_IV_LEN + WEP_ICV_LEN; | 
|  | 937 | break; | 
|  | 938 | } | 
|  | 939 | } | 
|  | 940 |  | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 941 | bc_ent = cpu_to_le16((len & 0xFFF) | (sta_id << 12)); | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 942 |  | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 943 | scd_bc_tbl[txq_id].tfd_offset[write_ptr] = bc_ent; | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 944 |  | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 945 | if (txq->q.write_ptr < TFD_QUEUE_SIZE_BC_DUP) | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 946 | scd_bc_tbl[txq_id]. | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 947 | tfd_offset[TFD_QUEUE_SIZE_MAX + write_ptr] = bc_ent; | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 948 | } | 
|  | 949 |  | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 950 | static void iwl5000_txq_inval_byte_cnt_tbl(struct iwl_priv *priv, | 
|  | 951 | struct iwl_tx_queue *txq) | 
|  | 952 | { | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 953 | struct iwl5000_scd_bc_tbl *scd_bc_tbl = priv->scd_bc_tbls.addr; | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 954 | int txq_id = txq->q.id; | 
|  | 955 | int read_ptr = txq->q.read_ptr; | 
|  | 956 | u8 sta_id = 0; | 
|  | 957 | __le16 bc_ent; | 
|  | 958 |  | 
|  | 959 | WARN_ON(read_ptr >= TFD_QUEUE_SIZE_MAX); | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 960 |  | 
|  | 961 | if (txq_id != IWL_CMD_QUEUE_NUM) | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 962 | sta_id = txq->cmd[read_ptr]->cmd.tx.sta_id; | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 963 |  | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 964 | bc_ent =  cpu_to_le16(1 | (sta_id << 12)); | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 965 | scd_bc_tbl[txq_id].tfd_offset[read_ptr] = bc_ent; | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 966 |  | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 967 | if (txq->q.write_ptr < TFD_QUEUE_SIZE_BC_DUP) | 
| Tomas Winkler | 4ddbb7d | 2008-11-07 09:58:40 -0800 | [diff] [blame] | 968 | scd_bc_tbl[txq_id]. | 
| Tomas Winkler | 127901a | 2008-10-23 23:48:55 -0700 | [diff] [blame] | 969 | tfd_offset[TFD_QUEUE_SIZE_MAX + read_ptr] =  bc_ent; | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 970 | } | 
|  | 971 |  | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 972 | static int iwl5000_tx_queue_set_q2ratid(struct iwl_priv *priv, u16 ra_tid, | 
|  | 973 | u16 txq_id) | 
|  | 974 | { | 
|  | 975 | u32 tbl_dw_addr; | 
|  | 976 | u32 tbl_dw; | 
|  | 977 | u16 scd_q2ratid; | 
|  | 978 |  | 
|  | 979 | scd_q2ratid = ra_tid & IWL_SCD_QUEUE_RA_TID_MAP_RATID_MSK; | 
|  | 980 |  | 
|  | 981 | tbl_dw_addr = priv->scd_base_addr + | 
|  | 982 | IWL50_SCD_TRANSLATE_TBL_OFFSET_QUEUE(txq_id); | 
|  | 983 |  | 
|  | 984 | tbl_dw = iwl_read_targ_mem(priv, tbl_dw_addr); | 
|  | 985 |  | 
|  | 986 | if (txq_id & 0x1) | 
|  | 987 | tbl_dw = (scd_q2ratid << 16) | (tbl_dw & 0x0000FFFF); | 
|  | 988 | else | 
|  | 989 | tbl_dw = scd_q2ratid | (tbl_dw & 0xFFFF0000); | 
|  | 990 |  | 
|  | 991 | iwl_write_targ_mem(priv, tbl_dw_addr, tbl_dw); | 
|  | 992 |  | 
|  | 993 | return 0; | 
|  | 994 | } | 
|  | 995 | static void iwl5000_tx_queue_stop_scheduler(struct iwl_priv *priv, u16 txq_id) | 
|  | 996 | { | 
|  | 997 | /* Simply stop the queue, but don't change any configuration; | 
|  | 998 | * the SCD_ACT_EN bit is the write-enable mask for the ACTIVE bit. */ | 
|  | 999 | iwl_write_prph(priv, | 
|  | 1000 | IWL50_SCD_QUEUE_STATUS_BITS(txq_id), | 
|  | 1001 | (0 << IWL50_SCD_QUEUE_STTS_REG_POS_ACTIVE)| | 
|  | 1002 | (1 << IWL50_SCD_QUEUE_STTS_REG_POS_SCD_ACT_EN)); | 
|  | 1003 | } | 
|  | 1004 |  | 
|  | 1005 | static int iwl5000_txq_agg_enable(struct iwl_priv *priv, int txq_id, | 
|  | 1006 | int tx_fifo, int sta_id, int tid, u16 ssn_idx) | 
|  | 1007 | { | 
|  | 1008 | unsigned long flags; | 
|  | 1009 | int ret; | 
|  | 1010 | u16 ra_tid; | 
|  | 1011 |  | 
| Tomas Winkler | 9f17b31 | 2008-07-11 11:53:35 +0800 | [diff] [blame] | 1012 | if ((IWL50_FIRST_AMPDU_QUEUE > txq_id) || | 
|  | 1013 | (IWL50_FIRST_AMPDU_QUEUE + IWL50_NUM_AMPDU_QUEUES <= txq_id)) { | 
| Winkler, Tomas | 39aadf8 | 2008-12-19 10:37:32 +0800 | [diff] [blame] | 1014 | IWL_WARN(priv, | 
|  | 1015 | "queue number out of range: %d, must be %d to %d\n", | 
| Tomas Winkler | 9f17b31 | 2008-07-11 11:53:35 +0800 | [diff] [blame] | 1016 | txq_id, IWL50_FIRST_AMPDU_QUEUE, | 
|  | 1017 | IWL50_FIRST_AMPDU_QUEUE + IWL50_NUM_AMPDU_QUEUES - 1); | 
|  | 1018 | return -EINVAL; | 
|  | 1019 | } | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 1020 |  | 
|  | 1021 | ra_tid = BUILD_RAxTID(sta_id, tid); | 
|  | 1022 |  | 
|  | 1023 | /* Modify device's station table to Tx this TID */ | 
| Tomas Winkler | 9f58671 | 2008-11-12 13:14:05 -0800 | [diff] [blame] | 1024 | iwl_sta_tx_modify_enable_tid(priv, sta_id, tid); | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 1025 |  | 
|  | 1026 | spin_lock_irqsave(&priv->lock, flags); | 
|  | 1027 | ret = iwl_grab_nic_access(priv); | 
|  | 1028 | if (ret) { | 
|  | 1029 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 1030 | return ret; | 
|  | 1031 | } | 
|  | 1032 |  | 
|  | 1033 | /* Stop this Tx queue before configuring it */ | 
|  | 1034 | iwl5000_tx_queue_stop_scheduler(priv, txq_id); | 
|  | 1035 |  | 
|  | 1036 | /* Map receiver-address / traffic-ID to this queue */ | 
|  | 1037 | iwl5000_tx_queue_set_q2ratid(priv, ra_tid, txq_id); | 
|  | 1038 |  | 
|  | 1039 | /* Set this queue as a chain-building queue */ | 
|  | 1040 | iwl_set_bits_prph(priv, IWL50_SCD_QUEUECHAIN_SEL, (1<<txq_id)); | 
|  | 1041 |  | 
|  | 1042 | /* enable aggregations for the queue */ | 
|  | 1043 | iwl_set_bits_prph(priv, IWL50_SCD_AGGR_SEL, (1<<txq_id)); | 
|  | 1044 |  | 
|  | 1045 | /* Place first TFD at index corresponding to start sequence number. | 
|  | 1046 | * Assumes that ssn_idx is valid (!= 0xFFF) */ | 
|  | 1047 | priv->txq[txq_id].q.read_ptr = (ssn_idx & 0xff); | 
|  | 1048 | priv->txq[txq_id].q.write_ptr = (ssn_idx & 0xff); | 
|  | 1049 | iwl5000_set_wr_ptrs(priv, txq_id, ssn_idx); | 
|  | 1050 |  | 
|  | 1051 | /* Set up Tx window size and frame limit for this queue */ | 
|  | 1052 | iwl_write_targ_mem(priv, priv->scd_base_addr + | 
|  | 1053 | IWL50_SCD_CONTEXT_QUEUE_OFFSET(txq_id) + | 
|  | 1054 | sizeof(u32), | 
|  | 1055 | ((SCD_WIN_SIZE << | 
|  | 1056 | IWL50_SCD_QUEUE_CTX_REG2_WIN_SIZE_POS) & | 
|  | 1057 | IWL50_SCD_QUEUE_CTX_REG2_WIN_SIZE_MSK) | | 
|  | 1058 | ((SCD_FRAME_LIMIT << | 
|  | 1059 | IWL50_SCD_QUEUE_CTX_REG2_FRAME_LIMIT_POS) & | 
|  | 1060 | IWL50_SCD_QUEUE_CTX_REG2_FRAME_LIMIT_MSK)); | 
|  | 1061 |  | 
|  | 1062 | iwl_set_bits_prph(priv, IWL50_SCD_INTERRUPT_MASK, (1 << txq_id)); | 
|  | 1063 |  | 
|  | 1064 | /* Set up Status area in SRAM, map to Tx DMA/FIFO, activate the queue */ | 
|  | 1065 | iwl5000_tx_queue_set_status(priv, &priv->txq[txq_id], tx_fifo, 1); | 
|  | 1066 |  | 
|  | 1067 | iwl_release_nic_access(priv); | 
|  | 1068 | spin_unlock_irqrestore(&priv->lock, flags); | 
|  | 1069 |  | 
|  | 1070 | return 0; | 
|  | 1071 | } | 
|  | 1072 |  | 
|  | 1073 | static int iwl5000_txq_agg_disable(struct iwl_priv *priv, u16 txq_id, | 
|  | 1074 | u16 ssn_idx, u8 tx_fifo) | 
|  | 1075 | { | 
|  | 1076 | int ret; | 
|  | 1077 |  | 
| Tomas Winkler | 9f17b31 | 2008-07-11 11:53:35 +0800 | [diff] [blame] | 1078 | if ((IWL50_FIRST_AMPDU_QUEUE > txq_id) || | 
|  | 1079 | (IWL50_FIRST_AMPDU_QUEUE + IWL50_NUM_AMPDU_QUEUES <= txq_id)) { | 
| Wey-Yi Guy | a2f1cbe | 2009-03-17 21:51:52 -0700 | [diff] [blame] | 1080 | IWL_ERR(priv, | 
| Winkler, Tomas | 39aadf8 | 2008-12-19 10:37:32 +0800 | [diff] [blame] | 1081 | "queue number out of range: %d, must be %d to %d\n", | 
| Tomas Winkler | 9f17b31 | 2008-07-11 11:53:35 +0800 | [diff] [blame] | 1082 | txq_id, IWL50_FIRST_AMPDU_QUEUE, | 
|  | 1083 | IWL50_FIRST_AMPDU_QUEUE + IWL50_NUM_AMPDU_QUEUES - 1); | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 1084 | return -EINVAL; | 
|  | 1085 | } | 
|  | 1086 |  | 
|  | 1087 | ret = iwl_grab_nic_access(priv); | 
|  | 1088 | if (ret) | 
|  | 1089 | return ret; | 
|  | 1090 |  | 
|  | 1091 | iwl5000_tx_queue_stop_scheduler(priv, txq_id); | 
|  | 1092 |  | 
|  | 1093 | iwl_clear_bits_prph(priv, IWL50_SCD_AGGR_SEL, (1 << txq_id)); | 
|  | 1094 |  | 
|  | 1095 | priv->txq[txq_id].q.read_ptr = (ssn_idx & 0xff); | 
|  | 1096 | priv->txq[txq_id].q.write_ptr = (ssn_idx & 0xff); | 
|  | 1097 | /* supposes that ssn_idx is valid (!= 0xFFF) */ | 
|  | 1098 | iwl5000_set_wr_ptrs(priv, txq_id, ssn_idx); | 
|  | 1099 |  | 
|  | 1100 | iwl_clear_bits_prph(priv, IWL50_SCD_INTERRUPT_MASK, (1 << txq_id)); | 
|  | 1101 | iwl_txq_ctx_deactivate(priv, txq_id); | 
|  | 1102 | iwl5000_tx_queue_set_status(priv, &priv->txq[txq_id], tx_fifo, 0); | 
|  | 1103 |  | 
|  | 1104 | iwl_release_nic_access(priv); | 
|  | 1105 |  | 
|  | 1106 | return 0; | 
|  | 1107 | } | 
|  | 1108 |  | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1109 | u16 iwl5000_build_addsta_hcmd(const struct iwl_addsta_cmd *cmd, u8 *data) | 
| Tomas Winkler | 2469bf2 | 2008-05-05 10:22:35 +0800 | [diff] [blame] | 1110 | { | 
|  | 1111 | u16 size = (u16)sizeof(struct iwl_addsta_cmd); | 
|  | 1112 | memcpy(data, cmd, size); | 
|  | 1113 | return size; | 
|  | 1114 | } | 
|  | 1115 |  | 
|  | 1116 |  | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 1117 | /* | 
| Tomas Winkler | a96a27f | 2008-10-23 23:48:56 -0700 | [diff] [blame] | 1118 | * Activate/Deactivate Tx DMA/FIFO channels according tx fifos mask | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 1119 | * must be called under priv->lock and mac access | 
|  | 1120 | */ | 
|  | 1121 | static void iwl5000_txq_set_sched(struct iwl_priv *priv, u32 mask) | 
| Ron Rindjunsky | 5a676bb | 2008-05-05 10:22:42 +0800 | [diff] [blame] | 1122 | { | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 1123 | iwl_write_prph(priv, IWL50_SCD_TXFACT, mask); | 
| Ron Rindjunsky | 5a676bb | 2008-05-05 10:22:42 +0800 | [diff] [blame] | 1124 | } | 
|  | 1125 |  | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1126 |  | 
|  | 1127 | static inline u32 iwl5000_get_scd_ssn(struct iwl5000_tx_resp *tx_resp) | 
|  | 1128 | { | 
| Tomas Winkler | 3ac7f14 | 2008-07-21 02:40:14 +0300 | [diff] [blame] | 1129 | return le32_to_cpup((__le32 *)&tx_resp->status + | 
| Tomas Winkler | 25a6572 | 2008-06-12 09:47:07 +0800 | [diff] [blame] | 1130 | tx_resp->frame_count) & MAX_SN; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1131 | } | 
|  | 1132 |  | 
|  | 1133 | static int iwl5000_tx_status_reply_tx(struct iwl_priv *priv, | 
|  | 1134 | struct iwl_ht_agg *agg, | 
|  | 1135 | struct iwl5000_tx_resp *tx_resp, | 
| Tomas Winkler | 25a6572 | 2008-06-12 09:47:07 +0800 | [diff] [blame] | 1136 | int txq_id, u16 start_idx) | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1137 | { | 
|  | 1138 | u16 status; | 
|  | 1139 | struct agg_tx_status *frame_status = &tx_resp->status; | 
|  | 1140 | struct ieee80211_tx_info *info = NULL; | 
|  | 1141 | struct ieee80211_hdr *hdr = NULL; | 
| Tomas Winkler | e7d326a | 2008-06-12 09:47:11 +0800 | [diff] [blame] | 1142 | u32 rate_n_flags = le32_to_cpu(tx_resp->rate_n_flags); | 
| Tomas Winkler | 25a6572 | 2008-06-12 09:47:07 +0800 | [diff] [blame] | 1143 | int i, sh, idx; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1144 | u16 seq; | 
|  | 1145 |  | 
|  | 1146 | if (agg->wait_for_ba) | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1147 | IWL_DEBUG_TX_REPLY(priv, "got tx response w/o block-ack\n"); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1148 |  | 
|  | 1149 | agg->frame_count = tx_resp->frame_count; | 
|  | 1150 | agg->start_idx = start_idx; | 
| Tomas Winkler | e7d326a | 2008-06-12 09:47:11 +0800 | [diff] [blame] | 1151 | agg->rate_n_flags = rate_n_flags; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1152 | agg->bitmap = 0; | 
|  | 1153 |  | 
|  | 1154 | /* # frames attempted by Tx command */ | 
|  | 1155 | if (agg->frame_count == 1) { | 
|  | 1156 | /* Only one frame was attempted; no block-ack will arrive */ | 
|  | 1157 | status = le16_to_cpu(frame_status[0].status); | 
| Tomas Winkler | 25a6572 | 2008-06-12 09:47:07 +0800 | [diff] [blame] | 1158 | idx = start_idx; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1159 |  | 
|  | 1160 | /* FIXME: code repetition */ | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1161 | IWL_DEBUG_TX_REPLY(priv, "FrameCnt = %d, StartIdx=%d idx=%d\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1162 | agg->frame_count, agg->start_idx, idx); | 
|  | 1163 |  | 
|  | 1164 | info = IEEE80211_SKB_CB(priv->txq[txq_id].txb[idx].skb[0]); | 
| Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 1165 | info->status.rates[0].count = tx_resp->failure_frame + 1; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1166 | info->flags &= ~IEEE80211_TX_CTL_AMPDU; | 
| Abhijeet Kolekar | c305606 | 2008-11-12 13:14:08 -0800 | [diff] [blame] | 1167 | info->flags |= iwl_is_tx_success(status) ? | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1168 | IEEE80211_TX_STAT_ACK : 0; | 
| Tomas Winkler | e7d326a | 2008-06-12 09:47:11 +0800 | [diff] [blame] | 1169 | iwl_hwrate_to_tx_control(priv, rate_n_flags, info); | 
|  | 1170 |  | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1171 | /* FIXME: code repetition end */ | 
|  | 1172 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1173 | IWL_DEBUG_TX_REPLY(priv, "1 Frame 0x%x failure :%d\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1174 | status & 0xff, tx_resp->failure_frame); | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1175 | IWL_DEBUG_TX_REPLY(priv, "Rate Info rate_n_flags=%x\n", rate_n_flags); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1176 |  | 
|  | 1177 | agg->wait_for_ba = 0; | 
|  | 1178 | } else { | 
|  | 1179 | /* Two or more frames were attempted; expect block-ack */ | 
|  | 1180 | u64 bitmap = 0; | 
|  | 1181 | int start = agg->start_idx; | 
|  | 1182 |  | 
|  | 1183 | /* Construct bit-map of pending frames within Tx window */ | 
|  | 1184 | for (i = 0; i < agg->frame_count; i++) { | 
|  | 1185 | u16 sc; | 
|  | 1186 | status = le16_to_cpu(frame_status[i].status); | 
|  | 1187 | seq  = le16_to_cpu(frame_status[i].sequence); | 
|  | 1188 | idx = SEQ_TO_INDEX(seq); | 
|  | 1189 | txq_id = SEQ_TO_QUEUE(seq); | 
|  | 1190 |  | 
|  | 1191 | if (status & (AGG_TX_STATE_FEW_BYTES_MSK | | 
|  | 1192 | AGG_TX_STATE_ABORT_MSK)) | 
|  | 1193 | continue; | 
|  | 1194 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1195 | IWL_DEBUG_TX_REPLY(priv, "FrameCnt = %d, txq_id=%d idx=%d\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1196 | agg->frame_count, txq_id, idx); | 
|  | 1197 |  | 
|  | 1198 | hdr = iwl_tx_queue_get_hdr(priv, txq_id, idx); | 
|  | 1199 |  | 
|  | 1200 | sc = le16_to_cpu(hdr->seq_ctrl); | 
|  | 1201 | if (idx != (SEQ_TO_SN(sc) & 0xff)) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 1202 | IWL_ERR(priv, | 
|  | 1203 | "BUG_ON idx doesn't match seq control" | 
|  | 1204 | " idx=%d, seq_idx=%d, seq=%d\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1205 | idx, SEQ_TO_SN(sc), | 
|  | 1206 | hdr->seq_ctrl); | 
|  | 1207 | return -1; | 
|  | 1208 | } | 
|  | 1209 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1210 | IWL_DEBUG_TX_REPLY(priv, "AGG Frame i=%d idx %d seq=%d\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1211 | i, idx, SEQ_TO_SN(sc)); | 
|  | 1212 |  | 
|  | 1213 | sh = idx - start; | 
|  | 1214 | if (sh > 64) { | 
|  | 1215 | sh = (start - idx) + 0xff; | 
|  | 1216 | bitmap = bitmap << sh; | 
|  | 1217 | sh = 0; | 
|  | 1218 | start = idx; | 
|  | 1219 | } else if (sh < -64) | 
|  | 1220 | sh  = 0xff - (start - idx); | 
|  | 1221 | else if (sh < 0) { | 
|  | 1222 | sh = start - idx; | 
|  | 1223 | start = idx; | 
|  | 1224 | bitmap = bitmap << sh; | 
|  | 1225 | sh = 0; | 
|  | 1226 | } | 
| Emmanuel Grumbach | 4aa41f1 | 2008-07-18 13:53:09 +0800 | [diff] [blame] | 1227 | bitmap |= 1ULL << sh; | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1228 | IWL_DEBUG_TX_REPLY(priv, "start=%d bitmap=0x%llx\n", | 
| Emmanuel Grumbach | 4aa41f1 | 2008-07-18 13:53:09 +0800 | [diff] [blame] | 1229 | start, (unsigned long long)bitmap); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1230 | } | 
|  | 1231 |  | 
|  | 1232 | agg->bitmap = bitmap; | 
|  | 1233 | agg->start_idx = start; | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1234 | IWL_DEBUG_TX_REPLY(priv, "Frames %d start_idx=%d bitmap=0x%llx\n", | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1235 | agg->frame_count, agg->start_idx, | 
|  | 1236 | (unsigned long long)agg->bitmap); | 
|  | 1237 |  | 
|  | 1238 | if (bitmap) | 
|  | 1239 | agg->wait_for_ba = 1; | 
|  | 1240 | } | 
|  | 1241 | return 0; | 
|  | 1242 | } | 
|  | 1243 |  | 
|  | 1244 | static void iwl5000_rx_reply_tx(struct iwl_priv *priv, | 
|  | 1245 | struct iwl_rx_mem_buffer *rxb) | 
|  | 1246 | { | 
|  | 1247 | struct iwl_rx_packet *pkt = (struct iwl_rx_packet *)rxb->skb->data; | 
|  | 1248 | u16 sequence = le16_to_cpu(pkt->hdr.sequence); | 
|  | 1249 | int txq_id = SEQ_TO_QUEUE(sequence); | 
|  | 1250 | int index = SEQ_TO_INDEX(sequence); | 
|  | 1251 | struct iwl_tx_queue *txq = &priv->txq[txq_id]; | 
|  | 1252 | struct ieee80211_tx_info *info; | 
|  | 1253 | struct iwl5000_tx_resp *tx_resp = (void *)&pkt->u.raw[0]; | 
|  | 1254 | u32  status = le16_to_cpu(tx_resp->status.status); | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1255 | int tid; | 
|  | 1256 | int sta_id; | 
|  | 1257 | int freed; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1258 |  | 
|  | 1259 | if ((index >= txq->q.n_bd) || (iwl_queue_used(&txq->q, index) == 0)) { | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 1260 | IWL_ERR(priv, "Read index for DMA queue txq_id (%d) index %d " | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1261 | "is out of range [0-%d] %d %d\n", txq_id, | 
|  | 1262 | index, txq->q.n_bd, txq->q.write_ptr, | 
|  | 1263 | txq->q.read_ptr); | 
|  | 1264 | return; | 
|  | 1265 | } | 
|  | 1266 |  | 
|  | 1267 | info = IEEE80211_SKB_CB(txq->txb[txq->q.read_ptr].skb[0]); | 
|  | 1268 | memset(&info->status, 0, sizeof(info->status)); | 
|  | 1269 |  | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1270 | tid = (tx_resp->ra_tid & IWL50_TX_RES_TID_MSK) >> IWL50_TX_RES_TID_POS; | 
|  | 1271 | sta_id = (tx_resp->ra_tid & IWL50_TX_RES_RA_MSK) >> IWL50_TX_RES_RA_POS; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1272 |  | 
|  | 1273 | if (txq->sched_retry) { | 
|  | 1274 | const u32 scd_ssn = iwl5000_get_scd_ssn(tx_resp); | 
|  | 1275 | struct iwl_ht_agg *agg = NULL; | 
|  | 1276 |  | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1277 | agg = &priv->stations[sta_id].tid[tid].agg; | 
|  | 1278 |  | 
| Tomas Winkler | 25a6572 | 2008-06-12 09:47:07 +0800 | [diff] [blame] | 1279 | iwl5000_tx_status_reply_tx(priv, agg, tx_resp, txq_id, index); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1280 |  | 
| Ron Rindjunsky | 3235427 | 2008-07-01 10:44:51 +0300 | [diff] [blame] | 1281 | /* check if BAR is needed */ | 
|  | 1282 | if ((tx_resp->frame_count == 1) && !iwl_is_tx_success(status)) | 
|  | 1283 | info->flags |= IEEE80211_TX_STAT_AMPDU_NO_BACK; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1284 |  | 
|  | 1285 | if (txq->q.read_ptr != (scd_ssn & 0xff)) { | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1286 | index = iwl_queue_dec_wrap(scd_ssn & 0xff, txq->q.n_bd); | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1287 | IWL_DEBUG_TX_REPLY(priv, "Retry scheduler reclaim " | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1288 | "scd_ssn=%d idx=%d txq=%d swq=%d\n", | 
|  | 1289 | scd_ssn , index, txq_id, txq->swq_id); | 
|  | 1290 |  | 
| Tomas Winkler | 17b8892 | 2008-05-29 16:35:12 +0800 | [diff] [blame] | 1291 | freed = iwl_tx_queue_reclaim(priv, txq_id, index); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1292 | priv->stations[sta_id].tid[tid].tfds_in_queue -= freed; | 
|  | 1293 |  | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1294 | if (priv->mac80211_registered && | 
|  | 1295 | (iwl_queue_space(&txq->q) > txq->q.low_mark) && | 
|  | 1296 | (agg->state != IWL_EMPTYING_HW_QUEUE_DELBA)) { | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1297 | if (agg->state == IWL_AGG_OFF) | 
| Johannes Berg | e4e72fb | 2009-03-23 17:28:42 +0100 | [diff] [blame] | 1298 | iwl_wake_queue(priv, txq_id); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1299 | else | 
| Johannes Berg | e4e72fb | 2009-03-23 17:28:42 +0100 | [diff] [blame] | 1300 | iwl_wake_queue(priv, txq->swq_id); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1301 | } | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1302 | } | 
|  | 1303 | } else { | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1304 | BUG_ON(txq_id != txq->swq_id); | 
|  | 1305 |  | 
| Johannes Berg | e6a9854 | 2008-10-21 12:40:02 +0200 | [diff] [blame] | 1306 | info->status.rates[0].count = tx_resp->failure_frame + 1; | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1307 | info->flags |= iwl_is_tx_success(status) ? | 
|  | 1308 | IEEE80211_TX_STAT_ACK : 0; | 
| Tomas Winkler | e7d326a | 2008-06-12 09:47:11 +0800 | [diff] [blame] | 1309 | iwl_hwrate_to_tx_control(priv, | 
| Ron Rindjunsky | 4f85f5b | 2008-06-09 22:54:35 +0300 | [diff] [blame] | 1310 | le32_to_cpu(tx_resp->rate_n_flags), | 
|  | 1311 | info); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1312 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1313 | IWL_DEBUG_TX_REPLY(priv, "TXQ %d status %s (0x%08x) rate_n_flags " | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1314 | "0x%x retries %d\n", | 
|  | 1315 | txq_id, | 
|  | 1316 | iwl_get_tx_fail_reason(status), status, | 
|  | 1317 | le32_to_cpu(tx_resp->rate_n_flags), | 
|  | 1318 | tx_resp->failure_frame); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1319 |  | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1320 | freed = iwl_tx_queue_reclaim(priv, txq_id, index); | 
|  | 1321 | if (ieee80211_is_data_qos(tx_resp->frame_ctrl)) | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1322 | priv->stations[sta_id].tid[tid].tfds_in_queue -= freed; | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1323 |  | 
|  | 1324 | if (priv->mac80211_registered && | 
|  | 1325 | (iwl_queue_space(&txq->q) > txq->q.low_mark)) | 
| Johannes Berg | e4e72fb | 2009-03-23 17:28:42 +0100 | [diff] [blame] | 1326 | iwl_wake_queue(priv, txq_id); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1327 | } | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1328 |  | 
| Tomas Winkler | 3fd07a1 | 2008-10-23 23:48:49 -0700 | [diff] [blame] | 1329 | if (ieee80211_is_data_qos(tx_resp->frame_ctrl)) | 
|  | 1330 | iwl_txq_check_empty(priv, sta_id, tid, txq_id); | 
|  | 1331 |  | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1332 | if (iwl_check_bits(status, TX_ABORT_REQUIRED_MSK)) | 
| Winkler, Tomas | 15b1687 | 2008-12-19 10:37:33 +0800 | [diff] [blame] | 1333 | IWL_ERR(priv, "TODO:  Implement Tx ABORT REQUIRED!!!\n"); | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1334 | } | 
|  | 1335 |  | 
| Tomas Winkler | a96a27f | 2008-10-23 23:48:56 -0700 | [diff] [blame] | 1336 | /* Currently 5000 is the superset of everything */ | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1337 | u16 iwl5000_get_hcmd_size(u8 cmd_id, u16 len) | 
| Gregory Greenman | c1adf9f | 2008-05-15 13:53:59 +0800 | [diff] [blame] | 1338 | { | 
|  | 1339 | return len; | 
|  | 1340 | } | 
|  | 1341 |  | 
| Emmanuel Grumbach | 203566f | 2008-06-12 09:46:54 +0800 | [diff] [blame] | 1342 | static void iwl5000_setup_deferred_work(struct iwl_priv *priv) | 
|  | 1343 | { | 
|  | 1344 | /* in 5000 the tx power calibration is done in uCode */ | 
|  | 1345 | priv->disable_tx_power_cal = 1; | 
|  | 1346 | } | 
|  | 1347 |  | 
| Ron Rindjunsky | b600e4e | 2008-05-15 13:54:11 +0800 | [diff] [blame] | 1348 | static void iwl5000_rx_handler_setup(struct iwl_priv *priv) | 
|  | 1349 | { | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 1350 | /* init calibration handlers */ | 
|  | 1351 | priv->rx_handlers[CALIBRATION_RES_NOTIFICATION] = | 
|  | 1352 | iwl5000_rx_calib_result; | 
|  | 1353 | priv->rx_handlers[CALIBRATION_COMPLETE_NOTIFICATION] = | 
|  | 1354 | iwl5000_rx_calib_complete; | 
| Ron Rindjunsky | e532fa0 | 2008-05-29 16:35:09 +0800 | [diff] [blame] | 1355 | priv->rx_handlers[REPLY_TX] = iwl5000_rx_reply_tx; | 
| Ron Rindjunsky | b600e4e | 2008-05-15 13:54:11 +0800 | [diff] [blame] | 1356 | } | 
|  | 1357 |  | 
| Tomas Winkler | 7c616cb | 2008-05-29 16:35:05 +0800 | [diff] [blame] | 1358 |  | 
| Ron Rindjunsky | 87283cc | 2008-05-29 16:34:47 +0800 | [diff] [blame] | 1359 | static int iwl5000_hw_valid_rtc_data_addr(u32 addr) | 
|  | 1360 | { | 
| Samuel Ortiz | 250bdd2 | 2008-12-19 10:37:11 +0800 | [diff] [blame] | 1361 | return (addr >= IWL50_RTC_DATA_LOWER_BOUND) && | 
| Ron Rindjunsky | 87283cc | 2008-05-29 16:34:47 +0800 | [diff] [blame] | 1362 | (addr < IWL50_RTC_DATA_UPPER_BOUND); | 
|  | 1363 | } | 
|  | 1364 |  | 
| Ron Rindjunsky | fe7a90c | 2008-05-29 16:35:14 +0800 | [diff] [blame] | 1365 | static int iwl5000_send_rxon_assoc(struct iwl_priv *priv) | 
|  | 1366 | { | 
|  | 1367 | int ret = 0; | 
|  | 1368 | struct iwl5000_rxon_assoc_cmd rxon_assoc; | 
|  | 1369 | const struct iwl_rxon_cmd *rxon1 = &priv->staging_rxon; | 
|  | 1370 | const struct iwl_rxon_cmd *rxon2 = &priv->active_rxon; | 
|  | 1371 |  | 
|  | 1372 | if ((rxon1->flags == rxon2->flags) && | 
|  | 1373 | (rxon1->filter_flags == rxon2->filter_flags) && | 
|  | 1374 | (rxon1->cck_basic_rates == rxon2->cck_basic_rates) && | 
|  | 1375 | (rxon1->ofdm_ht_single_stream_basic_rates == | 
|  | 1376 | rxon2->ofdm_ht_single_stream_basic_rates) && | 
|  | 1377 | (rxon1->ofdm_ht_dual_stream_basic_rates == | 
|  | 1378 | rxon2->ofdm_ht_dual_stream_basic_rates) && | 
|  | 1379 | (rxon1->ofdm_ht_triple_stream_basic_rates == | 
|  | 1380 | rxon2->ofdm_ht_triple_stream_basic_rates) && | 
|  | 1381 | (rxon1->acquisition_data == rxon2->acquisition_data) && | 
|  | 1382 | (rxon1->rx_chain == rxon2->rx_chain) && | 
|  | 1383 | (rxon1->ofdm_basic_rates == rxon2->ofdm_basic_rates)) { | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1384 | IWL_DEBUG_INFO(priv, "Using current RXON_ASSOC.  Not resending.\n"); | 
| Ron Rindjunsky | fe7a90c | 2008-05-29 16:35:14 +0800 | [diff] [blame] | 1385 | return 0; | 
|  | 1386 | } | 
|  | 1387 |  | 
|  | 1388 | rxon_assoc.flags = priv->staging_rxon.flags; | 
|  | 1389 | rxon_assoc.filter_flags = priv->staging_rxon.filter_flags; | 
|  | 1390 | rxon_assoc.ofdm_basic_rates = priv->staging_rxon.ofdm_basic_rates; | 
|  | 1391 | rxon_assoc.cck_basic_rates = priv->staging_rxon.cck_basic_rates; | 
|  | 1392 | rxon_assoc.reserved1 = 0; | 
|  | 1393 | rxon_assoc.reserved2 = 0; | 
|  | 1394 | rxon_assoc.reserved3 = 0; | 
|  | 1395 | rxon_assoc.ofdm_ht_single_stream_basic_rates = | 
|  | 1396 | priv->staging_rxon.ofdm_ht_single_stream_basic_rates; | 
|  | 1397 | rxon_assoc.ofdm_ht_dual_stream_basic_rates = | 
|  | 1398 | priv->staging_rxon.ofdm_ht_dual_stream_basic_rates; | 
|  | 1399 | rxon_assoc.rx_chain_select_flags = priv->staging_rxon.rx_chain; | 
|  | 1400 | rxon_assoc.ofdm_ht_triple_stream_basic_rates = | 
|  | 1401 | priv->staging_rxon.ofdm_ht_triple_stream_basic_rates; | 
|  | 1402 | rxon_assoc.acquisition_data = priv->staging_rxon.acquisition_data; | 
|  | 1403 |  | 
|  | 1404 | ret = iwl_send_cmd_pdu_async(priv, REPLY_RXON_ASSOC, | 
|  | 1405 | sizeof(rxon_assoc), &rxon_assoc, NULL); | 
|  | 1406 | if (ret) | 
|  | 1407 | return ret; | 
|  | 1408 |  | 
|  | 1409 | return ret; | 
|  | 1410 | } | 
| Tomas Winkler | 630fe9b | 2008-06-12 09:47:08 +0800 | [diff] [blame] | 1411 | static int  iwl5000_send_tx_power(struct iwl_priv *priv) | 
|  | 1412 | { | 
|  | 1413 | struct iwl5000_tx_power_dbm_cmd tx_power_cmd; | 
| Jay Sternberg | 76a2407 | 2009-01-29 11:09:14 -0800 | [diff] [blame] | 1414 | u8 tx_ant_cfg_cmd; | 
| Tomas Winkler | 630fe9b | 2008-06-12 09:47:08 +0800 | [diff] [blame] | 1415 |  | 
|  | 1416 | /* half dBm need to multiply */ | 
|  | 1417 | tx_power_cmd.global_lmt = (s8)(2 * priv->tx_power_user_lmt); | 
| Gregory Greenman | 853554a | 2008-06-30 17:23:01 +0800 | [diff] [blame] | 1418 | tx_power_cmd.flags = IWL50_TX_POWER_NO_CLOSED; | 
| Tomas Winkler | 630fe9b | 2008-06-12 09:47:08 +0800 | [diff] [blame] | 1419 | tx_power_cmd.srv_chan_lmt = IWL50_TX_POWER_AUTO; | 
| Jay Sternberg | 76a2407 | 2009-01-29 11:09:14 -0800 | [diff] [blame] | 1420 |  | 
|  | 1421 | if (IWL_UCODE_API(priv->ucode_ver) == 1) | 
|  | 1422 | tx_ant_cfg_cmd = REPLY_TX_POWER_DBM_CMD_V1; | 
|  | 1423 | else | 
|  | 1424 | tx_ant_cfg_cmd = REPLY_TX_POWER_DBM_CMD; | 
|  | 1425 |  | 
|  | 1426 | return  iwl_send_cmd_pdu_async(priv, tx_ant_cfg_cmd, | 
| Tomas Winkler | 630fe9b | 2008-06-12 09:47:08 +0800 | [diff] [blame] | 1427 | sizeof(tx_power_cmd), &tx_power_cmd, | 
|  | 1428 | NULL); | 
|  | 1429 | } | 
|  | 1430 |  | 
| Zhu Yi | 5225640 | 2008-06-30 17:23:31 +0800 | [diff] [blame] | 1431 | static void iwl5000_temperature(struct iwl_priv *priv) | 
| Emmanuel Grumbach | 8f91aec | 2008-06-30 17:23:07 +0800 | [diff] [blame] | 1432 | { | 
|  | 1433 | /* store temperature from statistics (in Celsius) */ | 
| Zhu Yi | 5225640 | 2008-06-30 17:23:31 +0800 | [diff] [blame] | 1434 | priv->temperature = le32_to_cpu(priv->statistics.general.temperature); | 
| Emmanuel Grumbach | 8f91aec | 2008-06-30 17:23:07 +0800 | [diff] [blame] | 1435 | } | 
| Ron Rindjunsky | fe7a90c | 2008-05-29 16:35:14 +0800 | [diff] [blame] | 1436 |  | 
| Tomas Winkler | caab8f1 | 2008-08-04 16:00:42 +0800 | [diff] [blame] | 1437 | /* Calc max signal level (dBm) among 3 possible receivers */ | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1438 | int iwl5000_calc_rssi(struct iwl_priv *priv, | 
| Tomas Winkler | caab8f1 | 2008-08-04 16:00:42 +0800 | [diff] [blame] | 1439 | struct iwl_rx_phy_res *rx_resp) | 
|  | 1440 | { | 
|  | 1441 | /* data from PHY/DSP regarding signal strength, etc., | 
|  | 1442 | *   contents are always there, not configurable by host | 
|  | 1443 | */ | 
|  | 1444 | struct iwl5000_non_cfg_phy *ncphy = | 
|  | 1445 | (struct iwl5000_non_cfg_phy *)rx_resp->non_cfg_phy_buf; | 
|  | 1446 | u32 val, rssi_a, rssi_b, rssi_c, max_rssi; | 
|  | 1447 | u8 agc; | 
|  | 1448 |  | 
|  | 1449 | val  = le32_to_cpu(ncphy->non_cfg_phy[IWL50_RX_RES_AGC_IDX]); | 
|  | 1450 | agc = (val & IWL50_OFDM_AGC_MSK) >> IWL50_OFDM_AGC_BIT_POS; | 
|  | 1451 |  | 
|  | 1452 | /* Find max rssi among 3 possible receivers. | 
|  | 1453 | * These values are measured by the digital signal processor (DSP). | 
|  | 1454 | * They should stay fairly constant even as the signal strength varies, | 
|  | 1455 | *   if the radio's automatic gain control (AGC) is working right. | 
|  | 1456 | * AGC value (see below) will provide the "interesting" info. | 
|  | 1457 | */ | 
|  | 1458 | val = le32_to_cpu(ncphy->non_cfg_phy[IWL50_RX_RES_RSSI_AB_IDX]); | 
|  | 1459 | rssi_a = (val & IWL50_OFDM_RSSI_A_MSK) >> IWL50_OFDM_RSSI_A_BIT_POS; | 
|  | 1460 | rssi_b = (val & IWL50_OFDM_RSSI_B_MSK) >> IWL50_OFDM_RSSI_B_BIT_POS; | 
|  | 1461 | val = le32_to_cpu(ncphy->non_cfg_phy[IWL50_RX_RES_RSSI_C_IDX]); | 
|  | 1462 | rssi_c = (val & IWL50_OFDM_RSSI_C_MSK) >> IWL50_OFDM_RSSI_C_BIT_POS; | 
|  | 1463 |  | 
|  | 1464 | max_rssi = max_t(u32, rssi_a, rssi_b); | 
|  | 1465 | max_rssi = max_t(u32, max_rssi, rssi_c); | 
|  | 1466 |  | 
| Tomas Winkler | e162344 | 2009-01-27 14:27:56 -0800 | [diff] [blame] | 1467 | IWL_DEBUG_STATS(priv, "Rssi In A %d B %d C %d Max %d AGC dB %d\n", | 
| Tomas Winkler | caab8f1 | 2008-08-04 16:00:42 +0800 | [diff] [blame] | 1468 | rssi_a, rssi_b, rssi_c, max_rssi, agc); | 
|  | 1469 |  | 
|  | 1470 | /* dBm = max_rssi dB - agc dB - constant. | 
|  | 1471 | * Higher AGC (higher radio gain) means lower signal. */ | 
| Samuel Ortiz | 250bdd2 | 2008-12-19 10:37:11 +0800 | [diff] [blame] | 1472 | return max_rssi - agc - IWL49_RSSI_OFFSET; | 
| Tomas Winkler | caab8f1 | 2008-08-04 16:00:42 +0800 | [diff] [blame] | 1473 | } | 
|  | 1474 |  | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1475 | struct iwl_hcmd_ops iwl5000_hcmd = { | 
| Ron Rindjunsky | fe7a90c | 2008-05-29 16:35:14 +0800 | [diff] [blame] | 1476 | .rxon_assoc = iwl5000_send_rxon_assoc, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1477 | }; | 
|  | 1478 |  | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1479 | struct iwl_hcmd_utils_ops iwl5000_hcmd_utils = { | 
| Gregory Greenman | c1adf9f | 2008-05-15 13:53:59 +0800 | [diff] [blame] | 1480 | .get_hcmd_size = iwl5000_get_hcmd_size, | 
| Tomas Winkler | 2469bf2 | 2008-05-05 10:22:35 +0800 | [diff] [blame] | 1481 | .build_addsta_hcmd = iwl5000_build_addsta_hcmd, | 
| Emmanuel Grumbach | 33fd503 | 2008-04-24 11:55:30 -0700 | [diff] [blame] | 1482 | .gain_computation = iwl5000_gain_computation, | 
|  | 1483 | .chain_noise_reset = iwl5000_chain_noise_reset, | 
| Emmanuel Grumbach | a326a5d | 2008-07-11 11:53:31 +0800 | [diff] [blame] | 1484 | .rts_tx_cmd_flag = iwl5000_rts_tx_cmd_flag, | 
| Tomas Winkler | caab8f1 | 2008-08-04 16:00:42 +0800 | [diff] [blame] | 1485 | .calc_rssi = iwl5000_calc_rssi, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1486 | }; | 
|  | 1487 |  | 
| Jay Sternberg | e8c00dc | 2009-01-29 11:09:15 -0800 | [diff] [blame] | 1488 | struct iwl_lib_ops iwl5000_lib = { | 
| Tomas Winkler | fdd3e8a | 2008-04-24 11:55:28 -0700 | [diff] [blame] | 1489 | .set_hw_params = iwl5000_hw_set_hw_params, | 
| Emmanuel Grumbach | 7839fc0 | 2008-04-24 11:55:34 -0700 | [diff] [blame] | 1490 | .txq_update_byte_cnt_tbl = iwl5000_txq_update_byte_cnt_tbl, | 
| Tomas Winkler | 972cf44 | 2008-05-29 16:35:13 +0800 | [diff] [blame] | 1491 | .txq_inval_byte_cnt_tbl = iwl5000_txq_inval_byte_cnt_tbl, | 
| Tomas Winkler | da1bc45 | 2008-05-29 16:35:00 +0800 | [diff] [blame] | 1492 | .txq_set_sched = iwl5000_txq_set_sched, | 
| Tomas Winkler | e26e47d | 2008-06-12 09:46:56 +0800 | [diff] [blame] | 1493 | .txq_agg_enable = iwl5000_txq_agg_enable, | 
|  | 1494 | .txq_agg_disable = iwl5000_txq_agg_disable, | 
| Samuel Ortiz | 7aaa1d7 | 2009-01-19 15:30:26 -0800 | [diff] [blame] | 1495 | .txq_attach_buf_to_tfd = iwl_hw_txq_attach_buf_to_tfd, | 
|  | 1496 | .txq_free_tfd = iwl_hw_txq_free_tfd, | 
| Samuel Ortiz | a8e74e2 | 2009-01-23 13:45:14 -0800 | [diff] [blame] | 1497 | .txq_init = iwl_hw_tx_queue_init, | 
| Ron Rindjunsky | b600e4e | 2008-05-15 13:54:11 +0800 | [diff] [blame] | 1498 | .rx_handler_setup = iwl5000_rx_handler_setup, | 
| Emmanuel Grumbach | 203566f | 2008-06-12 09:46:54 +0800 | [diff] [blame] | 1499 | .setup_deferred_work = iwl5000_setup_deferred_work, | 
| Ron Rindjunsky | 87283cc | 2008-05-29 16:34:47 +0800 | [diff] [blame] | 1500 | .is_valid_rtc_data_addr = iwl5000_hw_valid_rtc_data_addr, | 
| Ron Rindjunsky | dbb983b | 2008-05-15 13:54:12 +0800 | [diff] [blame] | 1501 | .load_ucode = iwl5000_load_ucode, | 
| Ron Rindjunsky | 99da1b4 | 2008-05-15 13:54:13 +0800 | [diff] [blame] | 1502 | .init_alive_start = iwl5000_init_alive_start, | 
|  | 1503 | .alive_notify = iwl5000_alive_notify, | 
| Tomas Winkler | 630fe9b | 2008-06-12 09:47:08 +0800 | [diff] [blame] | 1504 | .send_tx_power = iwl5000_send_tx_power, | 
| Emmanuel Grumbach | 8f91aec | 2008-06-30 17:23:07 +0800 | [diff] [blame] | 1505 | .temperature = iwl5000_temperature, | 
| Emmanuel Grumbach | 5b9f8cd | 2008-10-29 14:05:46 -0700 | [diff] [blame] | 1506 | .update_chain_flags = iwl_update_chain_flags, | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 1507 | .apm_ops = { | 
|  | 1508 | .init =	iwl5000_apm_init, | 
| Tomas Winkler | 7f06610 | 2008-05-29 16:34:57 +0800 | [diff] [blame] | 1509 | .reset = iwl5000_apm_reset, | 
| Tomas Winkler | f118a91 | 2008-05-29 16:34:58 +0800 | [diff] [blame] | 1510 | .stop = iwl5000_apm_stop, | 
| Ron Rindjunsky | 5a83535 | 2008-05-05 10:22:29 +0800 | [diff] [blame] | 1511 | .config = iwl5000_nic_config, | 
| Emmanuel Grumbach | 5b9f8cd | 2008-10-29 14:05:46 -0700 | [diff] [blame] | 1512 | .set_pwr_src = iwl_set_pwr_src, | 
| Tomas Winkler | 30d5926 | 2008-04-24 11:55:25 -0700 | [diff] [blame] | 1513 | }, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1514 | .eeprom_ops = { | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 1515 | .regulatory_bands = { | 
|  | 1516 | EEPROM_5000_REG_BAND_1_CHANNELS, | 
|  | 1517 | EEPROM_5000_REG_BAND_2_CHANNELS, | 
|  | 1518 | EEPROM_5000_REG_BAND_3_CHANNELS, | 
|  | 1519 | EEPROM_5000_REG_BAND_4_CHANNELS, | 
|  | 1520 | EEPROM_5000_REG_BAND_5_CHANNELS, | 
|  | 1521 | EEPROM_5000_REG_BAND_24_FAT_CHANNELS, | 
|  | 1522 | EEPROM_5000_REG_BAND_52_FAT_CHANNELS | 
|  | 1523 | }, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1524 | .verify_signature  = iwlcore_eeprom_verify_signature, | 
|  | 1525 | .acquire_semaphore = iwlcore_eeprom_acquire_semaphore, | 
|  | 1526 | .release_semaphore = iwlcore_eeprom_release_semaphore, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1527 | .calib_version	= iwl5000_eeprom_calib_version, | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 1528 | .query_addr = iwl5000_eeprom_query_addr, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1529 | }, | 
|  | 1530 | }; | 
|  | 1531 |  | 
| Jay Sternberg | cec2d3f | 2009-01-19 15:30:33 -0800 | [diff] [blame] | 1532 | struct iwl_ops iwl5000_ops = { | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1533 | .lib = &iwl5000_lib, | 
|  | 1534 | .hcmd = &iwl5000_hcmd, | 
|  | 1535 | .utils = &iwl5000_hcmd_utils, | 
|  | 1536 | }; | 
|  | 1537 |  | 
| Jay Sternberg | cec2d3f | 2009-01-19 15:30:33 -0800 | [diff] [blame] | 1538 | struct iwl_mod_params iwl50_mod_params = { | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1539 | .num_of_queues = IWL50_NUM_QUEUES, | 
| Tomas Winkler | 9f17b31 | 2008-07-11 11:53:35 +0800 | [diff] [blame] | 1540 | .num_of_ampdu_queues = IWL50_NUM_AMPDU_QUEUES, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1541 | .amsdu_size_8K = 1, | 
| Ester Kummer | 3a1081e | 2008-05-06 11:05:14 +0800 | [diff] [blame] | 1542 | .restart_fw = 1, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1543 | /* the rest are 0 by default */ | 
|  | 1544 | }; | 
|  | 1545 |  | 
|  | 1546 |  | 
|  | 1547 | struct iwl_cfg iwl5300_agn_cfg = { | 
|  | 1548 | .name = "5300AGN", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1549 | .fw_name_pre = IWL5000_FW_PRE, | 
|  | 1550 | .ucode_api_max = IWL5000_UCODE_API_MAX, | 
|  | 1551 | .ucode_api_min = IWL5000_UCODE_API_MIN, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1552 | .sku = IWL_SKU_A|IWL_SKU_G|IWL_SKU_N, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1553 | .ops = &iwl5000_ops, | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 1554 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1555 | .eeprom_ver = EEPROM_5000_EEPROM_VERSION, | 
|  | 1556 | .eeprom_calib_ver = EEPROM_5000_TX_POWER_VERSION, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1557 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1558 | .valid_tx_ant = ANT_ABC, | 
|  | 1559 | .valid_rx_ant = ANT_ABC, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1560 | .need_pll_cfg = true, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1561 | }; | 
|  | 1562 |  | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1563 | struct iwl_cfg iwl5100_bg_cfg = { | 
|  | 1564 | .name = "5100BG", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1565 | .fw_name_pre = IWL5000_FW_PRE, | 
|  | 1566 | .ucode_api_max = IWL5000_UCODE_API_MAX, | 
|  | 1567 | .ucode_api_min = IWL5000_UCODE_API_MIN, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1568 | .sku = IWL_SKU_G, | 
|  | 1569 | .ops = &iwl5000_ops, | 
|  | 1570 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1571 | .eeprom_ver = EEPROM_5000_EEPROM_VERSION, | 
|  | 1572 | .eeprom_calib_ver = EEPROM_5000_TX_POWER_VERSION, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1573 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1574 | .valid_tx_ant = ANT_B, | 
|  | 1575 | .valid_rx_ant = ANT_AB, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1576 | .need_pll_cfg = true, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1577 | }; | 
|  | 1578 |  | 
|  | 1579 | struct iwl_cfg iwl5100_abg_cfg = { | 
|  | 1580 | .name = "5100ABG", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1581 | .fw_name_pre = IWL5000_FW_PRE, | 
|  | 1582 | .ucode_api_max = IWL5000_UCODE_API_MAX, | 
|  | 1583 | .ucode_api_min = IWL5000_UCODE_API_MIN, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1584 | .sku = IWL_SKU_A|IWL_SKU_G, | 
|  | 1585 | .ops = &iwl5000_ops, | 
|  | 1586 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1587 | .eeprom_ver = EEPROM_5000_EEPROM_VERSION, | 
|  | 1588 | .eeprom_calib_ver = EEPROM_5000_TX_POWER_VERSION, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1589 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1590 | .valid_tx_ant = ANT_B, | 
|  | 1591 | .valid_rx_ant = ANT_AB, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1592 | .need_pll_cfg = true, | 
| Esti Kummer | 4740863 | 2008-07-11 11:53:30 +0800 | [diff] [blame] | 1593 | }; | 
|  | 1594 |  | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1595 | struct iwl_cfg iwl5100_agn_cfg = { | 
|  | 1596 | .name = "5100AGN", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1597 | .fw_name_pre = IWL5000_FW_PRE, | 
|  | 1598 | .ucode_api_max = IWL5000_UCODE_API_MAX, | 
|  | 1599 | .ucode_api_min = IWL5000_UCODE_API_MIN, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1600 | .sku = IWL_SKU_A|IWL_SKU_G|IWL_SKU_N, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1601 | .ops = &iwl5000_ops, | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 1602 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1603 | .eeprom_ver = EEPROM_5000_EEPROM_VERSION, | 
|  | 1604 | .eeprom_calib_ver = EEPROM_5000_TX_POWER_VERSION, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1605 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1606 | .valid_tx_ant = ANT_B, | 
|  | 1607 | .valid_rx_ant = ANT_AB, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1608 | .need_pll_cfg = true, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1609 | }; | 
|  | 1610 |  | 
|  | 1611 | struct iwl_cfg iwl5350_agn_cfg = { | 
|  | 1612 | .name = "5350AGN", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1613 | .fw_name_pre = IWL5000_FW_PRE, | 
|  | 1614 | .ucode_api_max = IWL5000_UCODE_API_MAX, | 
|  | 1615 | .ucode_api_min = IWL5000_UCODE_API_MIN, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1616 | .sku = IWL_SKU_A|IWL_SKU_G|IWL_SKU_N, | 
| Tomas Winkler | da8dec2 | 2008-04-24 11:55:24 -0700 | [diff] [blame] | 1617 | .ops = &iwl5000_ops, | 
| Tomas Winkler | 25ae398 | 2008-04-24 11:55:27 -0700 | [diff] [blame] | 1618 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | 0ef2ca6 | 2008-10-23 23:48:51 -0700 | [diff] [blame] | 1619 | .eeprom_ver = EEPROM_5050_EEPROM_VERSION, | 
|  | 1620 | .eeprom_calib_ver = EEPROM_5050_TX_POWER_VERSION, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1621 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1622 | .valid_tx_ant = ANT_ABC, | 
|  | 1623 | .valid_rx_ant = ANT_ABC, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1624 | .need_pll_cfg = true, | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1625 | }; | 
|  | 1626 |  | 
| Tomas Winkler | 7100e92 | 2008-12-01 16:32:18 -0800 | [diff] [blame] | 1627 | struct iwl_cfg iwl5150_agn_cfg = { | 
|  | 1628 | .name = "5150AGN", | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1629 | .fw_name_pre = IWL5150_FW_PRE, | 
|  | 1630 | .ucode_api_max = IWL5150_UCODE_API_MAX, | 
|  | 1631 | .ucode_api_min = IWL5150_UCODE_API_MIN, | 
| Tomas Winkler | 7100e92 | 2008-12-01 16:32:18 -0800 | [diff] [blame] | 1632 | .sku = IWL_SKU_A|IWL_SKU_G|IWL_SKU_N, | 
|  | 1633 | .ops = &iwl5000_ops, | 
|  | 1634 | .eeprom_size = IWL_5000_EEPROM_IMG_SIZE, | 
| Tomas Winkler | fd63edb | 2008-12-01 16:32:21 -0800 | [diff] [blame] | 1635 | .eeprom_ver = EEPROM_5050_EEPROM_VERSION, | 
|  | 1636 | .eeprom_calib_ver = EEPROM_5050_TX_POWER_VERSION, | 
| Tomas Winkler | 7100e92 | 2008-12-01 16:32:18 -0800 | [diff] [blame] | 1637 | .mod_params = &iwl50_mod_params, | 
| Jay Sternberg | c0bac76 | 2009-02-02 16:21:14 -0800 | [diff] [blame] | 1638 | .valid_tx_ant = ANT_A, | 
|  | 1639 | .valid_rx_ant = ANT_AB, | 
| Jay Sternberg | 050681b | 2009-01-29 11:09:13 -0800 | [diff] [blame] | 1640 | .need_pll_cfg = true, | 
| Tomas Winkler | 7100e92 | 2008-12-01 16:32:18 -0800 | [diff] [blame] | 1641 | }; | 
|  | 1642 |  | 
| Reinette Chatre | a0987a8 | 2008-12-02 12:14:06 -0800 | [diff] [blame] | 1643 | MODULE_FIRMWARE(IWL5000_MODULE_FIRMWARE(IWL5000_UCODE_API_MAX)); | 
|  | 1644 | MODULE_FIRMWARE(IWL5150_MODULE_FIRMWARE(IWL5150_UCODE_API_MAX)); | 
| Tomas Winkler | c9f79ed | 2008-09-11 11:45:21 +0800 | [diff] [blame] | 1645 |  | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1646 | module_param_named(disable50, iwl50_mod_params.disable, int, 0444); | 
|  | 1647 | MODULE_PARM_DESC(disable50, | 
|  | 1648 | "manually disable the 50XX radio (default 0 [radio on])"); | 
|  | 1649 | module_param_named(swcrypto50, iwl50_mod_params.sw_crypto, bool, 0444); | 
|  | 1650 | MODULE_PARM_DESC(swcrypto50, | 
|  | 1651 | "using software crypto engine (default 0 [hardware])\n"); | 
| Wu, Fengguang | 95aa194 | 2008-12-17 16:52:30 +0800 | [diff] [blame] | 1652 | module_param_named(debug50, iwl50_mod_params.debug, uint, 0444); | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1653 | MODULE_PARM_DESC(debug50, "50XX debug output mask"); | 
|  | 1654 | module_param_named(queues_num50, iwl50_mod_params.num_of_queues, int, 0444); | 
|  | 1655 | MODULE_PARM_DESC(queues_num50, "number of hw queues in 50xx series"); | 
| Ron Rindjunsky | 4977929 | 2008-06-30 17:23:21 +0800 | [diff] [blame] | 1656 | module_param_named(11n_disable50, iwl50_mod_params.disable_11n, int, 0444); | 
|  | 1657 | MODULE_PARM_DESC(11n_disable50, "disable 50XX 11n functionality"); | 
| Tomas Winkler | 5a6a256 | 2008-04-24 11:55:23 -0700 | [diff] [blame] | 1658 | module_param_named(amsdu_size_8K50, iwl50_mod_params.amsdu_size_8K, int, 0444); | 
|  | 1659 | MODULE_PARM_DESC(amsdu_size_8K50, "enable 8K amsdu size in 50XX series"); | 
| Ester Kummer | 3a1081e | 2008-05-06 11:05:14 +0800 | [diff] [blame] | 1660 | module_param_named(fw_restart50, iwl50_mod_params.restart_fw, int, 0444); | 
|  | 1661 | MODULE_PARM_DESC(fw_restart50, "restart firmware in case of error"); |