2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
34 MODULE_AUTHOR("Atheros Communications");
35 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
36 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
37 MODULE_LICENSE("Dual BSD/GPL");
39 static int __init ath9k_init(void)
43 module_init(ath9k_init);
45 static void __exit ath9k_exit(void)
49 module_exit(ath9k_exit);
51 /********************/
52 /* Helper Functions */
53 /********************/
55 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
57 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
59 if (!ah->curchan) /* should really check for CCK instead */
60 return clks / ATH9K_CLOCK_RATE_CCK;
61 if (conf->channel->band == IEEE80211_BAND_2GHZ)
62 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
64 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
67 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
69 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
71 if (conf_is_ht40(conf))
72 return ath9k_hw_mac_usec(ah, clks) / 2;
74 return ath9k_hw_mac_usec(ah, clks);
77 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
79 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
81 if (!ah->curchan) /* should really check for CCK instead */
82 return usecs *ATH9K_CLOCK_RATE_CCK;
83 if (conf->channel->band == IEEE80211_BAND_2GHZ)
84 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
85 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
88 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
90 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
92 if (conf_is_ht40(conf))
93 return ath9k_hw_mac_clks(ah, usecs) * 2;
95 return ath9k_hw_mac_clks(ah, usecs);
98 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
102 BUG_ON(timeout < AH_TIME_QUANTUM);
104 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
105 if ((REG_READ(ah, reg) & mask) == val)
108 udelay(AH_TIME_QUANTUM);
111 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
112 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
113 timeout, reg, REG_READ(ah, reg), mask, val);
117 EXPORT_SYMBOL(ath9k_hw_wait);
119 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
124 for (i = 0, retval = 0; i < n; i++) {
125 retval = (retval << 1) | (val & 1);
131 bool ath9k_get_channel_edges(struct ath_hw *ah,
135 struct ath9k_hw_capabilities *pCap = &ah->caps;
137 if (flags & CHANNEL_5GHZ) {
138 *low = pCap->low_5ghz_chan;
139 *high = pCap->high_5ghz_chan;
142 if ((flags & CHANNEL_2GHZ)) {
143 *low = pCap->low_2ghz_chan;
144 *high = pCap->high_2ghz_chan;
150 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
151 const struct ath_rate_table *rates,
152 u32 frameLen, u16 rateix,
155 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
158 kbps = rates->info[rateix].ratekbps;
163 switch (rates->info[rateix].phy) {
164 case WLAN_RC_PHY_CCK:
165 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
166 if (shortPreamble && rates->info[rateix].short_preamble)
168 numBits = frameLen << 3;
169 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
171 case WLAN_RC_PHY_OFDM:
172 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
173 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
174 numBits = OFDM_PLCP_BITS + (frameLen << 3);
175 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
176 txTime = OFDM_SIFS_TIME_QUARTER
177 + OFDM_PREAMBLE_TIME_QUARTER
178 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
179 } else if (ah->curchan &&
180 IS_CHAN_HALF_RATE(ah->curchan)) {
181 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
182 numBits = OFDM_PLCP_BITS + (frameLen << 3);
183 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
184 txTime = OFDM_SIFS_TIME_HALF +
185 OFDM_PREAMBLE_TIME_HALF
186 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
188 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
189 numBits = OFDM_PLCP_BITS + (frameLen << 3);
190 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
191 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
192 + (numSymbols * OFDM_SYMBOL_TIME);
196 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
197 "Unknown phy %u (rate ix %u)\n",
198 rates->info[rateix].phy, rateix);
205 EXPORT_SYMBOL(ath9k_hw_computetxtime);
207 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
208 struct ath9k_channel *chan,
209 struct chan_centers *centers)
213 if (!IS_CHAN_HT40(chan)) {
214 centers->ctl_center = centers->ext_center =
215 centers->synth_center = chan->channel;
219 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
220 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
221 centers->synth_center =
222 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
225 centers->synth_center =
226 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
230 centers->ctl_center =
231 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
232 /* 25 MHz spacing is supported by hw but not on upper layers */
233 centers->ext_center =
234 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
241 static void ath9k_hw_read_revisions(struct ath_hw *ah)
245 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
248 val = REG_READ(ah, AR_SREV);
249 ah->hw_version.macVersion =
250 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
251 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
252 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
254 if (!AR_SREV_9100(ah))
255 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
257 ah->hw_version.macRev = val & AR_SREV_REVISION;
259 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
260 ah->is_pciexpress = true;
264 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
269 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
271 for (i = 0; i < 8; i++)
272 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
273 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
274 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
276 return ath9k_hw_reverse_bits(val, 8);
279 /************************************/
280 /* HW Attach, Detach, Init Routines */
281 /************************************/
283 static void ath9k_hw_disablepcie(struct ath_hw *ah)
285 if (AR_SREV_9100(ah))
288 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
289 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
290 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
291 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
292 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
293 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
294 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
295 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
296 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
298 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
301 static bool ath9k_hw_chip_test(struct ath_hw *ah)
303 struct ath_common *common = ath9k_hw_common(ah);
304 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
306 u32 patternData[4] = { 0x55555555,
312 for (i = 0; i < 2; i++) {
313 u32 addr = regAddr[i];
316 regHold[i] = REG_READ(ah, addr);
317 for (j = 0; j < 0x100; j++) {
318 wrData = (j << 16) | j;
319 REG_WRITE(ah, addr, wrData);
320 rdData = REG_READ(ah, addr);
321 if (rdData != wrData) {
322 ath_print(common, ATH_DBG_FATAL,
323 "address test failed "
324 "addr: 0x%08x - wr:0x%08x != "
326 addr, wrData, rdData);
330 for (j = 0; j < 4; j++) {
331 wrData = patternData[j];
332 REG_WRITE(ah, addr, wrData);
333 rdData = REG_READ(ah, addr);
334 if (wrData != rdData) {
335 ath_print(common, ATH_DBG_FATAL,
336 "address test failed "
337 "addr: 0x%08x - wr:0x%08x != "
339 addr, wrData, rdData);
343 REG_WRITE(ah, regAddr[i], regHold[i]);
350 static const char *ath9k_hw_devname(u16 devid)
353 case AR5416_DEVID_PCI:
354 return "Atheros 5416";
355 case AR5416_DEVID_PCIE:
356 return "Atheros 5418";
357 case AR9160_DEVID_PCI:
358 return "Atheros 9160";
359 case AR5416_AR9100_DEVID:
360 return "Atheros 9100";
361 case AR9280_DEVID_PCI:
362 case AR9280_DEVID_PCIE:
363 return "Atheros 9280";
364 case AR9285_DEVID_PCIE:
365 return "Atheros 9285";
366 case AR5416_DEVID_AR9287_PCI:
367 case AR5416_DEVID_AR9287_PCIE:
368 return "Atheros 9287";
374 static void ath9k_hw_init_config(struct ath_hw *ah)
378 ah->config.dma_beacon_response_time = 2;
379 ah->config.sw_beacon_response_time = 10;
380 ah->config.additional_swba_backoff = 0;
381 ah->config.ack_6mb = 0x0;
382 ah->config.cwm_ignore_extcca = 0;
383 ah->config.pcie_powersave_enable = 0;
384 ah->config.pcie_clock_req = 0;
385 ah->config.pcie_waen = 0;
386 ah->config.analog_shiftreg = 1;
387 ah->config.ht_enable = 1;
388 ah->config.ofdm_trig_low = 200;
389 ah->config.ofdm_trig_high = 500;
390 ah->config.cck_trig_high = 200;
391 ah->config.cck_trig_low = 100;
392 ah->config.enable_ani = 1;
393 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
394 ah->config.antenna_switch_swap = 0;
396 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
397 ah->config.spurchans[i][0] = AR_NO_SPUR;
398 ah->config.spurchans[i][1] = AR_NO_SPUR;
401 ah->config.intr_mitigation = true;
404 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
405 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
406 * This means we use it for all AR5416 devices, and the few
407 * minor PCI AR9280 devices out there.
409 * Serialization is required because these devices do not handle
410 * well the case of two concurrent reads/writes due to the latency
411 * involved. During one read/write another read/write can be issued
412 * on another CPU while the previous read/write may still be working
413 * on our hardware, if we hit this case the hardware poops in a loop.
414 * We prevent this by serializing reads and writes.
416 * This issue is not present on PCI-Express devices or pre-AR5416
417 * devices (legacy, 802.11abg).
419 if (num_possible_cpus() > 1)
420 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
422 EXPORT_SYMBOL(ath9k_hw_init);
424 static void ath9k_hw_init_defaults(struct ath_hw *ah)
426 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
428 regulatory->country_code = CTRY_DEFAULT;
429 regulatory->power_limit = MAX_RATE_POWER;
430 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
432 ah->hw_version.magic = AR5416_MAGIC;
433 ah->hw_version.subvendorid = 0;
436 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
437 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
438 if (!AR_SREV_9100(ah))
439 ah->ah_flags = AH_USE_EEPROM;
442 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
443 ah->beacon_interval = 100;
444 ah->enable_32kHz_clock = DONT_USE_32KHZ;
445 ah->slottime = (u32) -1;
446 ah->acktimeout = (u32) -1;
447 ah->ctstimeout = (u32) -1;
448 ah->globaltxtimeout = (u32) -1;
450 ah->gbeacon_rate = 0;
452 ah->power_mode = ATH9K_PM_UNDEFINED;
455 static int ath9k_hw_rf_claim(struct ath_hw *ah)
459 REG_WRITE(ah, AR_PHY(0), 0x00000007);
461 val = ath9k_hw_get_radiorev(ah);
462 switch (val & AR_RADIO_SREV_MAJOR) {
464 val = AR_RAD5133_SREV_MAJOR;
466 case AR_RAD5133_SREV_MAJOR:
467 case AR_RAD5122_SREV_MAJOR:
468 case AR_RAD2133_SREV_MAJOR:
469 case AR_RAD2122_SREV_MAJOR:
472 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
473 "Radio Chip Rev 0x%02X not supported\n",
474 val & AR_RADIO_SREV_MAJOR);
478 ah->hw_version.analog5GhzRev = val;
483 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
485 struct ath_common *common = ath9k_hw_common(ah);
491 for (i = 0; i < 3; i++) {
492 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
494 common->macaddr[2 * i] = eeval >> 8;
495 common->macaddr[2 * i + 1] = eeval & 0xff;
497 if (sum == 0 || sum == 0xffff * 3)
498 return -EADDRNOTAVAIL;
503 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
507 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
508 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
510 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
511 INIT_INI_ARRAY(&ah->iniModesRxGain,
512 ar9280Modes_backoff_13db_rxgain_9280_2,
513 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
514 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
515 INIT_INI_ARRAY(&ah->iniModesRxGain,
516 ar9280Modes_backoff_23db_rxgain_9280_2,
517 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
519 INIT_INI_ARRAY(&ah->iniModesRxGain,
520 ar9280Modes_original_rxgain_9280_2,
521 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
523 INIT_INI_ARRAY(&ah->iniModesRxGain,
524 ar9280Modes_original_rxgain_9280_2,
525 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
529 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
533 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
534 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
536 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
537 INIT_INI_ARRAY(&ah->iniModesTxGain,
538 ar9280Modes_high_power_tx_gain_9280_2,
539 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
541 INIT_INI_ARRAY(&ah->iniModesTxGain,
542 ar9280Modes_original_tx_gain_9280_2,
543 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
545 INIT_INI_ARRAY(&ah->iniModesTxGain,
546 ar9280Modes_original_tx_gain_9280_2,
547 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
551 static int ath9k_hw_post_init(struct ath_hw *ah)
555 if (!ath9k_hw_chip_test(ah))
558 ecode = ath9k_hw_rf_claim(ah);
562 ecode = ath9k_hw_eeprom_init(ah);
566 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
567 "Eeprom VER: %d, REV: %d\n",
568 ah->eep_ops->get_eeprom_ver(ah),
569 ah->eep_ops->get_eeprom_rev(ah));
571 if (!AR_SREV_9280_10_OR_LATER(ah)) {
572 ecode = ath9k_hw_rf_alloc_ext_banks(ah);
574 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
575 "Failed allocating banks for "
581 if (!AR_SREV_9100(ah)) {
582 ath9k_hw_ani_setup(ah);
583 ath9k_hw_ani_init(ah);
589 static bool ath9k_hw_devid_supported(u16 devid)
592 case AR5416_DEVID_PCI:
593 case AR5416_DEVID_PCIE:
594 case AR5416_AR9100_DEVID:
595 case AR9160_DEVID_PCI:
596 case AR9280_DEVID_PCI:
597 case AR9280_DEVID_PCIE:
598 case AR9285_DEVID_PCIE:
599 case AR5416_DEVID_AR9287_PCI:
600 case AR5416_DEVID_AR9287_PCIE:
609 static bool ath9k_hw_macversion_supported(u32 macversion)
611 switch (macversion) {
612 case AR_SREV_VERSION_5416_PCI:
613 case AR_SREV_VERSION_5416_PCIE:
614 case AR_SREV_VERSION_9160:
615 case AR_SREV_VERSION_9100:
616 case AR_SREV_VERSION_9280:
617 case AR_SREV_VERSION_9285:
618 case AR_SREV_VERSION_9287:
619 case AR_SREV_VERSION_9271:
627 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
629 if (AR_SREV_9160_10_OR_LATER(ah)) {
630 if (AR_SREV_9280_10_OR_LATER(ah)) {
631 ah->iq_caldata.calData = &iq_cal_single_sample;
632 ah->adcgain_caldata.calData =
633 &adc_gain_cal_single_sample;
634 ah->adcdc_caldata.calData =
635 &adc_dc_cal_single_sample;
636 ah->adcdc_calinitdata.calData =
639 ah->iq_caldata.calData = &iq_cal_multi_sample;
640 ah->adcgain_caldata.calData =
641 &adc_gain_cal_multi_sample;
642 ah->adcdc_caldata.calData =
643 &adc_dc_cal_multi_sample;
644 ah->adcdc_calinitdata.calData =
647 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
651 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
653 if (AR_SREV_9271(ah)) {
654 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271,
655 ARRAY_SIZE(ar9271Modes_9271), 6);
656 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271,
657 ARRAY_SIZE(ar9271Common_9271), 2);
658 INIT_INI_ARRAY(&ah->iniModes_9271_1_0_only,
659 ar9271Modes_9271_1_0_only,
660 ARRAY_SIZE(ar9271Modes_9271_1_0_only), 6);
664 if (AR_SREV_9287_11_OR_LATER(ah)) {
665 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
666 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
667 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
668 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
669 if (ah->config.pcie_clock_req)
670 INIT_INI_ARRAY(&ah->iniPcieSerdes,
671 ar9287PciePhy_clkreq_off_L1_9287_1_1,
672 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
674 INIT_INI_ARRAY(&ah->iniPcieSerdes,
675 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
676 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
678 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
679 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
680 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
681 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
682 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
684 if (ah->config.pcie_clock_req)
685 INIT_INI_ARRAY(&ah->iniPcieSerdes,
686 ar9287PciePhy_clkreq_off_L1_9287_1_0,
687 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
689 INIT_INI_ARRAY(&ah->iniPcieSerdes,
690 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
691 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
693 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
696 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
697 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
698 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
699 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
701 if (ah->config.pcie_clock_req) {
702 INIT_INI_ARRAY(&ah->iniPcieSerdes,
703 ar9285PciePhy_clkreq_off_L1_9285_1_2,
704 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
706 INIT_INI_ARRAY(&ah->iniPcieSerdes,
707 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
708 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
711 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
712 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
713 ARRAY_SIZE(ar9285Modes_9285), 6);
714 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
715 ARRAY_SIZE(ar9285Common_9285), 2);
717 if (ah->config.pcie_clock_req) {
718 INIT_INI_ARRAY(&ah->iniPcieSerdes,
719 ar9285PciePhy_clkreq_off_L1_9285,
720 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
722 INIT_INI_ARRAY(&ah->iniPcieSerdes,
723 ar9285PciePhy_clkreq_always_on_L1_9285,
724 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
726 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
727 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
728 ARRAY_SIZE(ar9280Modes_9280_2), 6);
729 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
730 ARRAY_SIZE(ar9280Common_9280_2), 2);
732 if (ah->config.pcie_clock_req) {
733 INIT_INI_ARRAY(&ah->iniPcieSerdes,
734 ar9280PciePhy_clkreq_off_L1_9280,
735 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
737 INIT_INI_ARRAY(&ah->iniPcieSerdes,
738 ar9280PciePhy_clkreq_always_on_L1_9280,
739 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
741 INIT_INI_ARRAY(&ah->iniModesAdditional,
742 ar9280Modes_fast_clock_9280_2,
743 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
744 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
745 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
746 ARRAY_SIZE(ar9280Modes_9280), 6);
747 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
748 ARRAY_SIZE(ar9280Common_9280), 2);
749 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
750 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
751 ARRAY_SIZE(ar5416Modes_9160), 6);
752 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
753 ARRAY_SIZE(ar5416Common_9160), 2);
754 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
755 ARRAY_SIZE(ar5416Bank0_9160), 2);
756 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
757 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
758 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
759 ARRAY_SIZE(ar5416Bank1_9160), 2);
760 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
761 ARRAY_SIZE(ar5416Bank2_9160), 2);
762 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
763 ARRAY_SIZE(ar5416Bank3_9160), 3);
764 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
765 ARRAY_SIZE(ar5416Bank6_9160), 3);
766 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
767 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
768 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
769 ARRAY_SIZE(ar5416Bank7_9160), 2);
770 if (AR_SREV_9160_11(ah)) {
771 INIT_INI_ARRAY(&ah->iniAddac,
773 ARRAY_SIZE(ar5416Addac_91601_1), 2);
775 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
776 ARRAY_SIZE(ar5416Addac_9160), 2);
778 } else if (AR_SREV_9100_OR_LATER(ah)) {
779 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
780 ARRAY_SIZE(ar5416Modes_9100), 6);
781 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
782 ARRAY_SIZE(ar5416Common_9100), 2);
783 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
784 ARRAY_SIZE(ar5416Bank0_9100), 2);
785 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
786 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
787 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
788 ARRAY_SIZE(ar5416Bank1_9100), 2);
789 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
790 ARRAY_SIZE(ar5416Bank2_9100), 2);
791 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
792 ARRAY_SIZE(ar5416Bank3_9100), 3);
793 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
794 ARRAY_SIZE(ar5416Bank6_9100), 3);
795 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
796 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
797 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
798 ARRAY_SIZE(ar5416Bank7_9100), 2);
799 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
800 ARRAY_SIZE(ar5416Addac_9100), 2);
802 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
803 ARRAY_SIZE(ar5416Modes), 6);
804 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
805 ARRAY_SIZE(ar5416Common), 2);
806 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
807 ARRAY_SIZE(ar5416Bank0), 2);
808 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
809 ARRAY_SIZE(ar5416BB_RfGain), 3);
810 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
811 ARRAY_SIZE(ar5416Bank1), 2);
812 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
813 ARRAY_SIZE(ar5416Bank2), 2);
814 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
815 ARRAY_SIZE(ar5416Bank3), 3);
816 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
817 ARRAY_SIZE(ar5416Bank6), 3);
818 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
819 ARRAY_SIZE(ar5416Bank6TPC), 3);
820 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
821 ARRAY_SIZE(ar5416Bank7), 2);
822 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
823 ARRAY_SIZE(ar5416Addac), 2);
827 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
829 if (AR_SREV_9287_11_OR_LATER(ah))
830 INIT_INI_ARRAY(&ah->iniModesRxGain,
831 ar9287Modes_rx_gain_9287_1_1,
832 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
833 else if (AR_SREV_9287_10(ah))
834 INIT_INI_ARRAY(&ah->iniModesRxGain,
835 ar9287Modes_rx_gain_9287_1_0,
836 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
837 else if (AR_SREV_9280_20(ah))
838 ath9k_hw_init_rxgain_ini(ah);
840 if (AR_SREV_9287_11_OR_LATER(ah)) {
841 INIT_INI_ARRAY(&ah->iniModesTxGain,
842 ar9287Modes_tx_gain_9287_1_1,
843 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
844 } else if (AR_SREV_9287_10(ah)) {
845 INIT_INI_ARRAY(&ah->iniModesTxGain,
846 ar9287Modes_tx_gain_9287_1_0,
847 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
848 } else if (AR_SREV_9280_20(ah)) {
849 ath9k_hw_init_txgain_ini(ah);
850 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
851 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
854 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
855 INIT_INI_ARRAY(&ah->iniModesTxGain,
856 ar9285Modes_high_power_tx_gain_9285_1_2,
857 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
859 INIT_INI_ARRAY(&ah->iniModesTxGain,
860 ar9285Modes_original_tx_gain_9285_1_2,
861 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
867 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
871 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
872 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
875 for (i = 0; i < ah->iniModes.ia_rows; i++) {
876 u32 reg = INI_RA(&ah->iniModes, i, 0);
878 for (j = 1; j < ah->iniModes.ia_columns; j++) {
879 u32 val = INI_RA(&ah->iniModes, i, j);
881 INI_RA(&ah->iniModes, i, j) =
882 ath9k_hw_ini_fixup(ah,
890 int ath9k_hw_init(struct ath_hw *ah)
892 struct ath_common *common = ath9k_hw_common(ah);
895 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) {
896 ath_print(common, ATH_DBG_FATAL,
897 "Unsupported device ID: 0x%0x\n",
898 ah->hw_version.devid);
902 ath9k_hw_init_defaults(ah);
903 ath9k_hw_init_config(ah);
905 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
906 ath_print(common, ATH_DBG_FATAL,
907 "Couldn't reset chip\n");
911 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
912 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
916 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
917 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
918 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
919 ah->config.serialize_regmode =
922 ah->config.serialize_regmode =
927 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
928 ah->config.serialize_regmode);
930 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
931 ath_print(common, ATH_DBG_FATAL,
932 "Mac Chip Rev 0x%02x.%x is not supported by "
933 "this driver\n", ah->hw_version.macVersion,
934 ah->hw_version.macRev);
938 if (AR_SREV_9100(ah)) {
939 ah->iq_caldata.calData = &iq_cal_multi_sample;
940 ah->supp_cals = IQ_MISMATCH_CAL;
941 ah->is_pciexpress = false;
944 if (AR_SREV_9271(ah))
945 ah->is_pciexpress = false;
947 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
949 ath9k_hw_init_cal_settings(ah);
951 ah->ani_function = ATH9K_ANI_ALL;
952 if (AR_SREV_9280_10_OR_LATER(ah)) {
953 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
954 ah->ath9k_hw_rf_set_freq = &ath9k_hw_ar9280_set_channel;
955 ah->ath9k_hw_spur_mitigate_freq = &ath9k_hw_9280_spur_mitigate;
957 ah->ath9k_hw_rf_set_freq = &ath9k_hw_set_channel;
958 ah->ath9k_hw_spur_mitigate_freq = &ath9k_hw_spur_mitigate;
961 ath9k_hw_init_mode_regs(ah);
963 if (ah->is_pciexpress)
964 ath9k_hw_configpcipowersave(ah, 0, 0);
966 ath9k_hw_disablepcie(ah);
968 /* Support for Japan ch.14 (2484) spread */
969 if (AR_SREV_9287_11_OR_LATER(ah)) {
970 INIT_INI_ARRAY(&ah->iniCckfirNormal,
971 ar9287Common_normal_cck_fir_coeff_92871_1,
972 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2);
973 INIT_INI_ARRAY(&ah->iniCckfirJapan2484,
974 ar9287Common_japan_2484_cck_fir_coeff_92871_1,
975 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2);
978 r = ath9k_hw_post_init(ah);
982 ath9k_hw_init_mode_gain_regs(ah);
983 ath9k_hw_fill_cap_info(ah);
984 ath9k_hw_init_11a_eeprom_fix(ah);
986 r = ath9k_hw_init_macaddr(ah);
988 ath_print(common, ATH_DBG_FATAL,
989 "Failed to initialize MAC address\n");
993 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
994 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
996 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
998 ath9k_init_nfcal_hist_buffer(ah);
1000 common->state = ATH_HW_INITIALIZED;
1005 static void ath9k_hw_init_bb(struct ath_hw *ah,
1006 struct ath9k_channel *chan)
1010 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1011 if (IS_CHAN_B(chan))
1012 synthDelay = (4 * synthDelay) / 22;
1016 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1018 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1021 static void ath9k_hw_init_qos(struct ath_hw *ah)
1023 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1024 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1026 REG_WRITE(ah, AR_QOS_NO_ACK,
1027 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1028 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1029 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1031 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1032 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1033 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1034 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1035 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1038 static void ath9k_hw_change_target_baud(struct ath_hw *ah, u32 freq, u32 baud)
1041 u32 baud_divider = freq * 1000 * 1000 / 16 / baud;
1043 lcr = REG_READ(ah , 0x5100c);
1046 REG_WRITE(ah, 0x5100c, lcr);
1047 REG_WRITE(ah, 0x51004, (baud_divider >> 8));
1048 REG_WRITE(ah, 0x51000, (baud_divider & 0xff));
1051 REG_WRITE(ah, 0x5100c, lcr);
1054 static void ath9k_hw_init_pll(struct ath_hw *ah,
1055 struct ath9k_channel *chan)
1059 if (AR_SREV_9100(ah)) {
1060 if (chan && IS_CHAN_5GHZ(chan))
1065 if (AR_SREV_9280_10_OR_LATER(ah)) {
1066 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1068 if (chan && IS_CHAN_HALF_RATE(chan))
1069 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1070 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1071 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1073 if (chan && IS_CHAN_5GHZ(chan)) {
1074 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1077 if (AR_SREV_9280_20(ah)) {
1078 if (((chan->channel % 20) == 0)
1079 || ((chan->channel % 10) == 0))
1085 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1088 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1090 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1092 if (chan && IS_CHAN_HALF_RATE(chan))
1093 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1094 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1095 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1097 if (chan && IS_CHAN_5GHZ(chan))
1098 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1100 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1102 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1104 if (chan && IS_CHAN_HALF_RATE(chan))
1105 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1106 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1107 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1109 if (chan && IS_CHAN_5GHZ(chan))
1110 pll |= SM(0xa, AR_RTC_PLL_DIV);
1112 pll |= SM(0xb, AR_RTC_PLL_DIV);
1115 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1117 /* Switch the core clock for ar9271 to 117Mhz */
1118 if (AR_SREV_9271(ah)) {
1119 if ((pll == 0x142c) || (pll == 0x2850) ) {
1121 /* set CLKOBS to output AHB clock */
1122 REG_WRITE(ah, 0x7020, 0xe);
1124 * 0x304: 117Mhz, ahb_ratio: 1x1
1125 * 0x306: 40Mhz, ahb_ratio: 1x1
1127 REG_WRITE(ah, 0x50040, 0x304);
1129 * makes adjustments for the baud dividor to keep the
1130 * targetted baud rate based on the used core clock.
1132 ath9k_hw_change_target_baud(ah, AR9271_CORE_CLOCK,
1133 AR9271_TARGET_BAUD_RATE);
1137 udelay(RTC_PLL_SETTLE_DELAY);
1139 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1142 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1144 int rx_chainmask, tx_chainmask;
1146 rx_chainmask = ah->rxchainmask;
1147 tx_chainmask = ah->txchainmask;
1149 switch (rx_chainmask) {
1151 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1152 AR_PHY_SWAP_ALT_CHAIN);
1154 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1155 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1156 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1162 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1163 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1169 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1170 if (tx_chainmask == 0x5) {
1171 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1172 AR_PHY_SWAP_ALT_CHAIN);
1174 if (AR_SREV_9100(ah))
1175 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1176 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1179 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1180 enum nl80211_iftype opmode)
1182 ah->mask_reg = AR_IMR_TXERR |
1188 if (ah->config.intr_mitigation)
1189 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1191 ah->mask_reg |= AR_IMR_RXOK;
1193 ah->mask_reg |= AR_IMR_TXOK;
1195 if (opmode == NL80211_IFTYPE_AP)
1196 ah->mask_reg |= AR_IMR_MIB;
1198 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1199 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1201 if (!AR_SREV_9100(ah)) {
1202 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1203 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1204 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1208 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1210 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1211 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1212 "bad ack timeout %u\n", us);
1213 ah->acktimeout = (u32) -1;
1216 REG_RMW_FIELD(ah, AR_TIME_OUT,
1217 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1218 ah->acktimeout = us;
1223 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1225 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1226 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1227 "bad cts timeout %u\n", us);
1228 ah->ctstimeout = (u32) -1;
1231 REG_RMW_FIELD(ah, AR_TIME_OUT,
1232 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1233 ah->ctstimeout = us;
1238 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1241 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1242 "bad global tx timeout %u\n", tu);
1243 ah->globaltxtimeout = (u32) -1;
1246 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1247 ah->globaltxtimeout = tu;
1252 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1254 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1257 if (ah->misc_mode != 0)
1258 REG_WRITE(ah, AR_PCU_MISC,
1259 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1260 if (ah->slottime != (u32) -1)
1261 ath9k_hw_setslottime(ah, ah->slottime);
1262 if (ah->acktimeout != (u32) -1)
1263 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1264 if (ah->ctstimeout != (u32) -1)
1265 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1266 if (ah->globaltxtimeout != (u32) -1)
1267 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1270 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1272 return vendorid == ATHEROS_VENDOR_ID ?
1273 ath9k_hw_devname(devid) : NULL;
1276 void ath9k_hw_detach(struct ath_hw *ah)
1278 struct ath_common *common = ath9k_hw_common(ah);
1280 if (common->state <= ATH_HW_INITIALIZED)
1283 if (!AR_SREV_9100(ah))
1284 ath9k_hw_ani_disable(ah);
1286 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1289 if (!AR_SREV_9280_10_OR_LATER(ah))
1290 ath9k_hw_rf_free_ext_banks(ah);
1294 EXPORT_SYMBOL(ath9k_hw_detach);
1300 static void ath9k_hw_override_ini(struct ath_hw *ah,
1301 struct ath9k_channel *chan)
1305 if (AR_SREV_9271(ah)) {
1307 * Enable spectral scan to solution for issues with stuck
1308 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1311 if (AR_SREV_9271_10(ah)) {
1312 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) |
1313 AR_PHY_SPECTRAL_SCAN_ENABLE;
1314 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1316 else if (AR_SREV_9271_11(ah))
1318 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1319 * present on AR9271 1.1
1321 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1326 * Set the RX_ABORT and RX_DIS and clear if off only after
1327 * RXE is set for MAC. This prevents frames with corrupted
1328 * descriptor status.
1330 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1332 if (AR_SREV_9280_10_OR_LATER(ah)) {
1333 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1334 (~AR_PCU_MISC_MODE2_HWWAR1);
1336 if (AR_SREV_9287_10_OR_LATER(ah))
1337 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1339 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1342 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1343 AR_SREV_9280_10_OR_LATER(ah))
1346 * Disable BB clock gating
1347 * Necessary to avoid issues on AR5416 2.0
1349 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1352 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1353 struct ar5416_eeprom_def *pEepData,
1356 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1357 struct ath_common *common = ath9k_hw_common(ah);
1359 switch (ah->hw_version.devid) {
1360 case AR9280_DEVID_PCI:
1361 if (reg == 0x7894) {
1362 ath_print(common, ATH_DBG_EEPROM,
1363 "ini VAL: %x EEPROM: %x\n", value,
1364 (pBase->version & 0xff));
1366 if ((pBase->version & 0xff) > 0x0a) {
1367 ath_print(common, ATH_DBG_EEPROM,
1370 value &= ~AR_AN_TOP2_PWDCLKIND;
1371 value |= AR_AN_TOP2_PWDCLKIND &
1372 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1374 ath_print(common, ATH_DBG_EEPROM,
1375 "PWDCLKIND Earlier Rev\n");
1378 ath_print(common, ATH_DBG_EEPROM,
1379 "final ini VAL: %x\n", value);
1387 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1388 struct ar5416_eeprom_def *pEepData,
1391 if (ah->eep_map == EEP_MAP_4KBITS)
1394 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1397 static void ath9k_olc_init(struct ath_hw *ah)
1401 if (OLC_FOR_AR9287_10_LATER) {
1402 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1403 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1404 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1405 AR9287_AN_TXPC0_TXPCMODE,
1406 AR9287_AN_TXPC0_TXPCMODE_S,
1407 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1410 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1411 ah->originalGain[i] =
1412 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1418 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1419 struct ath9k_channel *chan)
1421 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1423 if (IS_CHAN_B(chan))
1425 else if (IS_CHAN_G(chan))
1433 static int ath9k_hw_process_ini(struct ath_hw *ah,
1434 struct ath9k_channel *chan)
1436 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1437 int i, regWrites = 0;
1438 struct ieee80211_channel *channel = chan->chan;
1439 u32 modesIndex, freqIndex;
1441 switch (chan->chanmode) {
1443 case CHANNEL_A_HT20:
1447 case CHANNEL_A_HT40PLUS:
1448 case CHANNEL_A_HT40MINUS:
1453 case CHANNEL_G_HT20:
1458 case CHANNEL_G_HT40PLUS:
1459 case CHANNEL_G_HT40MINUS:
1468 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1469 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1470 ah->eep_ops->set_addac(ah, chan);
1472 if (AR_SREV_5416_22_OR_LATER(ah)) {
1473 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1475 struct ar5416IniArray temp;
1477 sizeof(u32) * ah->iniAddac.ia_rows *
1478 ah->iniAddac.ia_columns;
1480 memcpy(ah->addac5416_21,
1481 ah->iniAddac.ia_array, addacSize);
1483 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1485 temp.ia_array = ah->addac5416_21;
1486 temp.ia_columns = ah->iniAddac.ia_columns;
1487 temp.ia_rows = ah->iniAddac.ia_rows;
1488 REG_WRITE_ARRAY(&temp, 1, regWrites);
1491 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1493 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1494 u32 reg = INI_RA(&ah->iniModes, i, 0);
1495 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1497 REG_WRITE(ah, reg, val);
1499 if (reg >= 0x7800 && reg < 0x78a0
1500 && ah->config.analog_shiftreg) {
1504 DO_DELAY(regWrites);
1507 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1508 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1510 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1511 AR_SREV_9287_10_OR_LATER(ah))
1512 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1514 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1515 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1516 u32 val = INI_RA(&ah->iniCommon, i, 1);
1518 REG_WRITE(ah, reg, val);
1520 if (reg >= 0x7800 && reg < 0x78a0
1521 && ah->config.analog_shiftreg) {
1525 DO_DELAY(regWrites);
1528 ath9k_hw_write_regs(ah, freqIndex, regWrites);
1530 if (AR_SREV_9271_10(ah))
1531 REG_WRITE_ARRAY(&ah->iniModes_9271_1_0_only,
1532 modesIndex, regWrites);
1534 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1535 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1539 ath9k_hw_override_ini(ah, chan);
1540 ath9k_hw_set_regs(ah, chan);
1541 ath9k_hw_init_chain_masks(ah);
1543 if (OLC_FOR_AR9280_20_LATER)
1546 ah->eep_ops->set_txpower(ah, chan,
1547 ath9k_regd_get_ctl(regulatory, chan),
1548 channel->max_antenna_gain * 2,
1549 channel->max_power * 2,
1550 min((u32) MAX_RATE_POWER,
1551 (u32) regulatory->power_limit));
1553 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1554 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1555 "ar5416SetRfRegs failed\n");
1562 /****************************************/
1563 /* Reset and Channel Switching Routines */
1564 /****************************************/
1566 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1573 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1574 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1576 if (!AR_SREV_9280_10_OR_LATER(ah))
1577 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1578 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1580 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1581 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1583 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1586 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1588 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1591 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1596 * set AHB_MODE not to do cacheline prefetches
1598 regval = REG_READ(ah, AR_AHB_MODE);
1599 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1602 * let mac dma reads be in 128 byte chunks
1604 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1605 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1608 * Restore TX Trigger Level to its pre-reset value.
1609 * The initial value depends on whether aggregation is enabled, and is
1610 * adjusted whenever underruns are detected.
1612 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1615 * let mac dma writes be in 128 byte chunks
1617 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1618 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1621 * Setup receive FIFO threshold to hold off TX activities
1623 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1626 * reduce the number of usable entries in PCU TXBUF to avoid
1627 * wrap around issues.
1629 if (AR_SREV_9285(ah)) {
1630 /* For AR9285 the number of Fifos are reduced to half.
1631 * So set the usable tx buf size also to half to
1632 * avoid data/delimiter underruns
1634 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1635 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1636 } else if (!AR_SREV_9271(ah)) {
1637 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1638 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1642 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1646 val = REG_READ(ah, AR_STA_ID1);
1647 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1649 case NL80211_IFTYPE_AP:
1650 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1651 | AR_STA_ID1_KSRCH_MODE);
1652 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1654 case NL80211_IFTYPE_ADHOC:
1655 case NL80211_IFTYPE_MESH_POINT:
1656 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1657 | AR_STA_ID1_KSRCH_MODE);
1658 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1660 case NL80211_IFTYPE_STATION:
1661 case NL80211_IFTYPE_MONITOR:
1662 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1667 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1672 u32 coef_exp, coef_man;
1674 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1675 if ((coef_scaled >> coef_exp) & 0x1)
1678 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1680 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1682 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1683 *coef_exponent = coef_exp - 16;
1686 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1687 struct ath9k_channel *chan)
1689 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1690 u32 clockMhzScaled = 0x64000000;
1691 struct chan_centers centers;
1693 if (IS_CHAN_HALF_RATE(chan))
1694 clockMhzScaled = clockMhzScaled >> 1;
1695 else if (IS_CHAN_QUARTER_RATE(chan))
1696 clockMhzScaled = clockMhzScaled >> 2;
1698 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1699 coef_scaled = clockMhzScaled / centers.synth_center;
1701 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1704 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1705 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1706 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1707 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1709 coef_scaled = (9 * coef_scaled) / 10;
1711 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1714 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1715 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1716 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1717 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1720 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1725 if (AR_SREV_9100(ah)) {
1726 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1727 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1728 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1729 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1730 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1733 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1734 AR_RTC_FORCE_WAKE_ON_INT);
1736 if (AR_SREV_9100(ah)) {
1737 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1738 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1740 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1742 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1743 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1744 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1745 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1747 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1750 rst_flags = AR_RTC_RC_MAC_WARM;
1751 if (type == ATH9K_RESET_COLD)
1752 rst_flags |= AR_RTC_RC_MAC_COLD;
1755 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1758 REG_WRITE(ah, AR_RTC_RC, 0);
1759 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1760 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1761 "RTC stuck in MAC reset\n");
1765 if (!AR_SREV_9100(ah))
1766 REG_WRITE(ah, AR_RC, 0);
1768 if (AR_SREV_9100(ah))
1774 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1776 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1777 AR_RTC_FORCE_WAKE_ON_INT);
1779 if (!AR_SREV_9100(ah))
1780 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1782 REG_WRITE(ah, AR_RTC_RESET, 0);
1785 if (!AR_SREV_9100(ah))
1786 REG_WRITE(ah, AR_RC, 0);
1788 REG_WRITE(ah, AR_RTC_RESET, 1);
1790 if (!ath9k_hw_wait(ah,
1795 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1796 "RTC not waking up\n");
1800 ath9k_hw_read_revisions(ah);
1802 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1805 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1807 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1808 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1811 case ATH9K_RESET_POWER_ON:
1812 return ath9k_hw_set_reset_power_on(ah);
1813 case ATH9K_RESET_WARM:
1814 case ATH9K_RESET_COLD:
1815 return ath9k_hw_set_reset(ah, type);
1821 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1824 u32 enableDacFifo = 0;
1826 if (AR_SREV_9285_10_OR_LATER(ah))
1827 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1828 AR_PHY_FC_ENABLE_DAC_FIFO);
1830 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1831 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1833 if (IS_CHAN_HT40(chan)) {
1834 phymode |= AR_PHY_FC_DYN2040_EN;
1836 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1837 (chan->chanmode == CHANNEL_G_HT40PLUS))
1838 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1841 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1843 ath9k_hw_set11nmac2040(ah);
1845 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1846 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1849 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1850 struct ath9k_channel *chan)
1852 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1853 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1855 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1858 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1861 ah->chip_fullsleep = false;
1862 ath9k_hw_init_pll(ah, chan);
1863 ath9k_hw_set_rfmode(ah, chan);
1868 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1869 struct ath9k_channel *chan)
1871 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1872 struct ath_common *common = ath9k_hw_common(ah);
1873 struct ieee80211_channel *channel = chan->chan;
1874 u32 synthDelay, qnum;
1877 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1878 if (ath9k_hw_numtxpending(ah, qnum)) {
1879 ath_print(common, ATH_DBG_QUEUE,
1880 "Transmit frames pending on "
1881 "queue %d\n", qnum);
1886 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1887 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1888 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1889 ath_print(common, ATH_DBG_FATAL,
1890 "Could not kill baseband RX\n");
1894 ath9k_hw_set_regs(ah, chan);
1896 r = ah->ath9k_hw_rf_set_freq(ah, chan);
1898 ath_print(common, ATH_DBG_FATAL,
1899 "Failed to set channel\n");
1903 ah->eep_ops->set_txpower(ah, chan,
1904 ath9k_regd_get_ctl(regulatory, chan),
1905 channel->max_antenna_gain * 2,
1906 channel->max_power * 2,
1907 min((u32) MAX_RATE_POWER,
1908 (u32) regulatory->power_limit));
1910 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1911 if (IS_CHAN_B(chan))
1912 synthDelay = (4 * synthDelay) / 22;
1916 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1918 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1920 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1921 ath9k_hw_set_delta_slope(ah, chan);
1923 ah->ath9k_hw_spur_mitigate_freq(ah, chan);
1925 if (!chan->oneTimeCalsDone)
1926 chan->oneTimeCalsDone = true;
1931 static void ath9k_enable_rfkill(struct ath_hw *ah)
1933 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
1934 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
1936 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
1937 AR_GPIO_INPUT_MUX2_RFSILENT);
1939 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
1940 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
1943 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1944 bool bChannelChange)
1946 struct ath_common *common = ath9k_hw_common(ah);
1948 struct ath9k_channel *curchan = ah->curchan;
1952 int i, rx_chainmask, r;
1954 ah->txchainmask = common->tx_chainmask;
1955 ah->rxchainmask = common->rx_chainmask;
1957 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1960 if (curchan && !ah->chip_fullsleep)
1961 ath9k_hw_getnf(ah, curchan);
1963 if (bChannelChange &&
1964 (ah->chip_fullsleep != true) &&
1965 (ah->curchan != NULL) &&
1966 (chan->channel != ah->curchan->channel) &&
1967 ((chan->channelFlags & CHANNEL_ALL) ==
1968 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
1969 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
1970 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
1972 if (ath9k_hw_channel_change(ah, chan)) {
1973 ath9k_hw_loadnf(ah, ah->curchan);
1974 ath9k_hw_start_nfcal(ah);
1979 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1980 if (saveDefAntenna == 0)
1983 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1985 /* For chips on which RTC reset is done, save TSF before it gets cleared */
1986 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
1987 tsf = ath9k_hw_gettsf64(ah);
1989 saveLedState = REG_READ(ah, AR_CFG_LED) &
1990 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1991 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1993 ath9k_hw_mark_phy_inactive(ah);
1995 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1997 AR9271_RESET_POWER_DOWN_CONTROL,
1998 AR9271_RADIO_RF_RST);
2002 if (!ath9k_hw_chip_reset(ah, chan)) {
2003 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2007 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2008 ah->htc_reset_init = false;
2010 AR9271_RESET_POWER_DOWN_CONTROL,
2011 AR9271_GATE_MAC_CTL);
2016 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2017 ath9k_hw_settsf64(ah, tsf);
2019 if (AR_SREV_9280_10_OR_LATER(ah))
2020 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2022 if (AR_SREV_9287_12_OR_LATER(ah)) {
2023 /* Enable ASYNC FIFO */
2024 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2025 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2026 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2027 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2028 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2029 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2030 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2032 r = ath9k_hw_process_ini(ah, chan);
2036 /* Setup MFP options for CCMP */
2037 if (AR_SREV_9280_20_OR_LATER(ah)) {
2038 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2039 * frames when constructing CCMP AAD. */
2040 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2042 ah->sw_mgmt_crypto = false;
2043 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2044 /* Disable hardware crypto for management frames */
2045 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2046 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2047 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2048 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2049 ah->sw_mgmt_crypto = true;
2051 ah->sw_mgmt_crypto = true;
2053 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2054 ath9k_hw_set_delta_slope(ah, chan);
2056 ah->ath9k_hw_spur_mitigate_freq(ah, chan);
2057 ah->eep_ops->set_board_values(ah, chan);
2058 ath9k_hw_decrease_chain_power(ah, chan);
2060 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2061 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2063 | AR_STA_ID1_RTS_USE_DEF
2065 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2066 | ah->sta_id1_defaults);
2067 ath9k_hw_set_operating_mode(ah, ah->opmode);
2069 ath_hw_setbssidmask(common);
2071 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2073 ath9k_hw_write_associd(ah);
2075 REG_WRITE(ah, AR_ISR, ~0);
2077 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2079 r = ah->ath9k_hw_rf_set_freq(ah, chan);
2083 for (i = 0; i < AR_NUM_DCU; i++)
2084 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2087 for (i = 0; i < ah->caps.total_queues; i++)
2088 ath9k_hw_resettxqueue(ah, i);
2090 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2091 ath9k_hw_init_qos(ah);
2093 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2094 ath9k_enable_rfkill(ah);
2096 ath9k_hw_init_user_settings(ah);
2098 if (AR_SREV_9287_12_OR_LATER(ah)) {
2099 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2100 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2101 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2102 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2103 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2104 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2106 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2107 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2109 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2110 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2111 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2112 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2114 if (AR_SREV_9287_12_OR_LATER(ah)) {
2115 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2116 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2119 REG_WRITE(ah, AR_STA_ID1,
2120 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2122 ath9k_hw_set_dma(ah);
2124 REG_WRITE(ah, AR_OBS, 8);
2126 if (ah->config.intr_mitigation) {
2127 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2128 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2131 ath9k_hw_init_bb(ah, chan);
2133 if (!ath9k_hw_init_cal(ah, chan))
2136 rx_chainmask = ah->rxchainmask;
2137 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2138 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2139 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2142 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2145 * For big endian systems turn on swapping for descriptors
2147 if (AR_SREV_9100(ah)) {
2149 mask = REG_READ(ah, AR_CFG);
2150 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2151 ath_print(common, ATH_DBG_RESET,
2152 "CFG Byte Swap Set 0x%x\n", mask);
2155 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2156 REG_WRITE(ah, AR_CFG, mask);
2157 ath_print(common, ATH_DBG_RESET,
2158 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2161 /* Configure AR9271 target WLAN */
2162 if (AR_SREV_9271(ah))
2163 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2166 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2170 if (ah->btcoex_hw.enabled)
2171 ath9k_hw_btcoex_enable(ah);
2175 EXPORT_SYMBOL(ath9k_hw_reset);
2177 /************************/
2178 /* Key Cache Management */
2179 /************************/
2181 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2185 if (entry >= ah->caps.keycache_size) {
2186 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2187 "keychache entry %u out of range\n", entry);
2191 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2193 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2194 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2195 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2196 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2197 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2198 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2199 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2200 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2202 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2203 u16 micentry = entry + 64;
2205 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2206 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2207 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2208 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2214 EXPORT_SYMBOL(ath9k_hw_keyreset);
2216 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2220 if (entry >= ah->caps.keycache_size) {
2221 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2222 "keychache entry %u out of range\n", entry);
2227 macHi = (mac[5] << 8) | mac[4];
2228 macLo = (mac[3] << 24) |
2233 macLo |= (macHi & 1) << 31;
2238 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2239 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2243 EXPORT_SYMBOL(ath9k_hw_keysetmac);
2245 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2246 const struct ath9k_keyval *k,
2249 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2250 struct ath_common *common = ath9k_hw_common(ah);
2251 u32 key0, key1, key2, key3, key4;
2254 if (entry >= pCap->keycache_size) {
2255 ath_print(common, ATH_DBG_FATAL,
2256 "keycache entry %u out of range\n", entry);
2260 switch (k->kv_type) {
2261 case ATH9K_CIPHER_AES_OCB:
2262 keyType = AR_KEYTABLE_TYPE_AES;
2264 case ATH9K_CIPHER_AES_CCM:
2265 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2266 ath_print(common, ATH_DBG_ANY,
2267 "AES-CCM not supported by mac rev 0x%x\n",
2268 ah->hw_version.macRev);
2271 keyType = AR_KEYTABLE_TYPE_CCM;
2273 case ATH9K_CIPHER_TKIP:
2274 keyType = AR_KEYTABLE_TYPE_TKIP;
2275 if (ATH9K_IS_MIC_ENABLED(ah)
2276 && entry + 64 >= pCap->keycache_size) {
2277 ath_print(common, ATH_DBG_ANY,
2278 "entry %u inappropriate for TKIP\n", entry);
2282 case ATH9K_CIPHER_WEP:
2283 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2284 ath_print(common, ATH_DBG_ANY,
2285 "WEP key length %u too small\n", k->kv_len);
2288 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2289 keyType = AR_KEYTABLE_TYPE_40;
2290 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2291 keyType = AR_KEYTABLE_TYPE_104;
2293 keyType = AR_KEYTABLE_TYPE_128;
2295 case ATH9K_CIPHER_CLR:
2296 keyType = AR_KEYTABLE_TYPE_CLR;
2299 ath_print(common, ATH_DBG_FATAL,
2300 "cipher %u not supported\n", k->kv_type);
2304 key0 = get_unaligned_le32(k->kv_val + 0);
2305 key1 = get_unaligned_le16(k->kv_val + 4);
2306 key2 = get_unaligned_le32(k->kv_val + 6);
2307 key3 = get_unaligned_le16(k->kv_val + 10);
2308 key4 = get_unaligned_le32(k->kv_val + 12);
2309 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2313 * Note: Key cache registers access special memory area that requires
2314 * two 32-bit writes to actually update the values in the internal
2315 * memory. Consequently, the exact order and pairs used here must be
2319 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2320 u16 micentry = entry + 64;
2323 * Write inverted key[47:0] first to avoid Michael MIC errors
2324 * on frames that could be sent or received at the same time.
2325 * The correct key will be written in the end once everything
2328 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2329 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2331 /* Write key[95:48] */
2332 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2333 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2335 /* Write key[127:96] and key type */
2336 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2337 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2339 /* Write MAC address for the entry */
2340 (void) ath9k_hw_keysetmac(ah, entry, mac);
2342 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2344 * TKIP uses two key cache entries:
2345 * Michael MIC TX/RX keys in the same key cache entry
2346 * (idx = main index + 64):
2347 * key0 [31:0] = RX key [31:0]
2348 * key1 [15:0] = TX key [31:16]
2349 * key1 [31:16] = reserved
2350 * key2 [31:0] = RX key [63:32]
2351 * key3 [15:0] = TX key [15:0]
2352 * key3 [31:16] = reserved
2353 * key4 [31:0] = TX key [63:32]
2355 u32 mic0, mic1, mic2, mic3, mic4;
2357 mic0 = get_unaligned_le32(k->kv_mic + 0);
2358 mic2 = get_unaligned_le32(k->kv_mic + 4);
2359 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2360 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2361 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2363 /* Write RX[31:0] and TX[31:16] */
2364 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2365 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2367 /* Write RX[63:32] and TX[15:0] */
2368 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2369 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2371 /* Write TX[63:32] and keyType(reserved) */
2372 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2373 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2374 AR_KEYTABLE_TYPE_CLR);
2378 * TKIP uses four key cache entries (two for group
2380 * Michael MIC TX/RX keys are in different key cache
2381 * entries (idx = main index + 64 for TX and
2382 * main index + 32 + 96 for RX):
2383 * key0 [31:0] = TX/RX MIC key [31:0]
2384 * key1 [31:0] = reserved
2385 * key2 [31:0] = TX/RX MIC key [63:32]
2386 * key3 [31:0] = reserved
2387 * key4 [31:0] = reserved
2389 * Upper layer code will call this function separately
2390 * for TX and RX keys when these registers offsets are
2395 mic0 = get_unaligned_le32(k->kv_mic + 0);
2396 mic2 = get_unaligned_le32(k->kv_mic + 4);
2398 /* Write MIC key[31:0] */
2399 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2400 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2402 /* Write MIC key[63:32] */
2403 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2404 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2406 /* Write TX[63:32] and keyType(reserved) */
2407 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2408 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2409 AR_KEYTABLE_TYPE_CLR);
2412 /* MAC address registers are reserved for the MIC entry */
2413 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2414 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2417 * Write the correct (un-inverted) key[47:0] last to enable
2418 * TKIP now that all other registers are set with correct
2421 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2422 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2424 /* Write key[47:0] */
2425 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2426 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2428 /* Write key[95:48] */
2429 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2430 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2432 /* Write key[127:96] and key type */
2433 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2434 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2436 /* Write MAC address for the entry */
2437 (void) ath9k_hw_keysetmac(ah, entry, mac);
2442 EXPORT_SYMBOL(ath9k_hw_set_keycache_entry);
2444 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2446 if (entry < ah->caps.keycache_size) {
2447 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2448 if (val & AR_KEYTABLE_VALID)
2453 EXPORT_SYMBOL(ath9k_hw_keyisvalid);
2455 /******************************/
2456 /* Power Management (Chipset) */
2457 /******************************/
2459 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2461 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2463 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2464 AR_RTC_FORCE_WAKE_EN);
2465 if (!AR_SREV_9100(ah))
2466 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2468 if(!AR_SREV_5416(ah))
2469 REG_CLR_BIT(ah, (AR_RTC_RESET),
2474 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2476 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2478 struct ath9k_hw_capabilities *pCap = &ah->caps;
2480 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2481 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2482 AR_RTC_FORCE_WAKE_ON_INT);
2484 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2485 AR_RTC_FORCE_WAKE_EN);
2490 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2496 if ((REG_READ(ah, AR_RTC_STATUS) &
2497 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2498 if (ath9k_hw_set_reset_reg(ah,
2499 ATH9K_RESET_POWER_ON) != true) {
2502 ath9k_hw_init_pll(ah, NULL);
2504 if (AR_SREV_9100(ah))
2505 REG_SET_BIT(ah, AR_RTC_RESET,
2508 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2509 AR_RTC_FORCE_WAKE_EN);
2512 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2513 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2514 if (val == AR_RTC_STATUS_ON)
2517 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2518 AR_RTC_FORCE_WAKE_EN);
2521 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2522 "Failed to wakeup in %uus\n",
2523 POWER_UP_TIME / 20);
2528 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2533 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2535 struct ath_common *common = ath9k_hw_common(ah);
2536 int status = true, setChip = true;
2537 static const char *modes[] = {
2544 if (ah->power_mode == mode)
2547 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
2548 modes[ah->power_mode], modes[mode]);
2551 case ATH9K_PM_AWAKE:
2552 status = ath9k_hw_set_power_awake(ah, setChip);
2554 case ATH9K_PM_FULL_SLEEP:
2555 ath9k_set_power_sleep(ah, setChip);
2556 ah->chip_fullsleep = true;
2558 case ATH9K_PM_NETWORK_SLEEP:
2559 ath9k_set_power_network_sleep(ah, setChip);
2562 ath_print(common, ATH_DBG_FATAL,
2563 "Unknown power mode %u\n", mode);
2566 ah->power_mode = mode;
2570 EXPORT_SYMBOL(ath9k_hw_setpower);
2573 * Helper for ASPM support.
2575 * Disable PLL when in L0s as well as receiver clock when in L1.
2576 * This power saving option must be enabled through the SerDes.
2578 * Programming the SerDes must go through the same 288 bit serial shift
2579 * register as the other analog registers. Hence the 9 writes.
2581 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
2586 if (ah->is_pciexpress != true)
2589 /* Do not touch SerDes registers */
2590 if (ah->config.pcie_powersave_enable == 2)
2593 /* Nothing to do on restore for 11N */
2595 if (AR_SREV_9280_20_OR_LATER(ah)) {
2597 * AR9280 2.0 or later chips use SerDes values from the
2598 * initvals.h initialized depending on chipset during
2601 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
2602 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
2603 INI_RA(&ah->iniPcieSerdes, i, 1));
2605 } else if (AR_SREV_9280(ah) &&
2606 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
2607 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
2608 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
2610 /* RX shut off when elecidle is asserted */
2611 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
2612 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
2613 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
2615 /* Shut off CLKREQ active in L1 */
2616 if (ah->config.pcie_clock_req)
2617 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
2619 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
2621 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
2622 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
2623 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
2625 /* Load the new settings */
2626 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
2629 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
2630 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
2632 /* RX shut off when elecidle is asserted */
2633 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
2634 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
2635 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
2638 * Ignore ah->ah_config.pcie_clock_req setting for
2641 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
2643 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
2644 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
2645 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
2647 /* Load the new settings */
2648 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
2653 /* set bit 19 to allow forcing of pcie core into L1 state */
2654 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
2656 /* Several PCIe massages to ensure proper behaviour */
2657 if (ah->config.pcie_waen) {
2658 val = ah->config.pcie_waen;
2660 val &= (~AR_WA_D3_L1_DISABLE);
2662 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
2664 val = AR9285_WA_DEFAULT;
2666 val &= (~AR_WA_D3_L1_DISABLE);
2667 } else if (AR_SREV_9280(ah)) {
2669 * On AR9280 chips bit 22 of 0x4004 needs to be
2670 * set otherwise card may disappear.
2672 val = AR9280_WA_DEFAULT;
2674 val &= (~AR_WA_D3_L1_DISABLE);
2676 val = AR_WA_DEFAULT;
2679 REG_WRITE(ah, AR_WA, val);
2684 * Set PCIe workaround bits
2685 * bit 14 in WA register (disable L1) should only
2686 * be set when device enters D3 and be cleared
2687 * when device comes back to D0.
2689 if (ah->config.pcie_waen) {
2690 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
2691 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
2693 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
2694 AR_SREV_9287(ah)) &&
2695 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
2696 (AR_SREV_9280(ah) &&
2697 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
2698 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
2703 EXPORT_SYMBOL(ath9k_hw_configpcipowersave);
2705 /**********************/
2706 /* Interrupt Handling */
2707 /**********************/
2709 bool ath9k_hw_intrpend(struct ath_hw *ah)
2713 if (AR_SREV_9100(ah))
2716 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
2717 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
2720 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
2721 if ((host_isr & AR_INTR_SYNC_DEFAULT)
2722 && (host_isr != AR_INTR_SPURIOUS))
2727 EXPORT_SYMBOL(ath9k_hw_intrpend);
2729 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
2733 struct ath9k_hw_capabilities *pCap = &ah->caps;
2735 bool fatal_int = false;
2736 struct ath_common *common = ath9k_hw_common(ah);
2738 if (!AR_SREV_9100(ah)) {
2739 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
2740 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
2741 == AR_RTC_STATUS_ON) {
2742 isr = REG_READ(ah, AR_ISR);
2746 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
2747 AR_INTR_SYNC_DEFAULT;
2751 if (!isr && !sync_cause)
2755 isr = REG_READ(ah, AR_ISR);
2759 if (isr & AR_ISR_BCNMISC) {
2761 isr2 = REG_READ(ah, AR_ISR_S2);
2762 if (isr2 & AR_ISR_S2_TIM)
2763 mask2 |= ATH9K_INT_TIM;
2764 if (isr2 & AR_ISR_S2_DTIM)
2765 mask2 |= ATH9K_INT_DTIM;
2766 if (isr2 & AR_ISR_S2_DTIMSYNC)
2767 mask2 |= ATH9K_INT_DTIMSYNC;
2768 if (isr2 & (AR_ISR_S2_CABEND))
2769 mask2 |= ATH9K_INT_CABEND;
2770 if (isr2 & AR_ISR_S2_GTT)
2771 mask2 |= ATH9K_INT_GTT;
2772 if (isr2 & AR_ISR_S2_CST)
2773 mask2 |= ATH9K_INT_CST;
2774 if (isr2 & AR_ISR_S2_TSFOOR)
2775 mask2 |= ATH9K_INT_TSFOOR;
2778 isr = REG_READ(ah, AR_ISR_RAC);
2779 if (isr == 0xffffffff) {
2784 *masked = isr & ATH9K_INT_COMMON;
2786 if (ah->config.intr_mitigation) {
2787 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
2788 *masked |= ATH9K_INT_RX;
2791 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
2792 *masked |= ATH9K_INT_RX;
2794 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
2798 *masked |= ATH9K_INT_TX;
2800 s0_s = REG_READ(ah, AR_ISR_S0_S);
2801 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
2802 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
2804 s1_s = REG_READ(ah, AR_ISR_S1_S);
2805 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
2806 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
2809 if (isr & AR_ISR_RXORN) {
2810 ath_print(common, ATH_DBG_INTERRUPT,
2811 "receive FIFO overrun interrupt\n");
2814 if (!AR_SREV_9100(ah)) {
2815 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2816 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
2817 if (isr5 & AR_ISR_S5_TIM_TIMER)
2818 *masked |= ATH9K_INT_TIM_TIMER;
2825 if (AR_SREV_9100(ah))
2828 if (isr & AR_ISR_GENTMR) {
2831 s5_s = REG_READ(ah, AR_ISR_S5_S);
2832 if (isr & AR_ISR_GENTMR) {
2833 ah->intr_gen_timer_trigger =
2834 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
2836 ah->intr_gen_timer_thresh =
2837 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
2839 if (ah->intr_gen_timer_trigger)
2840 *masked |= ATH9K_INT_GENTIMER;
2848 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
2852 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
2853 ath_print(common, ATH_DBG_ANY,
2854 "received PCI FATAL interrupt\n");
2856 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
2857 ath_print(common, ATH_DBG_ANY,
2858 "received PCI PERR interrupt\n");
2860 *masked |= ATH9K_INT_FATAL;
2862 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
2863 ath_print(common, ATH_DBG_INTERRUPT,
2864 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
2865 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
2866 REG_WRITE(ah, AR_RC, 0);
2867 *masked |= ATH9K_INT_FATAL;
2869 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
2870 ath_print(common, ATH_DBG_INTERRUPT,
2871 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
2874 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
2875 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
2880 EXPORT_SYMBOL(ath9k_hw_getisr);
2882 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
2884 u32 omask = ah->mask_reg;
2886 struct ath9k_hw_capabilities *pCap = &ah->caps;
2887 struct ath_common *common = ath9k_hw_common(ah);
2889 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
2891 if (omask & ATH9K_INT_GLOBAL) {
2892 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n");
2893 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
2894 (void) REG_READ(ah, AR_IER);
2895 if (!AR_SREV_9100(ah)) {
2896 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
2897 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
2899 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
2900 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
2904 mask = ints & ATH9K_INT_COMMON;
2907 if (ints & ATH9K_INT_TX) {
2908 if (ah->txok_interrupt_mask)
2909 mask |= AR_IMR_TXOK;
2910 if (ah->txdesc_interrupt_mask)
2911 mask |= AR_IMR_TXDESC;
2912 if (ah->txerr_interrupt_mask)
2913 mask |= AR_IMR_TXERR;
2914 if (ah->txeol_interrupt_mask)
2915 mask |= AR_IMR_TXEOL;
2917 if (ints & ATH9K_INT_RX) {
2918 mask |= AR_IMR_RXERR;
2919 if (ah->config.intr_mitigation)
2920 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
2922 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
2923 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
2924 mask |= AR_IMR_GENTMR;
2927 if (ints & (ATH9K_INT_BMISC)) {
2928 mask |= AR_IMR_BCNMISC;
2929 if (ints & ATH9K_INT_TIM)
2930 mask2 |= AR_IMR_S2_TIM;
2931 if (ints & ATH9K_INT_DTIM)
2932 mask2 |= AR_IMR_S2_DTIM;
2933 if (ints & ATH9K_INT_DTIMSYNC)
2934 mask2 |= AR_IMR_S2_DTIMSYNC;
2935 if (ints & ATH9K_INT_CABEND)
2936 mask2 |= AR_IMR_S2_CABEND;
2937 if (ints & ATH9K_INT_TSFOOR)
2938 mask2 |= AR_IMR_S2_TSFOOR;
2941 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
2942 mask |= AR_IMR_BCNMISC;
2943 if (ints & ATH9K_INT_GTT)
2944 mask2 |= AR_IMR_S2_GTT;
2945 if (ints & ATH9K_INT_CST)
2946 mask2 |= AR_IMR_S2_CST;
2949 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
2950 REG_WRITE(ah, AR_IMR, mask);
2951 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
2953 AR_IMR_S2_DTIMSYNC |
2957 AR_IMR_S2_GTT | AR_IMR_S2_CST);
2958 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
2959 ah->mask_reg = ints;
2961 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2962 if (ints & ATH9K_INT_TIM_TIMER)
2963 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
2965 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
2968 if (ints & ATH9K_INT_GLOBAL) {
2969 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n");
2970 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
2971 if (!AR_SREV_9100(ah)) {
2972 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
2974 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
2977 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
2978 AR_INTR_SYNC_DEFAULT);
2979 REG_WRITE(ah, AR_INTR_SYNC_MASK,
2980 AR_INTR_SYNC_DEFAULT);
2982 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
2983 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
2988 EXPORT_SYMBOL(ath9k_hw_set_interrupts);
2990 /*******************/
2991 /* Beacon Handling */
2992 /*******************/
2994 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
2998 ah->beacon_interval = beacon_period;
3000 switch (ah->opmode) {
3001 case NL80211_IFTYPE_STATION:
3002 case NL80211_IFTYPE_MONITOR:
3003 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3004 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3005 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3006 flags |= AR_TBTT_TIMER_EN;
3008 case NL80211_IFTYPE_ADHOC:
3009 case NL80211_IFTYPE_MESH_POINT:
3010 REG_SET_BIT(ah, AR_TXCFG,
3011 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3012 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3013 TU_TO_USEC(next_beacon +
3014 (ah->atim_window ? ah->
3016 flags |= AR_NDP_TIMER_EN;
3017 case NL80211_IFTYPE_AP:
3018 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3019 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3020 TU_TO_USEC(next_beacon -
3022 dma_beacon_response_time));
3023 REG_WRITE(ah, AR_NEXT_SWBA,
3024 TU_TO_USEC(next_beacon -
3026 sw_beacon_response_time));
3028 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3031 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON,
3032 "%s: unsupported opmode: %d\n",
3033 __func__, ah->opmode);
3038 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3039 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3040 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3041 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3043 beacon_period &= ~ATH9K_BEACON_ENA;
3044 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3045 ath9k_hw_reset_tsf(ah);
3048 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3050 EXPORT_SYMBOL(ath9k_hw_beaconinit);
3052 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3053 const struct ath9k_beacon_state *bs)
3055 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3056 struct ath9k_hw_capabilities *pCap = &ah->caps;
3057 struct ath_common *common = ath9k_hw_common(ah);
3059 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3061 REG_WRITE(ah, AR_BEACON_PERIOD,
3062 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3063 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3064 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3066 REG_RMW_FIELD(ah, AR_RSSI_THR,
3067 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3069 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3071 if (bs->bs_sleepduration > beaconintval)
3072 beaconintval = bs->bs_sleepduration;
3074 dtimperiod = bs->bs_dtimperiod;
3075 if (bs->bs_sleepduration > dtimperiod)
3076 dtimperiod = bs->bs_sleepduration;
3078 if (beaconintval == dtimperiod)
3079 nextTbtt = bs->bs_nextdtim;
3081 nextTbtt = bs->bs_nexttbtt;
3083 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3084 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3085 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3086 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3088 REG_WRITE(ah, AR_NEXT_DTIM,
3089 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3090 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3092 REG_WRITE(ah, AR_SLEEP1,
3093 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3094 | AR_SLEEP1_ASSUME_DTIM);
3096 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3097 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3099 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3101 REG_WRITE(ah, AR_SLEEP2,
3102 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3104 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3105 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3107 REG_SET_BIT(ah, AR_TIMER_MODE,
3108 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3111 /* TSF Out of Range Threshold */
3112 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3114 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
3116 /*******************/
3117 /* HW Capabilities */
3118 /*******************/
3120 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3122 struct ath9k_hw_capabilities *pCap = &ah->caps;
3123 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3124 struct ath_common *common = ath9k_hw_common(ah);
3125 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
3127 u16 capField = 0, eeval;
3129 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3130 regulatory->current_rd = eeval;
3132 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3133 if (AR_SREV_9285_10_OR_LATER(ah))
3134 eeval |= AR9285_RDEXT_DEFAULT;
3135 regulatory->current_rd_ext = eeval;
3137 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3139 if (ah->opmode != NL80211_IFTYPE_AP &&
3140 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3141 if (regulatory->current_rd == 0x64 ||
3142 regulatory->current_rd == 0x65)
3143 regulatory->current_rd += 5;
3144 else if (regulatory->current_rd == 0x41)
3145 regulatory->current_rd = 0x43;
3146 ath_print(common, ATH_DBG_REGULATORY,
3147 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3150 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3151 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3153 if (eeval & AR5416_OPFLAGS_11A) {
3154 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3155 if (ah->config.ht_enable) {
3156 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3157 set_bit(ATH9K_MODE_11NA_HT20,
3158 pCap->wireless_modes);
3159 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3160 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3161 pCap->wireless_modes);
3162 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3163 pCap->wireless_modes);
3168 if (eeval & AR5416_OPFLAGS_11G) {
3169 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3170 if (ah->config.ht_enable) {
3171 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3172 set_bit(ATH9K_MODE_11NG_HT20,
3173 pCap->wireless_modes);
3174 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3175 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3176 pCap->wireless_modes);
3177 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3178 pCap->wireless_modes);
3183 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3185 * For AR9271 we will temporarilly uses the rx chainmax as read from
3188 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3189 !(eeval & AR5416_OPFLAGS_11A) &&
3190 !(AR_SREV_9271(ah)))
3191 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3192 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3194 /* Use rx_chainmask from EEPROM. */
3195 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3197 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3198 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3200 pCap->low_2ghz_chan = 2312;
3201 pCap->high_2ghz_chan = 2732;
3203 pCap->low_5ghz_chan = 4920;
3204 pCap->high_5ghz_chan = 6100;
3206 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3207 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3208 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3210 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3211 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3212 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3214 if (ah->config.ht_enable)
3215 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3217 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3219 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3220 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3221 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3222 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3224 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3225 pCap->total_queues =
3226 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3228 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3230 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3231 pCap->keycache_size =
3232 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3234 pCap->keycache_size = AR_KEYTABLE_SIZE;
3236 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3237 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3239 if (AR_SREV_9285_10_OR_LATER(ah))
3240 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3241 else if (AR_SREV_9280_10_OR_LATER(ah))
3242 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3244 pCap->num_gpio_pins = AR_NUM_GPIO;
3246 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3247 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3248 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3250 pCap->rts_aggr_limit = (8 * 1024);
3253 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3255 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3256 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3257 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3259 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3260 ah->rfkill_polarity =
3261 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3263 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3267 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3269 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3270 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3272 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3274 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3276 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3277 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3278 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3279 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3282 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3283 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3286 /* Advertise midband for AR5416 with FCC midband set in eeprom */
3287 if (regulatory->current_rd_ext & (1 << REG_EXT_FCC_MIDBAND) &&
3289 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3291 pCap->num_antcfg_5ghz =
3292 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3293 pCap->num_antcfg_2ghz =
3294 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3296 if (AR_SREV_9280_10_OR_LATER(ah) &&
3297 ath9k_hw_btcoex_supported(ah)) {
3298 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO;
3299 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3301 if (AR_SREV_9285(ah)) {
3302 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
3303 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3305 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
3308 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
3312 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3313 u32 capability, u32 *result)
3315 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3317 case ATH9K_CAP_CIPHER:
3318 switch (capability) {
3319 case ATH9K_CIPHER_AES_CCM:
3320 case ATH9K_CIPHER_AES_OCB:
3321 case ATH9K_CIPHER_TKIP:
3322 case ATH9K_CIPHER_WEP:
3323 case ATH9K_CIPHER_MIC:
3324 case ATH9K_CIPHER_CLR:
3329 case ATH9K_CAP_TKIP_MIC:
3330 switch (capability) {
3334 return (ah->sta_id1_defaults &
3335 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3338 case ATH9K_CAP_TKIP_SPLIT:
3339 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3341 case ATH9K_CAP_DIVERSITY:
3342 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3343 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3345 case ATH9K_CAP_MCAST_KEYSRCH:
3346 switch (capability) {
3350 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3353 return (ah->sta_id1_defaults &
3354 AR_STA_ID1_MCAST_KSRCH) ? true :
3359 case ATH9K_CAP_TXPOW:
3360 switch (capability) {
3364 *result = regulatory->power_limit;
3367 *result = regulatory->max_power_level;
3370 *result = regulatory->tp_scale;
3375 return (AR_SREV_9280_20_OR_LATER(ah) &&
3376 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3382 EXPORT_SYMBOL(ath9k_hw_getcapability);
3384 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3385 u32 capability, u32 setting, int *status)
3390 case ATH9K_CAP_TKIP_MIC:
3392 ah->sta_id1_defaults |=
3393 AR_STA_ID1_CRPT_MIC_ENABLE;
3395 ah->sta_id1_defaults &=
3396 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3398 case ATH9K_CAP_DIVERSITY:
3399 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3401 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3403 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3404 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3406 case ATH9K_CAP_MCAST_KEYSRCH:
3408 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3410 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3416 EXPORT_SYMBOL(ath9k_hw_setcapability);
3418 /****************************/
3419 /* GPIO / RFKILL / Antennae */
3420 /****************************/
3422 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3426 u32 gpio_shift, tmp;
3429 addr = AR_GPIO_OUTPUT_MUX3;
3431 addr = AR_GPIO_OUTPUT_MUX2;
3433 addr = AR_GPIO_OUTPUT_MUX1;
3435 gpio_shift = (gpio % 6) * 5;
3437 if (AR_SREV_9280_20_OR_LATER(ah)
3438 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3439 REG_RMW(ah, addr, (type << gpio_shift),
3440 (0x1f << gpio_shift));
3442 tmp = REG_READ(ah, addr);
3443 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3444 tmp &= ~(0x1f << gpio_shift);
3445 tmp |= (type << gpio_shift);
3446 REG_WRITE(ah, addr, tmp);
3450 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3454 BUG_ON(gpio >= ah->caps.num_gpio_pins);
3456 gpio_shift = gpio << 1;
3460 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3461 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3463 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
3465 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3467 #define MS_REG_READ(x, y) \
3468 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3470 if (gpio >= ah->caps.num_gpio_pins)
3473 if (AR_SREV_9287_10_OR_LATER(ah))
3474 return MS_REG_READ(AR9287, gpio) != 0;
3475 else if (AR_SREV_9285_10_OR_LATER(ah))
3476 return MS_REG_READ(AR9285, gpio) != 0;
3477 else if (AR_SREV_9280_10_OR_LATER(ah))
3478 return MS_REG_READ(AR928X, gpio) != 0;
3480 return MS_REG_READ(AR, gpio) != 0;
3482 EXPORT_SYMBOL(ath9k_hw_gpio_get);
3484 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3489 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3491 gpio_shift = 2 * gpio;
3495 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3496 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3498 EXPORT_SYMBOL(ath9k_hw_cfg_output);
3500 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3502 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3505 EXPORT_SYMBOL(ath9k_hw_set_gpio);
3507 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3509 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3511 EXPORT_SYMBOL(ath9k_hw_getdefantenna);
3513 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3515 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3517 EXPORT_SYMBOL(ath9k_hw_setantenna);
3519 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3520 enum ath9k_ant_setting settings,
3521 struct ath9k_channel *chan,
3526 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3528 if (AR_SREV_9280(ah)) {
3529 if (!tx_chainmask_cfg) {
3531 tx_chainmask_cfg = *tx_chainmask;
3532 rx_chainmask_cfg = *rx_chainmask;
3536 case ATH9K_ANT_FIXED_A:
3537 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3538 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3539 *antenna_cfgd = true;
3541 case ATH9K_ANT_FIXED_B:
3542 if (ah->caps.tx_chainmask >
3543 ATH9K_ANTENNA1_CHAINMASK) {
3544 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3546 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3547 *antenna_cfgd = true;
3549 case ATH9K_ANT_VARIABLE:
3550 *tx_chainmask = tx_chainmask_cfg;
3551 *rx_chainmask = rx_chainmask_cfg;
3552 *antenna_cfgd = true;
3558 ah->config.diversity_control = settings;
3564 /*********************/
3565 /* General Operation */
3566 /*********************/
3568 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3570 u32 bits = REG_READ(ah, AR_RX_FILTER);
3571 u32 phybits = REG_READ(ah, AR_PHY_ERR);
3573 if (phybits & AR_PHY_ERR_RADAR)
3574 bits |= ATH9K_RX_FILTER_PHYRADAR;
3575 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
3576 bits |= ATH9K_RX_FILTER_PHYERR;
3580 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
3582 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
3586 REG_WRITE(ah, AR_RX_FILTER, bits);
3589 if (bits & ATH9K_RX_FILTER_PHYRADAR)
3590 phybits |= AR_PHY_ERR_RADAR;
3591 if (bits & ATH9K_RX_FILTER_PHYERR)
3592 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
3593 REG_WRITE(ah, AR_PHY_ERR, phybits);
3596 REG_WRITE(ah, AR_RXCFG,
3597 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
3599 REG_WRITE(ah, AR_RXCFG,
3600 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
3602 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
3604 bool ath9k_hw_phy_disable(struct ath_hw *ah)
3606 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
3609 ath9k_hw_init_pll(ah, NULL);
3612 EXPORT_SYMBOL(ath9k_hw_phy_disable);
3614 bool ath9k_hw_disable(struct ath_hw *ah)
3616 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
3619 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
3622 ath9k_hw_init_pll(ah, NULL);
3625 EXPORT_SYMBOL(ath9k_hw_disable);
3627 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
3629 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3630 struct ath9k_channel *chan = ah->curchan;
3631 struct ieee80211_channel *channel = chan->chan;
3633 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
3635 ah->eep_ops->set_txpower(ah, chan,
3636 ath9k_regd_get_ctl(regulatory, chan),
3637 channel->max_antenna_gain * 2,
3638 channel->max_power * 2,
3639 min((u32) MAX_RATE_POWER,
3640 (u32) regulatory->power_limit));
3642 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
3644 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
3646 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN);
3648 EXPORT_SYMBOL(ath9k_hw_setmac);
3650 void ath9k_hw_setopmode(struct ath_hw *ah)
3652 ath9k_hw_set_operating_mode(ah, ah->opmode);
3654 EXPORT_SYMBOL(ath9k_hw_setopmode);
3656 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
3658 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
3659 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
3661 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
3663 void ath9k_hw_write_associd(struct ath_hw *ah)
3665 struct ath_common *common = ath9k_hw_common(ah);
3667 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
3668 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
3669 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
3671 EXPORT_SYMBOL(ath9k_hw_write_associd);
3673 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
3677 tsf = REG_READ(ah, AR_TSF_U32);
3678 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
3682 EXPORT_SYMBOL(ath9k_hw_gettsf64);
3684 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
3686 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
3687 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
3689 EXPORT_SYMBOL(ath9k_hw_settsf64);
3691 void ath9k_hw_reset_tsf(struct ath_hw *ah)
3693 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
3694 AH_TSF_WRITE_TIMEOUT))
3695 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
3696 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
3698 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
3700 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
3702 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
3705 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
3707 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
3709 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
3711 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
3713 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
3714 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
3715 "bad slot time %u\n", us);
3716 ah->slottime = (u32) -1;
3719 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
3724 EXPORT_SYMBOL(ath9k_hw_setslottime);
3726 void ath9k_hw_set11nmac2040(struct ath_hw *ah)
3728 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
3731 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
3732 macmode = AR_2040_JOINED_RX_CLEAR;
3736 REG_WRITE(ah, AR_2040_MODE, macmode);
3739 /* HW Generic timers configuration */
3741 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
3743 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3744 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3745 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3746 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3747 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3748 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3749 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3750 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3751 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
3752 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
3753 AR_NDP2_TIMER_MODE, 0x0002},
3754 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
3755 AR_NDP2_TIMER_MODE, 0x0004},
3756 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
3757 AR_NDP2_TIMER_MODE, 0x0008},
3758 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
3759 AR_NDP2_TIMER_MODE, 0x0010},
3760 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
3761 AR_NDP2_TIMER_MODE, 0x0020},
3762 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
3763 AR_NDP2_TIMER_MODE, 0x0040},
3764 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
3765 AR_NDP2_TIMER_MODE, 0x0080}
3768 /* HW generic timer primitives */
3770 /* compute and clear index of rightmost 1 */
3771 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
3781 return timer_table->gen_timer_index[b];
3784 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
3786 return REG_READ(ah, AR_TSF_L32);
3788 EXPORT_SYMBOL(ath9k_hw_gettsf32);
3790 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
3791 void (*trigger)(void *),
3792 void (*overflow)(void *),
3796 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3797 struct ath_gen_timer *timer;
3799 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
3801 if (timer == NULL) {
3802 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
3803 "Failed to allocate memory"
3804 "for hw timer[%d]\n", timer_index);
3808 /* allocate a hardware generic timer slot */
3809 timer_table->timers[timer_index] = timer;
3810 timer->index = timer_index;
3811 timer->trigger = trigger;
3812 timer->overflow = overflow;
3817 EXPORT_SYMBOL(ath_gen_timer_alloc);
3819 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
3820 struct ath_gen_timer *timer,
3824 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3827 BUG_ON(!timer_period);
3829 set_bit(timer->index, &timer_table->timer_mask.timer_bits);
3831 tsf = ath9k_hw_gettsf32(ah);
3833 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER,
3834 "curent tsf %x period %x"
3835 "timer_next %x\n", tsf, timer_period, timer_next);
3838 * Pull timer_next forward if the current TSF already passed it
3839 * because of software latency
3841 if (timer_next < tsf)
3842 timer_next = tsf + timer_period;
3845 * Program generic timer registers
3847 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
3849 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
3851 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3852 gen_tmr_configuration[timer->index].mode_mask);
3854 /* Enable both trigger and thresh interrupt masks */
3855 REG_SET_BIT(ah, AR_IMR_S5,
3856 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3857 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3859 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
3861 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
3863 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3865 if ((timer->index < AR_FIRST_NDP_TIMER) ||
3866 (timer->index >= ATH_MAX_GEN_TIMER)) {
3870 /* Clear generic timer enable bits. */
3871 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3872 gen_tmr_configuration[timer->index].mode_mask);
3874 /* Disable both trigger and thresh interrupt masks */
3875 REG_CLR_BIT(ah, AR_IMR_S5,
3876 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3877 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3879 clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
3881 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
3883 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
3885 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3887 /* free the hardware generic timer slot */
3888 timer_table->timers[timer->index] = NULL;
3891 EXPORT_SYMBOL(ath_gen_timer_free);
3894 * Generic Timer Interrupts handling
3896 void ath_gen_timer_isr(struct ath_hw *ah)
3898 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3899 struct ath_gen_timer *timer;
3900 struct ath_common *common = ath9k_hw_common(ah);
3901 u32 trigger_mask, thresh_mask, index;
3903 /* get hardware generic timer interrupt status */
3904 trigger_mask = ah->intr_gen_timer_trigger;
3905 thresh_mask = ah->intr_gen_timer_thresh;
3906 trigger_mask &= timer_table->timer_mask.val;
3907 thresh_mask &= timer_table->timer_mask.val;
3909 trigger_mask &= ~thresh_mask;
3911 while (thresh_mask) {
3912 index = rightmost_index(timer_table, &thresh_mask);
3913 timer = timer_table->timers[index];
3915 ath_print(common, ATH_DBG_HWTIMER,
3916 "TSF overflow for Gen timer %d\n", index);
3917 timer->overflow(timer->arg);
3920 while (trigger_mask) {
3921 index = rightmost_index(timer_table, &trigger_mask);
3922 timer = timer_table->timers[index];
3924 ath_print(common, ATH_DBG_HWTIMER,
3925 "Gen timer[%d] trigger\n", index);
3926 timer->trigger(timer->arg);
3929 EXPORT_SYMBOL(ath_gen_timer_isr);
3934 } ath_mac_bb_names[] = {
3935 /* Devices with external radios */
3936 { AR_SREV_VERSION_5416_PCI, "5416" },
3937 { AR_SREV_VERSION_5416_PCIE, "5418" },
3938 { AR_SREV_VERSION_9100, "9100" },
3939 { AR_SREV_VERSION_9160, "9160" },
3940 /* Single-chip solutions */
3941 { AR_SREV_VERSION_9280, "9280" },
3942 { AR_SREV_VERSION_9285, "9285" },
3943 { AR_SREV_VERSION_9287, "9287" },
3944 { AR_SREV_VERSION_9271, "9271" },
3947 /* For devices with external radios */
3951 } ath_rf_names[] = {
3953 { AR_RAD5133_SREV_MAJOR, "5133" },
3954 { AR_RAD5122_SREV_MAJOR, "5122" },
3955 { AR_RAD2133_SREV_MAJOR, "2133" },
3956 { AR_RAD2122_SREV_MAJOR, "2122" }
3960 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
3962 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
3966 for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
3967 if (ath_mac_bb_names[i].version == mac_bb_version) {
3968 return ath_mac_bb_names[i].name;
3976 * Return the RF name. "????" is returned if the RF is unknown.
3977 * Used for devices with external radios.
3979 static const char *ath9k_hw_rf_name(u16 rf_version)
3983 for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
3984 if (ath_rf_names[i].version == rf_version) {
3985 return ath_rf_names[i].name;
3992 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
3996 /* chipsets >= AR9280 are single-chip */
3997 if (AR_SREV_9280_10_OR_LATER(ah)) {
3998 used = snprintf(hw_name, len,
3999 "Atheros AR%s Rev:%x",
4000 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4001 ah->hw_version.macRev);
4004 used = snprintf(hw_name, len,
4005 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
4006 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4007 ah->hw_version.macRev,
4008 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
4009 AR_RADIO_SREV_MAJOR)),
4010 ah->hw_version.phyRev);
4013 hw_name[used] = '\0';
4015 EXPORT_SYMBOL(ath9k_hw_name);