2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <asm/unaligned.h>
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
33 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
34 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 MODULE_AUTHOR("Atheros Communications");
37 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
38 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
39 MODULE_LICENSE("Dual BSD/GPL");
41 static int __init ath9k_init(void)
45 module_init(ath9k_init);
47 static void __exit ath9k_exit(void)
51 module_exit(ath9k_exit);
53 /********************/
54 /* Helper Functions */
55 /********************/
57 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
59 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
61 if (!ah->curchan) /* should really check for CCK instead */
62 return clks / ATH9K_CLOCK_RATE_CCK;
63 if (conf->channel->band == IEEE80211_BAND_2GHZ)
64 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
66 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
69 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
71 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
73 if (conf_is_ht40(conf))
74 return ath9k_hw_mac_usec(ah, clks) / 2;
76 return ath9k_hw_mac_usec(ah, clks);
79 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
81 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
83 if (!ah->curchan) /* should really check for CCK instead */
84 return usecs *ATH9K_CLOCK_RATE_CCK;
85 if (conf->channel->band == IEEE80211_BAND_2GHZ)
86 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
87 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
90 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
92 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
94 if (conf_is_ht40(conf))
95 return ath9k_hw_mac_clks(ah, usecs) * 2;
97 return ath9k_hw_mac_clks(ah, usecs);
100 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
104 BUG_ON(timeout < AH_TIME_QUANTUM);
106 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
107 if ((REG_READ(ah, reg) & mask) == val)
110 udelay(AH_TIME_QUANTUM);
113 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
114 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
115 timeout, reg, REG_READ(ah, reg), mask, val);
119 EXPORT_SYMBOL(ath9k_hw_wait);
121 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
126 for (i = 0, retval = 0; i < n; i++) {
127 retval = (retval << 1) | (val & 1);
133 bool ath9k_get_channel_edges(struct ath_hw *ah,
137 struct ath9k_hw_capabilities *pCap = &ah->caps;
139 if (flags & CHANNEL_5GHZ) {
140 *low = pCap->low_5ghz_chan;
141 *high = pCap->high_5ghz_chan;
144 if ((flags & CHANNEL_2GHZ)) {
145 *low = pCap->low_2ghz_chan;
146 *high = pCap->high_2ghz_chan;
152 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
153 const struct ath_rate_table *rates,
154 u32 frameLen, u16 rateix,
157 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
160 kbps = rates->info[rateix].ratekbps;
165 switch (rates->info[rateix].phy) {
166 case WLAN_RC_PHY_CCK:
167 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
168 if (shortPreamble && rates->info[rateix].short_preamble)
170 numBits = frameLen << 3;
171 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
173 case WLAN_RC_PHY_OFDM:
174 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
175 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
176 numBits = OFDM_PLCP_BITS + (frameLen << 3);
177 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
178 txTime = OFDM_SIFS_TIME_QUARTER
179 + OFDM_PREAMBLE_TIME_QUARTER
180 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
181 } else if (ah->curchan &&
182 IS_CHAN_HALF_RATE(ah->curchan)) {
183 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
184 numBits = OFDM_PLCP_BITS + (frameLen << 3);
185 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
186 txTime = OFDM_SIFS_TIME_HALF +
187 OFDM_PREAMBLE_TIME_HALF
188 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
190 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
191 numBits = OFDM_PLCP_BITS + (frameLen << 3);
192 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
193 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
194 + (numSymbols * OFDM_SYMBOL_TIME);
198 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
199 "Unknown phy %u (rate ix %u)\n",
200 rates->info[rateix].phy, rateix);
207 EXPORT_SYMBOL(ath9k_hw_computetxtime);
209 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
210 struct ath9k_channel *chan,
211 struct chan_centers *centers)
215 if (!IS_CHAN_HT40(chan)) {
216 centers->ctl_center = centers->ext_center =
217 centers->synth_center = chan->channel;
221 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
222 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
223 centers->synth_center =
224 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
227 centers->synth_center =
228 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
232 centers->ctl_center =
233 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
234 /* 25 MHz spacing is supported by hw but not on upper layers */
235 centers->ext_center =
236 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
243 static void ath9k_hw_read_revisions(struct ath_hw *ah)
247 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
250 val = REG_READ(ah, AR_SREV);
251 ah->hw_version.macVersion =
252 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
253 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
254 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
256 if (!AR_SREV_9100(ah))
257 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
259 ah->hw_version.macRev = val & AR_SREV_REVISION;
261 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
262 ah->is_pciexpress = true;
266 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
271 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
273 for (i = 0; i < 8; i++)
274 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
275 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
276 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
278 return ath9k_hw_reverse_bits(val, 8);
281 /************************************/
282 /* HW Attach, Detach, Init Routines */
283 /************************************/
285 static void ath9k_hw_disablepcie(struct ath_hw *ah)
287 if (AR_SREV_9100(ah))
290 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
291 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
292 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
293 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
294 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
295 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
296 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
297 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
298 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
300 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
303 static bool ath9k_hw_chip_test(struct ath_hw *ah)
305 struct ath_common *common = ath9k_hw_common(ah);
306 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
308 u32 patternData[4] = { 0x55555555,
314 for (i = 0; i < 2; i++) {
315 u32 addr = regAddr[i];
318 regHold[i] = REG_READ(ah, addr);
319 for (j = 0; j < 0x100; j++) {
320 wrData = (j << 16) | j;
321 REG_WRITE(ah, addr, wrData);
322 rdData = REG_READ(ah, addr);
323 if (rdData != wrData) {
324 ath_print(common, ATH_DBG_FATAL,
325 "address test failed "
326 "addr: 0x%08x - wr:0x%08x != "
328 addr, wrData, rdData);
332 for (j = 0; j < 4; j++) {
333 wrData = patternData[j];
334 REG_WRITE(ah, addr, wrData);
335 rdData = REG_READ(ah, addr);
336 if (wrData != rdData) {
337 ath_print(common, ATH_DBG_FATAL,
338 "address test failed "
339 "addr: 0x%08x - wr:0x%08x != "
341 addr, wrData, rdData);
345 REG_WRITE(ah, regAddr[i], regHold[i]);
352 static const char *ath9k_hw_devname(u16 devid)
355 case AR5416_DEVID_PCI:
356 return "Atheros 5416";
357 case AR5416_DEVID_PCIE:
358 return "Atheros 5418";
359 case AR9160_DEVID_PCI:
360 return "Atheros 9160";
361 case AR5416_AR9100_DEVID:
362 return "Atheros 9100";
363 case AR9280_DEVID_PCI:
364 case AR9280_DEVID_PCIE:
365 return "Atheros 9280";
366 case AR9285_DEVID_PCIE:
367 return "Atheros 9285";
368 case AR5416_DEVID_AR9287_PCI:
369 case AR5416_DEVID_AR9287_PCIE:
370 return "Atheros 9287";
376 static void ath9k_hw_init_config(struct ath_hw *ah)
380 ah->config.dma_beacon_response_time = 2;
381 ah->config.sw_beacon_response_time = 10;
382 ah->config.additional_swba_backoff = 0;
383 ah->config.ack_6mb = 0x0;
384 ah->config.cwm_ignore_extcca = 0;
385 ah->config.pcie_powersave_enable = 0;
386 ah->config.pcie_clock_req = 0;
387 ah->config.pcie_waen = 0;
388 ah->config.analog_shiftreg = 1;
389 ah->config.ht_enable = 1;
390 ah->config.ofdm_trig_low = 200;
391 ah->config.ofdm_trig_high = 500;
392 ah->config.cck_trig_high = 200;
393 ah->config.cck_trig_low = 100;
394 ah->config.enable_ani = 1;
395 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
396 ah->config.antenna_switch_swap = 0;
398 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
399 ah->config.spurchans[i][0] = AR_NO_SPUR;
400 ah->config.spurchans[i][1] = AR_NO_SPUR;
403 ah->config.intr_mitigation = true;
406 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
407 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
408 * This means we use it for all AR5416 devices, and the few
409 * minor PCI AR9280 devices out there.
411 * Serialization is required because these devices do not handle
412 * well the case of two concurrent reads/writes due to the latency
413 * involved. During one read/write another read/write can be issued
414 * on another CPU while the previous read/write may still be working
415 * on our hardware, if we hit this case the hardware poops in a loop.
416 * We prevent this by serializing reads and writes.
418 * This issue is not present on PCI-Express devices or pre-AR5416
419 * devices (legacy, 802.11abg).
421 if (num_possible_cpus() > 1)
422 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
424 EXPORT_SYMBOL(ath9k_hw_init);
426 static void ath9k_hw_init_defaults(struct ath_hw *ah)
428 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
430 regulatory->country_code = CTRY_DEFAULT;
431 regulatory->power_limit = MAX_RATE_POWER;
432 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
434 ah->hw_version.magic = AR5416_MAGIC;
435 ah->hw_version.subvendorid = 0;
438 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
439 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
440 if (!AR_SREV_9100(ah))
441 ah->ah_flags = AH_USE_EEPROM;
444 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
445 ah->beacon_interval = 100;
446 ah->enable_32kHz_clock = DONT_USE_32KHZ;
447 ah->slottime = (u32) -1;
448 ah->acktimeout = (u32) -1;
449 ah->ctstimeout = (u32) -1;
450 ah->globaltxtimeout = (u32) -1;
452 ah->gbeacon_rate = 0;
454 ah->power_mode = ATH9K_PM_UNDEFINED;
457 static int ath9k_hw_rf_claim(struct ath_hw *ah)
461 REG_WRITE(ah, AR_PHY(0), 0x00000007);
463 val = ath9k_hw_get_radiorev(ah);
464 switch (val & AR_RADIO_SREV_MAJOR) {
466 val = AR_RAD5133_SREV_MAJOR;
468 case AR_RAD5133_SREV_MAJOR:
469 case AR_RAD5122_SREV_MAJOR:
470 case AR_RAD2133_SREV_MAJOR:
471 case AR_RAD2122_SREV_MAJOR:
474 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
475 "Radio Chip Rev 0x%02X not supported\n",
476 val & AR_RADIO_SREV_MAJOR);
480 ah->hw_version.analog5GhzRev = val;
485 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
487 struct ath_common *common = ath9k_hw_common(ah);
493 for (i = 0; i < 3; i++) {
494 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
496 common->macaddr[2 * i] = eeval >> 8;
497 common->macaddr[2 * i + 1] = eeval & 0xff;
499 if (sum == 0 || sum == 0xffff * 3)
500 return -EADDRNOTAVAIL;
505 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
509 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
510 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
512 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
513 INIT_INI_ARRAY(&ah->iniModesRxGain,
514 ar9280Modes_backoff_13db_rxgain_9280_2,
515 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
516 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
517 INIT_INI_ARRAY(&ah->iniModesRxGain,
518 ar9280Modes_backoff_23db_rxgain_9280_2,
519 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
521 INIT_INI_ARRAY(&ah->iniModesRxGain,
522 ar9280Modes_original_rxgain_9280_2,
523 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
525 INIT_INI_ARRAY(&ah->iniModesRxGain,
526 ar9280Modes_original_rxgain_9280_2,
527 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
531 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
535 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
536 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
538 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
539 INIT_INI_ARRAY(&ah->iniModesTxGain,
540 ar9280Modes_high_power_tx_gain_9280_2,
541 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
543 INIT_INI_ARRAY(&ah->iniModesTxGain,
544 ar9280Modes_original_tx_gain_9280_2,
545 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
547 INIT_INI_ARRAY(&ah->iniModesTxGain,
548 ar9280Modes_original_tx_gain_9280_2,
549 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
553 static int ath9k_hw_post_init(struct ath_hw *ah)
557 if (!ath9k_hw_chip_test(ah))
560 ecode = ath9k_hw_rf_claim(ah);
564 ecode = ath9k_hw_eeprom_init(ah);
568 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
569 "Eeprom VER: %d, REV: %d\n",
570 ah->eep_ops->get_eeprom_ver(ah),
571 ah->eep_ops->get_eeprom_rev(ah));
573 if (!AR_SREV_9280_10_OR_LATER(ah)) {
574 ecode = ath9k_hw_rf_alloc_ext_banks(ah);
576 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
577 "Failed allocating banks for "
583 if (!AR_SREV_9100(ah)) {
584 ath9k_hw_ani_setup(ah);
585 ath9k_hw_ani_init(ah);
591 static bool ath9k_hw_devid_supported(u16 devid)
594 case AR5416_DEVID_PCI:
595 case AR5416_DEVID_PCIE:
596 case AR5416_AR9100_DEVID:
597 case AR9160_DEVID_PCI:
598 case AR9280_DEVID_PCI:
599 case AR9280_DEVID_PCIE:
600 case AR9285_DEVID_PCIE:
601 case AR5416_DEVID_AR9287_PCI:
602 case AR5416_DEVID_AR9287_PCIE:
611 static bool ath9k_hw_macversion_supported(u32 macversion)
613 switch (macversion) {
614 case AR_SREV_VERSION_5416_PCI:
615 case AR_SREV_VERSION_5416_PCIE:
616 case AR_SREV_VERSION_9160:
617 case AR_SREV_VERSION_9100:
618 case AR_SREV_VERSION_9280:
619 case AR_SREV_VERSION_9285:
620 case AR_SREV_VERSION_9287:
621 case AR_SREV_VERSION_9271:
629 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
631 if (AR_SREV_9160_10_OR_LATER(ah)) {
632 if (AR_SREV_9280_10_OR_LATER(ah)) {
633 ah->iq_caldata.calData = &iq_cal_single_sample;
634 ah->adcgain_caldata.calData =
635 &adc_gain_cal_single_sample;
636 ah->adcdc_caldata.calData =
637 &adc_dc_cal_single_sample;
638 ah->adcdc_calinitdata.calData =
641 ah->iq_caldata.calData = &iq_cal_multi_sample;
642 ah->adcgain_caldata.calData =
643 &adc_gain_cal_multi_sample;
644 ah->adcdc_caldata.calData =
645 &adc_dc_cal_multi_sample;
646 ah->adcdc_calinitdata.calData =
649 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
653 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
655 if (AR_SREV_9271(ah)) {
656 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271,
657 ARRAY_SIZE(ar9271Modes_9271), 6);
658 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271,
659 ARRAY_SIZE(ar9271Common_9271), 2);
660 INIT_INI_ARRAY(&ah->iniModes_9271_1_0_only,
661 ar9271Modes_9271_1_0_only,
662 ARRAY_SIZE(ar9271Modes_9271_1_0_only), 6);
666 if (AR_SREV_9287_11_OR_LATER(ah)) {
667 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
668 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
669 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
670 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
671 if (ah->config.pcie_clock_req)
672 INIT_INI_ARRAY(&ah->iniPcieSerdes,
673 ar9287PciePhy_clkreq_off_L1_9287_1_1,
674 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
676 INIT_INI_ARRAY(&ah->iniPcieSerdes,
677 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
678 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
680 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
681 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
682 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
683 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
684 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
686 if (ah->config.pcie_clock_req)
687 INIT_INI_ARRAY(&ah->iniPcieSerdes,
688 ar9287PciePhy_clkreq_off_L1_9287_1_0,
689 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
691 INIT_INI_ARRAY(&ah->iniPcieSerdes,
692 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
693 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
695 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
698 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
699 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
700 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
701 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
703 if (ah->config.pcie_clock_req) {
704 INIT_INI_ARRAY(&ah->iniPcieSerdes,
705 ar9285PciePhy_clkreq_off_L1_9285_1_2,
706 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
708 INIT_INI_ARRAY(&ah->iniPcieSerdes,
709 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
710 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
713 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
714 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
715 ARRAY_SIZE(ar9285Modes_9285), 6);
716 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
717 ARRAY_SIZE(ar9285Common_9285), 2);
719 if (ah->config.pcie_clock_req) {
720 INIT_INI_ARRAY(&ah->iniPcieSerdes,
721 ar9285PciePhy_clkreq_off_L1_9285,
722 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
724 INIT_INI_ARRAY(&ah->iniPcieSerdes,
725 ar9285PciePhy_clkreq_always_on_L1_9285,
726 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
728 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
729 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
730 ARRAY_SIZE(ar9280Modes_9280_2), 6);
731 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
732 ARRAY_SIZE(ar9280Common_9280_2), 2);
734 if (ah->config.pcie_clock_req) {
735 INIT_INI_ARRAY(&ah->iniPcieSerdes,
736 ar9280PciePhy_clkreq_off_L1_9280,
737 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
739 INIT_INI_ARRAY(&ah->iniPcieSerdes,
740 ar9280PciePhy_clkreq_always_on_L1_9280,
741 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
743 INIT_INI_ARRAY(&ah->iniModesAdditional,
744 ar9280Modes_fast_clock_9280_2,
745 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
746 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
747 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
748 ARRAY_SIZE(ar9280Modes_9280), 6);
749 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
750 ARRAY_SIZE(ar9280Common_9280), 2);
751 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
752 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
753 ARRAY_SIZE(ar5416Modes_9160), 6);
754 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
755 ARRAY_SIZE(ar5416Common_9160), 2);
756 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
757 ARRAY_SIZE(ar5416Bank0_9160), 2);
758 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
759 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
760 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
761 ARRAY_SIZE(ar5416Bank1_9160), 2);
762 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
763 ARRAY_SIZE(ar5416Bank2_9160), 2);
764 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
765 ARRAY_SIZE(ar5416Bank3_9160), 3);
766 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
767 ARRAY_SIZE(ar5416Bank6_9160), 3);
768 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
769 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
770 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
771 ARRAY_SIZE(ar5416Bank7_9160), 2);
772 if (AR_SREV_9160_11(ah)) {
773 INIT_INI_ARRAY(&ah->iniAddac,
775 ARRAY_SIZE(ar5416Addac_91601_1), 2);
777 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
778 ARRAY_SIZE(ar5416Addac_9160), 2);
780 } else if (AR_SREV_9100_OR_LATER(ah)) {
781 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
782 ARRAY_SIZE(ar5416Modes_9100), 6);
783 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
784 ARRAY_SIZE(ar5416Common_9100), 2);
785 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
786 ARRAY_SIZE(ar5416Bank0_9100), 2);
787 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
788 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
789 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
790 ARRAY_SIZE(ar5416Bank1_9100), 2);
791 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
792 ARRAY_SIZE(ar5416Bank2_9100), 2);
793 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
794 ARRAY_SIZE(ar5416Bank3_9100), 3);
795 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
796 ARRAY_SIZE(ar5416Bank6_9100), 3);
797 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
798 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
799 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
800 ARRAY_SIZE(ar5416Bank7_9100), 2);
801 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
802 ARRAY_SIZE(ar5416Addac_9100), 2);
804 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
805 ARRAY_SIZE(ar5416Modes), 6);
806 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
807 ARRAY_SIZE(ar5416Common), 2);
808 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
809 ARRAY_SIZE(ar5416Bank0), 2);
810 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
811 ARRAY_SIZE(ar5416BB_RfGain), 3);
812 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
813 ARRAY_SIZE(ar5416Bank1), 2);
814 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
815 ARRAY_SIZE(ar5416Bank2), 2);
816 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
817 ARRAY_SIZE(ar5416Bank3), 3);
818 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
819 ARRAY_SIZE(ar5416Bank6), 3);
820 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
821 ARRAY_SIZE(ar5416Bank6TPC), 3);
822 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
823 ARRAY_SIZE(ar5416Bank7), 2);
824 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
825 ARRAY_SIZE(ar5416Addac), 2);
829 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
831 if (AR_SREV_9287_11_OR_LATER(ah))
832 INIT_INI_ARRAY(&ah->iniModesRxGain,
833 ar9287Modes_rx_gain_9287_1_1,
834 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
835 else if (AR_SREV_9287_10(ah))
836 INIT_INI_ARRAY(&ah->iniModesRxGain,
837 ar9287Modes_rx_gain_9287_1_0,
838 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
839 else if (AR_SREV_9280_20(ah))
840 ath9k_hw_init_rxgain_ini(ah);
842 if (AR_SREV_9287_11_OR_LATER(ah)) {
843 INIT_INI_ARRAY(&ah->iniModesTxGain,
844 ar9287Modes_tx_gain_9287_1_1,
845 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
846 } else if (AR_SREV_9287_10(ah)) {
847 INIT_INI_ARRAY(&ah->iniModesTxGain,
848 ar9287Modes_tx_gain_9287_1_0,
849 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
850 } else if (AR_SREV_9280_20(ah)) {
851 ath9k_hw_init_txgain_ini(ah);
852 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
853 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
856 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
857 INIT_INI_ARRAY(&ah->iniModesTxGain,
858 ar9285Modes_high_power_tx_gain_9285_1_2,
859 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
861 INIT_INI_ARRAY(&ah->iniModesTxGain,
862 ar9285Modes_original_tx_gain_9285_1_2,
863 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
869 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
873 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
874 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
877 for (i = 0; i < ah->iniModes.ia_rows; i++) {
878 u32 reg = INI_RA(&ah->iniModes, i, 0);
880 for (j = 1; j < ah->iniModes.ia_columns; j++) {
881 u32 val = INI_RA(&ah->iniModes, i, j);
883 INI_RA(&ah->iniModes, i, j) =
884 ath9k_hw_ini_fixup(ah,
892 int ath9k_hw_init(struct ath_hw *ah)
894 struct ath_common *common = ath9k_hw_common(ah);
897 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) {
898 ath_print(common, ATH_DBG_FATAL,
899 "Unsupported device ID: 0x%0x\n",
900 ah->hw_version.devid);
904 ath9k_hw_init_defaults(ah);
905 ath9k_hw_init_config(ah);
907 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
908 ath_print(common, ATH_DBG_FATAL,
909 "Couldn't reset chip\n");
913 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
914 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
918 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
919 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
920 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
921 ah->config.serialize_regmode =
924 ah->config.serialize_regmode =
929 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
930 ah->config.serialize_regmode);
932 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
933 ath_print(common, ATH_DBG_FATAL,
934 "Mac Chip Rev 0x%02x.%x is not supported by "
935 "this driver\n", ah->hw_version.macVersion,
936 ah->hw_version.macRev);
940 if (AR_SREV_9100(ah)) {
941 ah->iq_caldata.calData = &iq_cal_multi_sample;
942 ah->supp_cals = IQ_MISMATCH_CAL;
943 ah->is_pciexpress = false;
946 if (AR_SREV_9271(ah))
947 ah->is_pciexpress = false;
949 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
951 ath9k_hw_init_cal_settings(ah);
953 ah->ani_function = ATH9K_ANI_ALL;
954 if (AR_SREV_9280_10_OR_LATER(ah)) {
955 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
956 ah->ath9k_hw_rf_set_freq = &ath9k_hw_ar9280_set_channel;
958 ah->ath9k_hw_rf_set_freq = &ath9k_hw_set_channel;
960 ath9k_hw_init_mode_regs(ah);
962 if (ah->is_pciexpress)
963 ath9k_hw_configpcipowersave(ah, 0, 0);
965 ath9k_hw_disablepcie(ah);
967 /* Support for Japan ch.14 (2484) spread */
968 if (AR_SREV_9287_11_OR_LATER(ah)) {
969 INIT_INI_ARRAY(&ah->iniCckfirNormal,
970 ar9287Common_normal_cck_fir_coeff_92871_1,
971 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2);
972 INIT_INI_ARRAY(&ah->iniCckfirJapan2484,
973 ar9287Common_japan_2484_cck_fir_coeff_92871_1,
974 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2);
977 r = ath9k_hw_post_init(ah);
981 ath9k_hw_init_mode_gain_regs(ah);
982 ath9k_hw_fill_cap_info(ah);
983 ath9k_hw_init_11a_eeprom_fix(ah);
985 r = ath9k_hw_init_macaddr(ah);
987 ath_print(common, ATH_DBG_FATAL,
988 "Failed to initialize MAC address\n");
992 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
993 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
995 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
997 ath9k_init_nfcal_hist_buffer(ah);
999 common->state = ATH_HW_INITIALIZED;
1004 static void ath9k_hw_init_bb(struct ath_hw *ah,
1005 struct ath9k_channel *chan)
1009 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1010 if (IS_CHAN_B(chan))
1011 synthDelay = (4 * synthDelay) / 22;
1015 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1017 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1020 static void ath9k_hw_init_qos(struct ath_hw *ah)
1022 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1023 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1025 REG_WRITE(ah, AR_QOS_NO_ACK,
1026 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1027 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1028 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1030 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1031 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1032 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1033 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1034 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1037 static void ath9k_hw_change_target_baud(struct ath_hw *ah, u32 freq, u32 baud)
1040 u32 baud_divider = freq * 1000 * 1000 / 16 / baud;
1042 lcr = REG_READ(ah , 0x5100c);
1045 REG_WRITE(ah, 0x5100c, lcr);
1046 REG_WRITE(ah, 0x51004, (baud_divider >> 8));
1047 REG_WRITE(ah, 0x51000, (baud_divider & 0xff));
1050 REG_WRITE(ah, 0x5100c, lcr);
1053 static void ath9k_hw_init_pll(struct ath_hw *ah,
1054 struct ath9k_channel *chan)
1058 if (AR_SREV_9100(ah)) {
1059 if (chan && IS_CHAN_5GHZ(chan))
1064 if (AR_SREV_9280_10_OR_LATER(ah)) {
1065 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1067 if (chan && IS_CHAN_HALF_RATE(chan))
1068 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1069 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1070 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1072 if (chan && IS_CHAN_5GHZ(chan)) {
1073 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1076 if (AR_SREV_9280_20(ah)) {
1077 if (((chan->channel % 20) == 0)
1078 || ((chan->channel % 10) == 0))
1084 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1087 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1089 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1091 if (chan && IS_CHAN_HALF_RATE(chan))
1092 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1093 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1094 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1096 if (chan && IS_CHAN_5GHZ(chan))
1097 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1099 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1101 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1103 if (chan && IS_CHAN_HALF_RATE(chan))
1104 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1105 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1106 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1108 if (chan && IS_CHAN_5GHZ(chan))
1109 pll |= SM(0xa, AR_RTC_PLL_DIV);
1111 pll |= SM(0xb, AR_RTC_PLL_DIV);
1114 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1116 /* Switch the core clock for ar9271 to 117Mhz */
1117 if (AR_SREV_9271(ah)) {
1118 if ((pll == 0x142c) || (pll == 0x2850) ) {
1120 /* set CLKOBS to output AHB clock */
1121 REG_WRITE(ah, 0x7020, 0xe);
1123 * 0x304: 117Mhz, ahb_ratio: 1x1
1124 * 0x306: 40Mhz, ahb_ratio: 1x1
1126 REG_WRITE(ah, 0x50040, 0x304);
1128 * makes adjustments for the baud dividor to keep the
1129 * targetted baud rate based on the used core clock.
1131 ath9k_hw_change_target_baud(ah, AR9271_CORE_CLOCK,
1132 AR9271_TARGET_BAUD_RATE);
1136 udelay(RTC_PLL_SETTLE_DELAY);
1138 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1141 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1143 int rx_chainmask, tx_chainmask;
1145 rx_chainmask = ah->rxchainmask;
1146 tx_chainmask = ah->txchainmask;
1148 switch (rx_chainmask) {
1150 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1151 AR_PHY_SWAP_ALT_CHAIN);
1153 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1154 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1155 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1161 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1162 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1168 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1169 if (tx_chainmask == 0x5) {
1170 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1171 AR_PHY_SWAP_ALT_CHAIN);
1173 if (AR_SREV_9100(ah))
1174 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1175 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1178 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1179 enum nl80211_iftype opmode)
1181 ah->mask_reg = AR_IMR_TXERR |
1187 if (ah->config.intr_mitigation)
1188 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1190 ah->mask_reg |= AR_IMR_RXOK;
1192 ah->mask_reg |= AR_IMR_TXOK;
1194 if (opmode == NL80211_IFTYPE_AP)
1195 ah->mask_reg |= AR_IMR_MIB;
1197 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1198 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1200 if (!AR_SREV_9100(ah)) {
1201 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1202 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1203 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1207 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1209 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1210 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1211 "bad ack timeout %u\n", us);
1212 ah->acktimeout = (u32) -1;
1215 REG_RMW_FIELD(ah, AR_TIME_OUT,
1216 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1217 ah->acktimeout = us;
1222 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1224 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1225 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1226 "bad cts timeout %u\n", us);
1227 ah->ctstimeout = (u32) -1;
1230 REG_RMW_FIELD(ah, AR_TIME_OUT,
1231 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1232 ah->ctstimeout = us;
1237 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1240 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1241 "bad global tx timeout %u\n", tu);
1242 ah->globaltxtimeout = (u32) -1;
1245 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1246 ah->globaltxtimeout = tu;
1251 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1253 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1256 if (ah->misc_mode != 0)
1257 REG_WRITE(ah, AR_PCU_MISC,
1258 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1259 if (ah->slottime != (u32) -1)
1260 ath9k_hw_setslottime(ah, ah->slottime);
1261 if (ah->acktimeout != (u32) -1)
1262 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1263 if (ah->ctstimeout != (u32) -1)
1264 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1265 if (ah->globaltxtimeout != (u32) -1)
1266 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1269 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1271 return vendorid == ATHEROS_VENDOR_ID ?
1272 ath9k_hw_devname(devid) : NULL;
1275 void ath9k_hw_detach(struct ath_hw *ah)
1277 struct ath_common *common = ath9k_hw_common(ah);
1279 if (common->state <= ATH_HW_INITIALIZED)
1282 if (!AR_SREV_9100(ah))
1283 ath9k_hw_ani_disable(ah);
1285 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1288 if (!AR_SREV_9280_10_OR_LATER(ah))
1289 ath9k_hw_rf_free_ext_banks(ah);
1293 EXPORT_SYMBOL(ath9k_hw_detach);
1299 static void ath9k_hw_override_ini(struct ath_hw *ah,
1300 struct ath9k_channel *chan)
1304 if (AR_SREV_9271(ah)) {
1306 * Enable spectral scan to solution for issues with stuck
1307 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1310 if (AR_SREV_9271_10(ah)) {
1311 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) |
1312 AR_PHY_SPECTRAL_SCAN_ENABLE;
1313 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1315 else if (AR_SREV_9271_11(ah))
1317 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1318 * present on AR9271 1.1
1320 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1325 * Set the RX_ABORT and RX_DIS and clear if off only after
1326 * RXE is set for MAC. This prevents frames with corrupted
1327 * descriptor status.
1329 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1331 if (AR_SREV_9280_10_OR_LATER(ah)) {
1332 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1333 (~AR_PCU_MISC_MODE2_HWWAR1);
1335 if (AR_SREV_9287_10_OR_LATER(ah))
1336 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1338 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1341 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1342 AR_SREV_9280_10_OR_LATER(ah))
1345 * Disable BB clock gating
1346 * Necessary to avoid issues on AR5416 2.0
1348 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1351 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1352 struct ar5416_eeprom_def *pEepData,
1355 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1356 struct ath_common *common = ath9k_hw_common(ah);
1358 switch (ah->hw_version.devid) {
1359 case AR9280_DEVID_PCI:
1360 if (reg == 0x7894) {
1361 ath_print(common, ATH_DBG_EEPROM,
1362 "ini VAL: %x EEPROM: %x\n", value,
1363 (pBase->version & 0xff));
1365 if ((pBase->version & 0xff) > 0x0a) {
1366 ath_print(common, ATH_DBG_EEPROM,
1369 value &= ~AR_AN_TOP2_PWDCLKIND;
1370 value |= AR_AN_TOP2_PWDCLKIND &
1371 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1373 ath_print(common, ATH_DBG_EEPROM,
1374 "PWDCLKIND Earlier Rev\n");
1377 ath_print(common, ATH_DBG_EEPROM,
1378 "final ini VAL: %x\n", value);
1386 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1387 struct ar5416_eeprom_def *pEepData,
1390 if (ah->eep_map == EEP_MAP_4KBITS)
1393 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1396 static void ath9k_olc_init(struct ath_hw *ah)
1400 if (OLC_FOR_AR9287_10_LATER) {
1401 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1402 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1403 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1404 AR9287_AN_TXPC0_TXPCMODE,
1405 AR9287_AN_TXPC0_TXPCMODE_S,
1406 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1409 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1410 ah->originalGain[i] =
1411 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1417 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1418 struct ath9k_channel *chan)
1420 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1422 if (IS_CHAN_B(chan))
1424 else if (IS_CHAN_G(chan))
1432 static int ath9k_hw_process_ini(struct ath_hw *ah,
1433 struct ath9k_channel *chan)
1435 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1436 int i, regWrites = 0;
1437 struct ieee80211_channel *channel = chan->chan;
1438 u32 modesIndex, freqIndex;
1440 switch (chan->chanmode) {
1442 case CHANNEL_A_HT20:
1446 case CHANNEL_A_HT40PLUS:
1447 case CHANNEL_A_HT40MINUS:
1452 case CHANNEL_G_HT20:
1457 case CHANNEL_G_HT40PLUS:
1458 case CHANNEL_G_HT40MINUS:
1467 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1468 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1469 ah->eep_ops->set_addac(ah, chan);
1471 if (AR_SREV_5416_22_OR_LATER(ah)) {
1472 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1474 struct ar5416IniArray temp;
1476 sizeof(u32) * ah->iniAddac.ia_rows *
1477 ah->iniAddac.ia_columns;
1479 memcpy(ah->addac5416_21,
1480 ah->iniAddac.ia_array, addacSize);
1482 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1484 temp.ia_array = ah->addac5416_21;
1485 temp.ia_columns = ah->iniAddac.ia_columns;
1486 temp.ia_rows = ah->iniAddac.ia_rows;
1487 REG_WRITE_ARRAY(&temp, 1, regWrites);
1490 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1492 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1493 u32 reg = INI_RA(&ah->iniModes, i, 0);
1494 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1496 REG_WRITE(ah, reg, val);
1498 if (reg >= 0x7800 && reg < 0x78a0
1499 && ah->config.analog_shiftreg) {
1503 DO_DELAY(regWrites);
1506 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1507 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1509 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1510 AR_SREV_9287_10_OR_LATER(ah))
1511 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1513 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1514 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1515 u32 val = INI_RA(&ah->iniCommon, i, 1);
1517 REG_WRITE(ah, reg, val);
1519 if (reg >= 0x7800 && reg < 0x78a0
1520 && ah->config.analog_shiftreg) {
1524 DO_DELAY(regWrites);
1527 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1529 if (AR_SREV_9271_10(ah))
1530 REG_WRITE_ARRAY(&ah->iniModes_9271_1_0_only,
1531 modesIndex, regWrites);
1533 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1534 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1538 ath9k_hw_override_ini(ah, chan);
1539 ath9k_hw_set_regs(ah, chan);
1540 ath9k_hw_init_chain_masks(ah);
1542 if (OLC_FOR_AR9280_20_LATER)
1545 ah->eep_ops->set_txpower(ah, chan,
1546 ath9k_regd_get_ctl(regulatory, chan),
1547 channel->max_antenna_gain * 2,
1548 channel->max_power * 2,
1549 min((u32) MAX_RATE_POWER,
1550 (u32) regulatory->power_limit));
1552 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1553 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1554 "ar5416SetRfRegs failed\n");
1561 /****************************************/
1562 /* Reset and Channel Switching Routines */
1563 /****************************************/
1565 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1572 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1573 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1575 if (!AR_SREV_9280_10_OR_LATER(ah))
1576 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1577 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1579 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1580 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1582 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1585 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1587 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1590 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1595 * set AHB_MODE not to do cacheline prefetches
1597 regval = REG_READ(ah, AR_AHB_MODE);
1598 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1601 * let mac dma reads be in 128 byte chunks
1603 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1604 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1607 * Restore TX Trigger Level to its pre-reset value.
1608 * The initial value depends on whether aggregation is enabled, and is
1609 * adjusted whenever underruns are detected.
1611 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1614 * let mac dma writes be in 128 byte chunks
1616 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1617 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1620 * Setup receive FIFO threshold to hold off TX activities
1622 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1625 * reduce the number of usable entries in PCU TXBUF to avoid
1626 * wrap around issues.
1628 if (AR_SREV_9285(ah)) {
1629 /* For AR9285 the number of Fifos are reduced to half.
1630 * So set the usable tx buf size also to half to
1631 * avoid data/delimiter underruns
1633 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1634 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1635 } else if (!AR_SREV_9271(ah)) {
1636 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1637 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1641 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1645 val = REG_READ(ah, AR_STA_ID1);
1646 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1648 case NL80211_IFTYPE_AP:
1649 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1650 | AR_STA_ID1_KSRCH_MODE);
1651 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1653 case NL80211_IFTYPE_ADHOC:
1654 case NL80211_IFTYPE_MESH_POINT:
1655 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1656 | AR_STA_ID1_KSRCH_MODE);
1657 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1659 case NL80211_IFTYPE_STATION:
1660 case NL80211_IFTYPE_MONITOR:
1661 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1666 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1671 u32 coef_exp, coef_man;
1673 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1674 if ((coef_scaled >> coef_exp) & 0x1)
1677 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1679 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1681 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1682 *coef_exponent = coef_exp - 16;
1685 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1686 struct ath9k_channel *chan)
1688 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1689 u32 clockMhzScaled = 0x64000000;
1690 struct chan_centers centers;
1692 if (IS_CHAN_HALF_RATE(chan))
1693 clockMhzScaled = clockMhzScaled >> 1;
1694 else if (IS_CHAN_QUARTER_RATE(chan))
1695 clockMhzScaled = clockMhzScaled >> 2;
1697 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1698 coef_scaled = clockMhzScaled / centers.synth_center;
1700 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1703 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1704 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1705 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1706 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1708 coef_scaled = (9 * coef_scaled) / 10;
1710 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1713 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1714 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1715 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1716 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1719 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1724 if (AR_SREV_9100(ah)) {
1725 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1726 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1727 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1728 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1729 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1732 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1733 AR_RTC_FORCE_WAKE_ON_INT);
1735 if (AR_SREV_9100(ah)) {
1736 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1737 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1739 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1741 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1742 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1743 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1744 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1746 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1749 rst_flags = AR_RTC_RC_MAC_WARM;
1750 if (type == ATH9K_RESET_COLD)
1751 rst_flags |= AR_RTC_RC_MAC_COLD;
1754 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1757 REG_WRITE(ah, AR_RTC_RC, 0);
1758 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1759 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1760 "RTC stuck in MAC reset\n");
1764 if (!AR_SREV_9100(ah))
1765 REG_WRITE(ah, AR_RC, 0);
1767 if (AR_SREV_9100(ah))
1773 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1775 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1776 AR_RTC_FORCE_WAKE_ON_INT);
1778 if (!AR_SREV_9100(ah))
1779 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1781 REG_WRITE(ah, AR_RTC_RESET, 0);
1784 if (!AR_SREV_9100(ah))
1785 REG_WRITE(ah, AR_RC, 0);
1787 REG_WRITE(ah, AR_RTC_RESET, 1);
1789 if (!ath9k_hw_wait(ah,
1794 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1795 "RTC not waking up\n");
1799 ath9k_hw_read_revisions(ah);
1801 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1804 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1806 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1807 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1810 case ATH9K_RESET_POWER_ON:
1811 return ath9k_hw_set_reset_power_on(ah);
1812 case ATH9K_RESET_WARM:
1813 case ATH9K_RESET_COLD:
1814 return ath9k_hw_set_reset(ah, type);
1820 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1823 u32 enableDacFifo = 0;
1825 if (AR_SREV_9285_10_OR_LATER(ah))
1826 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1827 AR_PHY_FC_ENABLE_DAC_FIFO);
1829 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1830 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1832 if (IS_CHAN_HT40(chan)) {
1833 phymode |= AR_PHY_FC_DYN2040_EN;
1835 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1836 (chan->chanmode == CHANNEL_G_HT40PLUS))
1837 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1840 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1842 ath9k_hw_set11nmac2040(ah);
1844 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1845 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1848 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1849 struct ath9k_channel *chan)
1851 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1852 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1854 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1857 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1860 ah->chip_fullsleep = false;
1861 ath9k_hw_init_pll(ah, chan);
1862 ath9k_hw_set_rfmode(ah, chan);
1867 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1868 struct ath9k_channel *chan)
1870 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1871 struct ath_common *common = ath9k_hw_common(ah);
1872 struct ieee80211_channel *channel = chan->chan;
1873 u32 synthDelay, qnum;
1876 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1877 if (ath9k_hw_numtxpending(ah, qnum)) {
1878 ath_print(common, ATH_DBG_QUEUE,
1879 "Transmit frames pending on "
1880 "queue %d\n", qnum);
1885 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1886 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1887 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1888 ath_print(common, ATH_DBG_FATAL,
1889 "Could not kill baseband RX\n");
1893 ath9k_hw_set_regs(ah, chan);
1895 r = ah->ath9k_hw_rf_set_freq(ah, chan);
1897 ath_print(common, ATH_DBG_FATAL,
1898 "Failed to set channel\n");
1902 ah->eep_ops->set_txpower(ah, chan,
1903 ath9k_regd_get_ctl(regulatory, chan),
1904 channel->max_antenna_gain * 2,
1905 channel->max_power * 2,
1906 min((u32) MAX_RATE_POWER,
1907 (u32) regulatory->power_limit));
1909 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1910 if (IS_CHAN_B(chan))
1911 synthDelay = (4 * synthDelay) / 22;
1915 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1917 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1919 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1920 ath9k_hw_set_delta_slope(ah, chan);
1922 if (AR_SREV_9280_10_OR_LATER(ah))
1923 ath9k_hw_9280_spur_mitigate(ah, chan);
1925 ath9k_hw_spur_mitigate(ah, chan);
1927 if (!chan->oneTimeCalsDone)
1928 chan->oneTimeCalsDone = true;
1933 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1935 int bb_spur = AR_NO_SPUR;
1938 int bb_spur_off, spur_subchannel_sd;
1940 int spur_delta_phase;
1942 int upper, lower, cur_vit_mask;
1945 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1946 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1948 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1949 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1951 int inc[4] = { 0, 100, 0, 0 };
1952 struct chan_centers centers;
1959 bool is2GHz = IS_CHAN_2GHZ(chan);
1961 memset(&mask_m, 0, sizeof(int8_t) * 123);
1962 memset(&mask_p, 0, sizeof(int8_t) * 123);
1964 ath9k_hw_get_channel_centers(ah, chan, ¢ers);
1965 freq = centers.synth_center;
1967 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1968 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1969 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1972 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1974 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1976 if (AR_NO_SPUR == cur_bb_spur)
1978 cur_bb_spur = cur_bb_spur - freq;
1980 if (IS_CHAN_HT40(chan)) {
1981 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1982 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1983 bb_spur = cur_bb_spur;
1986 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1987 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1988 bb_spur = cur_bb_spur;
1993 if (AR_NO_SPUR == bb_spur) {
1994 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1995 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1998 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1999 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
2002 bin = bb_spur * 320;
2004 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2006 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2007 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2008 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2009 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2010 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
2012 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2013 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2014 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2015 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2016 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2017 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
2019 if (IS_CHAN_HT40(chan)) {
2021 spur_subchannel_sd = 1;
2022 bb_spur_off = bb_spur + 10;
2024 spur_subchannel_sd = 0;
2025 bb_spur_off = bb_spur - 10;
2028 spur_subchannel_sd = 0;
2029 bb_spur_off = bb_spur;
2032 if (IS_CHAN_HT40(chan))
2034 ((bb_spur * 262144) /
2035 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2038 ((bb_spur * 524288) /
2039 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2041 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
2042 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
2044 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2045 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2046 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2047 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
2049 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
2050 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
2056 for (i = 0; i < 4; i++) {
2060 for (bp = 0; bp < 30; bp++) {
2061 if ((cur_bin > lower) && (cur_bin < upper)) {
2062 pilot_mask = pilot_mask | 0x1 << bp;
2063 chan_mask = chan_mask | 0x1 << bp;
2068 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2069 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2072 cur_vit_mask = 6100;
2076 for (i = 0; i < 123; i++) {
2077 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2079 /* workaround for gcc bug #37014 */
2080 volatile int tmp_v = abs(cur_vit_mask - bin);
2086 if (cur_vit_mask < 0)
2087 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2089 mask_p[cur_vit_mask / 100] = mask_amt;
2091 cur_vit_mask -= 100;
2094 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2095 | (mask_m[48] << 26) | (mask_m[49] << 24)
2096 | (mask_m[50] << 22) | (mask_m[51] << 20)
2097 | (mask_m[52] << 18) | (mask_m[53] << 16)
2098 | (mask_m[54] << 14) | (mask_m[55] << 12)
2099 | (mask_m[56] << 10) | (mask_m[57] << 8)
2100 | (mask_m[58] << 6) | (mask_m[59] << 4)
2101 | (mask_m[60] << 2) | (mask_m[61] << 0);
2102 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2103 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2105 tmp_mask = (mask_m[31] << 28)
2106 | (mask_m[32] << 26) | (mask_m[33] << 24)
2107 | (mask_m[34] << 22) | (mask_m[35] << 20)
2108 | (mask_m[36] << 18) | (mask_m[37] << 16)
2109 | (mask_m[48] << 14) | (mask_m[39] << 12)
2110 | (mask_m[40] << 10) | (mask_m[41] << 8)
2111 | (mask_m[42] << 6) | (mask_m[43] << 4)
2112 | (mask_m[44] << 2) | (mask_m[45] << 0);
2113 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2114 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2116 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2117 | (mask_m[18] << 26) | (mask_m[18] << 24)
2118 | (mask_m[20] << 22) | (mask_m[20] << 20)
2119 | (mask_m[22] << 18) | (mask_m[22] << 16)
2120 | (mask_m[24] << 14) | (mask_m[24] << 12)
2121 | (mask_m[25] << 10) | (mask_m[26] << 8)
2122 | (mask_m[27] << 6) | (mask_m[28] << 4)
2123 | (mask_m[29] << 2) | (mask_m[30] << 0);
2124 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2125 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2127 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2128 | (mask_m[2] << 26) | (mask_m[3] << 24)
2129 | (mask_m[4] << 22) | (mask_m[5] << 20)
2130 | (mask_m[6] << 18) | (mask_m[7] << 16)
2131 | (mask_m[8] << 14) | (mask_m[9] << 12)
2132 | (mask_m[10] << 10) | (mask_m[11] << 8)
2133 | (mask_m[12] << 6) | (mask_m[13] << 4)
2134 | (mask_m[14] << 2) | (mask_m[15] << 0);
2135 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2136 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2138 tmp_mask = (mask_p[15] << 28)
2139 | (mask_p[14] << 26) | (mask_p[13] << 24)
2140 | (mask_p[12] << 22) | (mask_p[11] << 20)
2141 | (mask_p[10] << 18) | (mask_p[9] << 16)
2142 | (mask_p[8] << 14) | (mask_p[7] << 12)
2143 | (mask_p[6] << 10) | (mask_p[5] << 8)
2144 | (mask_p[4] << 6) | (mask_p[3] << 4)
2145 | (mask_p[2] << 2) | (mask_p[1] << 0);
2146 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2147 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2149 tmp_mask = (mask_p[30] << 28)
2150 | (mask_p[29] << 26) | (mask_p[28] << 24)
2151 | (mask_p[27] << 22) | (mask_p[26] << 20)
2152 | (mask_p[25] << 18) | (mask_p[24] << 16)
2153 | (mask_p[23] << 14) | (mask_p[22] << 12)
2154 | (mask_p[21] << 10) | (mask_p[20] << 8)
2155 | (mask_p[19] << 6) | (mask_p[18] << 4)
2156 | (mask_p[17] << 2) | (mask_p[16] << 0);
2157 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2158 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2160 tmp_mask = (mask_p[45] << 28)
2161 | (mask_p[44] << 26) | (mask_p[43] << 24)
2162 | (mask_p[42] << 22) | (mask_p[41] << 20)
2163 | (mask_p[40] << 18) | (mask_p[39] << 16)
2164 | (mask_p[38] << 14) | (mask_p[37] << 12)
2165 | (mask_p[36] << 10) | (mask_p[35] << 8)
2166 | (mask_p[34] << 6) | (mask_p[33] << 4)
2167 | (mask_p[32] << 2) | (mask_p[31] << 0);
2168 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2169 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2171 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2172 | (mask_p[59] << 26) | (mask_p[58] << 24)
2173 | (mask_p[57] << 22) | (mask_p[56] << 20)
2174 | (mask_p[55] << 18) | (mask_p[54] << 16)
2175 | (mask_p[53] << 14) | (mask_p[52] << 12)
2176 | (mask_p[51] << 10) | (mask_p[50] << 8)
2177 | (mask_p[49] << 6) | (mask_p[48] << 4)
2178 | (mask_p[47] << 2) | (mask_p[46] << 0);
2179 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2180 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2183 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2185 int bb_spur = AR_NO_SPUR;
2188 int spur_delta_phase;
2190 int upper, lower, cur_vit_mask;
2193 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2194 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2196 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2197 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2199 int inc[4] = { 0, 100, 0, 0 };
2206 bool is2GHz = IS_CHAN_2GHZ(chan);
2208 memset(&mask_m, 0, sizeof(int8_t) * 123);
2209 memset(&mask_p, 0, sizeof(int8_t) * 123);
2211 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2212 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2213 if (AR_NO_SPUR == cur_bb_spur)
2215 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2216 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2217 bb_spur = cur_bb_spur;
2222 if (AR_NO_SPUR == bb_spur)
2227 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2228 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2229 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2230 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2231 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2233 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2235 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2236 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2237 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2238 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2239 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2240 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2242 spur_delta_phase = ((bb_spur * 524288) / 100) &
2243 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2245 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2246 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2248 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2249 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2250 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2251 REG_WRITE(ah, AR_PHY_TIMING11, new);
2257 for (i = 0; i < 4; i++) {
2261 for (bp = 0; bp < 30; bp++) {
2262 if ((cur_bin > lower) && (cur_bin < upper)) {
2263 pilot_mask = pilot_mask | 0x1 << bp;
2264 chan_mask = chan_mask | 0x1 << bp;
2269 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2270 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2273 cur_vit_mask = 6100;
2277 for (i = 0; i < 123; i++) {
2278 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2280 /* workaround for gcc bug #37014 */
2281 volatile int tmp_v = abs(cur_vit_mask - bin);
2287 if (cur_vit_mask < 0)
2288 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2290 mask_p[cur_vit_mask / 100] = mask_amt;
2292 cur_vit_mask -= 100;
2295 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2296 | (mask_m[48] << 26) | (mask_m[49] << 24)
2297 | (mask_m[50] << 22) | (mask_m[51] << 20)
2298 | (mask_m[52] << 18) | (mask_m[53] << 16)
2299 | (mask_m[54] << 14) | (mask_m[55] << 12)
2300 | (mask_m[56] << 10) | (mask_m[57] << 8)
2301 | (mask_m[58] << 6) | (mask_m[59] << 4)
2302 | (mask_m[60] << 2) | (mask_m[61] << 0);
2303 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2304 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2306 tmp_mask = (mask_m[31] << 28)
2307 | (mask_m[32] << 26) | (mask_m[33] << 24)
2308 | (mask_m[34] << 22) | (mask_m[35] << 20)
2309 | (mask_m[36] << 18) | (mask_m[37] << 16)
2310 | (mask_m[48] << 14) | (mask_m[39] << 12)
2311 | (mask_m[40] << 10) | (mask_m[41] << 8)
2312 | (mask_m[42] << 6) | (mask_m[43] << 4)
2313 | (mask_m[44] << 2) | (mask_m[45] << 0);
2314 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2315 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2317 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2318 | (mask_m[18] << 26) | (mask_m[18] << 24)
2319 | (mask_m[20] << 22) | (mask_m[20] << 20)
2320 | (mask_m[22] << 18) | (mask_m[22] << 16)
2321 | (mask_m[24] << 14) | (mask_m[24] << 12)
2322 | (mask_m[25] << 10) | (mask_m[26] << 8)
2323 | (mask_m[27] << 6) | (mask_m[28] << 4)
2324 | (mask_m[29] << 2) | (mask_m[30] << 0);
2325 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2326 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2328 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2329 | (mask_m[2] << 26) | (mask_m[3] << 24)
2330 | (mask_m[4] << 22) | (mask_m[5] << 20)
2331 | (mask_m[6] << 18) | (mask_m[7] << 16)
2332 | (mask_m[8] << 14) | (mask_m[9] << 12)
2333 | (mask_m[10] << 10) | (mask_m[11] << 8)
2334 | (mask_m[12] << 6) | (mask_m[13] << 4)
2335 | (mask_m[14] << 2) | (mask_m[15] << 0);
2336 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2337 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2339 tmp_mask = (mask_p[15] << 28)
2340 | (mask_p[14] << 26) | (mask_p[13] << 24)
2341 | (mask_p[12] << 22) | (mask_p[11] << 20)
2342 | (mask_p[10] << 18) | (mask_p[9] << 16)
2343 | (mask_p[8] << 14) | (mask_p[7] << 12)
2344 | (mask_p[6] << 10) | (mask_p[5] << 8)
2345 | (mask_p[4] << 6) | (mask_p[3] << 4)
2346 | (mask_p[2] << 2) | (mask_p[1] << 0);
2347 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2348 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2350 tmp_mask = (mask_p[30] << 28)
2351 | (mask_p[29] << 26) | (mask_p[28] << 24)
2352 | (mask_p[27] << 22) | (mask_p[26] << 20)
2353 | (mask_p[25] << 18) | (mask_p[24] << 16)
2354 | (mask_p[23] << 14) | (mask_p[22] << 12)
2355 | (mask_p[21] << 10) | (mask_p[20] << 8)
2356 | (mask_p[19] << 6) | (mask_p[18] << 4)
2357 | (mask_p[17] << 2) | (mask_p[16] << 0);
2358 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2359 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2361 tmp_mask = (mask_p[45] << 28)
2362 | (mask_p[44] << 26) | (mask_p[43] << 24)
2363 | (mask_p[42] << 22) | (mask_p[41] << 20)
2364 | (mask_p[40] << 18) | (mask_p[39] << 16)
2365 | (mask_p[38] << 14) | (mask_p[37] << 12)
2366 | (mask_p[36] << 10) | (mask_p[35] << 8)
2367 | (mask_p[34] << 6) | (mask_p[33] << 4)
2368 | (mask_p[32] << 2) | (mask_p[31] << 0);
2369 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2370 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2372 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2373 | (mask_p[59] << 26) | (mask_p[58] << 24)
2374 | (mask_p[57] << 22) | (mask_p[56] << 20)
2375 | (mask_p[55] << 18) | (mask_p[54] << 16)
2376 | (mask_p[53] << 14) | (mask_p[52] << 12)
2377 | (mask_p[51] << 10) | (mask_p[50] << 8)
2378 | (mask_p[49] << 6) | (mask_p[48] << 4)
2379 | (mask_p[47] << 2) | (mask_p[46] << 0);
2380 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2381 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2384 static void ath9k_enable_rfkill(struct ath_hw *ah)
2386 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2387 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2389 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2390 AR_GPIO_INPUT_MUX2_RFSILENT);
2392 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2393 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2396 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2397 bool bChannelChange)
2399 struct ath_common *common = ath9k_hw_common(ah);
2401 struct ath9k_channel *curchan = ah->curchan;
2405 int i, rx_chainmask, r;
2407 ah->txchainmask = common->tx_chainmask;
2408 ah->rxchainmask = common->rx_chainmask;
2410 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2413 if (curchan && !ah->chip_fullsleep)
2414 ath9k_hw_getnf(ah, curchan);
2416 if (bChannelChange &&
2417 (ah->chip_fullsleep != true) &&
2418 (ah->curchan != NULL) &&
2419 (chan->channel != ah->curchan->channel) &&
2420 ((chan->channelFlags & CHANNEL_ALL) ==
2421 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2422 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2423 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2425 if (ath9k_hw_channel_change(ah, chan)) {
2426 ath9k_hw_loadnf(ah, ah->curchan);
2427 ath9k_hw_start_nfcal(ah);
2432 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2433 if (saveDefAntenna == 0)
2436 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2438 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2439 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2440 tsf = ath9k_hw_gettsf64(ah);
2442 saveLedState = REG_READ(ah, AR_CFG_LED) &
2443 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2444 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2446 ath9k_hw_mark_phy_inactive(ah);
2448 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2450 AR9271_RESET_POWER_DOWN_CONTROL,
2451 AR9271_RADIO_RF_RST);
2455 if (!ath9k_hw_chip_reset(ah, chan)) {
2456 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2460 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2461 ah->htc_reset_init = false;
2463 AR9271_RESET_POWER_DOWN_CONTROL,
2464 AR9271_GATE_MAC_CTL);
2469 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2470 ath9k_hw_settsf64(ah, tsf);
2472 if (AR_SREV_9280_10_OR_LATER(ah))
2473 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2475 if (AR_SREV_9287_12_OR_LATER(ah)) {
2476 /* Enable ASYNC FIFO */
2477 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2478 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2479 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2480 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2481 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2482 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2483 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2485 r = ath9k_hw_process_ini(ah, chan);
2489 /* Setup MFP options for CCMP */
2490 if (AR_SREV_9280_20_OR_LATER(ah)) {
2491 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2492 * frames when constructing CCMP AAD. */
2493 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2495 ah->sw_mgmt_crypto = false;
2496 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2497 /* Disable hardware crypto for management frames */
2498 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2499 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2500 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2501 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2502 ah->sw_mgmt_crypto = true;
2504 ah->sw_mgmt_crypto = true;
2506 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2507 ath9k_hw_set_delta_slope(ah, chan);
2509 if (AR_SREV_9280_10_OR_LATER(ah))
2510 ath9k_hw_9280_spur_mitigate(ah, chan);
2512 ath9k_hw_spur_mitigate(ah, chan);
2514 ah->eep_ops->set_board_values(ah, chan);
2516 ath9k_hw_decrease_chain_power(ah, chan);
2518 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2519 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2521 | AR_STA_ID1_RTS_USE_DEF
2523 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2524 | ah->sta_id1_defaults);
2525 ath9k_hw_set_operating_mode(ah, ah->opmode);
2527 ath_hw_setbssidmask(common);
2529 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2531 ath9k_hw_write_associd(ah);
2533 REG_WRITE(ah, AR_ISR, ~0);
2535 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2537 r = ah->ath9k_hw_rf_set_freq(ah, chan);
2541 for (i = 0; i < AR_NUM_DCU; i++)
2542 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2545 for (i = 0; i < ah->caps.total_queues; i++)
2546 ath9k_hw_resettxqueue(ah, i);
2548 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2549 ath9k_hw_init_qos(ah);
2551 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2552 ath9k_enable_rfkill(ah);
2554 ath9k_hw_init_user_settings(ah);
2556 if (AR_SREV_9287_12_OR_LATER(ah)) {
2557 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2558 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2559 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2560 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2561 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2562 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2564 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2565 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2567 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2568 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2569 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2570 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2572 if (AR_SREV_9287_12_OR_LATER(ah)) {
2573 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2574 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2577 REG_WRITE(ah, AR_STA_ID1,
2578 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2580 ath9k_hw_set_dma(ah);
2582 REG_WRITE(ah, AR_OBS, 8);
2584 if (ah->config.intr_mitigation) {
2585 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2586 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2589 ath9k_hw_init_bb(ah, chan);
2591 if (!ath9k_hw_init_cal(ah, chan))
2594 rx_chainmask = ah->rxchainmask;
2595 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2596 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2597 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2600 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2603 * For big endian systems turn on swapping for descriptors
2605 if (AR_SREV_9100(ah)) {
2607 mask = REG_READ(ah, AR_CFG);
2608 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2609 ath_print(common, ATH_DBG_RESET,
2610 "CFG Byte Swap Set 0x%x\n", mask);
2613 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2614 REG_WRITE(ah, AR_CFG, mask);
2615 ath_print(common, ATH_DBG_RESET,
2616 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2619 /* Configure AR9271 target WLAN */
2620 if (AR_SREV_9271(ah))
2621 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2624 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2628 if (ah->btcoex_hw.enabled)
2629 ath9k_hw_btcoex_enable(ah);
2633 EXPORT_SYMBOL(ath9k_hw_reset);
2635 /************************/
2636 /* Key Cache Management */
2637 /************************/
2639 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2643 if (entry >= ah->caps.keycache_size) {
2644 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2645 "keychache entry %u out of range\n", entry);
2649 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2651 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2652 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2653 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2654 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2655 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2656 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2657 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2658 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2660 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2661 u16 micentry = entry + 64;
2663 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2664 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2665 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2666 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2672 EXPORT_SYMBOL(ath9k_hw_keyreset);
2674 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2678 if (entry >= ah->caps.keycache_size) {
2679 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2680 "keychache entry %u out of range\n", entry);
2685 macHi = (mac[5] << 8) | mac[4];
2686 macLo = (mac[3] << 24) |
2691 macLo |= (macHi & 1) << 31;
2696 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2697 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2701 EXPORT_SYMBOL(ath9k_hw_keysetmac);
2703 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2704 const struct ath9k_keyval *k,
2707 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2708 struct ath_common *common = ath9k_hw_common(ah);
2709 u32 key0, key1, key2, key3, key4;
2712 if (entry >= pCap->keycache_size) {
2713 ath_print(common, ATH_DBG_FATAL,
2714 "keycache entry %u out of range\n", entry);
2718 switch (k->kv_type) {
2719 case ATH9K_CIPHER_AES_OCB:
2720 keyType = AR_KEYTABLE_TYPE_AES;
2722 case ATH9K_CIPHER_AES_CCM:
2723 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2724 ath_print(common, ATH_DBG_ANY,
2725 "AES-CCM not supported by mac rev 0x%x\n",
2726 ah->hw_version.macRev);
2729 keyType = AR_KEYTABLE_TYPE_CCM;
2731 case ATH9K_CIPHER_TKIP:
2732 keyType = AR_KEYTABLE_TYPE_TKIP;
2733 if (ATH9K_IS_MIC_ENABLED(ah)
2734 && entry + 64 >= pCap->keycache_size) {
2735 ath_print(common, ATH_DBG_ANY,
2736 "entry %u inappropriate for TKIP\n", entry);
2740 case ATH9K_CIPHER_WEP:
2741 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2742 ath_print(common, ATH_DBG_ANY,
2743 "WEP key length %u too small\n", k->kv_len);
2746 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2747 keyType = AR_KEYTABLE_TYPE_40;
2748 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2749 keyType = AR_KEYTABLE_TYPE_104;
2751 keyType = AR_KEYTABLE_TYPE_128;
2753 case ATH9K_CIPHER_CLR:
2754 keyType = AR_KEYTABLE_TYPE_CLR;
2757 ath_print(common, ATH_DBG_FATAL,
2758 "cipher %u not supported\n", k->kv_type);
2762 key0 = get_unaligned_le32(k->kv_val + 0);
2763 key1 = get_unaligned_le16(k->kv_val + 4);
2764 key2 = get_unaligned_le32(k->kv_val + 6);
2765 key3 = get_unaligned_le16(k->kv_val + 10);
2766 key4 = get_unaligned_le32(k->kv_val + 12);
2767 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2771 * Note: Key cache registers access special memory area that requires
2772 * two 32-bit writes to actually update the values in the internal
2773 * memory. Consequently, the exact order and pairs used here must be
2777 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2778 u16 micentry = entry + 64;
2781 * Write inverted key[47:0] first to avoid Michael MIC errors
2782 * on frames that could be sent or received at the same time.
2783 * The correct key will be written in the end once everything
2786 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2787 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2789 /* Write key[95:48] */
2790 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2791 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2793 /* Write key[127:96] and key type */
2794 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2795 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2797 /* Write MAC address for the entry */
2798 (void) ath9k_hw_keysetmac(ah, entry, mac);
2800 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2802 * TKIP uses two key cache entries:
2803 * Michael MIC TX/RX keys in the same key cache entry
2804 * (idx = main index + 64):
2805 * key0 [31:0] = RX key [31:0]
2806 * key1 [15:0] = TX key [31:16]
2807 * key1 [31:16] = reserved
2808 * key2 [31:0] = RX key [63:32]
2809 * key3 [15:0] = TX key [15:0]
2810 * key3 [31:16] = reserved
2811 * key4 [31:0] = TX key [63:32]
2813 u32 mic0, mic1, mic2, mic3, mic4;
2815 mic0 = get_unaligned_le32(k->kv_mic + 0);
2816 mic2 = get_unaligned_le32(k->kv_mic + 4);
2817 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2818 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2819 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2821 /* Write RX[31:0] and TX[31:16] */
2822 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2823 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2825 /* Write RX[63:32] and TX[15:0] */
2826 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2827 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2829 /* Write TX[63:32] and keyType(reserved) */
2830 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2831 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2832 AR_KEYTABLE_TYPE_CLR);
2836 * TKIP uses four key cache entries (two for group
2838 * Michael MIC TX/RX keys are in different key cache
2839 * entries (idx = main index + 64 for TX and
2840 * main index + 32 + 96 for RX):
2841 * key0 [31:0] = TX/RX MIC key [31:0]
2842 * key1 [31:0] = reserved
2843 * key2 [31:0] = TX/RX MIC key [63:32]
2844 * key3 [31:0] = reserved
2845 * key4 [31:0] = reserved
2847 * Upper layer code will call this function separately
2848 * for TX and RX keys when these registers offsets are
2853 mic0 = get_unaligned_le32(k->kv_mic + 0);
2854 mic2 = get_unaligned_le32(k->kv_mic + 4);
2856 /* Write MIC key[31:0] */
2857 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2858 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2860 /* Write MIC key[63:32] */
2861 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2862 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2864 /* Write TX[63:32] and keyType(reserved) */
2865 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2866 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2867 AR_KEYTABLE_TYPE_CLR);
2870 /* MAC address registers are reserved for the MIC entry */
2871 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2872 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2875 * Write the correct (un-inverted) key[47:0] last to enable
2876 * TKIP now that all other registers are set with correct
2879 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2880 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2882 /* Write key[47:0] */
2883 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2884 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2886 /* Write key[95:48] */
2887 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2888 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2890 /* Write key[127:96] and key type */
2891 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2892 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2894 /* Write MAC address for the entry */
2895 (void) ath9k_hw_keysetmac(ah, entry, mac);
2900 EXPORT_SYMBOL(ath9k_hw_set_keycache_entry);
2902 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2904 if (entry < ah->caps.keycache_size) {
2905 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2906 if (val & AR_KEYTABLE_VALID)
2911 EXPORT_SYMBOL(ath9k_hw_keyisvalid);
2913 /******************************/
2914 /* Power Management (Chipset) */
2915 /******************************/
2917 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2919 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2921 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2922 AR_RTC_FORCE_WAKE_EN);
2923 if (!AR_SREV_9100(ah))
2924 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2926 if(!AR_SREV_5416(ah))
2927 REG_CLR_BIT(ah, (AR_RTC_RESET),
2932 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2934 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2936 struct ath9k_hw_capabilities *pCap = &ah->caps;
2938 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2939 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2940 AR_RTC_FORCE_WAKE_ON_INT);
2942 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2943 AR_RTC_FORCE_WAKE_EN);
2948 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2954 if ((REG_READ(ah, AR_RTC_STATUS) &
2955 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2956 if (ath9k_hw_set_reset_reg(ah,
2957 ATH9K_RESET_POWER_ON) != true) {
2960 ath9k_hw_init_pll(ah, NULL);
2962 if (AR_SREV_9100(ah))
2963 REG_SET_BIT(ah, AR_RTC_RESET,
2966 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2967 AR_RTC_FORCE_WAKE_EN);
2970 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2971 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2972 if (val == AR_RTC_STATUS_ON)
2975 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2976 AR_RTC_FORCE_WAKE_EN);
2979 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2980 "Failed to wakeup in %uus\n",
2981 POWER_UP_TIME / 20);
2986 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2991 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2993 struct ath_common *common = ath9k_hw_common(ah);
2994 int status = true, setChip = true;
2995 static const char *modes[] = {
3002 if (ah->power_mode == mode)
3005 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
3006 modes[ah->power_mode], modes[mode]);
3009 case ATH9K_PM_AWAKE:
3010 status = ath9k_hw_set_power_awake(ah, setChip);
3012 case ATH9K_PM_FULL_SLEEP:
3013 ath9k_set_power_sleep(ah, setChip);
3014 ah->chip_fullsleep = true;
3016 case ATH9K_PM_NETWORK_SLEEP:
3017 ath9k_set_power_network_sleep(ah, setChip);
3020 ath_print(common, ATH_DBG_FATAL,
3021 "Unknown power mode %u\n", mode);
3024 ah->power_mode = mode;
3028 EXPORT_SYMBOL(ath9k_hw_setpower);
3031 * Helper for ASPM support.
3033 * Disable PLL when in L0s as well as receiver clock when in L1.
3034 * This power saving option must be enabled through the SerDes.
3036 * Programming the SerDes must go through the same 288 bit serial shift
3037 * register as the other analog registers. Hence the 9 writes.
3039 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
3044 if (ah->is_pciexpress != true)
3047 /* Do not touch SerDes registers */
3048 if (ah->config.pcie_powersave_enable == 2)
3051 /* Nothing to do on restore for 11N */
3053 if (AR_SREV_9280_20_OR_LATER(ah)) {
3055 * AR9280 2.0 or later chips use SerDes values from the
3056 * initvals.h initialized depending on chipset during
3059 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3060 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3061 INI_RA(&ah->iniPcieSerdes, i, 1));
3063 } else if (AR_SREV_9280(ah) &&
3064 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3066 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3068 /* RX shut off when elecidle is asserted */
3069 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3070 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3071 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3073 /* Shut off CLKREQ active in L1 */
3074 if (ah->config.pcie_clock_req)
3075 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3077 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3079 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3080 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3081 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3083 /* Load the new settings */
3084 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3087 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3088 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3090 /* RX shut off when elecidle is asserted */
3091 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3092 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3093 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3096 * Ignore ah->ah_config.pcie_clock_req setting for
3099 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3101 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3102 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3103 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3105 /* Load the new settings */
3106 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3111 /* set bit 19 to allow forcing of pcie core into L1 state */
3112 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3114 /* Several PCIe massages to ensure proper behaviour */
3115 if (ah->config.pcie_waen) {
3116 val = ah->config.pcie_waen;
3118 val &= (~AR_WA_D3_L1_DISABLE);
3120 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3122 val = AR9285_WA_DEFAULT;
3124 val &= (~AR_WA_D3_L1_DISABLE);
3125 } else if (AR_SREV_9280(ah)) {
3127 * On AR9280 chips bit 22 of 0x4004 needs to be
3128 * set otherwise card may disappear.
3130 val = AR9280_WA_DEFAULT;
3132 val &= (~AR_WA_D3_L1_DISABLE);
3134 val = AR_WA_DEFAULT;
3137 REG_WRITE(ah, AR_WA, val);
3142 * Set PCIe workaround bits
3143 * bit 14 in WA register (disable L1) should only
3144 * be set when device enters D3 and be cleared
3145 * when device comes back to D0.
3147 if (ah->config.pcie_waen) {
3148 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3149 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3151 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3152 AR_SREV_9287(ah)) &&
3153 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3154 (AR_SREV_9280(ah) &&
3155 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3156 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3161 EXPORT_SYMBOL(ath9k_hw_configpcipowersave);
3163 /**********************/
3164 /* Interrupt Handling */
3165 /**********************/
3167 bool ath9k_hw_intrpend(struct ath_hw *ah)
3171 if (AR_SREV_9100(ah))
3174 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3175 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3178 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3179 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3180 && (host_isr != AR_INTR_SPURIOUS))
3185 EXPORT_SYMBOL(ath9k_hw_intrpend);
3187 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3191 struct ath9k_hw_capabilities *pCap = &ah->caps;
3193 bool fatal_int = false;
3194 struct ath_common *common = ath9k_hw_common(ah);
3196 if (!AR_SREV_9100(ah)) {
3197 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3198 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3199 == AR_RTC_STATUS_ON) {
3200 isr = REG_READ(ah, AR_ISR);
3204 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3205 AR_INTR_SYNC_DEFAULT;
3209 if (!isr && !sync_cause)
3213 isr = REG_READ(ah, AR_ISR);
3217 if (isr & AR_ISR_BCNMISC) {
3219 isr2 = REG_READ(ah, AR_ISR_S2);
3220 if (isr2 & AR_ISR_S2_TIM)
3221 mask2 |= ATH9K_INT_TIM;
3222 if (isr2 & AR_ISR_S2_DTIM)
3223 mask2 |= ATH9K_INT_DTIM;
3224 if (isr2 & AR_ISR_S2_DTIMSYNC)
3225 mask2 |= ATH9K_INT_DTIMSYNC;
3226 if (isr2 & (AR_ISR_S2_CABEND))
3227 mask2 |= ATH9K_INT_CABEND;
3228 if (isr2 & AR_ISR_S2_GTT)
3229 mask2 |= ATH9K_INT_GTT;
3230 if (isr2 & AR_ISR_S2_CST)
3231 mask2 |= ATH9K_INT_CST;
3232 if (isr2 & AR_ISR_S2_TSFOOR)
3233 mask2 |= ATH9K_INT_TSFOOR;
3236 isr = REG_READ(ah, AR_ISR_RAC);
3237 if (isr == 0xffffffff) {
3242 *masked = isr & ATH9K_INT_COMMON;
3244 if (ah->config.intr_mitigation) {
3245 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3246 *masked |= ATH9K_INT_RX;
3249 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3250 *masked |= ATH9K_INT_RX;
3252 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3256 *masked |= ATH9K_INT_TX;
3258 s0_s = REG_READ(ah, AR_ISR_S0_S);
3259 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3260 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3262 s1_s = REG_READ(ah, AR_ISR_S1_S);
3263 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3264 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3267 if (isr & AR_ISR_RXORN) {
3268 ath_print(common, ATH_DBG_INTERRUPT,
3269 "receive FIFO overrun interrupt\n");
3272 if (!AR_SREV_9100(ah)) {
3273 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3274 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3275 if (isr5 & AR_ISR_S5_TIM_TIMER)
3276 *masked |= ATH9K_INT_TIM_TIMER;
3283 if (AR_SREV_9100(ah))
3286 if (isr & AR_ISR_GENTMR) {
3289 s5_s = REG_READ(ah, AR_ISR_S5_S);
3290 if (isr & AR_ISR_GENTMR) {
3291 ah->intr_gen_timer_trigger =
3292 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3294 ah->intr_gen_timer_thresh =
3295 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3297 if (ah->intr_gen_timer_trigger)
3298 *masked |= ATH9K_INT_GENTIMER;
3306 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3310 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3311 ath_print(common, ATH_DBG_ANY,
3312 "received PCI FATAL interrupt\n");
3314 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3315 ath_print(common, ATH_DBG_ANY,
3316 "received PCI PERR interrupt\n");
3318 *masked |= ATH9K_INT_FATAL;
3320 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3321 ath_print(common, ATH_DBG_INTERRUPT,
3322 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3323 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3324 REG_WRITE(ah, AR_RC, 0);
3325 *masked |= ATH9K_INT_FATAL;
3327 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3328 ath_print(common, ATH_DBG_INTERRUPT,
3329 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3332 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3333 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3338 EXPORT_SYMBOL(ath9k_hw_getisr);
3340 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3342 u32 omask = ah->mask_reg;
3344 struct ath9k_hw_capabilities *pCap = &ah->caps;
3345 struct ath_common *common = ath9k_hw_common(ah);
3347 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3349 if (omask & ATH9K_INT_GLOBAL) {
3350 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n");
3351 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3352 (void) REG_READ(ah, AR_IER);
3353 if (!AR_SREV_9100(ah)) {
3354 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3355 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3357 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3358 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3362 mask = ints & ATH9K_INT_COMMON;
3365 if (ints & ATH9K_INT_TX) {
3366 if (ah->txok_interrupt_mask)
3367 mask |= AR_IMR_TXOK;
3368 if (ah->txdesc_interrupt_mask)
3369 mask |= AR_IMR_TXDESC;
3370 if (ah->txerr_interrupt_mask)
3371 mask |= AR_IMR_TXERR;
3372 if (ah->txeol_interrupt_mask)
3373 mask |= AR_IMR_TXEOL;
3375 if (ints & ATH9K_INT_RX) {
3376 mask |= AR_IMR_RXERR;
3377 if (ah->config.intr_mitigation)
3378 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3380 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3381 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3382 mask |= AR_IMR_GENTMR;
3385 if (ints & (ATH9K_INT_BMISC)) {
3386 mask |= AR_IMR_BCNMISC;
3387 if (ints & ATH9K_INT_TIM)
3388 mask2 |= AR_IMR_S2_TIM;
3389 if (ints & ATH9K_INT_DTIM)
3390 mask2 |= AR_IMR_S2_DTIM;
3391 if (ints & ATH9K_INT_DTIMSYNC)
3392 mask2 |= AR_IMR_S2_DTIMSYNC;
3393 if (ints & ATH9K_INT_CABEND)
3394 mask2 |= AR_IMR_S2_CABEND;
3395 if (ints & ATH9K_INT_TSFOOR)
3396 mask2 |= AR_IMR_S2_TSFOOR;
3399 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3400 mask |= AR_IMR_BCNMISC;
3401 if (ints & ATH9K_INT_GTT)
3402 mask2 |= AR_IMR_S2_GTT;
3403 if (ints & ATH9K_INT_CST)
3404 mask2 |= AR_IMR_S2_CST;
3407 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3408 REG_WRITE(ah, AR_IMR, mask);
3409 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3411 AR_IMR_S2_DTIMSYNC |
3415 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3416 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3417 ah->mask_reg = ints;
3419 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3420 if (ints & ATH9K_INT_TIM_TIMER)
3421 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3423 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3426 if (ints & ATH9K_INT_GLOBAL) {
3427 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n");
3428 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3429 if (!AR_SREV_9100(ah)) {
3430 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3432 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3435 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3436 AR_INTR_SYNC_DEFAULT);
3437 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3438 AR_INTR_SYNC_DEFAULT);
3440 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3441 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3446 EXPORT_SYMBOL(ath9k_hw_set_interrupts);
3448 /*******************/
3449 /* Beacon Handling */
3450 /*******************/
3452 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3456 ah->beacon_interval = beacon_period;
3458 switch (ah->opmode) {
3459 case NL80211_IFTYPE_STATION:
3460 case NL80211_IFTYPE_MONITOR:
3461 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3462 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3463 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3464 flags |= AR_TBTT_TIMER_EN;
3466 case NL80211_IFTYPE_ADHOC:
3467 case NL80211_IFTYPE_MESH_POINT:
3468 REG_SET_BIT(ah, AR_TXCFG,
3469 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3470 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3471 TU_TO_USEC(next_beacon +
3472 (ah->atim_window ? ah->
3474 flags |= AR_NDP_TIMER_EN;
3475 case NL80211_IFTYPE_AP:
3476 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3477 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3478 TU_TO_USEC(next_beacon -
3480 dma_beacon_response_time));
3481 REG_WRITE(ah, AR_NEXT_SWBA,
3482 TU_TO_USEC(next_beacon -
3484 sw_beacon_response_time));
3486 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3489 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON,
3490 "%s: unsupported opmode: %d\n",
3491 __func__, ah->opmode);
3496 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3497 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3498 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3499 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3501 beacon_period &= ~ATH9K_BEACON_ENA;
3502 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3503 ath9k_hw_reset_tsf(ah);
3506 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3508 EXPORT_SYMBOL(ath9k_hw_beaconinit);
3510 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3511 const struct ath9k_beacon_state *bs)
3513 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3514 struct ath9k_hw_capabilities *pCap = &ah->caps;
3515 struct ath_common *common = ath9k_hw_common(ah);
3517 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3519 REG_WRITE(ah, AR_BEACON_PERIOD,
3520 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3521 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3522 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3524 REG_RMW_FIELD(ah, AR_RSSI_THR,
3525 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3527 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3529 if (bs->bs_sleepduration > beaconintval)
3530 beaconintval = bs->bs_sleepduration;
3532 dtimperiod = bs->bs_dtimperiod;
3533 if (bs->bs_sleepduration > dtimperiod)
3534 dtimperiod = bs->bs_sleepduration;
3536 if (beaconintval == dtimperiod)
3537 nextTbtt = bs->bs_nextdtim;
3539 nextTbtt = bs->bs_nexttbtt;
3541 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3542 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3543 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3544 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3546 REG_WRITE(ah, AR_NEXT_DTIM,
3547 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3548 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3550 REG_WRITE(ah, AR_SLEEP1,
3551 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3552 | AR_SLEEP1_ASSUME_DTIM);
3554 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3555 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3557 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3559 REG_WRITE(ah, AR_SLEEP2,
3560 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3562 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3563 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3565 REG_SET_BIT(ah, AR_TIMER_MODE,
3566 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3569 /* TSF Out of Range Threshold */
3570 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3572 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
3574 /*******************/
3575 /* HW Capabilities */
3576 /*******************/
3578 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3580 struct ath9k_hw_capabilities *pCap = &ah->caps;
3581 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3582 struct ath_common *common = ath9k_hw_common(ah);
3583 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
3585 u16 capField = 0, eeval;
3587 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3588 regulatory->current_rd = eeval;
3590 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3591 if (AR_SREV_9285_10_OR_LATER(ah))
3592 eeval |= AR9285_RDEXT_DEFAULT;
3593 regulatory->current_rd_ext = eeval;
3595 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3597 if (ah->opmode != NL80211_IFTYPE_AP &&
3598 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3599 if (regulatory->current_rd == 0x64 ||
3600 regulatory->current_rd == 0x65)
3601 regulatory->current_rd += 5;
3602 else if (regulatory->current_rd == 0x41)
3603 regulatory->current_rd = 0x43;
3604 ath_print(common, ATH_DBG_REGULATORY,
3605 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3608 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3609 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3611 if (eeval & AR5416_OPFLAGS_11A) {
3612 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3613 if (ah->config.ht_enable) {
3614 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3615 set_bit(ATH9K_MODE_11NA_HT20,
3616 pCap->wireless_modes);
3617 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3618 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3619 pCap->wireless_modes);
3620 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3621 pCap->wireless_modes);
3626 if (eeval & AR5416_OPFLAGS_11G) {
3627 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3628 if (ah->config.ht_enable) {
3629 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3630 set_bit(ATH9K_MODE_11NG_HT20,
3631 pCap->wireless_modes);
3632 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3633 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3634 pCap->wireless_modes);
3635 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3636 pCap->wireless_modes);
3641 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3643 * For AR9271 we will temporarilly uses the rx chainmax as read from
3646 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3647 !(eeval & AR5416_OPFLAGS_11A) &&
3648 !(AR_SREV_9271(ah)))
3649 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3650 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3652 /* Use rx_chainmask from EEPROM. */
3653 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3655 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3656 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3658 pCap->low_2ghz_chan = 2312;
3659 pCap->high_2ghz_chan = 2732;
3661 pCap->low_5ghz_chan = 4920;
3662 pCap->high_5ghz_chan = 6100;
3664 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3665 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3666 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3668 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3669 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3670 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3672 if (ah->config.ht_enable)
3673 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3675 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3677 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3678 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3679 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3680 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3682 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3683 pCap->total_queues =
3684 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3686 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3688 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3689 pCap->keycache_size =
3690 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3692 pCap->keycache_size = AR_KEYTABLE_SIZE;
3694 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3695 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3697 if (AR_SREV_9285_10_OR_LATER(ah))
3698 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3699 else if (AR_SREV_9280_10_OR_LATER(ah))
3700 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3702 pCap->num_gpio_pins = AR_NUM_GPIO;
3704 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3705 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3706 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3708 pCap->rts_aggr_limit = (8 * 1024);
3711 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3713 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3714 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3715 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3717 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3718 ah->rfkill_polarity =
3719 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3721 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3725 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3727 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3728 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3730 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3732 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3734 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3735 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3736 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3737 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3740 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3741 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3744 /* Advertise midband for AR5416 with FCC midband set in eeprom */
3745 if (regulatory->current_rd_ext & (1 << REG_EXT_FCC_MIDBAND) &&
3747 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3749 pCap->num_antcfg_5ghz =
3750 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3751 pCap->num_antcfg_2ghz =
3752 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3754 if (AR_SREV_9280_10_OR_LATER(ah) &&
3755 ath9k_hw_btcoex_supported(ah)) {
3756 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO;
3757 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3759 if (AR_SREV_9285(ah)) {
3760 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
3761 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3763 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
3766 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
3770 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3771 u32 capability, u32 *result)
3773 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3775 case ATH9K_CAP_CIPHER:
3776 switch (capability) {
3777 case ATH9K_CIPHER_AES_CCM:
3778 case ATH9K_CIPHER_AES_OCB:
3779 case ATH9K_CIPHER_TKIP:
3780 case ATH9K_CIPHER_WEP:
3781 case ATH9K_CIPHER_MIC:
3782 case ATH9K_CIPHER_CLR:
3787 case ATH9K_CAP_TKIP_MIC:
3788 switch (capability) {
3792 return (ah->sta_id1_defaults &
3793 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3796 case ATH9K_CAP_TKIP_SPLIT:
3797 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3799 case ATH9K_CAP_DIVERSITY:
3800 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3801 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3803 case ATH9K_CAP_MCAST_KEYSRCH:
3804 switch (capability) {
3808 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3811 return (ah->sta_id1_defaults &
3812 AR_STA_ID1_MCAST_KSRCH) ? true :
3817 case ATH9K_CAP_TXPOW:
3818 switch (capability) {
3822 *result = regulatory->power_limit;
3825 *result = regulatory->max_power_level;
3828 *result = regulatory->tp_scale;
3833 return (AR_SREV_9280_20_OR_LATER(ah) &&
3834 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3840 EXPORT_SYMBOL(ath9k_hw_getcapability);
3842 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3843 u32 capability, u32 setting, int *status)
3848 case ATH9K_CAP_TKIP_MIC:
3850 ah->sta_id1_defaults |=
3851 AR_STA_ID1_CRPT_MIC_ENABLE;
3853 ah->sta_id1_defaults &=
3854 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3856 case ATH9K_CAP_DIVERSITY:
3857 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3859 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3861 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3862 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3864 case ATH9K_CAP_MCAST_KEYSRCH:
3866 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3868 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3874 EXPORT_SYMBOL(ath9k_hw_setcapability);
3876 /****************************/
3877 /* GPIO / RFKILL / Antennae */
3878 /****************************/
3880 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3884 u32 gpio_shift, tmp;
3887 addr = AR_GPIO_OUTPUT_MUX3;
3889 addr = AR_GPIO_OUTPUT_MUX2;
3891 addr = AR_GPIO_OUTPUT_MUX1;
3893 gpio_shift = (gpio % 6) * 5;
3895 if (AR_SREV_9280_20_OR_LATER(ah)
3896 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3897 REG_RMW(ah, addr, (type << gpio_shift),
3898 (0x1f << gpio_shift));
3900 tmp = REG_READ(ah, addr);
3901 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3902 tmp &= ~(0x1f << gpio_shift);
3903 tmp |= (type << gpio_shift);
3904 REG_WRITE(ah, addr, tmp);
3908 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3912 BUG_ON(gpio >= ah->caps.num_gpio_pins);
3914 gpio_shift = gpio << 1;
3918 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3919 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3921 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
3923 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3925 #define MS_REG_READ(x, y) \
3926 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3928 if (gpio >= ah->caps.num_gpio_pins)
3931 if (AR_SREV_9287_10_OR_LATER(ah))
3932 return MS_REG_READ(AR9287, gpio) != 0;
3933 else if (AR_SREV_9285_10_OR_LATER(ah))
3934 return MS_REG_READ(AR9285, gpio) != 0;
3935 else if (AR_SREV_9280_10_OR_LATER(ah))
3936 return MS_REG_READ(AR928X, gpio) != 0;
3938 return MS_REG_READ(AR, gpio) != 0;
3940 EXPORT_SYMBOL(ath9k_hw_gpio_get);
3942 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3947 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3949 gpio_shift = 2 * gpio;
3953 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3954 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3956 EXPORT_SYMBOL(ath9k_hw_cfg_output);
3958 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3960 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3963 EXPORT_SYMBOL(ath9k_hw_set_gpio);
3965 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3967 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3969 EXPORT_SYMBOL(ath9k_hw_getdefantenna);
3971 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3973 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3975 EXPORT_SYMBOL(ath9k_hw_setantenna);
3977 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3978 enum ath9k_ant_setting settings,
3979 struct ath9k_channel *chan,
3984 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3986 if (AR_SREV_9280(ah)) {
3987 if (!tx_chainmask_cfg) {
3989 tx_chainmask_cfg = *tx_chainmask;
3990 rx_chainmask_cfg = *rx_chainmask;
3994 case ATH9K_ANT_FIXED_A:
3995 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3996 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3997 *antenna_cfgd = true;
3999 case ATH9K_ANT_FIXED_B:
4000 if (ah->caps.tx_chainmask >
4001 ATH9K_ANTENNA1_CHAINMASK) {
4002 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
4004 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
4005 *antenna_cfgd = true;
4007 case ATH9K_ANT_VARIABLE:
4008 *tx_chainmask = tx_chainmask_cfg;
4009 *rx_chainmask = rx_chainmask_cfg;
4010 *antenna_cfgd = true;
4016 ah->config.diversity_control = settings;
4022 /*********************/
4023 /* General Operation */
4024 /*********************/
4026 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
4028 u32 bits = REG_READ(ah, AR_RX_FILTER);
4029 u32 phybits = REG_READ(ah, AR_PHY_ERR);
4031 if (phybits & AR_PHY_ERR_RADAR)
4032 bits |= ATH9K_RX_FILTER_PHYRADAR;
4033 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
4034 bits |= ATH9K_RX_FILTER_PHYERR;
4038 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
4040 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
4044 REG_WRITE(ah, AR_RX_FILTER, bits);
4047 if (bits & ATH9K_RX_FILTER_PHYRADAR)
4048 phybits |= AR_PHY_ERR_RADAR;
4049 if (bits & ATH9K_RX_FILTER_PHYERR)
4050 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
4051 REG_WRITE(ah, AR_PHY_ERR, phybits);
4054 REG_WRITE(ah, AR_RXCFG,
4055 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
4057 REG_WRITE(ah, AR_RXCFG,
4058 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
4060 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
4062 bool ath9k_hw_phy_disable(struct ath_hw *ah)
4064 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
4067 ath9k_hw_init_pll(ah, NULL);
4070 EXPORT_SYMBOL(ath9k_hw_phy_disable);
4072 bool ath9k_hw_disable(struct ath_hw *ah)
4074 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
4077 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
4080 ath9k_hw_init_pll(ah, NULL);
4083 EXPORT_SYMBOL(ath9k_hw_disable);
4085 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
4087 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
4088 struct ath9k_channel *chan = ah->curchan;
4089 struct ieee80211_channel *channel = chan->chan;
4091 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
4093 ah->eep_ops->set_txpower(ah, chan,
4094 ath9k_regd_get_ctl(regulatory, chan),
4095 channel->max_antenna_gain * 2,
4096 channel->max_power * 2,
4097 min((u32) MAX_RATE_POWER,
4098 (u32) regulatory->power_limit));
4100 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
4102 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4104 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN);
4106 EXPORT_SYMBOL(ath9k_hw_setmac);
4108 void ath9k_hw_setopmode(struct ath_hw *ah)
4110 ath9k_hw_set_operating_mode(ah, ah->opmode);
4112 EXPORT_SYMBOL(ath9k_hw_setopmode);
4114 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4116 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4117 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4119 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
4121 void ath9k_hw_write_associd(struct ath_hw *ah)
4123 struct ath_common *common = ath9k_hw_common(ah);
4125 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
4126 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
4127 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4129 EXPORT_SYMBOL(ath9k_hw_write_associd);
4131 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4135 tsf = REG_READ(ah, AR_TSF_U32);
4136 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4140 EXPORT_SYMBOL(ath9k_hw_gettsf64);
4142 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4144 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4145 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4147 EXPORT_SYMBOL(ath9k_hw_settsf64);
4149 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4151 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4152 AH_TSF_WRITE_TIMEOUT))
4153 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4154 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4156 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4158 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
4160 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4163 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4165 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4167 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
4169 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4171 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4172 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4173 "bad slot time %u\n", us);
4174 ah->slottime = (u32) -1;
4177 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4182 EXPORT_SYMBOL(ath9k_hw_setslottime);
4184 void ath9k_hw_set11nmac2040(struct ath_hw *ah)
4186 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
4189 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
4190 macmode = AR_2040_JOINED_RX_CLEAR;
4194 REG_WRITE(ah, AR_2040_MODE, macmode);
4197 /* HW Generic timers configuration */
4199 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4201 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4202 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4203 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4204 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4205 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4206 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4207 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4208 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4209 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4210 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4211 AR_NDP2_TIMER_MODE, 0x0002},
4212 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4213 AR_NDP2_TIMER_MODE, 0x0004},
4214 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4215 AR_NDP2_TIMER_MODE, 0x0008},
4216 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4217 AR_NDP2_TIMER_MODE, 0x0010},
4218 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4219 AR_NDP2_TIMER_MODE, 0x0020},
4220 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4221 AR_NDP2_TIMER_MODE, 0x0040},
4222 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4223 AR_NDP2_TIMER_MODE, 0x0080}
4226 /* HW generic timer primitives */
4228 /* compute and clear index of rightmost 1 */
4229 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4239 return timer_table->gen_timer_index[b];
4242 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4244 return REG_READ(ah, AR_TSF_L32);
4246 EXPORT_SYMBOL(ath9k_hw_gettsf32);
4248 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4249 void (*trigger)(void *),
4250 void (*overflow)(void *),
4254 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4255 struct ath_gen_timer *timer;
4257 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4259 if (timer == NULL) {
4260 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
4261 "Failed to allocate memory"
4262 "for hw timer[%d]\n", timer_index);
4266 /* allocate a hardware generic timer slot */
4267 timer_table->timers[timer_index] = timer;
4268 timer->index = timer_index;
4269 timer->trigger = trigger;
4270 timer->overflow = overflow;
4275 EXPORT_SYMBOL(ath_gen_timer_alloc);
4277 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
4278 struct ath_gen_timer *timer,
4282 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4285 BUG_ON(!timer_period);
4287 set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4289 tsf = ath9k_hw_gettsf32(ah);
4291 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER,
4292 "curent tsf %x period %x"
4293 "timer_next %x\n", tsf, timer_period, timer_next);
4296 * Pull timer_next forward if the current TSF already passed it
4297 * because of software latency
4299 if (timer_next < tsf)
4300 timer_next = tsf + timer_period;
4303 * Program generic timer registers
4305 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4307 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4309 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4310 gen_tmr_configuration[timer->index].mode_mask);
4312 /* Enable both trigger and thresh interrupt masks */
4313 REG_SET_BIT(ah, AR_IMR_S5,
4314 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4315 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4317 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
4319 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4321 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4323 if ((timer->index < AR_FIRST_NDP_TIMER) ||
4324 (timer->index >= ATH_MAX_GEN_TIMER)) {
4328 /* Clear generic timer enable bits. */
4329 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4330 gen_tmr_configuration[timer->index].mode_mask);
4332 /* Disable both trigger and thresh interrupt masks */
4333 REG_CLR_BIT(ah, AR_IMR_S5,
4334 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4335 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4337 clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4339 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
4341 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4343 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4345 /* free the hardware generic timer slot */
4346 timer_table->timers[timer->index] = NULL;
4349 EXPORT_SYMBOL(ath_gen_timer_free);
4352 * Generic Timer Interrupts handling
4354 void ath_gen_timer_isr(struct ath_hw *ah)
4356 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4357 struct ath_gen_timer *timer;
4358 struct ath_common *common = ath9k_hw_common(ah);
4359 u32 trigger_mask, thresh_mask, index;
4361 /* get hardware generic timer interrupt status */
4362 trigger_mask = ah->intr_gen_timer_trigger;
4363 thresh_mask = ah->intr_gen_timer_thresh;
4364 trigger_mask &= timer_table->timer_mask.val;
4365 thresh_mask &= timer_table->timer_mask.val;
4367 trigger_mask &= ~thresh_mask;
4369 while (thresh_mask) {
4370 index = rightmost_index(timer_table, &thresh_mask);
4371 timer = timer_table->timers[index];
4373 ath_print(common, ATH_DBG_HWTIMER,
4374 "TSF overflow for Gen timer %d\n", index);
4375 timer->overflow(timer->arg);
4378 while (trigger_mask) {
4379 index = rightmost_index(timer_table, &trigger_mask);
4380 timer = timer_table->timers[index];
4382 ath_print(common, ATH_DBG_HWTIMER,
4383 "Gen timer[%d] trigger\n", index);
4384 timer->trigger(timer->arg);
4387 EXPORT_SYMBOL(ath_gen_timer_isr);
4392 } ath_mac_bb_names[] = {
4393 /* Devices with external radios */
4394 { AR_SREV_VERSION_5416_PCI, "5416" },
4395 { AR_SREV_VERSION_5416_PCIE, "5418" },
4396 { AR_SREV_VERSION_9100, "9100" },
4397 { AR_SREV_VERSION_9160, "9160" },
4398 /* Single-chip solutions */
4399 { AR_SREV_VERSION_9280, "9280" },
4400 { AR_SREV_VERSION_9285, "9285" },
4401 { AR_SREV_VERSION_9287, "9287" },
4402 { AR_SREV_VERSION_9271, "9271" },
4405 /* For devices with external radios */
4409 } ath_rf_names[] = {
4411 { AR_RAD5133_SREV_MAJOR, "5133" },
4412 { AR_RAD5122_SREV_MAJOR, "5122" },
4413 { AR_RAD2133_SREV_MAJOR, "2133" },
4414 { AR_RAD2122_SREV_MAJOR, "2122" }
4418 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
4420 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
4424 for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
4425 if (ath_mac_bb_names[i].version == mac_bb_version) {
4426 return ath_mac_bb_names[i].name;
4434 * Return the RF name. "????" is returned if the RF is unknown.
4435 * Used for devices with external radios.
4437 static const char *ath9k_hw_rf_name(u16 rf_version)
4441 for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
4442 if (ath_rf_names[i].version == rf_version) {
4443 return ath_rf_names[i].name;
4450 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
4454 /* chipsets >= AR9280 are single-chip */
4455 if (AR_SREV_9280_10_OR_LATER(ah)) {
4456 used = snprintf(hw_name, len,
4457 "Atheros AR%s Rev:%x",
4458 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4459 ah->hw_version.macRev);
4462 used = snprintf(hw_name, len,
4463 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
4464 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4465 ah->hw_version.macRev,
4466 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
4467 AR_RADIO_SREV_MAJOR)),
4468 ah->hw_version.phyRev);
4471 hw_name[used] = '\0';
4473 EXPORT_SYMBOL(ath9k_hw_name);