mt76 patches for 4.20

* mt76x0 fixes
 * mt76x0e improvements (should be usable now)
 * usb support improvements
 * more mt76x0/mt76x2 unification work
 * minor fix for aggregation + powersave clients
 -----BEGIN PGP SIGNATURE-----
 Version: GnuPG/MacGPG2 v2
 Comment: GPGTools - http://gpgtools.org
 
 iEYEABECAAYFAlvCEgoACgkQ130UHQKnbvU4RQCeKXCe/rgtrGQ0KXjcuZbXfYmn
 2pAAoKLCTrI9QTCScMnTdyfHqiRCEBF8
 =wK37
 -----END PGP SIGNATURE-----

Merge tag 'mt76-for-kvalo-2018-10-13' of https://github.com/nbd168/wireless

mt76 patches for 4.20

* mt76x0 fixes
* mt76x0e improvements (should be usable now)
* usb support improvements
* more mt76x0/mt76x2 unification work
* minor fix for aggregation + powersave clients
This commit is contained in:
Kalle Valo 2018-10-14 11:40:41 +03:00
commit 6bfa6975f1
47 changed files with 875 additions and 932 deletions

View File

@ -79,6 +79,7 @@ void mt76_mmio_init(struct mt76_dev *dev, void __iomem *regs)
.copy = mt76_mmio_copy,
.wr_rp = mt76_mmio_wr_rp,
.rd_rp = mt76_mmio_rd_rp,
.type = MT76_BUS_MMIO,
};
dev->bus = &mt76_mmio_ops;

View File

@ -38,6 +38,11 @@ struct mt76_reg_pair {
u32 value;
};
enum mt76_bus_type {
MT76_BUS_MMIO,
MT76_BUS_USB,
};
struct mt76_bus_ops {
u32 (*rr)(struct mt76_dev *dev, u32 offset);
void (*wr)(struct mt76_dev *dev, u32 offset, u32 val);
@ -48,8 +53,12 @@ struct mt76_bus_ops {
const struct mt76_reg_pair *rp, int len);
int (*rd_rp)(struct mt76_dev *dev, u32 base,
struct mt76_reg_pair *rp, int len);
enum mt76_bus_type type;
};
#define mt76_is_usb(dev) ((dev)->mt76.bus->type == MT76_BUS_USB)
#define mt76_is_mmio(dev) ((dev)->mt76.bus->type == MT76_BUS_MMIO)
enum mt76_txq_id {
MT_TXQ_VO = IEEE80211_AC_VO,
MT_TXQ_VI = IEEE80211_AC_VI,

View File

@ -1,126 +0,0 @@
/*
* Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
* Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#ifndef __MT76X0U_DMA_H
#define __MT76X0U_DMA_H
#include <asm/unaligned.h>
#include <linux/skbuff.h>
#define MT_DMA_HDR_LEN 4
#define MT_RX_INFO_LEN 4
#define MT_FCE_INFO_LEN 4
#define MT_DMA_HDRS (MT_DMA_HDR_LEN + MT_RX_INFO_LEN)
/* Common Tx DMA descriptor fields */
#define MT_TXD_INFO_LEN GENMASK(15, 0)
#define MT_TXD_INFO_D_PORT GENMASK(29, 27)
#define MT_TXD_INFO_TYPE GENMASK(31, 30)
/* Tx DMA MCU command specific flags */
#define MT_TXD_CMD_SEQ GENMASK(19, 16)
#define MT_TXD_CMD_TYPE GENMASK(26, 20)
enum mt76_msg_port {
WLAN_PORT,
CPU_RX_PORT,
CPU_TX_PORT,
HOST_PORT,
VIRTUAL_CPU_RX_PORT,
VIRTUAL_CPU_TX_PORT,
DISCARD,
};
enum mt76_info_type {
DMA_PACKET,
DMA_COMMAND,
};
/* Tx DMA packet specific flags */
#define MT_TXD_PKT_INFO_NEXT_VLD BIT(16)
#define MT_TXD_PKT_INFO_TX_BURST BIT(17)
#define MT_TXD_PKT_INFO_80211 BIT(19)
#define MT_TXD_PKT_INFO_TSO BIT(20)
#define MT_TXD_PKT_INFO_CSO BIT(21)
#define MT_TXD_PKT_INFO_WIV BIT(24)
#define MT_TXD_PKT_INFO_QSEL GENMASK(26, 25)
enum mt76_qsel {
MT_QSEL_MGMT,
MT_QSEL_HCCA,
MT_QSEL_EDCA,
MT_QSEL_EDCA_2,
};
static inline int mt76x0_dma_skb_wrap(struct sk_buff *skb,
enum mt76_msg_port d_port,
enum mt76_info_type type, u32 flags)
{
u32 info;
/* Buffer layout:
* | 4B | xfer len | pad | 4B |
* | TXINFO | pkt/cmd | zero pad to 4B | zero |
*
* length field of TXINFO should be set to 'xfer len'.
*/
info = flags |
FIELD_PREP(MT_TXD_INFO_LEN, round_up(skb->len, 4)) |
FIELD_PREP(MT_TXD_INFO_D_PORT, d_port) |
FIELD_PREP(MT_TXD_INFO_TYPE, type);
put_unaligned_le32(info, skb_push(skb, sizeof(info)));
return skb_put_padto(skb, round_up(skb->len, 4) + 4);
}
static inline int
mt76x0_dma_skb_wrap_pkt(struct sk_buff *skb, enum mt76_qsel qsel, u32 flags)
{
flags |= FIELD_PREP(MT_TXD_PKT_INFO_QSEL, qsel);
return mt76x0_dma_skb_wrap(skb, WLAN_PORT, DMA_PACKET, flags);
}
/* Common Rx DMA descriptor fields */
#define MT_RXD_INFO_LEN GENMASK(13, 0)
#define MT_RXD_INFO_PCIE_INTR BIT(24)
#define MT_RXD_INFO_QSEL GENMASK(26, 25)
#define MT_RXD_INFO_PORT GENMASK(29, 27)
#define MT_RXD_INFO_TYPE GENMASK(31, 30)
/* Rx DMA packet specific flags */
#define MT_RXD_PKT_INFO_UDP_ERR BIT(16)
#define MT_RXD_PKT_INFO_TCP_ERR BIT(17)
#define MT_RXD_PKT_INFO_IP_ERR BIT(18)
#define MT_RXD_PKT_INFO_PKT_80211 BIT(19)
#define MT_RXD_PKT_INFO_L3L4_DONE BIT(20)
#define MT_RXD_PKT_INFO_MAC_LEN GENMASK(23, 21)
/* Rx DMA MCU command specific flags */
#define MT_RXD_CMD_INFO_SELF_GEN BIT(15)
#define MT_RXD_CMD_INFO_CMD_SEQ GENMASK(19, 16)
#define MT_RXD_CMD_INFO_EVT_TYPE GENMASK(23, 20)
enum mt76_evt_type {
CMD_DONE,
CMD_ERROR,
CMD_RETRY,
EVENT_PWR_RSP,
EVENT_WOW_RSP,
EVENT_CARRIER_DETECT_RSP,
EVENT_DFS_DETECT_RSP,
};
#endif

View File

@ -31,8 +31,8 @@ mt76x0_efuse_physical_size_check(struct mt76x02_dev *dev)
int ret, i;
u32 start = 0, end = 0, cnt_free;
ret = mt76x02_get_efuse_data(&dev->mt76, MT_EE_USAGE_MAP_START,
data, sizeof(data), MT_EE_PHYSICAL_READ);
ret = mt76x02_get_efuse_data(dev, MT_EE_USAGE_MAP_START, data,
sizeof(data), MT_EE_PHYSICAL_READ);
if (ret)
return ret;
@ -55,10 +55,10 @@ mt76x0_efuse_physical_size_check(struct mt76x02_dev *dev)
static void mt76x0_set_chip_cap(struct mt76x02_dev *dev)
{
u16 nic_conf0 = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_0);
u16 nic_conf1 = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_1);
u16 nic_conf0 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_0);
u16 nic_conf1 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1);
mt76x02_eeprom_parse_hw_cap(&dev->mt76);
mt76x02_eeprom_parse_hw_cap(dev);
dev_dbg(dev->mt76.dev, "2GHz %d 5GHz %d\n",
dev->mt76.cap.has_2ghz, dev->mt76.cap.has_5ghz);
@ -86,7 +86,7 @@ static void mt76x0_set_temp_offset(struct mt76x02_dev *dev)
{
u8 val;
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_2G_TARGET_POWER) >> 8;
val = mt76x02_eeprom_get(dev, MT_EE_2G_TARGET_POWER) >> 8;
if (mt76x02_field_valid(val))
dev->cal.rx.temp_offset = mt76x02_sign_extend(val, 8);
else
@ -98,12 +98,12 @@ static void mt76x0_set_freq_offset(struct mt76x02_dev *dev)
struct mt76x02_rx_freq_cal *caldata = &dev->cal.rx;
u8 val;
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_FREQ_OFFSET);
val = mt76x02_eeprom_get(dev, MT_EE_FREQ_OFFSET);
if (!mt76x02_field_valid(val))
val = 0;
caldata->freq_offset = val;
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TSSI_BOUND4) >> 8;
val = mt76x02_eeprom_get(dev, MT_EE_TSSI_BOUND4) >> 8;
if (!mt76x02_field_valid(val))
val = 0;
@ -118,10 +118,8 @@ void mt76x0_read_rx_gain(struct mt76x02_dev *dev)
u16 rssi_offset;
int i;
mt76x02_get_rx_gain(&dev->mt76, chan->band, &rssi_offset,
&lna_2g, lna_5g);
caldata->lna_gain = mt76x02_get_lna_gain(&dev->mt76, &lna_2g,
lna_5g, chan);
mt76x02_get_rx_gain(dev, chan->band, &rssi_offset, &lna_2g, lna_5g);
caldata->lna_gain = mt76x02_get_lna_gain(dev, &lna_2g, lna_5g, chan);
for (i = 0; i < ARRAY_SIZE(caldata->rssi_offset); i++) {
val = rssi_offset >> (8 * i);
@ -132,12 +130,12 @@ void mt76x0_read_rx_gain(struct mt76x02_dev *dev)
}
}
static s8 mt76x0_get_delta(struct mt76_dev *dev)
static s8 mt76x0_get_delta(struct mt76x02_dev *dev)
{
struct cfg80211_chan_def *chandef = &dev->chandef;
struct cfg80211_chan_def *chandef = &dev->mt76.chandef;
u8 val;
if (mt76x02_tssi_enabled(dev))
if (mt76x0_tssi_enabled(dev))
return 0;
if (chandef->width == NL80211_CHAN_WIDTH_80) {
@ -162,54 +160,54 @@ void mt76x0_get_tx_power_per_rate(struct mt76x02_dev *dev)
struct ieee80211_channel *chan = dev->mt76.chandef.chan;
bool is_2ghz = chan->band == NL80211_BAND_2GHZ;
struct mt76_rate_power *t = &dev->mt76.rate_power;
s8 delta = mt76x0_get_delta(&dev->mt76);
s8 delta = mt76x0_get_delta(dev);
u16 val, addr;
memset(t, 0, sizeof(*t));
/* cck 1M, 2M, 5.5M, 11M */
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_BYRATE_BASE);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_BYRATE_BASE);
t->cck[0] = t->cck[1] = s6_to_s8(val);
t->cck[2] = t->cck[3] = s6_to_s8(val >> 8);
/* ofdm 6M, 9M, 12M, 18M */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 2 : 0x120;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->ofdm[0] = t->ofdm[1] = s6_to_s8(val);
t->ofdm[2] = t->ofdm[3] = s6_to_s8(val >> 8);
/* ofdm 24M, 36M, 48M, 54M */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 4 : 0x122;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->ofdm[4] = t->ofdm[5] = s6_to_s8(val);
t->ofdm[6] = t->ofdm[7] = s6_to_s8(val >> 8);
/* ht-vht mcs 1ss 0, 1, 2, 3 */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 6 : 0x124;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->ht[0] = t->ht[1] = t->vht[0] = t->vht[1] = s6_to_s8(val);
t->ht[2] = t->ht[3] = t->vht[2] = t->vht[3] = s6_to_s8(val >> 8);
/* ht-vht mcs 1ss 4, 5, 6 */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 8 : 0x126;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->ht[4] = t->ht[5] = t->vht[4] = t->vht[5] = s6_to_s8(val);
t->ht[6] = t->vht[6] = s6_to_s8(val >> 8);
/* ht-vht mcs 1ss 0, 1, 2, 3 stbc */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 14 : 0xec;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->stbc[0] = t->stbc[1] = s6_to_s8(val);
t->stbc[2] = t->stbc[3] = s6_to_s8(val >> 8);
/* ht-vht mcs 1ss 4, 5, 6 stbc */
addr = is_2ghz ? MT_EE_TX_POWER_BYRATE_BASE + 16 : 0xee;
val = mt76x02_eeprom_get(&dev->mt76, addr);
val = mt76x02_eeprom_get(dev, addr);
t->stbc[4] = t->stbc[5] = s6_to_s8(val);
t->stbc[6] = t->stbc[7] = s6_to_s8(val >> 8);
/* vht mcs 8, 9 5GHz */
val = mt76x02_eeprom_get(&dev->mt76, 0x132);
val = mt76x02_eeprom_get(dev, 0x132);
t->vht[7] = s6_to_s8(val);
t->vht[8] = s6_to_s8(val >> 8);
@ -266,7 +264,7 @@ void mt76x0_get_power_info(struct mt76x02_dev *dev, u8 *info)
addr = MT_EE_TX_POWER_0_GRP4_TSSI_SLOPE + 2 + offset;
}
data = mt76x02_eeprom_get(&dev->mt76, addr);
data = mt76x02_eeprom_get(dev, addr);
info[0] = data;
if (!info[0] || info[0] > 0x3f)
@ -312,7 +310,7 @@ static int mt76x0_load_eeprom(struct mt76x02_dev *dev)
if (found < 0)
return found;
return mt76x02_get_efuse_data(&dev->mt76, 0, dev->mt76.eeprom.data,
return mt76x02_get_efuse_data(dev, 0, dev->mt76.eeprom.data,
MT76X0_EEPROM_SIZE, MT_EE_READ);
}
@ -326,7 +324,7 @@ int mt76x0_eeprom_init(struct mt76x02_dev *dev)
if (err < 0)
return err;
data = mt76x02_eeprom_get(&dev->mt76, MT_EE_VERSION);
data = mt76x02_eeprom_get(dev, MT_EE_VERSION);
version = data >> 8;
fae = data;
@ -337,8 +335,7 @@ int mt76x0_eeprom_init(struct mt76x02_dev *dev)
dev_info(dev->mt76.dev, "EEPROM ver:%02hhx fae:%02hhx\n",
version, fae);
mt76x02_mac_setaddr(&dev->mt76,
dev->mt76.eeprom.data + MT_EE_MAC_ADDR);
mt76x02_mac_setaddr(dev, dev->mt76.eeprom.data + MT_EE_MAC_ADDR);
mt76x0_set_chip_cap(dev);
mt76x0_set_freq_offset(dev);
mt76x0_set_temp_offset(dev);

View File

@ -37,4 +37,10 @@ static inline s8 s6_to_s8(u32 val)
return ret;
}
static inline bool mt76x0_tssi_enabled(struct mt76x02_dev *dev)
{
return (mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) &
MT_EE_NIC_CONF_1_TX_ALC_EN);
}
#endif

View File

@ -138,7 +138,7 @@ static void mt76x0_init_mac_registers(struct mt76x02_dev *dev)
RANDOM_WRITE(dev, common_mac_reg_table);
mt76x02_set_beacon_offsets(&dev->mt76);
mt76x02_set_beacon_offsets(dev);
/* Enable PBF and MAC clock SYS_CTRL[11:10] = 0x3 */
RANDOM_WRITE(dev, mt76x0_mac_reg_table);
@ -280,7 +280,7 @@ int mt76x0_init_hardware(struct mt76x02_dev *dev)
return -ETIMEDOUT;
mt76x0_reset_csr_bbp(dev);
ret = mt76x02_mcu_function_select(&dev->mt76, Q_SELECT, 1, false);
ret = mt76x02_mcu_function_select(dev, Q_SELECT, 1, false);
if (ret)
return ret;
@ -368,7 +368,10 @@ int mt76x0_register_device(struct mt76x02_dev *dev)
hw->max_rates = 1;
hw->max_report_rates = 7;
hw->max_rate_tries = 1;
hw->extra_tx_headroom = sizeof(struct mt76x02_txwi) + 4 + 2;
hw->extra_tx_headroom = 2;
if (mt76_is_usb(dev))
hw->extra_tx_headroom += sizeof(struct mt76x02_txwi) +
MT_DMA_HDR_LEN;
hw->sta_data_size = sizeof(struct mt76x02_sta);
hw->vif_data_size = sizeof(struct mt76x02_vif);

View File

@ -16,6 +16,20 @@
#include <linux/etherdevice.h>
#include "mt76x0.h"
static int
mt76x0_set_channel(struct mt76x02_dev *dev, struct cfg80211_chan_def *chandef)
{
int ret;
cancel_delayed_work_sync(&dev->cal_work);
mt76_set_channel(&dev->mt76);
ret = mt76x0_phy_set_channel(dev, chandef);
mt76_txq_schedule_all(&dev->mt76);
return ret;
}
int mt76x0_config(struct ieee80211_hw *hw, u32 changed)
{
struct mt76x02_dev *dev = hw->priv;
@ -25,7 +39,7 @@ int mt76x0_config(struct ieee80211_hw *hw, u32 changed)
if (changed & IEEE80211_CONF_CHANGE_CHANNEL) {
ieee80211_stop_queues(hw);
ret = mt76x0_phy_set_channel(dev, &hw->conf.chandef);
ret = mt76x0_set_channel(dev, &hw->conf.chandef);
ieee80211_wake_queues(hw);
}
@ -114,8 +128,6 @@ void mt76x0_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
{
struct mt76x02_dev *dev = hw->priv;
cancel_delayed_work_sync(&dev->cal_work);
mt76x0_agc_save(dev);
set_bit(MT76_SCANNING, &dev->mt76.state);
}
EXPORT_SYMBOL_GPL(mt76x0_sw_scan);
@ -125,11 +137,7 @@ void mt76x0_sw_scan_complete(struct ieee80211_hw *hw,
{
struct mt76x02_dev *dev = hw->priv;
mt76x0_agc_restore(dev);
clear_bit(MT76_SCANNING, &dev->mt76.state);
ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
MT_CALIBRATE_INTERVAL);
}
EXPORT_SYMBOL_GPL(mt76x0_sw_scan_complete);

View File

@ -39,6 +39,9 @@ enum mcu_calibrate {
MCU_CAL_TXDCOC,
MCU_CAL_RX_GROUP_DELAY,
MCU_CAL_TX_GROUP_DELAY,
MCU_CAL_VCO,
MCU_CAL_NO_SIGNAL = 0xfe,
MCU_CAL_FULL = 0xff,
};
int mt76x0e_mcu_init(struct mt76x02_dev *dev);

View File

@ -66,12 +66,11 @@ int mt76x0_set_rts_threshold(struct ieee80211_hw *hw, u32 value);
/* PHY */
void mt76x0_phy_init(struct mt76x02_dev *dev);
int mt76x0_wait_bbp_ready(struct mt76x02_dev *dev);
void mt76x0_agc_save(struct mt76x02_dev *dev);
void mt76x0_agc_restore(struct mt76x02_dev *dev);
int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
struct cfg80211_chan_def *chandef);
void mt76x0_phy_recalibrate_after_assoc(struct mt76x02_dev *dev);
void mt76x0_phy_set_txpower(struct mt76x02_dev *dev);
void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on);
/* MAC */
void mt76x0_mac_work(struct work_struct *work);

View File

@ -28,6 +28,7 @@ static int mt76x0e_start(struct ieee80211_hw *hw)
mutex_lock(&dev->mt76.mutex);
mt76x02_mac_start(dev);
mt76x0_phy_calibrate(dev, true);
ieee80211_queue_delayed_work(dev->mt76.hw, &dev->mac_work,
MT_CALIBRATE_INTERVAL);
ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
@ -71,10 +72,19 @@ static const struct ieee80211_ops mt76x0e_ops = {
.tx = mt76x02_tx,
.start = mt76x0e_start,
.stop = mt76x0e_stop,
.config = mt76x0_config,
.add_interface = mt76x02_add_interface,
.remove_interface = mt76x02_remove_interface,
.config = mt76x0_config,
.configure_filter = mt76x02_configure_filter,
.sta_add = mt76x02_sta_add,
.sta_remove = mt76x02_sta_remove,
.set_key = mt76x02_set_key,
.conf_tx = mt76x02_conf_tx,
.sw_scan_start = mt76x0_sw_scan,
.sw_scan_complete = mt76x0_sw_scan_complete,
.ampdu_action = mt76x02_ampdu_action,
.sta_rate_tbl_update = mt76x02_sta_rate_tbl_update,
.wake_tx_queue = mt76_wake_tx_queue,
};
static int mt76x0e_register_device(struct mt76x02_dev *dev)
@ -102,28 +112,34 @@ static int mt76x0e_register_device(struct mt76x02_dev *dev)
u16 val;
mt76_clear(dev, MT_COEXCFG0, BIT(0));
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_0);
if (val & MT_EE_NIC_CONF_0_PA_IO_CURRENT) {
u32 data;
/* set external external PA I/O
* current to 16mA
*/
data = mt76_rr(dev, 0x11c);
val |= 0xc03;
mt76_wr(dev, 0x11c, val);
}
val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_0);
if (!(val & MT_EE_NIC_CONF_0_PA_IO_CURRENT))
mt76_set(dev, MT_XO_CTRL7, 0xc03);
}
mt76_clear(dev, 0x110, BIT(9));
mt76_set(dev, MT_MAX_LEN_CFG, BIT(13));
err = mt76x0_register_device(dev);
if (err < 0)
return err;
set_bit(MT76_STATE_INITIALIZED, &dev->mt76.state);
return 0;
}
static int
mt76x0e_probe(struct pci_dev *pdev, const struct pci_device_id *id)
{
static const struct mt76_driver_ops drv_ops = {
.txwi_size = sizeof(struct mt76x02_txwi),
.tx_prepare_skb = mt76x02_tx_prepare_skb,
.tx_complete_skb = mt76x02_tx_complete_skb,
.rx_skb = mt76x02_queue_rx_skb,
.rx_poll_complete = mt76x02_rx_poll_complete,
};
struct mt76x02_dev *dev;
int ret;
@ -141,7 +157,7 @@ mt76x0e_probe(struct pci_dev *pdev, const struct pci_device_id *id)
if (ret)
return ret;
dev = mt76x0_alloc_device(&pdev->dev, NULL, &mt76x0e_ops);
dev = mt76x0_alloc_device(&pdev->dev, &drv_ops, &mt76x0e_ops);
if (!dev)
return -ENOMEM;
@ -150,6 +166,11 @@ mt76x0e_probe(struct pci_dev *pdev, const struct pci_device_id *id)
dev->mt76.rev = mt76_rr(dev, MT_ASIC_VERSION);
dev_info(dev->mt76.dev, "ASIC revision: %08x\n", dev->mt76.rev);
ret = devm_request_irq(dev->mt76.dev, pdev->irq, mt76x02_irq_handler,
IRQF_SHARED, KBUILD_MODNAME, dev);
if (ret)
goto error;
ret = mt76x0e_register_device(dev);
if (ret < 0)
goto error;
@ -167,7 +188,7 @@ static void mt76x0e_cleanup(struct mt76x02_dev *dev)
mt76x0_chip_onoff(dev, false, false);
mt76x0e_stop_hw(dev);
mt76x02_dma_cleanup(dev);
mt76x02_mcu_cleanup(&dev->mt76);
mt76x02_mcu_cleanup(dev);
}
static void

View File

@ -116,6 +116,7 @@ static int mt76x0e_load_firmware(struct mt76x02_dev *dev)
goto out;
}
mt76x02_set_ethtool_fwver(dev, hdr);
dev_dbg(dev->mt76.dev, "Firmware running!\n");
out:

View File

@ -14,6 +14,9 @@
* GNU General Public License for more details.
*/
#include <linux/kernel.h>
#include <linux/etherdevice.h>
#include "mt76x0.h"
#include "mcu.h"
#include "eeprom.h"
@ -23,8 +26,6 @@
#include "initvals_phy.h"
#include "../mt76x02_phy.h"
#include <linux/etherdevice.h>
static int
mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
{
@ -37,7 +38,7 @@ mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
bank = MT_RF_BANK(offset);
reg = MT_RF_REG(offset);
if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
return -EINVAL;
mutex_lock(&dev->phy_mutex);
@ -76,7 +77,7 @@ static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
bank = MT_RF_BANK(offset);
reg = MT_RF_REG(offset);
if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
return -EINVAL;
mutex_lock(&dev->phy_mutex);
@ -111,15 +112,16 @@ static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
static int
rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
{
if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
if (mt76_is_usb(dev)) {
struct mt76_reg_pair pair = {
.reg = offset,
.value = val,
};
WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
&dev->mt76.state));
return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
} else {
WARN_ON_ONCE(1);
return mt76x0_rf_csr_wr(dev, offset, val);
}
}
@ -130,15 +132,16 @@ rf_rr(struct mt76x02_dev *dev, u32 offset)
int ret;
u32 val;
if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
if (mt76_is_usb(dev)) {
struct mt76_reg_pair pair = {
.reg = offset,
};
WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
&dev->mt76.state));
ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
val = pair.value;
} else {
WARN_ON_ONCE(1);
ret = val = mt76x0_rf_csr_rr(dev, offset);
}
@ -175,9 +178,22 @@ rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
}
#endif
#define RF_RANDOM_WRITE(dev, tab) \
mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, \
tab, ARRAY_SIZE(tab))
static void
mt76x0_rf_csr_wr_rp(struct mt76x02_dev *dev, const struct mt76_reg_pair *data,
int n)
{
while (n-- > 0) {
mt76x0_rf_csr_wr(dev, data->reg, data->value);
data++;
}
}
#define RF_RANDOM_WRITE(dev, tab) do { \
if (mt76_is_mmio(dev)) \
mt76x0_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
else \
mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
} while (0)
int mt76x0_wait_bbp_ready(struct mt76x02_dev *dev)
{
@ -186,7 +202,6 @@ int mt76x0_wait_bbp_ready(struct mt76x02_dev *dev)
do {
val = mt76_rr(dev, MT_BBP(CORE, 0));
printk("BBP version %08x\n", val);
if (val && ~val)
break;
} while (--i);
@ -196,36 +211,10 @@ int mt76x0_wait_bbp_ready(struct mt76x02_dev *dev)
return -EIO;
}
dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
return 0;
}
static void
mt76x0_bbp_set_ctrlch(struct mt76x02_dev *dev, enum nl80211_chan_width width,
u8 ctrl)
{
int core_val, agc_val;
switch (width) {
case NL80211_CHAN_WIDTH_80:
core_val = 3;
agc_val = 7;
break;
case NL80211_CHAN_WIDTH_40:
core_val = 2;
agc_val = 3;
break;
default:
core_val = 0;
agc_val = 1;
break;
}
mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
}
static void mt76x0_vco_cal(struct mt76x02_dev *dev, u8 channel)
{
u8 val;
@ -282,13 +271,6 @@ static void mt76x0_vco_cal(struct mt76x02_dev *dev, u8 channel)
msleep(2);
}
static void
mt76x0_mac_set_ctrlch(struct mt76x02_dev *dev, bool primary_upper)
{
mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
primary_upper);
}
static void
mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
{
@ -299,9 +281,6 @@ mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
rf_wr(dev, MT_RF(5, 0), 0x45);
rf_wr(dev, MT_RF(6, 0), 0x44);
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
break;
@ -311,9 +290,6 @@ mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
rf_wr(dev, MT_RF(5, 0), 0x44);
rf_wr(dev, MT_RF(6, 0), 0x45);
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
break;
@ -475,7 +451,7 @@ mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_ban
mt76_wr(dev, MT_RF_MISC, mac_reg);
band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
if (mt76x02_ext_pa_enabled(dev, band)) {
/*
MT_RF_MISC (offset: 0x0518)
[2]1'b1: enable external A band PA, 1'b0: disable external A band PA
@ -514,7 +490,7 @@ mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_ban
}
static void
mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band)
mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
{
int i;
@ -587,7 +563,7 @@ mt76x0_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
return ;
}
mt76x02_mcu_function_select(&dev->mt76, BW_SETTING, bw, false);
mt76x02_mcu_function_select(dev, BW_SETTING, bw, false);
}
void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
@ -603,9 +579,51 @@ void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t);
mt76x02_add_rate_power_offset(t, -info[0]);
mt76x02_phy_set_txpower(&dev->mt76, info[0], info[1]);
mt76x02_phy_set_txpower(dev, info[0], info[1]);
}
void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
{
struct ieee80211_channel *chan = dev->mt76.chandef.chan;
u32 val, tx_alc, reg_val;
if (power_on) {
mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value,
false);
usleep_range(10, 20);
/* XXX: tssi */
}
tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
usleep_range(500, 700);
reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
if (chan->band == NL80211_BAND_5GHZ) {
if (chan->hw_value < 100)
val = 0x701;
else if (chan->hw_value < 140)
val = 0x801;
else
val = 0x901;
} else {
val = 0x600;
}
mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val, false);
msleep(350);
mt76x02_mcu_calibrate(dev, MCU_CAL_LC, 1, false);
usleep_range(15000, 20000);
mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1, false);
}
EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
struct cfg80211_chan_def *chandef)
{
@ -665,9 +683,19 @@ int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
break;
}
mt76x0_bbp_set_bw(dev, chandef->width);
mt76x0_bbp_set_ctrlch(dev, chandef->width, ch_group_index);
mt76x0_mac_set_ctrlch(dev, ch_group_index & 1);
if (mt76_is_usb(dev)) {
mt76x0_bbp_set_bw(dev, chandef->width);
} else {
if (chandef->width == NL80211_CHAN_WIDTH_80 ||
chandef->width == NL80211_CHAN_WIDTH_40)
val = 0x201;
else
val = 0x601;
mt76_wr(dev, MT_TX_SW_CFG0, val);
}
mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
mt76x02_phy_set_band(dev, chandef->chan->band,
ch_group_index & 1);
mt76x0_ant_select(dev);
mt76_rmw(dev, MT_EXT_CCA_CFG,
@ -680,7 +708,6 @@ int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
mt76x0_phy_set_band(dev, chandef->chan->band);
mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
mt76x0_read_rx_gain(dev);
/* set Japan Tx filter at channel 14 */
val = mt76_rr(dev, MT_BBP(CORE, 1));
@ -690,17 +717,27 @@ int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
val &= ~0x20;
mt76_wr(dev, MT_BBP(CORE, 1), val);
mt76x0_phy_set_chan_bbp_params(dev, channel, rf_bw_band);
mt76x0_read_rx_gain(dev);
mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
mt76x02_init_agc_gain(dev);
/* Vendor driver don't do it */
/* mt76x0_phy_set_tx_power(dev, channel, rf_bw_band); */
if (mt76_is_usb(dev)) {
mt76x0_vco_cal(dev, channel);
} else {
/* enable vco */
rf_set(dev, MT_RF(0, 4), BIT(7));
}
mt76x0_vco_cal(dev, channel);
if (scan)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false);
return 0;
if (mt76_is_mmio(dev))
mt76x0_phy_calibrate(dev, false);
mt76x0_phy_set_txpower(dev);
ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
MT_CALIBRATE_INTERVAL);
return 0;
}
@ -710,7 +747,7 @@ void mt76x0_phy_recalibrate_after_assoc(struct mt76x02_dev *dev)
u8 channel = dev->mt76.chandef.chan->hw_value;
int is_5ghz = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_R, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0, false);
mt76x0_vco_cal(dev, channel);
@ -718,109 +755,113 @@ void mt76x0_phy_recalibrate_after_assoc(struct mt76x02_dev *dev)
mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
usleep_range(500, 700);
reg_val = mt76_rr(dev, 0x2124);
reg_val &= 0xffffff7e;
mt76_wr(dev, 0x2124, reg_val);
reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 0, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LC, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LOFT, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_GROUP_DELAY,
is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RX_GROUP_DELAY,
is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_LOFT, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TX_GROUP_DELAY, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RX_GROUP_DELAY, is_5ghz, false);
mt76_wr(dev, 0x2124, reg_val);
mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
msleep(100);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false);
}
void mt76x0_agc_save(struct mt76x02_dev *dev)
{
/* Only one RX path */
dev->agc_save = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, 8)));
}
void mt76x0_agc_restore(struct mt76x02_dev *dev)
{
mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, dev->agc_save);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1, false);
}
static void mt76x0_temp_sensor(struct mt76x02_dev *dev)
{
u8 rf_b7_73, rf_b0_66, rf_b0_67;
int cycle, temp;
u32 val;
s32 sval;
s8 val;
rf_b7_73 = rf_rr(dev, MT_RF(7, 73));
rf_b0_66 = rf_rr(dev, MT_RF(0, 66));
rf_b0_67 = rf_rr(dev, MT_RF(0, 73));
rf_b0_67 = rf_rr(dev, MT_RF(0, 67));
rf_wr(dev, MT_RF(7, 73), 0x02);
rf_wr(dev, MT_RF(0, 66), 0x23);
rf_wr(dev, MT_RF(0, 73), 0x01);
rf_wr(dev, MT_RF(0, 67), 0x01);
mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
for (cycle = 0; cycle < 2000; cycle++) {
val = mt76_rr(dev, MT_BBP(CORE, 34));
if (!(val & 0x10))
break;
udelay(3);
}
if (cycle >= 2000) {
val &= 0x10;
mt76_wr(dev, MT_BBP(CORE, 34), val);
if (!mt76_poll(dev, MT_BBP(CORE, 34), BIT(4), 0, 2000)) {
mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
goto done;
}
sval = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
if (!(sval & 0x80))
sval &= 0x7f; /* Positive */
else
sval |= 0xffffff00; /* Negative */
val = mt76_rr(dev, MT_BBP(CORE, 35));
val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
temp = (35 * (sval - dev->cal.rx.temp_offset)) / 10 + 25;
if (abs(val - dev->cal.temp_vco) > 20) {
mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
dev->mt76.chandef.chan->hw_value,
false);
dev->cal.temp_vco = val;
}
if (abs(val - dev->cal.temp) > 30) {
mt76x0_phy_calibrate(dev, false);
dev->cal.temp = val;
}
done:
rf_wr(dev, MT_RF(7, 73), rf_b7_73);
rf_wr(dev, MT_RF(0, 66), rf_b0_66);
rf_wr(dev, MT_RF(0, 73), rf_b0_67);
rf_wr(dev, MT_RF(0, 67), rf_b0_67);
}
static void mt76x0_dynamic_vga_tuning(struct mt76x02_dev *dev)
static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
{
struct cfg80211_chan_def *chandef = &dev->mt76.chandef;
u32 val, init_vga;
int avg_rssi;
u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
u32 val = 0x122c << 16 | 0xf2;
init_vga = chandef->chan->band == NL80211_BAND_5GHZ ? 0x54 : 0x4E;
avg_rssi = mt76x02_phy_get_min_avg_rssi(&dev->mt76);
if (avg_rssi > -60)
init_vga -= 0x20;
else if (avg_rssi > -70)
init_vga -= 0x10;
val = mt76_rr(dev, MT_BBP(AGC, 8));
val &= 0xFFFF80FF;
val |= init_vga << 8;
mt76_wr(dev, MT_BBP(AGC,8), val);
mt76_wr(dev, MT_BBP(AGC, 8),
val | FIELD_PREP(MT_BBP_AGC_GAIN, gain));
}
static void mt76x0_phy_calibrate(struct work_struct *work)
static void
mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
{
bool gain_change;
u8 gain_delta;
int low_gain;
dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(dev);
low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
(dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
gain_change = (dev->cal.low_gain & 2) ^ (low_gain & 2);
dev->cal.low_gain = low_gain;
if (!gain_change) {
if (mt76x02_phy_adjust_vga_gain(dev))
mt76x0_phy_set_gain_val(dev);
return;
}
dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
gain_delta = (low_gain == 2) ? 10 : 0;
dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
mt76x0_phy_set_gain_val(dev);
/* clear false CCA counters */
mt76_rr(dev, MT_RX_STAT_1);
}
static void mt76x0_phy_calibration_work(struct work_struct *work)
{
struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
cal_work.work);
mt76x0_dynamic_vga_tuning(dev);
mt76x0_temp_sensor(dev);
mt76x0_phy_update_channel_gain(dev);
if (!mt76x0_tssi_enabled(dev))
mt76x0_temp_sensor(dev);
ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
MT_CALIBRATE_INTERVAL);
@ -881,9 +922,9 @@ static void mt76x0_rf_init(struct mt76x02_dev *dev)
void mt76x0_phy_init(struct mt76x02_dev *dev)
{
INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibrate);
INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
mt76x0_rf_init(dev);
mt76x02_phy_set_rxpath(&dev->mt76);
mt76x02_phy_set_txdac(&dev->mt76);
mt76x02_phy_set_rxpath(dev);
mt76x02_phy_set_txdac(dev);
}

View File

@ -40,8 +40,7 @@ mt76x0u_upload_firmware(struct mt76x02_dev *dev,
ilm_len = le32_to_cpu(hdr->ilm_len) - MT_MCU_IVB_SIZE;
dev_dbg(dev->mt76.dev, "loading FW - ILM %u + IVB %u\n",
ilm_len, MT_MCU_IVB_SIZE);
err = mt76x02u_mcu_fw_send_data(&dev->mt76,
fw_payload + MT_MCU_IVB_SIZE,
err = mt76x02u_mcu_fw_send_data(dev, fw_payload + MT_MCU_IVB_SIZE,
ilm_len, MCU_FW_URB_MAX_PAYLOAD,
MT_MCU_IVB_SIZE);
if (err)
@ -49,7 +48,7 @@ mt76x0u_upload_firmware(struct mt76x02_dev *dev,
dlm_len = le32_to_cpu(hdr->dlm_len);
dev_dbg(dev->mt76.dev, "loading FW - DLM %u\n", dlm_len);
err = mt76x02u_mcu_fw_send_data(&dev->mt76,
err = mt76x02u_mcu_fw_send_data(dev,
fw_payload + le32_to_cpu(hdr->ilm_len),
dlm_len, MCU_FW_URB_MAX_PAYLOAD,
MT_MCU_DLM_OFFSET);
@ -121,7 +120,7 @@ static int mt76x0u_load_firmware(struct mt76x02_dev *dev)
mt76_set(dev, MT_USB_DMA_CFG,
(MT_USB_DMA_CFG_RX_BULK_EN | MT_USB_DMA_CFG_TX_BULK_EN) |
FIELD_PREP(MT_USB_DMA_CFG_RX_BULK_AGG_TOUT, 0x20));
mt76x02u_mcu_fw_reset(&dev->mt76);
mt76x02u_mcu_fw_reset(dev);
usleep_range(5000, 6000);
/*
mt76x0_rmw(dev, MT_PBF_CFG, 0, (MT_PBF_CFG_TX0Q_EN |

View File

@ -55,7 +55,8 @@ struct mt76x02_calibration {
s8 agc_gain_adjust;
s8 low_gain;
u8 temp;
s8 temp_vco;
s8 temp;
bool init_cal_done;
bool tssi_cal_done;
@ -101,8 +102,6 @@ struct mt76x02_dev {
bool no_2ghz;
u8 agc_save;
u8 coverage_class;
u8 slottime;
@ -119,8 +118,8 @@ int mt76x02_sta_add(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
int mt76x02_sta_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
struct ieee80211_sta *sta);
void mt76x02_vif_init(struct mt76_dev *dev, struct ieee80211_vif *vif,
unsigned int idx);
void mt76x02_vif_init(struct mt76x02_dev *dev, struct ieee80211_vif *vif,
unsigned int idx);
int mt76x02_add_interface(struct ieee80211_hw *hw,
struct ieee80211_vif *vif);
void mt76x02_remove_interface(struct ieee80211_hw *hw,
@ -136,14 +135,15 @@ int mt76x02_conf_tx(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
void mt76x02_sta_rate_tbl_update(struct ieee80211_hw *hw,
struct ieee80211_vif *vif,
struct ieee80211_sta *sta);
s8 mt76x02_tx_get_max_txpwr_adj(struct mt76_dev *dev,
s8 mt76x02_tx_get_max_txpwr_adj(struct mt76x02_dev *dev,
const struct ieee80211_tx_rate *rate);
s8 mt76x02_tx_get_txpwr_adj(struct mt76_dev *mdev, s8 txpwr, s8 max_txpwr_adj);
s8 mt76x02_tx_get_txpwr_adj(struct mt76x02_dev *dev, s8 txpwr,
s8 max_txpwr_adj);
void mt76x02_tx_set_txpwr_auto(struct mt76x02_dev *dev, s8 txpwr);
int mt76x02_insert_hdr_pad(struct sk_buff *skb);
void mt76x02_remove_hdr_pad(struct sk_buff *skb, int len);
void mt76x02_tx_complete(struct mt76_dev *dev, struct sk_buff *skb);
bool mt76x02_tx_status_data(struct mt76_dev *dev, u8 *update);
bool mt76x02_tx_status_data(struct mt76_dev *mdev, u8 *update);
void mt76x02_queue_rx_skb(struct mt76_dev *mdev, enum mt76_rxq_id q,
struct sk_buff *skb);
void mt76x02_rx_poll_complete(struct mt76_dev *mdev, enum mt76_rxq_id q);
@ -156,10 +156,17 @@ int mt76x02_tx_prepare_skb(struct mt76_dev *mdev, void *txwi,
u32 *tx_info);
extern const u16 mt76x02_beacon_offsets[16];
void mt76x02_set_beacon_offsets(struct mt76_dev *dev);
void mt76x02_set_beacon_offsets(struct mt76x02_dev *dev);
void mt76x02_set_irq_mask(struct mt76x02_dev *dev, u32 clear, u32 set);
void mt76x02_mac_start(struct mt76x02_dev *dev);
static inline bool is_mt76x2(struct mt76x02_dev *dev)
{
return mt76_chip(&dev->mt76) == 0x7612 ||
mt76_chip(&dev->mt76) == 0x7662 ||
mt76_chip(&dev->mt76) == 0x7602;
}
static inline void mt76x02_irq_enable(struct mt76x02_dev *dev, u32 mask)
{
mt76x02_set_irq_mask(dev, 0, mask);

View File

@ -17,46 +17,43 @@
#include <asm/unaligned.h>
#include "mt76.h"
#include "mt76x02_eeprom.h"
#include "mt76x02_regs.h"
static int
mt76x02_efuse_read(struct mt76_dev *dev, u16 addr, u8 *data,
mt76x02_efuse_read(struct mt76x02_dev *dev, u16 addr, u8 *data,
enum mt76x02_eeprom_modes mode)
{
u32 val;
int i;
val = __mt76_rr(dev, MT_EFUSE_CTRL);
val = mt76_rr(dev, MT_EFUSE_CTRL);
val &= ~(MT_EFUSE_CTRL_AIN |
MT_EFUSE_CTRL_MODE);
val |= FIELD_PREP(MT_EFUSE_CTRL_AIN, addr & ~0xf);
val |= FIELD_PREP(MT_EFUSE_CTRL_MODE, mode);
val |= MT_EFUSE_CTRL_KICK;
__mt76_wr(dev, MT_EFUSE_CTRL, val);
mt76_wr(dev, MT_EFUSE_CTRL, val);
if (!__mt76_poll_msec(dev, MT_EFUSE_CTRL, MT_EFUSE_CTRL_KICK,
0, 1000))
if (!mt76_poll_msec(dev, MT_EFUSE_CTRL, MT_EFUSE_CTRL_KICK, 0, 1000))
return -ETIMEDOUT;
udelay(2);
val = __mt76_rr(dev, MT_EFUSE_CTRL);
val = mt76_rr(dev, MT_EFUSE_CTRL);
if ((val & MT_EFUSE_CTRL_AOUT) == MT_EFUSE_CTRL_AOUT) {
memset(data, 0xff, 16);
return 0;
}
for (i = 0; i < 4; i++) {
val = __mt76_rr(dev, MT_EFUSE_DATA(i));
val = mt76_rr(dev, MT_EFUSE_DATA(i));
put_unaligned_le32(val, data + 4 * i);
}
return 0;
}
int mt76x02_get_efuse_data(struct mt76_dev *dev, u16 base, void *buf,
int mt76x02_get_efuse_data(struct mt76x02_dev *dev, u16 base, void *buf,
int len, enum mt76x02_eeprom_modes mode)
{
int ret, i;
@ -71,26 +68,26 @@ int mt76x02_get_efuse_data(struct mt76_dev *dev, u16 base, void *buf,
}
EXPORT_SYMBOL_GPL(mt76x02_get_efuse_data);
void mt76x02_eeprom_parse_hw_cap(struct mt76_dev *dev)
void mt76x02_eeprom_parse_hw_cap(struct mt76x02_dev *dev)
{
u16 val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_0);
switch (FIELD_GET(MT_EE_NIC_CONF_0_BOARD_TYPE, val)) {
case BOARD_TYPE_5GHZ:
dev->cap.has_5ghz = true;
dev->mt76.cap.has_5ghz = true;
break;
case BOARD_TYPE_2GHZ:
dev->cap.has_2ghz = true;
dev->mt76.cap.has_2ghz = true;
break;
default:
dev->cap.has_2ghz = true;
dev->cap.has_5ghz = true;
dev->mt76.cap.has_2ghz = true;
dev->mt76.cap.has_5ghz = true;
break;
}
}
EXPORT_SYMBOL_GPL(mt76x02_eeprom_parse_hw_cap);
bool mt76x02_ext_pa_enabled(struct mt76_dev *dev, enum nl80211_band band)
bool mt76x02_ext_pa_enabled(struct mt76x02_dev *dev, enum nl80211_band band)
{
u16 conf0 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_0);
@ -101,7 +98,7 @@ bool mt76x02_ext_pa_enabled(struct mt76_dev *dev, enum nl80211_band band)
}
EXPORT_SYMBOL_GPL(mt76x02_ext_pa_enabled);
void mt76x02_get_rx_gain(struct mt76_dev *dev, enum nl80211_band band,
void mt76x02_get_rx_gain(struct mt76x02_dev *dev, enum nl80211_band band,
u16 *rssi_offset, s8 *lna_2g, s8 *lna_5g)
{
u16 val;
@ -129,7 +126,7 @@ void mt76x02_get_rx_gain(struct mt76_dev *dev, enum nl80211_band band,
}
EXPORT_SYMBOL_GPL(mt76x02_get_rx_gain);
u8 mt76x02_get_lna_gain(struct mt76_dev *dev,
u8 mt76x02_get_lna_gain(struct mt76x02_dev *dev,
s8 *lna_2g, s8 *lna_5g,
struct ieee80211_channel *chan)
{

View File

@ -18,6 +18,8 @@
#ifndef __MT76x02_EEPROM_H
#define __MT76x02_EEPROM_H
#include "mt76x02.h"
enum mt76x02_eeprom_field {
MT_EE_CHIP_ID = 0x000,
MT_EE_VERSION = 0x002,
@ -168,44 +170,23 @@ static inline s8 mt76x02_rate_power_val(u8 val)
}
static inline int
mt76x02_eeprom_get(struct mt76_dev *dev,
mt76x02_eeprom_get(struct mt76x02_dev *dev,
enum mt76x02_eeprom_field field)
{
if ((field & 1) || field >= __MT_EE_MAX)
return -1;
return get_unaligned_le16(dev->eeprom.data + field);
return get_unaligned_le16(dev->mt76.eeprom.data + field);
}
static inline bool
mt76x02_temp_tx_alc_enabled(struct mt76_dev *dev)
{
u16 val;
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_EXT_PA_5G);
if (!(val & BIT(15)))
return false;
return mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) &
MT_EE_NIC_CONF_1_TEMP_TX_ALC;
}
static inline bool
mt76x02_tssi_enabled(struct mt76_dev *dev)
{
return !mt76x02_temp_tx_alc_enabled(dev) &&
(mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) &
MT_EE_NIC_CONF_1_TX_ALC_EN);
}
bool mt76x02_ext_pa_enabled(struct mt76_dev *dev, enum nl80211_band band);
int mt76x02_get_efuse_data(struct mt76_dev *dev, u16 base, void *buf,
bool mt76x02_ext_pa_enabled(struct mt76x02_dev *dev, enum nl80211_band band);
int mt76x02_get_efuse_data(struct mt76x02_dev *dev, u16 base, void *buf,
int len, enum mt76x02_eeprom_modes mode);
void mt76x02_get_rx_gain(struct mt76_dev *dev, enum nl80211_band band,
void mt76x02_get_rx_gain(struct mt76x02_dev *dev, enum nl80211_band band,
u16 *rssi_offset, s8 *lna_2g, s8 *lna_5g);
u8 mt76x02_get_lna_gain(struct mt76_dev *dev,
u8 mt76x02_get_lna_gain(struct mt76x02_dev *dev,
s8 *lna_2g, s8 *lna_5g,
struct ieee80211_channel *chan);
void mt76x02_eeprom_parse_hw_cap(struct mt76_dev *dev);
void mt76x02_eeprom_parse_hw_cap(struct mt76x02_dev *dev);
#endif /* __MT76x02_EEPROM_H */

View File

@ -45,8 +45,8 @@ mt76x02_mac_get_key_info(struct ieee80211_key_conf *key, u8 *key_data)
}
EXPORT_SYMBOL_GPL(mt76x02_mac_get_key_info);
int mt76x02_mac_shared_key_setup(struct mt76_dev *dev, u8 vif_idx, u8 key_idx,
struct ieee80211_key_conf *key)
int mt76x02_mac_shared_key_setup(struct mt76x02_dev *dev, u8 vif_idx,
u8 key_idx, struct ieee80211_key_conf *key)
{
enum mt76x02_cipher_type cipher;
u8 key_data[32];
@ -56,20 +56,20 @@ int mt76x02_mac_shared_key_setup(struct mt76_dev *dev, u8 vif_idx, u8 key_idx,
if (cipher == MT_CIPHER_NONE && key)
return -EOPNOTSUPP;
val = __mt76_rr(dev, MT_SKEY_MODE(vif_idx));
val = mt76_rr(dev, MT_SKEY_MODE(vif_idx));
val &= ~(MT_SKEY_MODE_MASK << MT_SKEY_MODE_SHIFT(vif_idx, key_idx));
val |= cipher << MT_SKEY_MODE_SHIFT(vif_idx, key_idx);
__mt76_wr(dev, MT_SKEY_MODE(vif_idx), val);
mt76_wr(dev, MT_SKEY_MODE(vif_idx), val);
__mt76_wr_copy(dev, MT_SKEY(vif_idx, key_idx), key_data,
sizeof(key_data));
mt76_wr_copy(dev, MT_SKEY(vif_idx, key_idx), key_data,
sizeof(key_data));
return 0;
}
EXPORT_SYMBOL_GPL(mt76x02_mac_shared_key_setup);
int mt76x02_mac_wcid_set_key(struct mt76_dev *dev, u8 idx,
struct ieee80211_key_conf *key)
int mt76x02_mac_wcid_set_key(struct mt76x02_dev *dev, u8 idx,
struct ieee80211_key_conf *key)
{
enum mt76x02_cipher_type cipher;
u8 key_data[32];
@ -79,25 +79,26 @@ int mt76x02_mac_wcid_set_key(struct mt76_dev *dev, u8 idx,
if (cipher == MT_CIPHER_NONE && key)
return -EOPNOTSUPP;
__mt76_wr_copy(dev, MT_WCID_KEY(idx), key_data, sizeof(key_data));
__mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PKEY_MODE, cipher);
mt76_wr_copy(dev, MT_WCID_KEY(idx), key_data, sizeof(key_data));
mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PKEY_MODE, cipher);
memset(iv_data, 0, sizeof(iv_data));
if (key) {
__mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PAIRWISE,
!!(key->flags & IEEE80211_KEY_FLAG_PAIRWISE));
mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PAIRWISE,
!!(key->flags & IEEE80211_KEY_FLAG_PAIRWISE));
iv_data[3] = key->keyidx << 6;
if (cipher >= MT_CIPHER_TKIP)
iv_data[3] |= 0x20;
}
__mt76_wr_copy(dev, MT_WCID_IV(idx), iv_data, sizeof(iv_data));
mt76_wr_copy(dev, MT_WCID_IV(idx), iv_data, sizeof(iv_data));
return 0;
}
EXPORT_SYMBOL_GPL(mt76x02_mac_wcid_set_key);
void mt76x02_mac_wcid_setup(struct mt76_dev *dev, u8 idx, u8 vif_idx, u8 *mac)
void mt76x02_mac_wcid_setup(struct mt76x02_dev *dev, u8 idx,
u8 vif_idx, u8 *mac)
{
struct mt76_wcid_addr addr = {};
u32 attr;
@ -105,10 +106,10 @@ void mt76x02_mac_wcid_setup(struct mt76_dev *dev, u8 idx, u8 vif_idx, u8 *mac)
attr = FIELD_PREP(MT_WCID_ATTR_BSS_IDX, vif_idx & 7) |
FIELD_PREP(MT_WCID_ATTR_BSS_IDX_EXT, !!(vif_idx & 8));
__mt76_wr(dev, MT_WCID_ATTR(idx), attr);
mt76_wr(dev, MT_WCID_ATTR(idx), attr);
__mt76_wr(dev, MT_WCID_TX_RATE(idx), 0);
__mt76_wr(dev, MT_WCID_TX_RATE(idx) + 4, 0);
mt76_wr(dev, MT_WCID_TX_RATE(idx), 0);
mt76_wr(dev, MT_WCID_TX_RATE(idx) + 4, 0);
if (idx >= 128)
return;
@ -116,22 +117,22 @@ void mt76x02_mac_wcid_setup(struct mt76_dev *dev, u8 idx, u8 vif_idx, u8 *mac)
if (mac)
memcpy(addr.macaddr, mac, ETH_ALEN);
__mt76_wr_copy(dev, MT_WCID_ADDR(idx), &addr, sizeof(addr));
mt76_wr_copy(dev, MT_WCID_ADDR(idx), &addr, sizeof(addr));
}
EXPORT_SYMBOL_GPL(mt76x02_mac_wcid_setup);
void mt76x02_mac_wcid_set_drop(struct mt76_dev *dev, u8 idx, bool drop)
void mt76x02_mac_wcid_set_drop(struct mt76x02_dev *dev, u8 idx, bool drop)
{
u32 val = __mt76_rr(dev, MT_WCID_DROP(idx));
u32 val = mt76_rr(dev, MT_WCID_DROP(idx));
u32 bit = MT_WCID_DROP_MASK(idx);
/* prevent unnecessary writes */
if ((val & bit) != (bit * drop))
__mt76_wr(dev, MT_WCID_DROP(idx), (val & ~bit) | (bit * drop));
mt76_wr(dev, MT_WCID_DROP(idx), (val & ~bit) | (bit * drop));
}
EXPORT_SYMBOL_GPL(mt76x02_mac_wcid_set_drop);
void mt76x02_txq_init(struct mt76_dev *dev, struct ieee80211_txq *txq)
void mt76x02_txq_init(struct mt76x02_dev *dev, struct ieee80211_txq *txq)
{
struct mt76_txq *mtxq;
@ -151,55 +152,13 @@ void mt76x02_txq_init(struct mt76_dev *dev, struct ieee80211_txq *txq)
mtxq->wcid = &mvif->group_wcid;
}
mt76_txq_init(dev, txq);
mt76_txq_init(&dev->mt76, txq);
}
EXPORT_SYMBOL_GPL(mt76x02_txq_init);
static void
mt76x02_mac_fill_txwi(struct mt76x02_txwi *txwi, struct sk_buff *skb,
struct ieee80211_sta *sta, int len, u8 nss)
{
struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data;
u16 txwi_flags = 0;
if (info->flags & IEEE80211_TX_CTL_LDPC)
txwi->rate |= cpu_to_le16(MT_RXWI_RATE_LDPC);
if ((info->flags & IEEE80211_TX_CTL_STBC) && nss == 1)
txwi->rate |= cpu_to_le16(MT_RXWI_RATE_STBC);
if (nss > 1 && sta && sta->smps_mode == IEEE80211_SMPS_DYNAMIC)
txwi_flags |= MT_TXWI_FLAGS_MMPS;
if (!(info->flags & IEEE80211_TX_CTL_NO_ACK))
txwi->ack_ctl |= MT_TXWI_ACK_CTL_REQ;
if (info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)
txwi->ack_ctl |= MT_TXWI_ACK_CTL_NSEQ;
if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)
txwi->pktid |= MT_TXWI_PKTID_PROBE;
if ((info->flags & IEEE80211_TX_CTL_AMPDU) && sta) {
u8 ba_size = IEEE80211_MIN_AMPDU_BUF;
ba_size <<= sta->ht_cap.ampdu_factor;
ba_size = min_t(int, 63, ba_size - 1);
if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)
ba_size = 0;
txwi->ack_ctl |= FIELD_PREP(MT_TXWI_ACK_CTL_BA_WINDOW, ba_size);
txwi_flags |= MT_TXWI_FLAGS_AMPDU |
FIELD_PREP(MT_TXWI_FLAGS_MPDU_DENSITY,
sta->ht_cap.ampdu_density);
}
if (ieee80211_is_probe_resp(hdr->frame_control) ||
ieee80211_is_beacon(hdr->frame_control))
txwi_flags |= MT_TXWI_FLAGS_TS;
txwi->flags |= cpu_to_le16(txwi_flags);
txwi->len_ctl = cpu_to_le16(len);
}
static __le16
mt76x02_mac_tx_rate_val(struct mt76_dev *dev,
const struct ieee80211_tx_rate *rate, u8 *nss_val)
mt76x02_mac_tx_rate_val(struct mt76x02_dev *dev,
const struct ieee80211_tx_rate *rate, u8 *nss_val)
{
u16 rateval;
u8 phy, rate_idx;
@ -224,10 +183,10 @@ mt76x02_mac_tx_rate_val(struct mt76_dev *dev,
bw = 1;
} else {
const struct ieee80211_rate *r;
int band = dev->chandef.chan->band;
int band = dev->mt76.chandef.chan->band;
u16 val;
r = &dev->hw->wiphy->bands[band]->bitrates[rate->idx];
r = &dev->mt76.hw->wiphy->bands[band]->bitrates[rate->idx];
if (rate->flags & IEEE80211_TX_RC_USE_SHORT_PREAMBLE)
val = r->hw_value_short;
else
@ -248,22 +207,22 @@ mt76x02_mac_tx_rate_val(struct mt76_dev *dev,
return cpu_to_le16(rateval);
}
void mt76x02_mac_wcid_set_rate(struct mt76_dev *dev, struct mt76_wcid *wcid,
const struct ieee80211_tx_rate *rate)
void mt76x02_mac_wcid_set_rate(struct mt76x02_dev *dev, struct mt76_wcid *wcid,
const struct ieee80211_tx_rate *rate)
{
spin_lock_bh(&dev->lock);
spin_lock_bh(&dev->mt76.lock);
wcid->tx_rate = mt76x02_mac_tx_rate_val(dev, rate, &wcid->tx_rate_nss);
wcid->tx_rate_set = true;
spin_unlock_bh(&dev->lock);
spin_unlock_bh(&dev->mt76.lock);
}
bool mt76x02_mac_load_tx_status(struct mt76_dev *dev,
struct mt76x02_tx_status *stat)
bool mt76x02_mac_load_tx_status(struct mt76x02_dev *dev,
struct mt76x02_tx_status *stat)
{
u32 stat1, stat2;
stat2 = __mt76_rr(dev, MT_TX_STAT_FIFO_EXT);
stat1 = __mt76_rr(dev, MT_TX_STAT_FIFO);
stat2 = mt76_rr(dev, MT_TX_STAT_FIFO_EXT);
stat1 = mt76_rr(dev, MT_TX_STAT_FIFO);
stat->valid = !!(stat1 & MT_TX_STAT_FIFO_VALID);
if (!stat->valid)
@ -339,17 +298,19 @@ mt76x02_mac_process_tx_rate(struct ieee80211_tx_rate *txrate, u16 rate,
return 0;
}
void mt76x02_mac_write_txwi(struct mt76_dev *dev, struct mt76x02_txwi *txwi,
void mt76x02_mac_write_txwi(struct mt76x02_dev *dev, struct mt76x02_txwi *txwi,
struct sk_buff *skb, struct mt76_wcid *wcid,
struct ieee80211_sta *sta, int len)
{
struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
struct ieee80211_tx_rate *rate = &info->control.rates[0];
struct ieee80211_key_conf *key = info->control.hw_key;
u16 rate_ht_mask = FIELD_PREP(MT_RXWI_RATE_PHY, BIT(1) | BIT(2));
u16 txwi_flags = 0;
u8 nss;
s8 txpwr_adj, max_txpwr_adj;
u8 ccmp_pn[8], nstreams = dev->chainmask & 0xf;
u8 ccmp_pn[8], nstreams = dev->mt76.chainmask & 0xf;
memset(txwi, 0, sizeof(*txwi));
@ -374,7 +335,7 @@ void mt76x02_mac_write_txwi(struct mt76_dev *dev, struct mt76x02_txwi *txwi,
txwi->eiv = *((__le32 *)&ccmp_pn[1]);
}
spin_lock_bh(&dev->lock);
spin_lock_bh(&dev->mt76.lock);
if (wcid && (rate->idx < 0 || !rate->count)) {
txwi->rate = wcid->tx_rate;
max_txpwr_adj = wcid->max_txpwr_adj;
@ -383,26 +344,57 @@ void mt76x02_mac_write_txwi(struct mt76_dev *dev, struct mt76x02_txwi *txwi,
txwi->rate = mt76x02_mac_tx_rate_val(dev, rate, &nss);
max_txpwr_adj = mt76x02_tx_get_max_txpwr_adj(dev, rate);
}
spin_unlock_bh(&dev->lock);
spin_unlock_bh(&dev->mt76.lock);
txpwr_adj = mt76x02_tx_get_txpwr_adj(dev, dev->txpower_conf,
txpwr_adj = mt76x02_tx_get_txpwr_adj(dev, dev->mt76.txpower_conf,
max_txpwr_adj);
txwi->ctl2 = FIELD_PREP(MT_TX_PWR_ADJ, txpwr_adj);
if (nstreams > 1 && mt76_rev(dev) >= MT76XX_REV_E4)
if (nstreams > 1 && mt76_rev(&dev->mt76) >= MT76XX_REV_E4)
txwi->txstream = 0x13;
else if (nstreams > 1 && mt76_rev(dev) >= MT76XX_REV_E3 &&
else if (nstreams > 1 && mt76_rev(&dev->mt76) >= MT76XX_REV_E3 &&
!(txwi->rate & cpu_to_le16(rate_ht_mask)))
txwi->txstream = 0x93;
mt76x02_mac_fill_txwi(txwi, skb, sta, len, nss);
if (is_mt76x2(dev) && (info->flags & IEEE80211_TX_CTL_LDPC))
txwi->rate |= cpu_to_le16(MT_RXWI_RATE_LDPC);
if ((info->flags & IEEE80211_TX_CTL_STBC) && nss == 1)
txwi->rate |= cpu_to_le16(MT_RXWI_RATE_STBC);
if (nss > 1 && sta && sta->smps_mode == IEEE80211_SMPS_DYNAMIC)
txwi_flags |= MT_TXWI_FLAGS_MMPS;
if (!(info->flags & IEEE80211_TX_CTL_NO_ACK))
txwi->ack_ctl |= MT_TXWI_ACK_CTL_REQ;
if (info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)
txwi->ack_ctl |= MT_TXWI_ACK_CTL_NSEQ;
if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)
txwi->pktid |= MT_TXWI_PKTID_PROBE;
if ((info->flags & IEEE80211_TX_CTL_AMPDU) && sta) {
u8 ba_size = IEEE80211_MIN_AMPDU_BUF;
ba_size <<= sta->ht_cap.ampdu_factor;
ba_size = min_t(int, 63, ba_size - 1);
if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)
ba_size = 0;
txwi->ack_ctl |= FIELD_PREP(MT_TXWI_ACK_CTL_BA_WINDOW, ba_size);
txwi_flags |= MT_TXWI_FLAGS_AMPDU |
FIELD_PREP(MT_TXWI_FLAGS_MPDU_DENSITY,
sta->ht_cap.ampdu_density);
}
if (ieee80211_is_probe_resp(hdr->frame_control) ||
ieee80211_is_beacon(hdr->frame_control))
txwi_flags |= MT_TXWI_FLAGS_TS;
txwi->flags |= cpu_to_le16(txwi_flags);
txwi->len_ctl = cpu_to_le16(len);
}
EXPORT_SYMBOL_GPL(mt76x02_mac_write_txwi);
static void
mt76x02_mac_fill_tx_status(struct mt76_dev *dev,
struct ieee80211_tx_info *info,
struct mt76x02_tx_status *st, int n_frames)
mt76x02_mac_fill_tx_status(struct mt76x02_dev *dev,
struct ieee80211_tx_info *info,
struct mt76x02_tx_status *st, int n_frames)
{
struct ieee80211_tx_rate *rate = info->status.rates;
int cur_idx, last_rate;
@ -413,7 +405,7 @@ mt76x02_mac_fill_tx_status(struct mt76_dev *dev,
last_rate = min_t(int, st->retry, IEEE80211_TX_MAX_RATES - 1);
mt76x02_mac_process_tx_rate(&rate[last_rate], st->rate,
dev->chandef.chan->band);
dev->mt76.chandef.chan->band);
if (last_rate < IEEE80211_TX_MAX_RATES - 1)
rate[last_rate + 1].idx = -1;
@ -441,8 +433,8 @@ mt76x02_mac_fill_tx_status(struct mt76_dev *dev,
info->flags |= IEEE80211_TX_STAT_ACK;
}
void mt76x02_send_tx_status(struct mt76_dev *dev,
struct mt76x02_tx_status *stat, u8 *update)
void mt76x02_send_tx_status(struct mt76x02_dev *dev,
struct mt76x02_tx_status *stat, u8 *update)
{
struct ieee80211_tx_info info = {};
struct ieee80211_sta *sta = NULL;
@ -450,8 +442,8 @@ void mt76x02_send_tx_status(struct mt76_dev *dev,
struct mt76x02_sta *msta = NULL;
rcu_read_lock();
if (stat->wcid < ARRAY_SIZE(dev->wcid))
wcid = rcu_dereference(dev->wcid[stat->wcid]);
if (stat->wcid < ARRAY_SIZE(dev->mt76.wcid))
wcid = rcu_dereference(dev->mt76.wcid[stat->wcid]);
if (wcid) {
void *priv;
@ -476,7 +468,7 @@ void mt76x02_send_tx_status(struct mt76_dev *dev,
}
mt76x02_mac_fill_tx_status(dev, &info, &msta->status,
msta->n_frames);
msta->n_frames);
msta->status = *stat;
msta->n_frames = 1;
@ -486,7 +478,7 @@ void mt76x02_send_tx_status(struct mt76_dev *dev,
*update = 1;
}
ieee80211_tx_status_noskb(dev->hw, sta, &info);
ieee80211_tx_status_noskb(dev->mt76.hw, sta, &info);
out:
rcu_read_unlock();
@ -561,21 +553,21 @@ mt76x02_mac_process_rate(struct mt76_rx_status *status, u16 rate)
}
EXPORT_SYMBOL_GPL(mt76x02_mac_process_rate);
void mt76x02_mac_setaddr(struct mt76_dev *dev, u8 *addr)
void mt76x02_mac_setaddr(struct mt76x02_dev *dev, u8 *addr)
{
ether_addr_copy(dev->macaddr, addr);
ether_addr_copy(dev->mt76.macaddr, addr);
if (!is_valid_ether_addr(dev->macaddr)) {
eth_random_addr(dev->macaddr);
dev_info(dev->dev,
if (!is_valid_ether_addr(dev->mt76.macaddr)) {
eth_random_addr(dev->mt76.macaddr);
dev_info(dev->mt76.dev,
"Invalid MAC address, using random address %pM\n",
dev->macaddr);
dev->mt76.macaddr);
}
__mt76_wr(dev, MT_MAC_ADDR_DW0, get_unaligned_le32(dev->macaddr));
__mt76_wr(dev, MT_MAC_ADDR_DW1,
get_unaligned_le16(dev->macaddr + 4) |
FIELD_PREP(MT_MAC_ADDR_DW1_U2ME_MASK, 0xff));
mt76_wr(dev, MT_MAC_ADDR_DW0, get_unaligned_le32(dev->mt76.macaddr));
mt76_wr(dev, MT_MAC_ADDR_DW1,
get_unaligned_le16(dev->mt76.macaddr + 4) |
FIELD_PREP(MT_MAC_ADDR_DW1_U2ME_MASK, 0xff));
}
EXPORT_SYMBOL_GPL(mt76x02_mac_setaddr);
@ -697,7 +689,7 @@ void mt76x02_mac_poll_tx_status(struct mt76x02_dev *dev, bool irq)
while (!irq || !kfifo_is_full(&dev->txstatus_fifo)) {
spin_lock_irqsave(&dev->mt76.mmio.irq_lock, flags);
ret = mt76x02_mac_load_tx_status(&dev->mt76, &stat);
ret = mt76x02_mac_load_tx_status(dev, &stat);
spin_unlock_irqrestore(&dev->mt76.mmio.irq_lock, flags);
if (!ret)
@ -706,7 +698,7 @@ void mt76x02_mac_poll_tx_status(struct mt76x02_dev *dev, bool irq)
trace_mac_txstat_fetch(dev, &stat);
if (!irq) {
mt76x02_send_tx_status(&dev->mt76, &stat, &update);
mt76x02_send_tx_status(dev, &stat, &update);
continue;
}

View File

@ -198,28 +198,29 @@ mt76x02_skb_tx_info(struct sk_buff *skb)
return (void *)info->status.status_driver_data;
}
void mt76x02_txq_init(struct mt76_dev *dev, struct ieee80211_txq *txq);
void mt76x02_txq_init(struct mt76x02_dev *dev, struct ieee80211_txq *txq);
enum mt76x02_cipher_type
mt76x02_mac_get_key_info(struct ieee80211_key_conf *key, u8 *key_data);
int mt76x02_mac_shared_key_setup(struct mt76_dev *dev, u8 vif_idx, u8 key_idx,
struct ieee80211_key_conf *key);
int mt76x02_mac_wcid_set_key(struct mt76_dev *dev, u8 idx,
struct ieee80211_key_conf *key);
void mt76x02_mac_wcid_setup(struct mt76_dev *dev, u8 idx, u8 vif_idx, u8 *mac);
void mt76x02_mac_wcid_set_drop(struct mt76_dev *dev, u8 idx, bool drop);
void mt76x02_mac_wcid_set_rate(struct mt76_dev *dev, struct mt76_wcid *wcid,
const struct ieee80211_tx_rate *rate);
bool mt76x02_mac_load_tx_status(struct mt76_dev *dev,
struct mt76x02_tx_status *stat);
void mt76x02_send_tx_status(struct mt76_dev *dev,
struct mt76x02_tx_status *stat, u8 *update);
int mt76x02_mac_shared_key_setup(struct mt76x02_dev *dev, u8 vif_idx,
u8 key_idx, struct ieee80211_key_conf *key);
int mt76x02_mac_wcid_set_key(struct mt76x02_dev *dev, u8 idx,
struct ieee80211_key_conf *key);
void mt76x02_mac_wcid_setup(struct mt76x02_dev *dev, u8 idx, u8 vif_idx,
u8 *mac);
void mt76x02_mac_wcid_set_drop(struct mt76x02_dev *dev, u8 idx, bool drop);
void mt76x02_mac_wcid_set_rate(struct mt76x02_dev *dev, struct mt76_wcid *wcid,
const struct ieee80211_tx_rate *rate);
bool mt76x02_mac_load_tx_status(struct mt76x02_dev *dev,
struct mt76x02_tx_status *stat);
void mt76x02_send_tx_status(struct mt76x02_dev *dev,
struct mt76x02_tx_status *stat, u8 *update);
int mt76x02_mac_process_rx(struct mt76x02_dev *dev, struct sk_buff *skb,
void *rxi);
int
mt76x02_mac_process_rate(struct mt76_rx_status *status, u16 rate);
void mt76x02_mac_setaddr(struct mt76_dev *dev, u8 *addr);
void mt76x02_mac_write_txwi(struct mt76_dev *dev, struct mt76x02_txwi *txwi,
void mt76x02_mac_setaddr(struct mt76x02_dev *dev, u8 *addr);
void mt76x02_mac_write_txwi(struct mt76x02_dev *dev, struct mt76x02_txwi *txwi,
struct sk_buff *skb, struct mt76_wcid *wcid,
struct ieee80211_sta *sta, int len);
void mt76x02_mac_poll_tx_status(struct mt76x02_dev *dev, bool irq);

View File

@ -19,9 +19,7 @@
#include <linux/firmware.h>
#include <linux/delay.h>
#include "mt76.h"
#include "mt76x02_mcu.h"
#include "mt76x02_dma.h"
struct sk_buff *mt76x02_mcu_msg_alloc(const void *data, int len)
{
@ -37,7 +35,7 @@ struct sk_buff *mt76x02_mcu_msg_alloc(const void *data, int len)
EXPORT_SYMBOL_GPL(mt76x02_mcu_msg_alloc);
static struct sk_buff *
mt76x02_mcu_get_response(struct mt76_dev *dev, unsigned long expires)
mt76x02_mcu_get_response(struct mt76x02_dev *dev, unsigned long expires)
{
unsigned long timeout;
@ -45,17 +43,17 @@ mt76x02_mcu_get_response(struct mt76_dev *dev, unsigned long expires)
return NULL;
timeout = expires - jiffies;
wait_event_timeout(dev->mmio.mcu.wait,
!skb_queue_empty(&dev->mmio.mcu.res_q),
wait_event_timeout(dev->mt76.mmio.mcu.wait,
!skb_queue_empty(&dev->mt76.mmio.mcu.res_q),
timeout);
return skb_dequeue(&dev->mmio.mcu.res_q);
return skb_dequeue(&dev->mt76.mmio.mcu.res_q);
}
static int
mt76x02_tx_queue_mcu(struct mt76_dev *dev, enum mt76_txq_id qid,
mt76x02_tx_queue_mcu(struct mt76x02_dev *dev, enum mt76_txq_id qid,
struct sk_buff *skb, int cmd, int seq)
{
struct mt76_queue *q = &dev->q_tx[qid];
struct mt76_queue *q = &dev->mt76.q_tx[qid];
struct mt76_queue_buf buf;
dma_addr_t addr;
u32 tx_info;
@ -66,24 +64,26 @@ mt76x02_tx_queue_mcu(struct mt76_dev *dev, enum mt76_txq_id qid,
FIELD_PREP(MT_MCU_MSG_PORT, CPU_TX_PORT) |
FIELD_PREP(MT_MCU_MSG_LEN, skb->len);
addr = dma_map_single(dev->dev, skb->data, skb->len,
addr = dma_map_single(dev->mt76.dev, skb->data, skb->len,
DMA_TO_DEVICE);
if (dma_mapping_error(dev->dev, addr))
if (dma_mapping_error(dev->mt76.dev, addr))
return -ENOMEM;
buf.addr = addr;
buf.len = skb->len;
spin_lock_bh(&q->lock);
dev->queue_ops->add_buf(dev, q, &buf, 1, tx_info, skb, NULL);
dev->queue_ops->kick(dev, q);
mt76_queue_add_buf(dev, q, &buf, 1, tx_info, skb, NULL);
mt76_queue_kick(dev, q);
spin_unlock_bh(&q->lock);
return 0;
}
int mt76x02_mcu_msg_send(struct mt76_dev *dev, struct sk_buff *skb,
int mt76x02_mcu_msg_send(struct mt76_dev *mdev, struct sk_buff *skb,
int cmd, bool wait_resp)
{
struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
unsigned long expires = jiffies + HZ;
int ret;
u8 seq;
@ -91,11 +91,11 @@ int mt76x02_mcu_msg_send(struct mt76_dev *dev, struct sk_buff *skb,
if (!skb)
return -EINVAL;
mutex_lock(&dev->mmio.mcu.mutex);
mutex_lock(&mdev->mmio.mcu.mutex);
seq = ++dev->mmio.mcu.msg_seq & 0xf;
seq = ++mdev->mmio.mcu.msg_seq & 0xf;
if (!seq)
seq = ++dev->mmio.mcu.msg_seq & 0xf;
seq = ++mdev->mmio.mcu.msg_seq & 0xf;
ret = mt76x02_tx_queue_mcu(dev, MT_TXQ_MCU, skb, cmd, seq);
if (ret)
@ -107,7 +107,7 @@ int mt76x02_mcu_msg_send(struct mt76_dev *dev, struct sk_buff *skb,
skb = mt76x02_mcu_get_response(dev, expires);
if (!skb) {
dev_err(dev->dev,
dev_err(mdev->dev,
"MCU message %d (seq %d) timed out\n", cmd,
seq);
ret = -ETIMEDOUT;
@ -125,13 +125,13 @@ int mt76x02_mcu_msg_send(struct mt76_dev *dev, struct sk_buff *skb,
}
out:
mutex_unlock(&dev->mmio.mcu.mutex);
mutex_unlock(&mdev->mmio.mcu.mutex);
return ret;
}
EXPORT_SYMBOL_GPL(mt76x02_mcu_msg_send);
int mt76x02_mcu_function_select(struct mt76_dev *dev,
int mt76x02_mcu_function_select(struct mt76x02_dev *dev,
enum mcu_function func,
u32 val, bool wait_resp)
{
@ -144,13 +144,12 @@ int mt76x02_mcu_function_select(struct mt76_dev *dev,
.value = cpu_to_le32(val),
};
skb = dev->mcu_ops->mcu_msg_alloc(&msg, sizeof(msg));
return dev->mcu_ops->mcu_send_msg(dev, skb, CMD_FUN_SET_OP,
wait_resp);
skb = mt76_mcu_msg_alloc(dev, &msg, sizeof(msg));
return mt76_mcu_send_msg(dev, skb, CMD_FUN_SET_OP, wait_resp);
}
EXPORT_SYMBOL_GPL(mt76x02_mcu_function_select);
int mt76x02_mcu_set_radio_state(struct mt76_dev *dev, bool on,
int mt76x02_mcu_set_radio_state(struct mt76x02_dev *dev, bool on,
bool wait_resp)
{
struct sk_buff *skb;
@ -162,13 +161,12 @@ int mt76x02_mcu_set_radio_state(struct mt76_dev *dev, bool on,
.level = cpu_to_le32(0),
};
skb = dev->mcu_ops->mcu_msg_alloc(&msg, sizeof(msg));
return dev->mcu_ops->mcu_send_msg(dev, skb, CMD_POWER_SAVING_OP,
wait_resp);
skb = mt76_mcu_msg_alloc(dev, &msg, sizeof(msg));
return mt76_mcu_send_msg(dev, skb, CMD_POWER_SAVING_OP, wait_resp);
}
EXPORT_SYMBOL_GPL(mt76x02_mcu_set_radio_state);
int mt76x02_mcu_calibrate(struct mt76_dev *dev, int type,
int mt76x02_mcu_calibrate(struct mt76x02_dev *dev, int type,
u32 param, bool wait)
{
struct sk_buff *skb;
@ -182,44 +180,44 @@ int mt76x02_mcu_calibrate(struct mt76_dev *dev, int type,
int ret;
if (wait)
dev->bus->rmw(dev, MT_MCU_COM_REG0, BIT(31), 0);
mt76_rmw(dev, MT_MCU_COM_REG0, BIT(31), 0);
skb = dev->mcu_ops->mcu_msg_alloc(&msg, sizeof(msg));
ret = dev->mcu_ops->mcu_send_msg(dev, skb, CMD_CALIBRATION_OP, true);
skb = mt76_mcu_msg_alloc(dev, &msg, sizeof(msg));
ret = mt76_mcu_send_msg(dev, skb, CMD_CALIBRATION_OP, true);
if (ret)
return ret;
if (wait &&
WARN_ON(!__mt76_poll_msec(dev, MT_MCU_COM_REG0,
BIT(31), BIT(31), 100)))
WARN_ON(!mt76_poll_msec(dev, MT_MCU_COM_REG0,
BIT(31), BIT(31), 100)))
return -ETIMEDOUT;
return 0;
}
EXPORT_SYMBOL_GPL(mt76x02_mcu_calibrate);
int mt76x02_mcu_cleanup(struct mt76_dev *dev)
int mt76x02_mcu_cleanup(struct mt76x02_dev *dev)
{
struct sk_buff *skb;
dev->bus->wr(dev, MT_MCU_INT_LEVEL, 1);
mt76_wr(dev, MT_MCU_INT_LEVEL, 1);
usleep_range(20000, 30000);
while ((skb = skb_dequeue(&dev->mmio.mcu.res_q)) != NULL)
while ((skb = skb_dequeue(&dev->mt76.mmio.mcu.res_q)) != NULL)
dev_kfree_skb(skb);
return 0;
}
EXPORT_SYMBOL_GPL(mt76x02_mcu_cleanup);
void mt76x02_set_ethtool_fwver(struct mt76_dev *dev,
void mt76x02_set_ethtool_fwver(struct mt76x02_dev *dev,
const struct mt76x02_fw_header *h)
{
u16 bld = le16_to_cpu(h->build_ver);
u16 ver = le16_to_cpu(h->fw_ver);
snprintf(dev->hw->wiphy->fw_version,
sizeof(dev->hw->wiphy->fw_version),
snprintf(dev->mt76.hw->wiphy->fw_version,
sizeof(dev->mt76.hw->wiphy->fw_version),
"%d.%d.%02d-b%x",
(ver >> 12) & 0xf, (ver >> 8) & 0xf, ver & 0xf, bld);
}

View File

@ -17,6 +17,8 @@
#ifndef __MT76x02_MCU_H
#define __MT76x02_MCU_H
#include "mt76x02.h"
#define MT_MCU_RESET_CTL 0x070C
#define MT_MCU_INT_LEVEL 0x0718
#define MT_MCU_COM_REG0 0x0730
@ -94,18 +96,18 @@ struct mt76x02_patch_header {
u8 pad[2];
};
int mt76x02_mcu_cleanup(struct mt76_dev *dev);
int mt76x02_mcu_calibrate(struct mt76_dev *dev, int type,
int mt76x02_mcu_cleanup(struct mt76x02_dev *dev);
int mt76x02_mcu_calibrate(struct mt76x02_dev *dev, int type,
u32 param, bool wait);
struct sk_buff *mt76x02_mcu_msg_alloc(const void *data, int len);
int mt76x02_mcu_msg_send(struct mt76_dev *dev, struct sk_buff *skb,
int mt76x02_mcu_msg_send(struct mt76_dev *mdev, struct sk_buff *skb,
int cmd, bool wait_resp);
int mt76x02_mcu_function_select(struct mt76_dev *dev,
int mt76x02_mcu_function_select(struct mt76x02_dev *dev,
enum mcu_function func,
u32 val, bool wait_resp);
int mt76x02_mcu_set_radio_state(struct mt76_dev *dev, bool on,
int mt76x02_mcu_set_radio_state(struct mt76x02_dev *dev, bool on,
bool wait_resp);
void mt76x02_set_ethtool_fwver(struct mt76_dev *dev,
void mt76x02_set_ethtool_fwver(struct mt76x02_dev *dev,
const struct mt76x02_fw_header *h);
#endif /* __MT76x02_MCU_H */

View File

@ -65,7 +65,7 @@ static void mt76x02_process_tx_status_fifo(struct mt76x02_dev *dev)
u8 update = 1;
while (kfifo_get(&dev->txstatus_fifo, &stat))
mt76x02_send_tx_status(&dev->mt76, &stat, &update);
mt76x02_send_tx_status(dev, &stat, &update);
}
static void mt76x02_tx_tasklet(unsigned long data)

View File

@ -17,18 +17,17 @@
#include <linux/kernel.h>
#include "mt76.h"
#include "mt76x02.h"
#include "mt76x02_phy.h"
#include "mt76x02_mac.h"
void mt76x02_phy_set_rxpath(struct mt76_dev *dev)
void mt76x02_phy_set_rxpath(struct mt76x02_dev *dev)
{
u32 val;
val = __mt76_rr(dev, MT_BBP(AGC, 0));
val = mt76_rr(dev, MT_BBP(AGC, 0));
val &= ~BIT(4);
switch (dev->chainmask & 0xf) {
switch (dev->mt76.chainmask & 0xf) {
case 2:
val |= BIT(3);
break;
@ -37,23 +36,23 @@ void mt76x02_phy_set_rxpath(struct mt76_dev *dev)
break;
}
__mt76_wr(dev, MT_BBP(AGC, 0), val);
mt76_wr(dev, MT_BBP(AGC, 0), val);
mb();
val = __mt76_rr(dev, MT_BBP(AGC, 0));
val = mt76_rr(dev, MT_BBP(AGC, 0));
}
EXPORT_SYMBOL_GPL(mt76x02_phy_set_rxpath);
void mt76x02_phy_set_txdac(struct mt76_dev *dev)
void mt76x02_phy_set_txdac(struct mt76x02_dev *dev)
{
int txpath;
txpath = (dev->chainmask >> 8) & 0xf;
txpath = (dev->mt76.chainmask >> 8) & 0xf;
switch (txpath) {
case 2:
__mt76_set(dev, MT_BBP(TXBE, 5), 0x3);
mt76_set(dev, MT_BBP(TXBE, 5), 0x3);
break;
default:
__mt76_clear(dev, MT_BBP(TXBE, 5), 0x3);
mt76_clear(dev, MT_BBP(TXBE, 5), 0x3);
break;
}
}
@ -102,40 +101,38 @@ void mt76x02_add_rate_power_offset(struct mt76_rate_power *r, int offset)
}
EXPORT_SYMBOL_GPL(mt76x02_add_rate_power_offset);
void mt76x02_phy_set_txpower(struct mt76_dev *dev, int txp_0, int txp_1)
void mt76x02_phy_set_txpower(struct mt76x02_dev *dev, int txp_0, int txp_1)
{
struct mt76_rate_power *t = &dev->rate_power;
struct mt76_rate_power *t = &dev->mt76.rate_power;
__mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_0,
txp_0);
__mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_1,
txp_1);
mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_0, txp_0);
mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_1, txp_1);
__mt76_wr(dev, MT_TX_PWR_CFG_0,
mt76x02_tx_power_mask(t->cck[0], t->cck[2], t->ofdm[0],
t->ofdm[2]));
__mt76_wr(dev, MT_TX_PWR_CFG_1,
mt76x02_tx_power_mask(t->ofdm[4], t->ofdm[6], t->ht[0],
t->ht[2]));
__mt76_wr(dev, MT_TX_PWR_CFG_2,
mt76x02_tx_power_mask(t->ht[4], t->ht[6], t->ht[8],
t->ht[10]));
__mt76_wr(dev, MT_TX_PWR_CFG_3,
mt76x02_tx_power_mask(t->ht[12], t->ht[14], t->stbc[0],
t->stbc[2]));
__mt76_wr(dev, MT_TX_PWR_CFG_4,
mt76x02_tx_power_mask(t->stbc[4], t->stbc[6], 0, 0));
__mt76_wr(dev, MT_TX_PWR_CFG_7,
mt76x02_tx_power_mask(t->ofdm[7], t->vht[8], t->ht[7],
t->vht[9]));
__mt76_wr(dev, MT_TX_PWR_CFG_8,
mt76x02_tx_power_mask(t->ht[14], 0, t->vht[8], t->vht[9]));
__mt76_wr(dev, MT_TX_PWR_CFG_9,
mt76x02_tx_power_mask(t->ht[7], 0, t->stbc[8], t->stbc[9]));
mt76_wr(dev, MT_TX_PWR_CFG_0,
mt76x02_tx_power_mask(t->cck[0], t->cck[2], t->ofdm[0],
t->ofdm[2]));
mt76_wr(dev, MT_TX_PWR_CFG_1,
mt76x02_tx_power_mask(t->ofdm[4], t->ofdm[6], t->ht[0],
t->ht[2]));
mt76_wr(dev, MT_TX_PWR_CFG_2,
mt76x02_tx_power_mask(t->ht[4], t->ht[6], t->ht[8],
t->ht[10]));
mt76_wr(dev, MT_TX_PWR_CFG_3,
mt76x02_tx_power_mask(t->ht[12], t->ht[14], t->stbc[0],
t->stbc[2]));
mt76_wr(dev, MT_TX_PWR_CFG_4,
mt76x02_tx_power_mask(t->stbc[4], t->stbc[6], 0, 0));
mt76_wr(dev, MT_TX_PWR_CFG_7,
mt76x02_tx_power_mask(t->ofdm[7], t->vht[8], t->ht[7],
t->vht[9]));
mt76_wr(dev, MT_TX_PWR_CFG_8,
mt76x02_tx_power_mask(t->ht[14], 0, t->vht[8], t->vht[9]));
mt76_wr(dev, MT_TX_PWR_CFG_9,
mt76x02_tx_power_mask(t->ht[7], 0, t->stbc[8], t->stbc[9]));
}
EXPORT_SYMBOL_GPL(mt76x02_phy_set_txpower);
int mt76x02_phy_get_min_avg_rssi(struct mt76_dev *dev)
int mt76x02_phy_get_min_avg_rssi(struct mt76x02_dev *dev)
{
struct mt76x02_sta *sta;
struct mt76_wcid *wcid;
@ -145,8 +142,8 @@ int mt76x02_phy_get_min_avg_rssi(struct mt76_dev *dev)
local_bh_disable();
rcu_read_lock();
for (i = 0; i < ARRAY_SIZE(dev->wcid_mask); i++) {
unsigned long mask = dev->wcid_mask[i];
for (i = 0; i < ARRAY_SIZE(dev->mt76.wcid_mask); i++) {
unsigned long mask = dev->mt76.wcid_mask[i];
if (!mask)
continue;
@ -155,17 +152,17 @@ int mt76x02_phy_get_min_avg_rssi(struct mt76_dev *dev)
if (!(mask & 1))
continue;
wcid = rcu_dereference(dev->wcid[j]);
wcid = rcu_dereference(dev->mt76.wcid[j]);
if (!wcid)
continue;
sta = container_of(wcid, struct mt76x02_sta, wcid);
spin_lock(&dev->rx_lock);
spin_lock(&dev->mt76.rx_lock);
if (sta->inactive_count++ < 5)
cur_rssi = ewma_signal_read(&sta->rssi);
else
cur_rssi = 0;
spin_unlock(&dev->rx_lock);
spin_unlock(&dev->mt76.rx_lock);
if (cur_rssi < min_rssi)
min_rssi = cur_rssi;
@ -181,3 +178,81 @@ int mt76x02_phy_get_min_avg_rssi(struct mt76_dev *dev)
return min_rssi;
}
EXPORT_SYMBOL_GPL(mt76x02_phy_get_min_avg_rssi);
void mt76x02_phy_set_bw(struct mt76x02_dev *dev, int width, u8 ctrl)
{
int core_val, agc_val;
switch (width) {
case NL80211_CHAN_WIDTH_80:
core_val = 3;
agc_val = 7;
break;
case NL80211_CHAN_WIDTH_40:
core_val = 2;
agc_val = 3;
break;
default:
core_val = 0;
agc_val = 1;
break;
}
mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
}
EXPORT_SYMBOL_GPL(mt76x02_phy_set_bw);
void mt76x02_phy_set_band(struct mt76x02_dev *dev, int band,
bool primary_upper)
{
switch (band) {
case NL80211_BAND_2GHZ:
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
break;
case NL80211_BAND_5GHZ:
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
break;
}
mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
primary_upper);
}
EXPORT_SYMBOL_GPL(mt76x02_phy_set_band);
bool mt76x02_phy_adjust_vga_gain(struct mt76x02_dev *dev)
{
u8 limit = dev->cal.low_gain > 0 ? 16 : 4;
bool ret = false;
u32 false_cca;
false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
dev->cal.false_cca = false_cca;
if (false_cca > 800 && dev->cal.agc_gain_adjust < limit) {
dev->cal.agc_gain_adjust += 2;
ret = true;
} else if ((false_cca < 10 && dev->cal.agc_gain_adjust > 0) ||
(dev->cal.agc_gain_adjust >= limit && false_cca < 500)) {
dev->cal.agc_gain_adjust -= 2;
ret = true;
}
return ret;
}
EXPORT_SYMBOL_GPL(mt76x02_phy_adjust_vga_gain);
void mt76x02_init_agc_gain(struct mt76x02_dev *dev)
{
dev->cal.agc_gain_init[0] = mt76_get_field(dev, MT_BBP(AGC, 8),
MT_BBP_AGC_GAIN);
dev->cal.agc_gain_init[1] = mt76_get_field(dev, MT_BBP(AGC, 9),
MT_BBP_AGC_GAIN);
memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
sizeof(dev->cal.agc_gain_cur));
dev->cal.low_gain = -1;
}
EXPORT_SYMBOL_GPL(mt76x02_init_agc_gain);

View File

@ -19,12 +19,43 @@
#include "mt76x02_regs.h"
static inline int
mt76x02_get_rssi_gain_thresh(struct mt76x02_dev *dev)
{
switch (dev->mt76.chandef.width) {
case NL80211_CHAN_WIDTH_80:
return -62;
case NL80211_CHAN_WIDTH_40:
return -65;
default:
return -68;
}
}
static inline int
mt76x02_get_low_rssi_gain_thresh(struct mt76x02_dev *dev)
{
switch (dev->mt76.chandef.width) {
case NL80211_CHAN_WIDTH_80:
return -76;
case NL80211_CHAN_WIDTH_40:
return -79;
default:
return -82;
}
}
void mt76x02_add_rate_power_offset(struct mt76_rate_power *r, int offset);
void mt76x02_phy_set_txpower(struct mt76_dev *dev, int txp_0, int txp_2);
void mt76x02_phy_set_txpower(struct mt76x02_dev *dev, int txp_0, int txp_2);
void mt76x02_limit_rate_power(struct mt76_rate_power *r, int limit);
int mt76x02_get_max_rate_power(struct mt76_rate_power *r);
void mt76x02_phy_set_rxpath(struct mt76_dev *dev);
void mt76x02_phy_set_txdac(struct mt76_dev *dev);
int mt76x02_phy_get_min_avg_rssi(struct mt76_dev *dev);
void mt76x02_phy_set_rxpath(struct mt76x02_dev *dev);
void mt76x02_phy_set_txdac(struct mt76x02_dev *dev);
int mt76x02_phy_get_min_avg_rssi(struct mt76x02_dev *dev);
void mt76x02_phy_set_bw(struct mt76x02_dev *dev, int width, u8 ctrl);
void mt76x02_phy_set_band(struct mt76x02_dev *dev, int band,
bool primary_upper);
bool mt76x02_phy_adjust_vga_gain(struct mt76x02_dev *dev);
void mt76x02_init_agc_gain(struct mt76x02_dev *dev);
#endif /* __MT76x02_PHY_H */

View File

@ -205,8 +205,8 @@
#define MT_TXQ_STA 0x0434
#define MT_RF_CSR_CFG 0x0500
#define MT_RF_CSR_CFG_DATA GENMASK(7, 0)
#define MT_RF_CSR_CFG_REG_ID GENMASK(13, 8)
#define MT_RF_CSR_CFG_REG_BANK GENMASK(17, 14)
#define MT_RF_CSR_CFG_REG_ID GENMASK(14, 8)
#define MT_RF_CSR_CFG_REG_BANK GENMASK(17, 15)
#define MT_RF_CSR_CFG_WR BIT(30)
#define MT_RF_CSR_CFG_KICK BIT(31)

View File

@ -71,7 +71,7 @@ void mt76x02_queue_rx_skb(struct mt76_dev *mdev, enum mt76_rxq_id q,
}
EXPORT_SYMBOL_GPL(mt76x02_queue_rx_skb);
s8 mt76x02_tx_get_max_txpwr_adj(struct mt76_dev *dev,
s8 mt76x02_tx_get_max_txpwr_adj(struct mt76x02_dev *dev,
const struct ieee80211_tx_rate *rate)
{
s8 max_txpwr;
@ -80,23 +80,23 @@ s8 mt76x02_tx_get_max_txpwr_adj(struct mt76_dev *dev,
u8 mcs = ieee80211_rate_get_vht_mcs(rate);
if (mcs == 8 || mcs == 9) {
max_txpwr = dev->rate_power.vht[8];
max_txpwr = dev->mt76.rate_power.vht[8];
} else {
u8 nss, idx;
nss = ieee80211_rate_get_vht_nss(rate);
idx = ((nss - 1) << 3) + mcs;
max_txpwr = dev->rate_power.ht[idx & 0xf];
max_txpwr = dev->mt76.rate_power.ht[idx & 0xf];
}
} else if (rate->flags & IEEE80211_TX_RC_MCS) {
max_txpwr = dev->rate_power.ht[rate->idx & 0xf];
max_txpwr = dev->mt76.rate_power.ht[rate->idx & 0xf];
} else {
enum nl80211_band band = dev->chandef.chan->band;
enum nl80211_band band = dev->mt76.chandef.chan->band;
if (band == NL80211_BAND_2GHZ) {
const struct ieee80211_rate *r;
struct wiphy *wiphy = dev->hw->wiphy;
struct mt76_rate_power *rp = &dev->rate_power;
struct wiphy *wiphy = dev->mt76.hw->wiphy;
struct mt76_rate_power *rp = &dev->mt76.rate_power;
r = &wiphy->bands[band]->bitrates[rate->idx];
if (r->flags & IEEE80211_RATE_SHORT_PREAMBLE)
@ -104,7 +104,7 @@ s8 mt76x02_tx_get_max_txpwr_adj(struct mt76_dev *dev,
else
max_txpwr = rp->ofdm[r->hw_value & 0x7];
} else {
max_txpwr = dev->rate_power.ofdm[rate->idx & 0x7];
max_txpwr = dev->mt76.rate_power.ofdm[rate->idx & 0x7];
}
}
@ -112,10 +112,8 @@ s8 mt76x02_tx_get_max_txpwr_adj(struct mt76_dev *dev,
}
EXPORT_SYMBOL_GPL(mt76x02_tx_get_max_txpwr_adj);
s8 mt76x02_tx_get_txpwr_adj(struct mt76_dev *mdev, s8 txpwr, s8 max_txpwr_adj)
s8 mt76x02_tx_get_txpwr_adj(struct mt76x02_dev *dev, s8 txpwr, s8 max_txpwr_adj)
{
struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
txpwr = min_t(s8, txpwr, dev->mt76.txpower_conf);
txpwr -= (dev->target_power + dev->target_power_delta[0]);
txpwr = min_t(s8, txpwr, max_txpwr_adj);
@ -133,7 +131,7 @@ void mt76x02_tx_set_txpwr_auto(struct mt76x02_dev *dev, s8 txpwr)
{
s8 txpwr_adj;
txpwr_adj = mt76x02_tx_get_txpwr_adj(&dev->mt76, txpwr,
txpwr_adj = mt76x02_tx_get_txpwr_adj(dev, txpwr,
dev->mt76.rate_power.ofdm[4]);
mt76_rmw_field(dev, MT_PROT_AUTO_TX_CFG,
MT_PROT_AUTO_TX_CFG_PROT_PADJ, txpwr_adj);
@ -157,8 +155,9 @@ void mt76x02_tx_complete(struct mt76_dev *dev, struct sk_buff *skb)
}
EXPORT_SYMBOL_GPL(mt76x02_tx_complete);
bool mt76x02_tx_status_data(struct mt76_dev *dev, u8 *update)
bool mt76x02_tx_status_data(struct mt76_dev *mdev, u8 *update)
{
struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
struct mt76x02_tx_status stat;
if (!mt76x02_mac_load_tx_status(dev, &stat))
@ -181,9 +180,9 @@ int mt76x02_tx_prepare_skb(struct mt76_dev *mdev, void *txwi,
int ret;
if (q == &dev->mt76.q_tx[MT_TXQ_PSD] && wcid && wcid->idx < 128)
mt76x02_mac_wcid_set_drop(&dev->mt76, wcid->idx, false);
mt76x02_mac_wcid_set_drop(dev, wcid->idx, false);
mt76x02_mac_write_txwi(mdev, txwi, skb, wcid, sta, skb->len);
mt76x02_mac_write_txwi(dev, txwi, skb, wcid, sta, skb->len);
ret = mt76x02_insert_hdr_pad(skb);
if (ret < 0)

View File

@ -17,15 +17,15 @@
#ifndef __MT76x02_USB_H
#define __MT76x02_USB_H
#include "mt76.h"
#include "mt76x02.h"
void mt76x02u_init_mcu(struct mt76_dev *dev);
void mt76x02u_mcu_fw_reset(struct mt76_dev *dev);
int mt76x02u_mcu_fw_send_data(struct mt76_dev *dev, const void *data,
void mt76x02u_mcu_fw_reset(struct mt76x02_dev *dev);
int mt76x02u_mcu_fw_send_data(struct mt76x02_dev *dev, const void *data,
int data_len, u32 max_payload, u32 offset);
int mt76x02u_skb_dma_info(struct sk_buff *skb, int port, u32 flags);
int mt76x02u_tx_prepare_skb(struct mt76_dev *dev, void *data,
int mt76x02u_tx_prepare_skb(struct mt76_dev *mdev, void *data,
struct sk_buff *skb, struct mt76_queue *q,
struct mt76_wcid *wcid, struct ieee80211_sta *sta,
u32 *tx_info);

View File

@ -34,17 +34,6 @@ void mt76x02u_tx_complete_skb(struct mt76_dev *mdev, struct mt76_queue *q,
}
EXPORT_SYMBOL_GPL(mt76x02u_tx_complete_skb);
static int mt76x02u_check_skb_rooms(struct sk_buff *skb)
{
int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
u32 need_head;
need_head = sizeof(struct mt76x02_txwi) + MT_DMA_HDR_LEN;
if (hdr_len % 4)
need_head += 2;
return skb_cow(skb, need_head);
}
int mt76x02u_skb_dma_info(struct sk_buff *skb, int port, u32 flags)
{
struct sk_buff *iter, *last = skb;
@ -99,17 +88,14 @@ mt76x02u_set_txinfo(struct sk_buff *skb, struct mt76_wcid *wcid, u8 ep)
return mt76x02u_skb_dma_info(skb, WLAN_PORT, flags);
}
int mt76x02u_tx_prepare_skb(struct mt76_dev *dev, void *data,
int mt76x02u_tx_prepare_skb(struct mt76_dev *mdev, void *data,
struct sk_buff *skb, struct mt76_queue *q,
struct mt76_wcid *wcid, struct ieee80211_sta *sta,
u32 *tx_info)
{
struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
struct mt76x02_txwi *txwi;
int err, len = skb->len;
err = mt76x02u_check_skb_rooms(skb);
if (err < 0)
return -ENOMEM;
int len = skb->len;
mt76x02_insert_hdr_pad(skb);

View File

@ -17,8 +17,7 @@
#include <linux/module.h>
#include <linux/firmware.h>
#include "mt76.h"
#include "mt76x02_dma.h"
#include "mt76x02.h"
#include "mt76x02_mcu.h"
#include "mt76x02_usb.h"
@ -255,16 +254,16 @@ mt76x02u_mcu_rd_rp(struct mt76_dev *dev, u32 base,
return ret;
}
void mt76x02u_mcu_fw_reset(struct mt76_dev *dev)
void mt76x02u_mcu_fw_reset(struct mt76x02_dev *dev)
{
mt76u_vendor_request(dev, MT_VEND_DEV_MODE,
mt76u_vendor_request(&dev->mt76, MT_VEND_DEV_MODE,
USB_DIR_OUT | USB_TYPE_VENDOR,
0x1, 0, NULL, 0);
}
EXPORT_SYMBOL_GPL(mt76x02u_mcu_fw_reset);
static int
__mt76x02u_mcu_fw_send_data(struct mt76_dev *dev, struct mt76u_buf *buf,
__mt76x02u_mcu_fw_send_data(struct mt76x02_dev *dev, struct mt76u_buf *buf,
const void *fw_data, int len, u32 dst_addr)
{
u8 *data = sg_virt(&buf->urb->sg[0]);
@ -281,14 +280,14 @@ __mt76x02u_mcu_fw_send_data(struct mt76_dev *dev, struct mt76u_buf *buf,
memcpy(data + sizeof(info), fw_data, len);
memset(data + sizeof(info) + len, 0, 4);
mt76u_single_wr(dev, MT_VEND_WRITE_FCE,
mt76u_single_wr(&dev->mt76, MT_VEND_WRITE_FCE,
MT_FCE_DMA_ADDR, dst_addr);
len = roundup(len, 4);
mt76u_single_wr(dev, MT_VEND_WRITE_FCE,
mt76u_single_wr(&dev->mt76, MT_VEND_WRITE_FCE,
MT_FCE_DMA_LEN, len << 16);
buf->len = MT_CMD_HDR_LEN + len + sizeof(info);
err = mt76u_submit_buf(dev, USB_DIR_OUT,
err = mt76u_submit_buf(&dev->mt76, USB_DIR_OUT,
MT_EP_OUT_INBAND_CMD,
buf, GFP_KERNEL,
mt76u_mcu_complete_urb, &cmpl);
@ -297,31 +296,31 @@ __mt76x02u_mcu_fw_send_data(struct mt76_dev *dev, struct mt76u_buf *buf,
if (!wait_for_completion_timeout(&cmpl,
msecs_to_jiffies(1000))) {
dev_err(dev->dev, "firmware upload timed out\n");
dev_err(dev->mt76.dev, "firmware upload timed out\n");
usb_kill_urb(buf->urb);
return -ETIMEDOUT;
}
if (mt76u_urb_error(buf->urb)) {
dev_err(dev->dev, "firmware upload failed: %d\n",
dev_err(dev->mt76.dev, "firmware upload failed: %d\n",
buf->urb->status);
return buf->urb->status;
}
val = mt76u_rr(dev, MT_TX_CPU_FROM_FCE_CPU_DESC_IDX);
val = mt76_rr(dev, MT_TX_CPU_FROM_FCE_CPU_DESC_IDX);
val++;
mt76u_wr(dev, MT_TX_CPU_FROM_FCE_CPU_DESC_IDX, val);
mt76_wr(dev, MT_TX_CPU_FROM_FCE_CPU_DESC_IDX, val);
return 0;
}
int mt76x02u_mcu_fw_send_data(struct mt76_dev *dev, const void *data,
int mt76x02u_mcu_fw_send_data(struct mt76x02_dev *dev, const void *data,
int data_len, u32 max_payload, u32 offset)
{
int err, len, pos = 0, max_len = max_payload - 8;
struct mt76u_buf buf;
err = mt76u_buf_alloc(dev, &buf, 1, max_payload, max_payload,
err = mt76u_buf_alloc(&dev->mt76, &buf, 1, max_payload, max_payload,
GFP_KERNEL);
if (err < 0)
return err;

View File

@ -48,21 +48,21 @@ struct ieee80211_rate mt76x02_rates[] = {
EXPORT_SYMBOL_GPL(mt76x02_rates);
void mt76x02_configure_filter(struct ieee80211_hw *hw,
unsigned int changed_flags,
unsigned int *total_flags, u64 multicast)
unsigned int changed_flags,
unsigned int *total_flags, u64 multicast)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
u32 flags = 0;
#define MT76_FILTER(_flag, _hw) do { \
flags |= *total_flags & FIF_##_flag; \
dev->rxfilter &= ~(_hw); \
dev->rxfilter |= !(flags & FIF_##_flag) * (_hw); \
dev->mt76.rxfilter &= ~(_hw); \
dev->mt76.rxfilter |= !(flags & FIF_##_flag) * (_hw); \
} while (0)
mutex_lock(&dev->mutex);
mutex_lock(&dev->mt76.mutex);
dev->rxfilter &= ~MT_RX_FILTR_CFG_OTHER_BSS;
dev->mt76.rxfilter &= ~MT_RX_FILTR_CFG_OTHER_BSS;
MT76_FILTER(FCSFAIL, MT_RX_FILTR_CFG_CRC_ERR);
MT76_FILTER(PLCPFAIL, MT_RX_FILTR_CFG_PHY_ERR);
@ -75,25 +75,25 @@ void mt76x02_configure_filter(struct ieee80211_hw *hw,
MT76_FILTER(PSPOLL, MT_RX_FILTR_CFG_PSPOLL);
*total_flags = flags;
dev->bus->wr(dev, MT_RX_FILTR_CFG, dev->rxfilter);
mt76_wr(dev, MT_RX_FILTR_CFG, dev->mt76.rxfilter);
mutex_unlock(&dev->mutex);
mutex_unlock(&dev->mt76.mutex);
}
EXPORT_SYMBOL_GPL(mt76x02_configure_filter);
int mt76x02_sta_add(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
struct ieee80211_sta *sta)
struct ieee80211_sta *sta)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *) sta->drv_priv;
struct mt76x02_vif *mvif = (struct mt76x02_vif *) vif->drv_priv;
struct mt76x02_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *)sta->drv_priv;
struct mt76x02_vif *mvif = (struct mt76x02_vif *)vif->drv_priv;
int ret = 0;
int idx = 0;
int i;
mutex_lock(&dev->mutex);
mutex_lock(&dev->mt76.mutex);
idx = mt76_wcid_alloc(dev->wcid_mask, ARRAY_SIZE(dev->wcid));
idx = mt76_wcid_alloc(dev->mt76.wcid_mask, ARRAY_SIZE(dev->mt76.wcid));
if (idx < 0) {
ret = -ENOSPC;
goto out;
@ -113,40 +113,40 @@ int mt76x02_sta_add(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
ewma_signal_init(&msta->rssi);
rcu_assign_pointer(dev->wcid[idx], &msta->wcid);
rcu_assign_pointer(dev->mt76.wcid[idx], &msta->wcid);
out:
mutex_unlock(&dev->mutex);
mutex_unlock(&dev->mt76.mutex);
return ret;
}
EXPORT_SYMBOL_GPL(mt76x02_sta_add);
int mt76x02_sta_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
struct ieee80211_sta *sta)
struct ieee80211_sta *sta)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *) sta->drv_priv;
struct mt76x02_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *)sta->drv_priv;
int idx = msta->wcid.idx;
int i;
mutex_lock(&dev->mutex);
rcu_assign_pointer(dev->wcid[idx], NULL);
mutex_lock(&dev->mt76.mutex);
rcu_assign_pointer(dev->mt76.wcid[idx], NULL);
for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
mt76_txq_remove(dev, sta->txq[i]);
mt76_txq_remove(&dev->mt76, sta->txq[i]);
mt76x02_mac_wcid_set_drop(dev, idx, true);
mt76_wcid_free(dev->wcid_mask, idx);
mt76_wcid_free(dev->mt76.wcid_mask, idx);
mt76x02_mac_wcid_setup(dev, idx, 0, NULL);
mutex_unlock(&dev->mutex);
mutex_unlock(&dev->mt76.mutex);
return 0;
}
EXPORT_SYMBOL_GPL(mt76x02_sta_remove);
void mt76x02_vif_init(struct mt76_dev *dev, struct ieee80211_vif *vif,
unsigned int idx)
void mt76x02_vif_init(struct mt76x02_dev *dev, struct ieee80211_vif *vif,
unsigned int idx)
{
struct mt76x02_vif *mvif = (struct mt76x02_vif *) vif->drv_priv;
struct mt76x02_vif *mvif = (struct mt76x02_vif *)vif->drv_priv;
mvif->idx = idx;
mvif->group_wcid.idx = MT_VIF_WCID(idx);
@ -158,11 +158,11 @@ EXPORT_SYMBOL_GPL(mt76x02_vif_init);
int
mt76x02_add_interface(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
unsigned int idx = 0;
if (vif->addr[0] & BIT(1))
idx = 1 + (((dev->macaddr[0] ^ vif->addr[0]) >> 2) & 7);
idx = 1 + (((dev->mt76.macaddr[0] ^ vif->addr[0]) >> 2) & 7);
/*
* Client mode typically only has one configurable BSSID register,
@ -186,20 +186,20 @@ mt76x02_add_interface(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
EXPORT_SYMBOL_GPL(mt76x02_add_interface);
void mt76x02_remove_interface(struct ieee80211_hw *hw,
struct ieee80211_vif *vif)
struct ieee80211_vif *vif)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
mt76_txq_remove(dev, vif->txq);
mt76_txq_remove(&dev->mt76, vif->txq);
}
EXPORT_SYMBOL_GPL(mt76x02_remove_interface);
int mt76x02_ampdu_action(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
struct ieee80211_ampdu_params *params)
struct ieee80211_ampdu_params *params)
{
enum ieee80211_ampdu_mlme_action action = params->action;
struct ieee80211_sta *sta = params->sta;
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *) sta->drv_priv;
struct ieee80211_txq *txq = sta->txq[params->tid];
u16 tid = params->tid;
@ -213,12 +213,14 @@ int mt76x02_ampdu_action(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
switch (action) {
case IEEE80211_AMPDU_RX_START:
mt76_rx_aggr_start(dev, &msta->wcid, tid, *ssn, params->buf_size);
__mt76_set(dev, MT_WCID_ADDR(msta->wcid.idx) + 4, BIT(16 + tid));
mt76_rx_aggr_start(&dev->mt76, &msta->wcid, tid,
*ssn, params->buf_size);
mt76_set(dev, MT_WCID_ADDR(msta->wcid.idx) + 4, BIT(16 + tid));
break;
case IEEE80211_AMPDU_RX_STOP:
mt76_rx_aggr_stop(dev, &msta->wcid, tid);
__mt76_clear(dev, MT_WCID_ADDR(msta->wcid.idx) + 4, BIT(16 + tid));
mt76_rx_aggr_stop(&dev->mt76, &msta->wcid, tid);
mt76_clear(dev, MT_WCID_ADDR(msta->wcid.idx) + 4,
BIT(16 + tid));
break;
case IEEE80211_AMPDU_TX_OPERATIONAL:
mtxq->aggr = true;
@ -245,11 +247,11 @@ int mt76x02_ampdu_action(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
EXPORT_SYMBOL_GPL(mt76x02_ampdu_action);
int mt76x02_set_key(struct ieee80211_hw *hw, enum set_key_cmd cmd,
struct ieee80211_vif *vif, struct ieee80211_sta *sta,
struct ieee80211_key_conf *key)
struct ieee80211_vif *vif, struct ieee80211_sta *sta,
struct ieee80211_key_conf *key)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_vif *mvif = (struct mt76x02_vif *) vif->drv_priv;
struct mt76x02_dev *dev = hw->priv;
struct mt76x02_vif *mvif = (struct mt76x02_vif *)vif->drv_priv;
struct mt76x02_sta *msta;
struct mt76_wcid *wcid;
int idx = key->keyidx;
@ -295,7 +297,7 @@ int mt76x02_set_key(struct ieee80211_hw *hw, enum set_key_cmd cmd,
key = NULL;
}
mt76_wcid_key_setup(dev, wcid, key);
mt76_wcid_key_setup(&dev->mt76, wcid, key);
if (!msta) {
if (key || wcid->hw_key_idx == idx) {
@ -312,13 +314,13 @@ int mt76x02_set_key(struct ieee80211_hw *hw, enum set_key_cmd cmd,
EXPORT_SYMBOL_GPL(mt76x02_set_key);
int mt76x02_conf_tx(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
u16 queue, const struct ieee80211_tx_queue_params *params)
u16 queue, const struct ieee80211_tx_queue_params *params)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
u8 cw_min = 5, cw_max = 10, qid;
u32 val;
qid = dev->q_tx[queue].hw_idx;
qid = dev->mt76.q_tx[queue].hw_idx;
if (params->cw_min)
cw_min = fls(params->cw_min);
@ -329,27 +331,27 @@ int mt76x02_conf_tx(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
FIELD_PREP(MT_EDCA_CFG_AIFSN, params->aifs) |
FIELD_PREP(MT_EDCA_CFG_CWMIN, cw_min) |
FIELD_PREP(MT_EDCA_CFG_CWMAX, cw_max);
__mt76_wr(dev, MT_EDCA_CFG_AC(qid), val);
mt76_wr(dev, MT_EDCA_CFG_AC(qid), val);
val = __mt76_rr(dev, MT_WMM_TXOP(qid));
val = mt76_rr(dev, MT_WMM_TXOP(qid));
val &= ~(MT_WMM_TXOP_MASK << MT_WMM_TXOP_SHIFT(qid));
val |= params->txop << MT_WMM_TXOP_SHIFT(qid);
__mt76_wr(dev, MT_WMM_TXOP(qid), val);
mt76_wr(dev, MT_WMM_TXOP(qid), val);
val = __mt76_rr(dev, MT_WMM_AIFSN);
val = mt76_rr(dev, MT_WMM_AIFSN);
val &= ~(MT_WMM_AIFSN_MASK << MT_WMM_AIFSN_SHIFT(qid));
val |= params->aifs << MT_WMM_AIFSN_SHIFT(qid);
__mt76_wr(dev, MT_WMM_AIFSN, val);
mt76_wr(dev, MT_WMM_AIFSN, val);
val = __mt76_rr(dev, MT_WMM_CWMIN);
val = mt76_rr(dev, MT_WMM_CWMIN);
val &= ~(MT_WMM_CWMIN_MASK << MT_WMM_CWMIN_SHIFT(qid));
val |= cw_min << MT_WMM_CWMIN_SHIFT(qid);
__mt76_wr(dev, MT_WMM_CWMIN, val);
mt76_wr(dev, MT_WMM_CWMIN, val);
val = __mt76_rr(dev, MT_WMM_CWMAX);
val = mt76_rr(dev, MT_WMM_CWMAX);
val &= ~(MT_WMM_CWMAX_MASK << MT_WMM_CWMAX_SHIFT(qid));
val |= cw_max << MT_WMM_CWMAX_SHIFT(qid);
__mt76_wr(dev, MT_WMM_CWMAX, val);
mt76_wr(dev, MT_WMM_CWMAX, val);
return 0;
}
@ -359,7 +361,7 @@ void mt76x02_sta_rate_tbl_update(struct ieee80211_hw *hw,
struct ieee80211_vif *vif,
struct ieee80211_sta *sta)
{
struct mt76_dev *dev = hw->priv;
struct mt76x02_dev *dev = hw->priv;
struct mt76x02_sta *msta = (struct mt76x02_sta *) sta->drv_priv;
struct ieee80211_sta_rates *rates = rcu_dereference(sta->rates);
struct ieee80211_tx_rate rate = {};
@ -425,7 +427,7 @@ const u16 mt76x02_beacon_offsets[16] = {
};
EXPORT_SYMBOL_GPL(mt76x02_beacon_offsets);
void mt76x02_set_beacon_offsets(struct mt76_dev *dev)
void mt76x02_set_beacon_offsets(struct mt76x02_dev *dev)
{
u16 val, base = MT_BEACON_BASE;
u32 regs[4] = {};
@ -437,7 +439,7 @@ void mt76x02_set_beacon_offsets(struct mt76_dev *dev)
}
for (i = 0; i < 4; i++)
__mt76_wr(dev, MT_BCN_OFFSET(i), regs[i]);
mt76_wr(dev, MT_BCN_OFFSET(i), regs[i]);
}
EXPORT_SYMBOL_GPL(mt76x02_set_beacon_offsets);

View File

@ -177,8 +177,8 @@ mt76x2_eeprom_load(struct mt76x02_dev *dev)
efuse = dev->mt76.otp.data;
if (mt76x02_get_efuse_data(&dev->mt76, 0, efuse,
MT7662_EEPROM_SIZE, MT_EE_READ))
if (mt76x02_get_efuse_data(dev, 0, efuse, MT7662_EEPROM_SIZE,
MT_EE_READ))
goto out;
if (found) {
@ -248,22 +248,22 @@ mt76x2_get_5g_rx_gain(struct mt76x02_dev *dev, u8 channel)
group = mt76x2_get_cal_channel_group(channel);
switch (group) {
case MT_CH_5G_JAPAN:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP0_1_RX_HIGH_GAIN);
case MT_CH_5G_UNII_1:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP0_1_RX_HIGH_GAIN) >> 8;
case MT_CH_5G_UNII_2:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP2_3_RX_HIGH_GAIN);
case MT_CH_5G_UNII_2E_1:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP2_3_RX_HIGH_GAIN) >> 8;
case MT_CH_5G_UNII_2E_2:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP4_5_RX_HIGH_GAIN);
default:
return mt76x02_eeprom_get(&dev->mt76,
return mt76x02_eeprom_get(dev,
MT_EE_RF_5G_GRP4_5_RX_HIGH_GAIN) >> 8;
}
}
@ -277,14 +277,13 @@ void mt76x2_read_rx_gain(struct mt76x02_dev *dev)
u16 val;
if (chan->band == NL80211_BAND_2GHZ)
val = mt76x02_eeprom_get(&dev->mt76,
MT_EE_RF_2G_RX_HIGH_GAIN) >> 8;
val = mt76x02_eeprom_get(dev, MT_EE_RF_2G_RX_HIGH_GAIN) >> 8;
else
val = mt76x2_get_5g_rx_gain(dev, channel);
mt76x2_set_rx_gain_group(dev, val);
mt76x02_get_rx_gain(&dev->mt76, chan->band, &val, &lna_2g, lna_5g);
mt76x02_get_rx_gain(dev, chan->band, &val, &lna_2g, lna_5g);
mt76x2_set_rssi_offset(dev, 0, val);
mt76x2_set_rssi_offset(dev, 1, val >> 8);
@ -293,7 +292,7 @@ void mt76x2_read_rx_gain(struct mt76x02_dev *dev)
dev->cal.rx.mcu_gain |= (lna_5g[1] & 0xff) << 16;
dev->cal.rx.mcu_gain |= (lna_5g[2] & 0xff) << 24;
lna = mt76x02_get_lna_gain(&dev->mt76, &lna_2g, lna_5g, chan);
lna = mt76x02_get_lna_gain(dev, &lna_2g, lna_5g, chan);
dev->cal.rx.lna_gain = mt76x02_sign_extend(lna, 8);
}
EXPORT_SYMBOL_GPL(mt76x2_read_rx_gain);
@ -308,53 +307,49 @@ void mt76x2_get_rate_power(struct mt76x02_dev *dev, struct mt76_rate_power *t,
memset(t, 0, sizeof(*t));
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_CCK);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_CCK);
t->cck[0] = t->cck[1] = mt76x02_rate_power_val(val);
t->cck[2] = t->cck[3] = mt76x02_rate_power_val(val >> 8);
if (is_5ghz)
val = mt76x02_eeprom_get(&dev->mt76,
MT_EE_TX_POWER_OFDM_5G_6M);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_OFDM_5G_6M);
else
val = mt76x02_eeprom_get(&dev->mt76,
MT_EE_TX_POWER_OFDM_2G_6M);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_OFDM_2G_6M);
t->ofdm[0] = t->ofdm[1] = mt76x02_rate_power_val(val);
t->ofdm[2] = t->ofdm[3] = mt76x02_rate_power_val(val >> 8);
if (is_5ghz)
val = mt76x02_eeprom_get(&dev->mt76,
MT_EE_TX_POWER_OFDM_5G_24M);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_OFDM_5G_24M);
else
val = mt76x02_eeprom_get(&dev->mt76,
MT_EE_TX_POWER_OFDM_2G_24M);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_OFDM_2G_24M);
t->ofdm[4] = t->ofdm[5] = mt76x02_rate_power_val(val);
t->ofdm[6] = t->ofdm[7] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_HT_MCS0);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_HT_MCS0);
t->ht[0] = t->ht[1] = mt76x02_rate_power_val(val);
t->ht[2] = t->ht[3] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_HT_MCS4);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_HT_MCS4);
t->ht[4] = t->ht[5] = mt76x02_rate_power_val(val);
t->ht[6] = t->ht[7] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_HT_MCS8);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_HT_MCS8);
t->ht[8] = t->ht[9] = mt76x02_rate_power_val(val);
t->ht[10] = t->ht[11] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_HT_MCS12);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_HT_MCS12);
t->ht[12] = t->ht[13] = mt76x02_rate_power_val(val);
t->ht[14] = t->ht[15] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_VHT_MCS0);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_VHT_MCS0);
t->vht[0] = t->vht[1] = mt76x02_rate_power_val(val);
t->vht[2] = t->vht[3] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_VHT_MCS4);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_VHT_MCS4);
t->vht[4] = t->vht[5] = mt76x02_rate_power_val(val);
t->vht[6] = t->vht[7] = mt76x02_rate_power_val(val >> 8);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_VHT_MCS8);
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_VHT_MCS8);
if (!is_5ghz)
val >>= 8;
t->vht[8] = t->vht[9] = mt76x02_rate_power_val(val >> 8);
@ -390,7 +385,7 @@ mt76x2_get_power_info_2g(struct mt76x02_dev *dev,
t->chain[chain].target_power = data[2];
t->chain[chain].delta = mt76x02_sign_extend_optional(data[delta_idx], 7);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_RF_2G_TSSI_OFF_TXPOWER);
val = mt76x02_eeprom_get(dev, MT_EE_RF_2G_TSSI_OFF_TXPOWER);
t->target_power = val >> 8;
}
@ -441,7 +436,7 @@ mt76x2_get_power_info_5g(struct mt76x02_dev *dev,
t->chain[chain].target_power = data[2];
t->chain[chain].delta = mt76x02_sign_extend_optional(data[delta_idx], 7);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_RF_2G_RX_HIGH_GAIN);
val = mt76x02_eeprom_get(dev, MT_EE_RF_2G_RX_HIGH_GAIN);
t->target_power = val & 0xff;
}
@ -453,8 +448,8 @@ void mt76x2_get_power_info(struct mt76x02_dev *dev,
memset(t, 0, sizeof(*t));
bw40 = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_DELTA_BW40);
bw80 = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_DELTA_BW80);
bw40 = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_DELTA_BW40);
bw80 = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_DELTA_BW80);
if (chan->band == NL80211_BAND_5GHZ) {
bw40 >>= 8;
@ -469,7 +464,7 @@ void mt76x2_get_power_info(struct mt76x02_dev *dev,
MT_EE_TX_POWER_1_START_2G);
}
if (mt76x02_tssi_enabled(&dev->mt76) ||
if (mt76x2_tssi_enabled(dev) ||
!mt76x02_field_valid(t->target_power))
t->target_power = t->chain[0].target_power;
@ -486,23 +481,20 @@ int mt76x2_get_temp_comp(struct mt76x02_dev *dev, struct mt76x2_temp_comp *t)
memset(t, 0, sizeof(*t));
if (!mt76x02_temp_tx_alc_enabled(&dev->mt76))
if (!mt76x2_temp_tx_alc_enabled(dev))
return -EINVAL;
if (!mt76x02_ext_pa_enabled(&dev->mt76, band))
if (!mt76x02_ext_pa_enabled(dev, band))
return -EINVAL;
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_TX_POWER_EXT_PA_5G) >> 8;
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_EXT_PA_5G) >> 8;
t->temp_25_ref = val & 0x7f;
if (band == NL80211_BAND_5GHZ) {
slope = mt76x02_eeprom_get(&dev->mt76,
MT_EE_RF_TEMP_COMP_SLOPE_5G);
bounds = mt76x02_eeprom_get(&dev->mt76,
MT_EE_TX_POWER_EXT_PA_5G);
slope = mt76x02_eeprom_get(dev, MT_EE_RF_TEMP_COMP_SLOPE_5G);
bounds = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_EXT_PA_5G);
} else {
slope = mt76x02_eeprom_get(&dev->mt76,
MT_EE_RF_TEMP_COMP_SLOPE_2G);
bounds = mt76x02_eeprom_get(&dev->mt76,
slope = mt76x02_eeprom_get(dev, MT_EE_RF_TEMP_COMP_SLOPE_2G);
bounds = mt76x02_eeprom_get(dev,
MT_EE_TX_POWER_DELTA_BW80) >> 8;
}
@ -523,7 +515,7 @@ int mt76x2_eeprom_init(struct mt76x02_dev *dev)
if (ret)
return ret;
mt76x02_eeprom_parse_hw_cap(&dev->mt76);
mt76x02_eeprom_parse_hw_cap(dev);
mt76x2_eeprom_get_macaddr(dev);
mt76_eeprom_override(&dev->mt76);
dev->mt76.macaddr[0] &= ~BIT(1);

View File

@ -62,7 +62,7 @@ void mt76x2_read_rx_gain(struct mt76x02_dev *dev);
static inline bool
mt76x2_has_ext_lna(struct mt76x02_dev *dev)
{
u32 val = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_1);
u32 val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1);
if (dev->mt76.chandef.chan->band == NL80211_BAND_2GHZ)
return val & MT_EE_NIC_CONF_1_LNA_EXT_2G;
@ -70,4 +70,25 @@ mt76x2_has_ext_lna(struct mt76x02_dev *dev)
return val & MT_EE_NIC_CONF_1_LNA_EXT_5G;
}
static inline bool
mt76x2_temp_tx_alc_enabled(struct mt76x02_dev *dev)
{
u16 val;
val = mt76x02_eeprom_get(dev, MT_EE_TX_POWER_EXT_PA_5G);
if (!(val & BIT(15)))
return false;
return mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) &
MT_EE_NIC_CONF_1_TEMP_TX_ALC;
}
static inline bool
mt76x2_tssi_enabled(struct mt76x02_dev *dev)
{
return !mt76x2_temp_tx_alc_enabled(dev) &&
(mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) &
MT_EE_NIC_CONF_1_TX_ALC_EN);
}
#endif

View File

@ -167,6 +167,9 @@ void mt76x2_init_device(struct mt76x02_dev *dev)
hw->max_report_rates = 7;
hw->max_rate_tries = 1;
hw->extra_tx_headroom = 2;
if (mt76_is_usb(dev))
hw->extra_tx_headroom += sizeof(struct mt76x02_txwi) +
MT_DMA_HDR_LEN;
hw->sta_data_size = sizeof(struct mt76x02_sta);
hw->vif_data_size = sizeof(struct mt76x02_vif);

View File

@ -59,7 +59,6 @@ EXPORT_SYMBOL_GPL(mt76x2_mcu_set_channel);
int mt76x2_mcu_load_cr(struct mt76x02_dev *dev, u8 type, u8 temp_level,
u8 channel)
{
struct mt76_dev *mdev = &dev->mt76;
struct sk_buff *skb;
struct {
u8 cr_mode;
@ -76,8 +75,8 @@ int mt76x2_mcu_load_cr(struct mt76x02_dev *dev, u8 type, u8 temp_level,
u32 val;
val = BIT(31);
val |= (mt76x02_eeprom_get(mdev, MT_EE_NIC_CONF_0) >> 8) & 0x00ff;
val |= (mt76x02_eeprom_get(mdev, MT_EE_NIC_CONF_1) << 8) & 0xff00;
val |= (mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_0) >> 8) & 0x00ff;
val |= (mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_1) << 8) & 0xff00;
msg.cfg = cpu_to_le32(val);
/* first set the channel without the extension channel info */

View File

@ -100,8 +100,6 @@ void mt76x2_phy_set_txpower_regs(struct mt76x02_dev *dev,
enum nl80211_band band);
void mt76x2_configure_tx_delay(struct mt76x02_dev *dev,
enum nl80211_band band, u8 bw);
void mt76x2_phy_set_bw(struct mt76x02_dev *dev, int width, u8 ctrl);
void mt76x2_phy_set_band(struct mt76x02_dev *dev, int band, bool primary_upper);
void mt76x2_apply_gain_adj(struct mt76x02_dev *dev);
#endif

View File

@ -43,7 +43,7 @@ mt76x2_fixup_xtal(struct mt76x02_dev *dev)
u16 eep_val;
s8 offset = 0;
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_XTAL_TRIM_2);
eep_val = mt76x02_eeprom_get(dev, MT_EE_XTAL_TRIM_2);
offset = eep_val & 0x7f;
if ((eep_val & 0xff) == 0xff)
@ -53,7 +53,7 @@ mt76x2_fixup_xtal(struct mt76x02_dev *dev)
eep_val >>= 8;
if (eep_val == 0x00 || eep_val == 0xff) {
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_XTAL_TRIM_1);
eep_val = mt76x02_eeprom_get(dev, MT_EE_XTAL_TRIM_1);
eep_val &= 0xff;
if (eep_val == 0x00 || eep_val == 0xff)
@ -64,7 +64,7 @@ mt76x2_fixup_xtal(struct mt76x02_dev *dev)
mt76_rmw_field(dev, MT_XO_CTRL5, MT_XO_CTRL5_C2_VAL, eep_val + offset);
mt76_set(dev, MT_XO_CTRL6, MT_XO_CTRL6_C2_CTRL);
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_2);
eep_val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
switch (FIELD_GET(MT_EE_NIC_CONF_2_XTAL_OPTION, eep_val)) {
case 0:
mt76_wr(dev, MT_XO_CTRL7, 0x5c1fee80);
@ -143,14 +143,14 @@ static int mt76x2_mac_reset(struct mt76x02_dev *dev, bool hard)
mt76_wr(dev, MT_WCID_DROP_BASE + i * 4, 0);
for (i = 0; i < 256; i++)
mt76x02_mac_wcid_setup(&dev->mt76, i, 0, NULL);
mt76x02_mac_wcid_setup(dev, i, 0, NULL);
for (i = 0; i < MT_MAX_VIFS; i++)
mt76x02_mac_wcid_setup(&dev->mt76, MT_VIF_WCID(i), i, NULL);
mt76x02_mac_wcid_setup(dev, MT_VIF_WCID(i), i, NULL);
for (i = 0; i < 16; i++)
for (k = 0; k < 4; k++)
mt76x02_mac_shared_key_setup(&dev->mt76, i, k, NULL);
mt76x02_mac_shared_key_setup(dev, i, k, NULL);
for (i = 0; i < 8; i++) {
mt76x2_mac_set_bssid(dev, i, null_addr);
@ -168,7 +168,7 @@ static int mt76x2_mac_reset(struct mt76x02_dev *dev, bool hard)
MT_CH_TIME_CFG_EIFS_AS_BUSY |
FIELD_PREP(MT_CH_TIME_CFG_CH_TIMER_CLR, 1));
mt76x02_set_beacon_offsets(&dev->mt76);
mt76x02_set_beacon_offsets(dev);
mt76x2_set_tx_ackto(dev);
@ -337,7 +337,7 @@ void mt76x2_stop_hardware(struct mt76x02_dev *dev)
{
cancel_delayed_work_sync(&dev->cal_work);
cancel_delayed_work_sync(&dev->mac_work);
mt76x02_mcu_set_radio_state(&dev->mt76, false, true);
mt76x02_mcu_set_radio_state(dev, false, true);
mt76x2_mac_stop(dev, false);
}
@ -347,7 +347,7 @@ void mt76x2_cleanup(struct mt76x02_dev *dev)
tasklet_disable(&dev->pre_tbtt_tasklet);
mt76x2_stop_hardware(dev);
mt76x02_dma_cleanup(dev);
mt76x02_mcu_cleanup(&dev->mt76);
mt76x02_mcu_cleanup(dev);
}
struct mt76x02_dev *mt76x2_alloc_device(struct device *pdev)

View File

@ -36,7 +36,7 @@ mt76_write_beacon(struct mt76x02_dev *dev, int offset, struct sk_buff *skb)
if (WARN_ON_ONCE(beacon_len < skb->len + sizeof(struct mt76x02_txwi)))
return -ENOSPC;
mt76x02_mac_write_txwi(&dev->mt76, &txwi, skb, NULL, NULL, skb->len);
mt76x02_mac_write_txwi(dev, &txwi, skb, NULL, NULL, skb->len);
mt76_wr_copy(dev, offset, &txwi, sizeof(txwi));
offset += sizeof(txwi);

View File

@ -172,7 +172,7 @@ mt76x2_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps)
int idx = msta->wcid.idx;
mt76_stop_tx_queues(&dev->mt76, sta, true);
mt76x02_mac_wcid_set_drop(&dev->mt76, idx, ps);
mt76x02_mac_wcid_set_drop(dev, idx, ps);
}
static void

View File

@ -140,7 +140,7 @@ mt76pci_load_firmware(struct mt76x02_dev *dev)
mt76_wr(dev, MT_MCU_PCIE_REMAP_BASE4, 0);
val = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_2);
val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
if (FIELD_GET(MT_EE_NIC_CONF_2_XTAL_OPTION, val) == 1)
mt76_set(dev, MT_MCU_COM_REG0, BIT(30));
@ -152,8 +152,8 @@ mt76pci_load_firmware(struct mt76x02_dev *dev)
return -ETIMEDOUT;
}
mt76x02_set_ethtool_fwver(dev, hdr);
dev_info(dev->mt76.dev, "Firmware running!\n");
mt76x02_set_ethtool_fwver(&dev->mt76, hdr);
release_firmware(fw);
@ -183,6 +183,6 @@ int mt76x2_mcu_init(struct mt76x02_dev *dev)
if (ret)
return ret;
mt76x02_mcu_function_select(&dev->mt76, Q_SELECT, 1, true);
mt76x02_mcu_function_select(dev, Q_SELECT, 1, true);
return 0;
}

View File

@ -26,7 +26,7 @@ mt76x2_phy_tssi_init_cal(struct mt76x02_dev *dev)
struct ieee80211_channel *chan = dev->mt76.chandef.chan;
u32 flag = 0;
if (!mt76x02_tssi_enabled(&dev->mt76))
if (!mt76x2_tssi_enabled(dev))
return false;
if (mt76x2_channel_silent(dev))
@ -35,10 +35,10 @@ mt76x2_phy_tssi_init_cal(struct mt76x02_dev *dev)
if (chan->band == NL80211_BAND_5GHZ)
flag |= BIT(0);
if (mt76x02_ext_pa_enabled(&dev->mt76, chan->band))
if (mt76x02_ext_pa_enabled(dev, chan->band))
flag |= BIT(8);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TSSI, flag, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_TSSI, flag, true);
dev->cal.tssi_cal_done = true;
return true;
}
@ -62,13 +62,13 @@ mt76x2_phy_channel_calibrate(struct mt76x02_dev *dev, bool mac_stopped)
mt76x2_mac_stop(dev, false);
if (is_5ghz)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LC, 0, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_LC, 0, true);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_LOFT, is_5ghz, true);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TXIQ, is_5ghz, true);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXIQC_FI, is_5ghz, true);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TEMP_SENSOR, 0, true);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_SHAPING, 0, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_TX_LOFT, is_5ghz, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXIQC_FI, is_5ghz, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_TEMP_SENSOR, 0, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_TX_SHAPING, 0, true);
if (!mac_stopped)
mt76x2_mac_resume(dev);
@ -124,39 +124,6 @@ void mt76x2_phy_set_antenna(struct mt76x02_dev *dev)
mt76_wr(dev, MT_BBP(AGC, 0), val);
}
static void
mt76x2_get_agc_gain(struct mt76x02_dev *dev, u8 *dest)
{
dest[0] = mt76_get_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN);
dest[1] = mt76_get_field(dev, MT_BBP(AGC, 9), MT_BBP_AGC_GAIN);
}
static int
mt76x2_get_rssi_gain_thresh(struct mt76x02_dev *dev)
{
switch (dev->mt76.chandef.width) {
case NL80211_CHAN_WIDTH_80:
return -62;
case NL80211_CHAN_WIDTH_40:
return -65;
default:
return -68;
}
}
static int
mt76x2_get_low_rssi_gain_thresh(struct mt76x02_dev *dev)
{
switch (dev->mt76.chandef.width) {
case NL80211_CHAN_WIDTH_80:
return -76;
case NL80211_CHAN_WIDTH_40:
return -79;
default:
return -82;
}
}
static void
mt76x2_phy_set_gain_val(struct mt76x02_dev *dev)
{
@ -182,25 +149,6 @@ mt76x2_phy_set_gain_val(struct mt76x02_dev *dev)
mt76x2_dfs_adjust_agc(dev);
}
static void
mt76x2_phy_adjust_vga_gain(struct mt76x02_dev *dev)
{
u32 false_cca;
u8 limit = dev->cal.low_gain > 0 ? 16 : 4;
false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
dev->cal.false_cca = false_cca;
if (false_cca > 800 && dev->cal.agc_gain_adjust < limit)
dev->cal.agc_gain_adjust += 2;
else if ((false_cca < 10 && dev->cal.agc_gain_adjust > 0) ||
(dev->cal.agc_gain_adjust >= limit && false_cca < 500))
dev->cal.agc_gain_adjust -= 2;
else
return;
mt76x2_phy_set_gain_val(dev);
}
static void
mt76x2_phy_update_channel_gain(struct mt76x02_dev *dev)
{
@ -210,16 +158,17 @@ mt76x2_phy_update_channel_gain(struct mt76x02_dev *dev)
int low_gain;
u32 val;
dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(&dev->mt76);
dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(dev);
low_gain = (dev->cal.avg_rssi_all > mt76x2_get_rssi_gain_thresh(dev)) +
(dev->cal.avg_rssi_all > mt76x2_get_low_rssi_gain_thresh(dev));
low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
(dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
gain_change = (dev->cal.low_gain & 2) ^ (low_gain & 2);
dev->cal.low_gain = low_gain;
if (!gain_change) {
mt76x2_phy_adjust_vga_gain(dev);
if (mt76x02_phy_adjust_vga_gain(dev))
mt76x2_phy_set_gain_val(dev);
return;
}
@ -337,8 +286,8 @@ int mt76x2_phy_set_channel(struct mt76x02_dev *dev,
mt76x2_configure_tx_delay(dev, band, bw);
mt76x2_phy_set_txpower(dev);
mt76x2_phy_set_band(dev, chan->band, ch_group_index & 1);
mt76x2_phy_set_bw(dev, chandef->width, ch_group_index);
mt76x02_phy_set_band(dev, chan->band, ch_group_index & 1);
mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
mt76_rmw(dev, MT_EXT_CCA_CFG,
(MT_EXT_CCA_CFG_CCA0 |
@ -361,17 +310,17 @@ int mt76x2_phy_set_channel(struct mt76x02_dev *dev,
mt76_set(dev, MT_BBP(RXO, 13), BIT(10));
if (!dev->cal.init_cal_done) {
u8 val = mt76x02_eeprom_get(&dev->mt76, MT_EE_BT_RCAL_RESULT);
u8 val = mt76x02_eeprom_get(dev, MT_EE_BT_RCAL_RESULT);
if (val != 0xff)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_R, 0, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0, true);
}
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, channel, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, channel, true);
/* Rx LPF calibration */
if (!dev->cal.init_cal_done)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RC, 0, true);
mt76x02_mcu_calibrate(dev, MCU_CAL_RC, 0, true);
dev->cal.init_cal_done = true;
@ -384,14 +333,11 @@ int mt76x2_phy_set_channel(struct mt76x02_dev *dev,
if (scan)
return 0;
dev->cal.low_gain = -1;
mt76x2_phy_channel_calibrate(dev, true);
mt76x2_get_agc_gain(dev, dev->cal.agc_gain_init);
memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
sizeof(dev->cal.agc_gain_cur));
mt76x02_init_agc_gain(dev);
/* init default values for temp compensation */
if (mt76x02_tssi_enabled(&dev->mt76)) {
if (mt76x2_tssi_enabled(dev)) {
mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
0x38);
mt76_rmw_field(dev, MT_TX_ALC_CFG_2, MT_TX_ALC_CFG_2_TEMP_COMP,
@ -449,7 +395,7 @@ int mt76x2_phy_start(struct mt76x02_dev *dev)
{
int ret;
ret = mt76x02_mcu_set_radio_state(&dev->mt76, true, true);
ret = mt76x02_mcu_set_radio_state(dev, true, true);
if (ret)
return ret;

View File

@ -65,7 +65,7 @@ void mt76x2_phy_set_txpower_regs(struct mt76x02_dev *dev,
mt76_wr(dev, MT_TX_ALC_CFG_2, 0x35160a00);
mt76_wr(dev, MT_TX_ALC_CFG_3, 0x35160a06);
if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
if (mt76x02_ext_pa_enabled(dev, band)) {
mt76_wr(dev, MT_RF_PA_MODE_ADJ0, 0x0000ec00);
mt76_wr(dev, MT_RF_PA_MODE_ADJ1, 0x0000ec00);
} else {
@ -76,7 +76,7 @@ void mt76x2_phy_set_txpower_regs(struct mt76x02_dev *dev,
pa_mode[0] = 0x0000ffff;
pa_mode[1] = 0x00ff00ff;
if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
if (mt76x02_ext_pa_enabled(dev, band)) {
mt76_wr(dev, MT_TX_ALC_CFG_2, 0x2f0f0400);
mt76_wr(dev, MT_TX_ALC_CFG_3, 0x2f0f0476);
} else {
@ -84,7 +84,7 @@ void mt76x2_phy_set_txpower_regs(struct mt76x02_dev *dev,
mt76_wr(dev, MT_TX_ALC_CFG_3, 0x1b0f0476);
}
if (mt76x02_ext_pa_enabled(&dev->mt76, band))
if (mt76x02_ext_pa_enabled(dev, band))
pa_mode_adj = 0x04000000;
else
pa_mode_adj = 0;
@ -98,7 +98,7 @@ void mt76x2_phy_set_txpower_regs(struct mt76x02_dev *dev,
mt76_wr(dev, MT_RF_PA_MODE_CFG0, pa_mode[0]);
mt76_wr(dev, MT_RF_PA_MODE_CFG1, pa_mode[1]);
if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
if (mt76x02_ext_pa_enabled(dev, band)) {
u32 val;
if (band == NL80211_BAND_2GHZ)
@ -187,7 +187,7 @@ void mt76x2_phy_set_txpower(struct mt76x02_dev *dev)
dev->target_power_delta[1] = txp_1 - txp.chain[0].target_power;
dev->mt76.rate_power = t;
mt76x02_phy_set_txpower(&dev->mt76, txp_0, txp_1);
mt76x02_phy_set_txpower(dev, txp_0, txp_1);
}
EXPORT_SYMBOL_GPL(mt76x2_phy_set_txpower);
@ -196,7 +196,7 @@ void mt76x2_configure_tx_delay(struct mt76x02_dev *dev,
{
u32 cfg0, cfg1;
if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
if (mt76x02_ext_pa_enabled(dev, band)) {
cfg0 = bw ? 0x000b0c01 : 0x00101101;
cfg1 = 0x00011414;
} else {
@ -210,50 +210,6 @@ void mt76x2_configure_tx_delay(struct mt76x02_dev *dev,
}
EXPORT_SYMBOL_GPL(mt76x2_configure_tx_delay);
void mt76x2_phy_set_bw(struct mt76x02_dev *dev, int width, u8 ctrl)
{
int core_val, agc_val;
switch (width) {
case NL80211_CHAN_WIDTH_80:
core_val = 3;
agc_val = 7;
break;
case NL80211_CHAN_WIDTH_40:
core_val = 2;
agc_val = 3;
break;
default:
core_val = 0;
agc_val = 1;
break;
}
mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
}
EXPORT_SYMBOL_GPL(mt76x2_phy_set_bw);
void mt76x2_phy_set_band(struct mt76x02_dev *dev, int band, bool primary_upper)
{
switch (band) {
case NL80211_BAND_2GHZ:
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
break;
case NL80211_BAND_5GHZ:
mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
break;
}
mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
primary_upper);
}
EXPORT_SYMBOL_GPL(mt76x2_phy_set_band);
void mt76x2_phy_tssi_compensate(struct mt76x02_dev *dev, bool wait)
{
struct ieee80211_channel *chan = dev->mt76.chandef.chan;
@ -275,7 +231,7 @@ void mt76x2_phy_tssi_compensate(struct mt76x02_dev *dev, bool wait)
dev->cal.tssi_comp_pending = false;
mt76x2_get_power_info(dev, &txp, chan);
if (mt76x02_ext_pa_enabled(&dev->mt76, chan->band))
if (mt76x02_ext_pa_enabled(dev, chan->band))
t.pa_mode = 1;
t.cal_mode = BIT(1);
@ -289,8 +245,7 @@ void mt76x2_phy_tssi_compensate(struct mt76x02_dev *dev, bool wait)
return;
usleep_range(10000, 20000);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_DPD,
chan->hw_value, wait);
mt76x02_mcu_calibrate(dev, MCU_CAL_DPD, chan->hw_value, wait);
dev->cal.dpd_cal_done = true;
}
}

View File

@ -130,7 +130,7 @@ static int mt76x2u_init_eeprom(struct mt76x02_dev *dev)
put_unaligned_le32(val, dev->mt76.eeprom.data + i);
}
mt76x02_eeprom_parse_hw_cap(&dev->mt76);
mt76x02_eeprom_parse_hw_cap(dev);
return 0;
}
@ -204,8 +204,7 @@ int mt76x2u_init_hardware(struct mt76x02_dev *dev)
if (err < 0)
return err;
mt76x02_mac_setaddr(&dev->mt76,
dev->mt76.eeprom.data + MT_EE_MAC_ADDR);
mt76x02_mac_setaddr(dev, dev->mt76.eeprom.data + MT_EE_MAC_ADDR);
dev->mt76.rxfilter = mt76_rr(dev, MT_RX_FILTR_CFG);
mt76x2u_init_beacon_offsets(dev);
@ -237,8 +236,8 @@ int mt76x2u_init_hardware(struct mt76x02_dev *dev)
if (err < 0)
return err;
mt76x02_phy_set_rxpath(&dev->mt76);
mt76x02_phy_set_txdac(&dev->mt76);
mt76x02_phy_set_rxpath(dev);
mt76x02_phy_set_txdac(dev);
return mt76x2u_mac_stop(dev);
}
@ -303,7 +302,7 @@ void mt76x2u_stop_hw(struct mt76x02_dev *dev)
void mt76x2u_cleanup(struct mt76x02_dev *dev)
{
mt76x02_mcu_set_radio_state(&dev->mt76, false, false);
mt76x02_mcu_set_radio_state(dev, false, false);
mt76x2u_stop_hw(dev);
mt76u_queues_deinit(&dev->mt76);
mt76u_mcu_deinit(&dev->mt76);

View File

@ -32,7 +32,7 @@ static void mt76x2u_mac_fixup_xtal(struct mt76x02_dev *dev)
s8 offset = 0;
u16 eep_val;
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_XTAL_TRIM_2);
eep_val = mt76x02_eeprom_get(dev, MT_EE_XTAL_TRIM_2);
offset = eep_val & 0x7f;
if ((eep_val & 0xff) == 0xff)
@ -42,7 +42,7 @@ static void mt76x2u_mac_fixup_xtal(struct mt76x02_dev *dev)
eep_val >>= 8;
if (eep_val == 0x00 || eep_val == 0xff) {
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_XTAL_TRIM_1);
eep_val = mt76x02_eeprom_get(dev, MT_EE_XTAL_TRIM_1);
eep_val &= 0xff;
if (eep_val == 0x00 || eep_val == 0xff)
@ -67,7 +67,7 @@ static void mt76x2u_mac_fixup_xtal(struct mt76x02_dev *dev)
/* init fce */
mt76_clear(dev, MT_FCE_L2_STUFF, MT_FCE_L2_STUFF_WR_MPDU_LEN_EN);
eep_val = mt76x02_eeprom_get(&dev->mt76, MT_EE_NIC_CONF_2);
eep_val = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
switch (FIELD_GET(MT_EE_NIC_CONF_2_XTAL_OPTION, eep_val)) {
case 0:
mt76_wr(dev, MT_XO_CTRL7, 0x5c1fee80);

View File

@ -50,9 +50,9 @@ static int mt76x2u_add_interface(struct ieee80211_hw *hw,
struct mt76x02_dev *dev = hw->priv;
if (!ether_addr_equal(dev->mt76.macaddr, vif->addr))
mt76x02_mac_setaddr(&dev->mt76, vif->addr);
mt76x02_mac_setaddr(dev, vif->addr);
mt76x02_vif_init(&dev->mt76, vif, 0);
mt76x02_vif_init(dev, vif, 0);
return 0;
}

View File

@ -137,7 +137,7 @@ static int mt76x2u_mcu_load_rom_patch(struct mt76x02_dev *dev)
mt76_wr(dev, MT_VEND_ADDR(CFG, MT_USB_U3DMA_CFG), val);
/* vendor reset */
mt76x02u_mcu_fw_reset(&dev->mt76);
mt76x02u_mcu_fw_reset(dev);
usleep_range(5000, 10000);
/* enable FCE to send in-band cmd */
@ -151,7 +151,7 @@ static int mt76x2u_mcu_load_rom_patch(struct mt76x02_dev *dev)
/* FCE skip_fs_en */
mt76_wr(dev, MT_FCE_SKIP_FS, 0x3);
err = mt76x02u_mcu_fw_send_data(&dev->mt76, fw->data + sizeof(*hdr),
err = mt76x02u_mcu_fw_send_data(dev, fw->data + sizeof(*hdr),
fw->size - sizeof(*hdr),
MCU_ROM_PATCH_MAX_PAYLOAD,
MT76U_MCU_ROM_PATCH_OFFSET);
@ -210,7 +210,7 @@ static int mt76x2u_mcu_load_firmware(struct mt76x02_dev *dev)
dev_info(dev->mt76.dev, "Build Time: %.16s\n", hdr->build_time);
/* vendor reset */
mt76x02u_mcu_fw_reset(&dev->mt76);
mt76x02u_mcu_fw_reset(dev);
usleep_range(5000, 10000);
/* enable USB_DMA_CFG */
@ -230,7 +230,7 @@ static int mt76x2u_mcu_load_firmware(struct mt76x02_dev *dev)
mt76_wr(dev, MT_FCE_SKIP_FS, 0x3);
/* load ILM */
err = mt76x02u_mcu_fw_send_data(&dev->mt76, fw->data + sizeof(*hdr),
err = mt76x02u_mcu_fw_send_data(dev, fw->data + sizeof(*hdr),
ilm_len, MCU_FW_URB_MAX_PAYLOAD,
MT76U_MCU_ILM_OFFSET);
if (err < 0) {
@ -241,8 +241,7 @@ static int mt76x2u_mcu_load_firmware(struct mt76x02_dev *dev)
/* load DLM */
if (mt76xx_rev(dev) >= MT76XX_REV_E3)
dlm_offset += 0x800;
err = mt76x02u_mcu_fw_send_data(&dev->mt76,
fw->data + sizeof(*hdr) + ilm_len,
err = mt76x02u_mcu_fw_send_data(dev, fw->data + sizeof(*hdr) + ilm_len,
dlm_len, MCU_FW_URB_MAX_PAYLOAD,
dlm_offset);
if (err < 0) {
@ -260,8 +259,8 @@ static int mt76x2u_mcu_load_firmware(struct mt76x02_dev *dev)
mt76_set(dev, MT_MCU_COM_REG0, BIT(1));
/* enable FCE to send in-band cmd */
mt76_wr(dev, MT_FCE_PSE_CTRL, 0x1);
mt76x02_set_ethtool_fwver(dev, hdr);
dev_dbg(dev->mt76.dev, "firmware running\n");
mt76x02_set_ethtool_fwver(&dev->mt76, hdr);
out:
release_firmware(fw);
@ -283,10 +282,9 @@ int mt76x2u_mcu_init(struct mt76x02_dev *dev)
{
int err;
err = mt76x02_mcu_function_select(&dev->mt76, Q_SELECT,
1, false);
err = mt76x02_mcu_function_select(dev, Q_SELECT, 1, false);
if (err < 0)
return err;
return mt76x02_mcu_set_radio_state(&dev->mt76, true, false);
return mt76x02_mcu_set_radio_state(dev, true, false);
}

View File

@ -29,12 +29,12 @@ void mt76x2u_phy_channel_calibrate(struct mt76x02_dev *dev)
mt76x2u_mac_stop(dev);
if (is_5ghz)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LC, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_LC, 0, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_LOFT, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXIQC_FI, is_5ghz, false);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TEMP_SENSOR, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TX_LOFT, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXIQC_FI, is_5ghz, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TEMP_SENSOR, 0, false);
mt76x2u_mac_resume(dev);
}
@ -69,7 +69,7 @@ mt76x2u_phy_update_channel_gain(struct mt76x02_dev *dev)
break;
}
dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(&dev->mt76);
dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(dev);
false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS,
mt76_rr(dev, MT_RX_STAT_1));
@ -155,8 +155,8 @@ int mt76x2u_phy_set_channel(struct mt76x02_dev *dev,
mt76x2_configure_tx_delay(dev, chan->band, bw);
mt76x2_phy_set_txpower(dev);
mt76x2_phy_set_band(dev, chan->band, ch_group_index & 1);
mt76x2_phy_set_bw(dev, chandef->width, ch_group_index);
mt76x02_phy_set_band(dev, chan->band, ch_group_index & 1);
mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
mt76_rmw(dev, MT_EXT_CCA_CFG,
(MT_EXT_CCA_CFG_CCA0 |
@ -177,18 +177,17 @@ int mt76x2u_phy_set_channel(struct mt76x02_dev *dev,
mt76_set(dev, MT_BBP(RXO, 13), BIT(10));
if (!dev->cal.init_cal_done) {
u8 val = mt76x02_eeprom_get(&dev->mt76, MT_EE_BT_RCAL_RESULT);
u8 val = mt76x02_eeprom_get(dev, MT_EE_BT_RCAL_RESULT);
if (val != 0xff)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_R,
0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0, false);
}
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, channel, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, channel, false);
/* Rx LPF calibration */
if (!dev->cal.init_cal_done)
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RC, 0, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_RC, 0, false);
dev->cal.init_cal_done = true;
mt76_wr(dev, MT_BBP(AGC, 61), 0xff64a4e2);
@ -203,7 +202,7 @@ int mt76x2u_phy_set_channel(struct mt76x02_dev *dev,
if (scan)
return 0;
if (mt76x02_tssi_enabled(&dev->mt76)) {
if (mt76x2_tssi_enabled(dev)) {
/* init default values for temp compensation */
mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
0x38);
@ -218,10 +217,9 @@ int mt76x2u_phy_set_channel(struct mt76x02_dev *dev,
chan = dev->mt76.chandef.chan;
if (chan->band == NL80211_BAND_5GHZ)
flag |= BIT(0);
if (mt76x02_ext_pa_enabled(&dev->mt76, chan->band))
if (mt76x02_ext_pa_enabled(dev, chan->band))
flag |= BIT(8);
mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TSSI,
flag, false);
mt76x02_mcu_calibrate(dev, MCU_CAL_TSSI, flag, false);
dev->cal.tssi_cal_done = true;
}
}

View File

@ -96,7 +96,8 @@ mt76_check_agg_ssn(struct mt76_txq *mtxq, struct sk_buff *skb)
{
struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data;
if (!ieee80211_is_data_qos(hdr->frame_control))
if (!ieee80211_is_data_qos(hdr->frame_control) ||
!ieee80211_is_data_present(hdr->frame_control))
return;
mtxq->agg_ssn = le16_to_cpu(hdr->seq_ctrl) + 0x10;

View File

@ -862,6 +862,7 @@ int mt76u_init(struct mt76_dev *dev,
.copy = mt76u_copy,
.wr_rp = mt76u_wr_rp,
.rd_rp = mt76u_rd_rp,
.type = MT76_BUS_USB,
};
struct mt76_usb *usb = &dev->usb;